[ns_server:info,2014-08-19T15:37:51.645,nonode@nohost:<0.58.0>:ns_server:init_logging:248]Started & configured logging [ns_server:info,2014-08-19T15:37:51.648,nonode@nohost:<0.58.0>:ns_server:log_pending:30]Static config terms: [{error_logger_mf_dir,"/opt/couchbase/var/lib/couchbase/logs"}, {error_logger_mf_maxbytes,10485760}, {error_logger_mf_maxfiles,20}, {path_config_bindir,"/opt/couchbase/bin"}, {path_config_etcdir,"/opt/couchbase/etc/couchbase"}, {path_config_libdir,"/opt/couchbase/lib"}, {path_config_datadir,"/opt/couchbase/var/lib/couchbase"}, {path_config_tmpdir,"/opt/couchbase/var/lib/couchbase/tmp"}, {nodefile,"/opt/couchbase/var/lib/couchbase/couchbase-server.node"}, {loglevel_default,debug}, {loglevel_couchdb,info}, {loglevel_ns_server,debug}, {loglevel_error_logger,debug}, {loglevel_user,debug}, {loglevel_menelaus,debug}, {loglevel_ns_doctor,debug}, {loglevel_stats,debug}, {loglevel_rebalance,debug}, {loglevel_cluster,debug}, {loglevel_views,debug}, {loglevel_mapreduce_errors,debug}, {loglevel_xdcr,debug}] [ns_server:info,2014-08-19T15:37:51.814,nonode@nohost:<0.58.0>:ns_server:start:58]Locked myself into a memory successfully. [error_logger:info,2014-08-19T15:37:51.869,nonode@nohost:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,crypto_sup} started: [{pid,<0.167.0>}, {name,crypto_server}, {mfargs,{crypto_server,start_link,[]}}, {restart_type,permanent}, {shutdown,2000}, {child_type,worker}] [error_logger:info,2014-08-19T15:37:51.869,nonode@nohost:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= application: crypto started_at: nonode@nohost [error_logger:info,2014-08-19T15:37:51.884,nonode@nohost:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= application: asn1 started_at: nonode@nohost [error_logger:info,2014-08-19T15:37:51.891,nonode@nohost:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= application: public_key started_at: nonode@nohost [error_logger:info,2014-08-19T15:37:51.903,nonode@nohost:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,inets_sup} started: [{pid,<0.174.0>}, {name,ftp_sup}, {mfargs,{ftp_sup,start_link,[]}}, {restart_type,permanent}, {shutdown,infinity}, {child_type,supervisor}] [error_logger:info,2014-08-19T15:37:51.930,nonode@nohost:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,httpc_profile_sup} started: [{pid,<0.177.0>}, {name,httpc_manager}, {mfargs, {httpc_manager,start_link, [default,only_session_cookies,inets]}}, {restart_type,permanent}, {shutdown,4000}, {child_type,worker}] [error_logger:info,2014-08-19T15:37:51.931,nonode@nohost:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,httpc_sup} started: [{pid,<0.176.0>}, {name,httpc_profile_sup}, {mfargs, {httpc_profile_sup,start_link, [[{httpc,{default,only_session_cookies}}]]}}, {restart_type,permanent}, {shutdown,infinity}, {child_type,supervisor}] [error_logger:info,2014-08-19T15:37:51.934,nonode@nohost:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,httpc_sup} started: [{pid,<0.178.0>}, {name,httpc_handler_sup}, {mfargs,{httpc_handler_sup,start_link,[]}}, {restart_type,permanent}, {shutdown,infinity}, {child_type,supervisor}] [error_logger:info,2014-08-19T15:37:51.934,nonode@nohost:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,inets_sup} started: [{pid,<0.175.0>}, {name,httpc_sup}, {mfargs, {httpc_sup,start_link, [[{httpc,{default,only_session_cookies}}]]}}, {restart_type,permanent}, {shutdown,infinity}, {child_type,supervisor}] [error_logger:info,2014-08-19T15:37:51.938,nonode@nohost:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,inets_sup} started: [{pid,<0.179.0>}, {name,httpd_sup}, {mfargs,{httpd_sup,start_link,[[]]}}, {restart_type,permanent}, {shutdown,infinity}, {child_type,supervisor}] [error_logger:info,2014-08-19T15:37:51.941,nonode@nohost:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,inets_sup} started: [{pid,<0.180.0>}, {name,tftp_sup}, {mfargs,{tftp_sup,start_link,[[]]}}, {restart_type,permanent}, {shutdown,infinity}, {child_type,supervisor}] [error_logger:info,2014-08-19T15:37:51.941,nonode@nohost:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= application: inets started_at: nonode@nohost [error_logger:info,2014-08-19T15:37:51.941,nonode@nohost:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= application: oauth started_at: nonode@nohost [error_logger:info,2014-08-19T15:37:51.953,nonode@nohost:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ssl_sup} started: [{pid,<0.186.0>}, {name,ssl_broker_sup}, {mfargs,{ssl_broker_sup,start_link,[]}}, {restart_type,permanent}, {shutdown,2000}, {child_type,supervisor}] [error_logger:info,2014-08-19T15:37:51.959,nonode@nohost:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ssl_sup} started: [{pid,<0.187.0>}, {name,ssl_manager}, {mfargs,{ssl_manager,start_link,[[]]}}, {restart_type,permanent}, {shutdown,4000}, {child_type,worker}] [error_logger:info,2014-08-19T15:37:51.961,nonode@nohost:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ssl_sup} started: [{pid,<0.188.0>}, {name,ssl_connection}, {mfargs,{ssl_connection_sup,start_link,[]}}, {restart_type,permanent}, {shutdown,4000}, {child_type,supervisor}] [error_logger:info,2014-08-19T15:37:51.961,nonode@nohost:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= application: ssl started_at: nonode@nohost [error_logger:info,2014-08-19T15:37:52.113,nonode@nohost:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ssl_sup} started: [{pid,<0.195.0>}, {name,ssl_server}, {mfargs,{ssl_server,start_link,[]}}, {restart_type,permanent}, {shutdown,2000}, {child_type,worker}] [error_logger:info,2014-08-19T15:37:52.114,nonode@nohost:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,lhttpc_sup} started: [{pid,<0.193.0>}, {name,lhttpc_manager}, {mfargs, {lhttpc_manager,start_link, [[{name,lhttpc_manager}]]}}, {restart_type,permanent}, {shutdown,10000}, {child_type,worker}] [error_logger:info,2014-08-19T15:37:52.114,nonode@nohost:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= application: lhttpc started_at: nonode@nohost [error_logger:info,2014-08-19T15:37:52.117,nonode@nohost:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= application: xmerl started_at: nonode@nohost [error_logger:info,2014-08-19T15:37:52.127,nonode@nohost:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= application: compiler started_at: nonode@nohost [error_logger:info,2014-08-19T15:37:52.131,nonode@nohost:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= application: syntax_tools started_at: nonode@nohost [error_logger:info,2014-08-19T15:37:52.131,nonode@nohost:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= application: mochiweb started_at: nonode@nohost [error_logger:info,2014-08-19T15:37:52.133,nonode@nohost:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= application: couch_view_parser started_at: nonode@nohost [error_logger:info,2014-08-19T15:37:52.136,nonode@nohost:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= application: couch_set_view started_at: nonode@nohost [error_logger:info,2014-08-19T15:37:52.138,nonode@nohost:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= application: couch_index_merger started_at: nonode@nohost [error_logger:info,2014-08-19T15:37:52.140,nonode@nohost:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= application: mapreduce started_at: nonode@nohost [error_logger:info,2014-08-19T15:37:52.173,nonode@nohost:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,couch_server_sup} started: [{pid,<0.204.0>}, {name,couch_config}, {mfargs, {couch_server_sup,couch_config_start_link_wrapper, [["/opt/couchbase/etc/couchdb/default.ini", "/opt/couchbase/etc/couchdb/default.d/capi.ini", "/opt/couchbase/etc/couchdb/default.d/geocouch.ini", "/opt/couchbase/etc/couchdb/local.ini"], <0.204.0>]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [error_logger:info,2014-08-19T15:37:52.194,nonode@nohost:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,couch_primary_services} started: [{pid,<0.207.0>}, {name,collation_driver}, {mfargs,{couch_drv,start_link,[]}}, {restart_type,permanent}, {shutdown,infinity}, {child_type,supervisor}] [error_logger:info,2014-08-19T15:37:52.195,nonode@nohost:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,couch_primary_services} started: [{pid,<0.208.0>}, {name,couch_task_events}, {mfargs, {gen_event,start_link,[{local,couch_task_events}]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [error_logger:info,2014-08-19T15:37:52.197,nonode@nohost:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,couch_primary_services} started: [{pid,<0.209.0>}, {name,couch_task_status}, {mfargs,{couch_task_status,start_link,[]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [error_logger:info,2014-08-19T15:37:52.198,nonode@nohost:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,couch_primary_services} started: [{pid,<0.210.0>}, {name,couch_file_write_guard}, {mfargs,{couch_file_write_guard,sup_start_link,[]}}, {restart_type,permanent}, {shutdown,10000}, {child_type,worker}] [error_logger:info,2014-08-19T15:37:52.206,nonode@nohost:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,couch_primary_services} started: [{pid,<0.211.0>}, {name,couch_server}, {mfargs,{couch_server,sup_start_link,[]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [error_logger:info,2014-08-19T15:37:52.207,nonode@nohost:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,couch_primary_services} started: [{pid,<0.212.0>}, {name,couch_db_update_event}, {mfargs, {gen_event,start_link,[{local,couch_db_update}]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [error_logger:info,2014-08-19T15:37:52.207,nonode@nohost:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,couch_primary_services} started: [{pid,<0.213.0>}, {name,couch_replication_event}, {mfargs, {gen_event,start_link,[{local,couch_replication}]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [error_logger:info,2014-08-19T15:37:52.208,nonode@nohost:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,couch_primary_services} started: [{pid,<0.214.0>}, {name,couch_replication_supervisor}, {mfargs,{couch_rep_sup,start_link,[]}}, {restart_type,permanent}, {shutdown,infinity}, {child_type,supervisor}] [error_logger:info,2014-08-19T15:37:52.210,nonode@nohost:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,couch_primary_services} started: [{pid,<0.215.0>}, {name,couch_log}, {mfargs,{couch_log,start_link,[]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [error_logger:info,2014-08-19T15:37:52.214,nonode@nohost:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,couch_primary_services} started: [{pid,<0.216.0>}, {name,couch_main_index_barrier}, {mfargs, {couch_index_barrier,start_link, [couch_main_index_barrier, "max_parallel_indexers"]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [error_logger:info,2014-08-19T15:37:52.214,nonode@nohost:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,couch_primary_services} started: [{pid,<0.217.0>}, {name,couch_replica_index_barrier}, {mfargs, {couch_index_barrier,start_link, [couch_replica_index_barrier, "max_parallel_replica_indexers"]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [error_logger:info,2014-08-19T15:37:52.214,nonode@nohost:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,couch_primary_services} started: [{pid,<0.218.0>}, {name,couch_spatial_index_barrier}, {mfargs, {couch_index_barrier,start_link, [couch_spatial_index_barrier, "max_parallel_spatial_indexers"]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [error_logger:info,2014-08-19T15:37:52.214,nonode@nohost:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,couch_server_sup} started: [{pid,<0.206.0>}, {name,couch_primary_services}, {mfargs,{couch_primary_sup,start_link,[]}}, {restart_type,permanent}, {shutdown,infinity}, {child_type,supervisor}] [error_logger:info,2014-08-19T15:37:52.218,nonode@nohost:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,couch_secondary_services} started: [{pid,<0.220.0>}, {name,couch_db_update_notifier_sup}, {mfargs,{couch_db_update_notifier_sup,start_link,[]}}, {restart_type,permanent}, {shutdown,infinity}, {child_type,supervisor}] [error_logger:info,2014-08-19T15:37:52.302,nonode@nohost:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,couch_secondary_services} started: [{pid,<0.221.0>}, {name,auth_cache}, {mfargs,{couch_auth_cache,start_link,[]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [error_logger:info,2014-08-19T15:37:52.311,nonode@nohost:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,couch_secondary_services} started: [{pid,<0.232.0>}, {name,set_view_manager}, {mfargs,{couch_set_view,start_link,[]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [error_logger:info,2014-08-19T15:37:52.314,nonode@nohost:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,couch_secondary_services} started: [{pid,<0.235.0>}, {name,spatial_manager}, {mfargs,{couch_spatial,start_link,[]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [error_logger:info,2014-08-19T15:37:52.314,nonode@nohost:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,couch_secondary_services} started: [{pid,<0.237.0>}, {name,index_merger_pool}, {mfargs, {lhttpc_manager,start_link, [[{connection_timeout,90000}, {pool_size,10000}, {name,couch_index_merger_connection_pool}]]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [error_logger:info,2014-08-19T15:37:52.318,nonode@nohost:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,couch_secondary_services} started: [{pid,<0.238.0>}, {name,query_servers}, {mfargs,{couch_query_servers,start_link,[]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [error_logger:info,2014-08-19T15:37:52.321,nonode@nohost:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,couch_secondary_services} started: [{pid,<0.240.0>}, {name,couch_set_view_ddoc_cache}, {mfargs,{couch_set_view_ddoc_cache,start_link,[]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [error_logger:info,2014-08-19T15:37:52.324,nonode@nohost:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,couch_secondary_services} started: [{pid,<0.242.0>}, {name,view_manager}, {mfargs,{couch_view,start_link,[]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [error_logger:info,2014-08-19T15:37:52.339,nonode@nohost:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,couch_secondary_services} started: [{pid,<0.244.0>}, {name,httpd}, {mfargs,{couch_httpd,start_link,[]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [error_logger:info,2014-08-19T15:37:52.339,nonode@nohost:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,couch_secondary_services} started: [{pid,<0.261.0>}, {name,uuids}, {mfargs,{couch_uuids,start,[]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [error_logger:info,2014-08-19T15:37:52.339,nonode@nohost:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,couch_server_sup} started: [{pid,<0.219.0>}, {name,couch_secondary_services}, {mfargs,{couch_secondary_sup,start_link,[]}}, {restart_type,permanent}, {shutdown,infinity}, {child_type,supervisor}] [error_logger:info,2014-08-19T15:37:52.339,nonode@nohost:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,cb_couch_sup} started: [{pid,<0.205.0>}, {name,couch_app}, {mfargs, {couch_app,start, [fake, ["/opt/couchbase/etc/couchdb/default.ini", "/opt/couchbase/etc/couchdb/local.ini"]]}}, {restart_type,permanent}, {shutdown,infinity}, {child_type,supervisor}] [error_logger:info,2014-08-19T15:37:52.339,nonode@nohost:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_cluster_sup} started: [{pid,<0.162.0>}, {name,cb_couch_sup}, {mfargs,{cb_couch_sup,start_link,[]}}, {restart_type,permanent}, {shutdown,5000}, {child_type,supervisor}] [ns_server:info,2014-08-19T15:37:52.348,nonode@nohost:ns_server_cluster_sup<0.161.0>:log_os_info:start_link:25]OS type: {unix,linux} Version: {2,6,32} Runtime info: [{otp_release,"R14B04"}, {erl_version,"5.8.5"}, {erl_version_long, "Erlang R14B04 (erts-5.8.5) [source] [64-bit] [smp:24:24] [rq:24] [async-threads:16] [kernel-poll:true]\n"}, {system_arch_raw,"x86_64-unknown-linux-gnu"}, {system_arch,"x86_64-unknown-linux-gnu"}, {localtime,{{2014,8,19},{15,37,52}}}, {memory, [{total,560927296}, {processes,5655168}, {processes_used,5644744}, {system,555272128}, {atom,833185}, {atom_used,825581}, {binary,49584}, {code,7877161}, {ets,648928}]}, {loaded, [ns_info,log_os_info,couch_config_writer,cb_init_loggers, mochiweb_acceptor,inet_tcp,gen_tcp,mochiweb_socket, mochiweb_socket_server,mochilists,mochiweb_http,eval_bits, couch_httpd,couch_view,couch_set_view_ddoc_cache, couch_query_servers,couch_spatial,mapreduce, couch_set_view,snappy,couch_compress, couch_spatial_validation,couch_set_view_mapreduce,ejson, couch_doc,couch_db_update_notifier,couch_btree, couch_ref_counter,couch_uuids,couch_db_updater,couch_db, couch_auth_cache,couch_db_update_notifier_sup, couch_secondary_sup,queue,couch_index_barrier, couch_event_sup,couch_log,couch_rep_sup,httpd_util, filelib,couch_file,couch_file_write_guard, couch_task_status,erl_ddll,couch_drv,couch_primary_sup, couch_server,string,re,file2,couch_util,couch_config, couch_server_sup,ssl_server,crypto,ssl,lhttpc_manager, lhttpc_sup,lhttpc,ssl_connection_sup,ssl_session_cache, ssl_certificate_db,ssl_manager,ssl_broker_sup,ssl_sup, ssl_app,tftp_sup,httpd_sup,httpc_handler_sup,httpc_cookie, inets,httpc_manager,httpc,httpc_profile_sup,httpc_sup, ftp_sup,inets_sup,inets_app,crypto_server,crypto_sup, crypto_app,couch_app,cb_couch_sup,ns_server_cluster_sup, mlockall,calendar,ale_default_formatter,otp_internal,misc, 'ale_logger-xdcr','ale_logger-mapreduce_errors', 'ale_logger-views',timer,io_lib_fread, 'ale_logger-cluster','ale_logger-rebalance', 'ale_logger-stats','ale_logger-ns_doctor', 'ale_logger-menelaus','ale_logger-user', 'ale_logger-ns_server','ale_logger-couchdb',ns_log_sink, disk_log_sup,disk_log_server,disk_log_1,disk_log, ale_disk_sink,ns_server,cpu_sup,memsup,disksup,os_mon,io, release_handler,overload,alarm_handler,log_mf_h,sasl, ale_error_logger_handler,'ale_logger-ale_logger', 'ale_logger-error_logger',beam_opcodes,beam_dict,beam_asm, beam_validator,beam_flatten,beam_trim,beam_receive, beam_bsm,beam_peep,beam_dead,beam_type,beam_bool, beam_clean,beam_utils,beam_jump,beam_block,v3_codegen, v3_life,v3_kernel,sys_core_dsetel,erl_bifs,sys_core_fold, cerl_trees,sys_core_inline,core_lib,cerl,v3_core,erl_bits, erl_expand_records,sys_pre_expand,sofs,erl_internal,sets, ordsets,erl_lint,compile,dynamic_compile,ale_utils, io_lib_pretty,io_lib_format,io_lib,ale_codegen,dict,ale, ale_dynamic_sup,ale_sup,ale_app,ns_bootstrap,child_erlang, file_io_server,orddict,erl_eval,file,c,kernel_config, user_sup,supervisor_bridge,standard_error,unicode,binary, ets,gb_sets,hipe_unified_loader,packages,code_server,code, file_server,net_kernel,global_group,erl_distribution, filename,inet_gethost_native,os,inet_parse,inet,inet_udp, inet_config,inet_db,global,gb_trees,rpc,supervisor,kernel, application_master,sys,application,gen_server,erl_parse, proplists,erl_scan,lists,application_controller,proc_lib, gen,gen_event,error_logger,heart,error_handler,erlang, erl_prim_loader,prim_zip,zlib,prim_file,prim_inet,init, otp_ring0]}, {applications, [{public_key,"Public key infrastructure","0.13"}, {asn1,"The Erlang ASN1 compiler version 1.6.18","1.6.18"}, {lhttpc,"Lightweight HTTP Client","1.3.0"}, {ale,"Another Logger for Erlang","8ca6d2a"}, {os_mon,"CPO CXC 138 46","2.2.7"}, {couch_set_view,"Set views","1.2.0a-a425d97-git"}, {compiler,"ERTS CXC 138 10","4.7.5"}, {inets,"INETS CXC 138 49","5.7.1"}, {couch,"Apache CouchDB","1.2.0a-a425d97-git"}, {mapreduce,"MapReduce using V8 JavaScript engine","1.0.0"}, {couch_index_merger,"Index merger","1.2.0a-a425d97-git"}, {kernel,"ERTS CXC 138 10","2.14.5"}, {crypto,"CRYPTO version 2","2.0.4"}, {ssl,"Erlang/OTP SSL application","4.1.6"}, {sasl,"SASL CXC 138 11","2.1.10"}, {couch_view_parser,"Couch view parser","1.0.0"}, {ns_server,"Couchbase server","2.5.1-1083-rel-enterprise"}, {mochiweb,"MochiMedia Web Server","2.4.2"}, {syntax_tools,"Syntax tools","1.6.7.1"}, {xmerl,"XML parser","1.2.10"}, {oauth,"Erlang OAuth implementation","7d85d3ef"}, {stdlib,"ERTS CXC 138 10","1.17.5"}]}, {pre_loaded, [erlang,erl_prim_loader,prim_zip,zlib,prim_file,prim_inet, init,otp_ring0]}, {process_count,147}, {node,nonode@nohost}, {nodes,[]}, {registered, [kernel_safe_sup,couch_db_update_notifier_sup, couch_auth_cache,couch_rep_sup,os_mon_sup,couch_view, cpu_sup,couch_server_sup,memsup,disksup, couch_query_servers,ns_server_cluster_sup, couch_task_status,couch_log,httpd_sup,couch_httpd, couch_drv,ssl_connection_sup,couch_file_write_guard, couch_set_view_ddoc_cache,cb_couch_sup,ssl_manager, error_logger,couch_index_merger_connection_pool, sasl_safe_sup,'sink-ns_log','sink-disk_stats',ale_sup, couch_spatial,standard_error,'sink-disk_xdcr_errors', 'sink-disk_xdcr','sink-disk_debug',standard_error_sup, ale_dynamic_sup,'sink-disk_couchdb', 'sink-disk_mapreduce_errors','sink-disk_views', ssl_broker_sup,'sink-disk_error',ssl_server,timer_server, ssl_sup,ale,httpc_sup,httpc_profile_sup,httpc_manager, httpc_handler_sup,erl_prim_loader,inet_db,ftp_sup, sasl_sup,couch_spatial_index_barrier,rex, couch_replica_index_barrier,kernel_sup, couch_main_index_barrier,global_name_server,inets_sup, lhttpc_sup,couch_replication,crypto_server,file_server_2, crypto_sup,global_group,couch_task_events, couch_secondary_services,couch_primary_services, release_handler,couch_db_update,init,overload, couch_config,alarm_handler,couch_set_view,disk_log_sup, disk_log_server,couch_server,code_server,couch_uuids, application_controller,lhttpc_manager,tftp_sup, 'sink-disk_default']}, {cookie,nocookie}, {wordsize,8}, {wall_clock,1}] [ns_server:info,2014-08-19T15:37:52.354,nonode@nohost:ns_server_cluster_sup<0.161.0>:log_os_info:start_link:27]Manifest: ["","", " ", " ", " ", " ", " ", " ", " "," ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " "," "] [error_logger:info,2014-08-19T15:37:52.355,nonode@nohost:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_cluster_sup} started: [{pid,<0.263.0>}, {name,timeout_diag_logger}, {mfargs,{timeout_diag_logger,start_link,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [ns_server:info,2014-08-19T15:37:52.357,nonode@nohost:dist_manager<0.264.0>:dist_manager:read_address_config_from_path:83]Reading ip config from "/opt/couchbase/var/lib/couchbase/ip_start" [ns_server:info,2014-08-19T15:37:52.357,nonode@nohost:dist_manager<0.264.0>:dist_manager:read_address_config_from_path:83]Reading ip config from "/opt/couchbase/var/lib/couchbase/ip" [ns_server:info,2014-08-19T15:37:52.357,nonode@nohost:dist_manager<0.264.0>:dist_manager:init:159]ip config not found. Looks like we're brand new node [error_logger:info,2014-08-19T15:37:52.358,nonode@nohost:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,inet_gethost_native_sup} started: [{pid,<0.266.0>},{mfa,{inet_gethost_native,init,[[]]}}] [error_logger:info,2014-08-19T15:37:52.358,nonode@nohost:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,kernel_safe_sup} started: [{pid,<0.265.0>}, {name,inet_gethost_native_sup}, {mfargs,{inet_gethost_native,start_link,[]}}, {restart_type,temporary}, {shutdown,1000}, {child_type,worker}] [ns_server:info,2014-08-19T15:37:52.488,nonode@nohost:dist_manager<0.264.0>:dist_manager:bringup:230]Attempting to bring up net_kernel with name 'ns_1@127.0.0.1' [error_logger:info,2014-08-19T15:37:52.491,nonode@nohost:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,net_sup} started: [{pid,<0.268.0>}, {name,erl_epmd}, {mfargs,{erl_epmd,start_link,[]}}, {restart_type,permanent}, {shutdown,2000}, {child_type,worker}] [error_logger:info,2014-08-19T15:37:52.491,nonode@nohost:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,net_sup} started: [{pid,<0.269.0>}, {name,auth}, {mfargs,{auth,start_link,[]}}, {restart_type,permanent}, {shutdown,2000}, {child_type,worker}] [ns_server:info,2014-08-19T15:37:52.492,ns_1@127.0.0.1:dist_manager<0.264.0>:dist_manager:save_node:143]saving node to "/opt/couchbase/var/lib/couchbase/couchbase-server.node" [error_logger:info,2014-08-19T15:37:52.492,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,net_sup} started: [{pid,<0.270.0>}, {name,net_kernel}, {mfargs, {net_kernel,start_link, [['ns_1@127.0.0.1',longnames]]}}, {restart_type,permanent}, {shutdown,2000}, {child_type,worker}] [error_logger:info,2014-08-19T15:37:52.492,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,kernel_sup} started: [{pid,<0.267.0>}, {name,net_sup_dynamic}, {mfargs, {erl_distribution,start_link, [['ns_1@127.0.0.1',longnames]]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,supervisor}] [ns_server:debug,2014-08-19T15:37:52.518,ns_1@127.0.0.1:dist_manager<0.264.0>:dist_manager:bringup:238]Attempted to save node name to disk: ok [error_logger:info,2014-08-19T15:37:52.518,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_cluster_sup} started: [{pid,<0.264.0>}, {name,dist_manager}, {mfargs,{dist_manager,start_link,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2014-08-19T15:37:52.520,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_cluster_sup} started: [{pid,<0.273.0>}, {name,ns_cookie_manager}, {mfargs,{ns_cookie_manager,start_link,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2014-08-19T15:37:52.523,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_cluster_sup} started: [{pid,<0.274.0>}, {name,ns_cluster}, {mfargs,{ns_cluster,start_link,[]}}, {restart_type,permanent}, {shutdown,5000}, {child_type,worker}] [ns_server:info,2014-08-19T15:37:52.524,ns_1@127.0.0.1:ns_config_sup<0.275.0>:ns_config_sup:init:32]loading static ns_config from "/opt/couchbase/etc/couchbase/config" [error_logger:info,2014-08-19T15:37:52.524,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_config_sup} started: [{pid,<0.276.0>}, {name,ns_config_events}, {mfargs, {gen_event,start_link,[{local,ns_config_events}]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2014-08-19T15:37:52.524,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_config_sup} started: [{pid,<0.277.0>}, {name,ns_config_events_local}, {mfargs, {gen_event,start_link, [{local,ns_config_events_local}]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [ns_server:info,2014-08-19T15:37:52.538,ns_1@127.0.0.1:ns_config<0.278.0>:ns_config:load_config:795]Loading static config from "/opt/couchbase/etc/couchbase/config" [ns_server:info,2014-08-19T15:37:52.538,ns_1@127.0.0.1:ns_config<0.278.0>:ns_config:load_config:809]Loading dynamic config from "/opt/couchbase/var/lib/couchbase/config/config.dat" [ns_server:info,2014-08-19T15:37:52.538,ns_1@127.0.0.1:ns_config<0.278.0>:ns_config:load_config:813]No dynamic config file found. Assuming we're brand new node [ns_server:debug,2014-08-19T15:37:52.541,ns_1@127.0.0.1:ns_config<0.278.0>:ns_config:load_config:816]Here's full dynamic config we loaded: [] [ns_server:info,2014-08-19T15:37:52.541,ns_1@127.0.0.1:ns_config<0.278.0>:ns_config:load_config:827]Here's full dynamic config we loaded + static & default config: [{replication_topology,star}, {drop_request_memory_threshold_mib,undefined}, {{request_limit,capi},undefined}, {{request_limit,rest},undefined}, {auto_failover_cfg,[{enabled,false},{timeout,120},{max_nodes,1},{count,0}]}, {replication,[{enabled,true}]}, {alert_limits,[{max_overhead_perc,50},{max_disk_used,90}]}, {email_alerts, [{recipients,["root@localhost"]}, {sender,"couchbase@localhost"}, {enabled,false}, {email_server, [{user,[]}, {pass,"*****"}, {host,"localhost"}, {port,25}, {encrypt,false}]}, {alerts, [auto_failover_node,auto_failover_maximum_reached, auto_failover_other_nodes_down,auto_failover_cluster_too_small,ip, disk,overhead,ep_oom_errors,ep_item_commit_failed]}]}, {{node,'ns_1@127.0.0.1',ns_log}, [{'_vclock',[{<<"c3a87fe2e8c58375a03730a71fdf48a8">>,{1,63575667472}}]}, {filename,"/opt/couchbase/var/lib/couchbase/ns_log"}]}, {{node,'ns_1@127.0.0.1',port_servers}, [{moxi,"/opt/couchbase/bin/moxi", ["-Z", {"port_listen=~B,default_bucket_name=default,downstream_max=1024,downstream_conn_max=4,connect_max_errors=5,connect_retry_interval=30000,connect_timeout=400,auth_timeout=100,cycle=200,downstream_conn_queue_timeout=200,downstream_timeout=5000,wait_queue_timeout=200", [port]}, "-z", {"url=http://127.0.0.1:~B/pools/default/saslBucketsStreaming", [{misc,this_node_rest_port,[]}]}, "-p","0","-Y","y","-O","stderr", {"~s",[verbosity]}], [{env, [{"EVENT_NOSELECT","1"}, {"MOXI_SASL_PLAIN_USR",{"~s",[{ns_moxi_sup,rest_user,[]}]}}, {"MOXI_SASL_PLAIN_PWD",{"~s",[{ns_moxi_sup,rest_pass,[]}]}}]}, use_stdio,exit_status,port_server_send_eol,stderr_to_stdout,stream]}, {memcached,"/opt/couchbase/bin/memcached", ["-X","/opt/couchbase/lib/memcached/stdin_term_handler.so","-X", {"/opt/couchbase/lib/memcached/file_logger.so,cyclesize=~B;sleeptime=~B;filename=~s/~s", [log_cyclesize,log_sleeptime,log_path,log_prefix]}, "-l", {"0.0.0.0:~B,0.0.0.0:~B:1000",[port,dedicated_port]}, "-p", {"~B",[port]}, "-E","/opt/couchbase/lib/memcached/bucket_engine.so","-B","binary", "-r","-c","10000","-e", {"admin=~s;default_bucket_name=default;auto_create=false", [admin_user]}, {"~s",[verbosity]}], [{env, [{"EVENT_NOSELECT","1"}, {"MEMCACHED_TOP_KEYS","100"}, {"ISASL_PWFILE",{"~s",[{isasl,path}]}}]}, use_stdio,stderr_to_stdout,exit_status,port_server_send_eol,stream]}]}, {{node,'ns_1@127.0.0.1',moxi},[{port,11211},{verbosity,[]}]}, {buckets,[{configs,[]}]}, {memory_quota,58026}, {{node,'ns_1@127.0.0.1',memcached}, [{'_vclock',[{<<"c3a87fe2e8c58375a03730a71fdf48a8">>,{1,63575667472}}]}, {port,11210}, {mccouch_port,11213}, {dedicated_port,11209}, {admin_user,"_admin"}, {admin_pass,"*****"}, {bucket_engine,"/opt/couchbase/lib/memcached/bucket_engine.so"}, {engines, [{membase, [{engine,"/opt/couchbase/lib/memcached/ep.so"}, {static_config_string, "vb0=false;waitforwarmup=false;failpartialwarmup=false"}]}, {memcached, [{engine,"/opt/couchbase/lib/memcached/default_engine.so"}, {static_config_string,"vb0=true"}]}]}, {log_path,"/opt/couchbase/var/lib/couchbase/logs"}, {log_prefix,"memcached.log"}, {log_generations,20}, {log_cyclesize,10485760}, {log_sleeptime,19}, {log_rotation_period,39003}, {verbosity,[]}]}, {{node,'ns_1@127.0.0.1',isasl}, [{'_vclock',[{<<"c3a87fe2e8c58375a03730a71fdf48a8">>,{1,63575667472}}]}, {path,"/opt/couchbase/var/lib/couchbase/isasl.pw"}]}, {remote_clusters,[]}, {rest_creds,[{creds,[]}]}, {{node,'ns_1@127.0.0.1',ssl_proxy_upstream_port},11215}, {{node,'ns_1@127.0.0.1',ssl_proxy_downstream_port},11214}, {{node,'ns_1@127.0.0.1',ssl_capi_port},18092}, {{node,'ns_1@127.0.0.1',capi_port},8092}, {{node,'ns_1@127.0.0.1',ssl_rest_port},18091}, {{node,'ns_1@127.0.0.1',rest},[{port,8091},{port_meta,global}]}, {{couchdb,max_parallel_replica_indexers},2}, {{couchdb,max_parallel_indexers},4}, {rest,[{port,8091}]}, {{node,'ns_1@127.0.0.1',membership},active}, {nodes_wanted,['ns_1@127.0.0.1']}, {{node,'ns_1@127.0.0.1',compaction_daemon}, [{check_interval,30},{min_file_size,131072}]}, {fast_warmup, [{fast_warmup_enabled,true}, {min_memory_threshold,10}, {min_items_threshold,10}]}, {set_view_update_daemon, [{update_interval,5000}, {update_min_changes,5000}, {replica_update_min_changes,5000}]}, {autocompaction, [{database_fragmentation_threshold,{30,undefined}}, {view_fragmentation_threshold,{30,undefined}}]}, {max_bucket_count,10}, {index_aware_rebalance_disabled,false}] [ns_server:info,2014-08-19T15:37:52.542,ns_1@127.0.0.1:ns_config<0.278.0>:ns_config_default:upgrade_config_from_1_7_to_1_7_1:342]Upgrading config from 1.7 to 1.7.1 [ns_server:debug,2014-08-19T15:37:52.543,ns_1@127.0.0.1:ns_config<0.278.0>:ns_config:do_upgrade_config:577]Upgrading config by changes: [{set,{node,'ns_1@127.0.0.1',config_version},{1,7,1}}, {set,email_alerts, [{recipients,["root@localhost"]}, {sender,"couchbase@localhost"}, {enabled,false}, {email_server,[{user,[]}, {pass,"*****"}, {host,"localhost"}, {port,25}, {encrypt,false}]}, {alerts,[auto_failover_node,auto_failover_maximum_reached, auto_failover_other_nodes_down, auto_failover_cluster_too_small,ip,disk,overhead, ep_oom_errors,ep_item_commit_failed]}]}, {set,auto_failover_cfg, [{enabled,false},{timeout,120},{max_nodes,1},{count,0}]}] [ns_server:info,2014-08-19T15:37:52.543,ns_1@127.0.0.1:ns_config<0.278.0>:ns_config_default:upgrade_config_from_1_7_1_to_1_7_2:353]Upgrading config from 1.7.1 to 1.7.2 [ns_server:debug,2014-08-19T15:37:52.544,ns_1@127.0.0.1:ns_config<0.278.0>:ns_config:do_upgrade_config:577]Upgrading config by changes: [{set,{node,'ns_1@127.0.0.1',config_version},{1,7,2}}] [ns_server:info,2014-08-19T15:37:52.544,ns_1@127.0.0.1:ns_config<0.278.0>:ns_config_default:upgrade_config_from_1_7_2_to_1_8_0:407]Upgrading config from 1.7.2 to 1.8.0 [ns_server:debug,2014-08-19T15:37:52.546,ns_1@127.0.0.1:ns_config<0.278.0>:ns_config:do_upgrade_config:577]Upgrading config by changes: [{set,{node,'ns_1@127.0.0.1',config_version},{1,8,0}}, {set,{node,'ns_1@127.0.0.1',port_servers}, [{moxi,"/opt/couchbase/bin/moxi", ["-Z", {"port_listen=~B,default_bucket_name=default,downstream_max=1024,downstream_conn_max=4,connect_max_errors=5,connect_retry_interval=30000,connect_timeout=400,auth_timeout=100,cycle=200,downstream_conn_queue_timeout=200,downstream_timeout=5000,wait_queue_timeout=200", [port]}, "-z", {"url=http://127.0.0.1:~B/pools/default/saslBucketsStreaming", [{misc,this_node_rest_port,[]}]}, "-p","0","-Y","y","-O","stderr", {"~s",[verbosity]}], [{env,[{"EVENT_NOSELECT","1"}, {"MOXI_SASL_PLAIN_USR", {"~s",[{ns_moxi_sup,rest_user,[]}]}}, {"MOXI_SASL_PLAIN_PWD", {"~s",[{ns_moxi_sup,rest_pass,[]}]}}]}, use_stdio,exit_status,port_server_send_eol,stderr_to_stdout, stream]}, {memcached,"/opt/couchbase/bin/memcached", ["-X","/opt/couchbase/lib/memcached/stdin_term_handler.so", "-X", {"/opt/couchbase/lib/memcached/file_logger.so,cyclesize=~B;sleeptime=~B;filename=~s/~s", [log_cyclesize,log_sleeptime,log_path,log_prefix]}, "-l", {"0.0.0.0:~B,0.0.0.0:~B:1000",[port,dedicated_port]}, "-p", {"~B",[port]}, "-E","/opt/couchbase/lib/memcached/bucket_engine.so","-B", "binary","-r","-c","10000","-e", {"admin=~s;default_bucket_name=default;auto_create=false", [admin_user]}, {"~s",[verbosity]}], [{env,[{"EVENT_NOSELECT","1"}, {"MEMCACHED_TOP_KEYS","100"}, {"ISASL_PWFILE",{"~s",[{isasl,path}]}}]}, use_stdio,stderr_to_stdout,exit_status, port_server_send_eol,stream]}]}] [ns_server:info,2014-08-19T15:37:52.546,ns_1@127.0.0.1:ns_config<0.278.0>:ns_config_default:upgrade_config_from_1_8_0_to_1_8_1:444]Upgrading config from 1.8.0 to 1.8.1 [ns_server:debug,2014-08-19T15:37:52.547,ns_1@127.0.0.1:ns_config<0.278.0>:ns_config:do_upgrade_config:577]Upgrading config by changes: [{set,{node,'ns_1@127.0.0.1',config_version},{1,8,1}}, {set, {node,'ns_1@127.0.0.1',memcached}, [{dedicated_port,11209}, {bucket_engine,"/opt/couchbase/lib/memcached/bucket_engine.so"}, {engines, [{membase, [{engine,"/opt/couchbase/lib/memcached/ep.so"}, {static_config_string, "vb0=false;waitforwarmup=false;failpartialwarmup=false"}]}, {memcached, [{engine,"/opt/couchbase/lib/memcached/default_engine.so"}, {static_config_string,"vb0=true"}]}]}, {port,11210}, {mccouch_port,11213}, {dedicated_port,11209}, {admin_user,"_admin"}, {admin_pass,"*****"}, {log_path,"/opt/couchbase/var/lib/couchbase/logs"}, {log_prefix,"memcached.log"}, {log_generations,20}, {log_cyclesize,10485760}, {log_sleeptime,19}, {log_rotation_period,39003}, {verbosity,[]}]}, {set, {node,'ns_1@127.0.0.1',isasl}, [{'_vclock',[{<<"6ac3587b37a4af2a2855762c74f815ea">>,{1,63575667472}}]}, {path,"/opt/couchbase/var/lib/couchbase/isasl.pw"}]}, {set, {node,'ns_1@127.0.0.1',port_servers}, [{moxi,"/opt/couchbase/bin/moxi", ["-Z", {"port_listen=~B,default_bucket_name=default,downstream_max=1024,downstream_conn_max=4,connect_max_errors=5,connect_retry_interval=30000,connect_timeout=400,auth_timeout=100,cycle=200,downstream_conn_queue_timeout=200,downstream_timeout=5000,wait_queue_timeout=200", [port]}, "-z", {"url=http://127.0.0.1:~B/pools/default/saslBucketsStreaming", [{misc,this_node_rest_port,[]}]}, "-p","0","-Y","y","-O","stderr", {"~s",[verbosity]}], [{env, [{"EVENT_NOSELECT","1"}, {"MOXI_SASL_PLAIN_USR",{"~s",[{ns_moxi_sup,rest_user,[]}]}}, {"MOXI_SASL_PLAIN_PWD",{"~s",[{ns_moxi_sup,rest_pass,[]}]}}]}, use_stdio,exit_status,port_server_send_eol,stderr_to_stdout, stream]}, {memcached,"/opt/couchbase/bin/memcached", ["-X","/opt/couchbase/lib/memcached/stdin_term_handler.so","-X", {"/opt/couchbase/lib/memcached/file_logger.so,cyclesize=~B;sleeptime=~B;filename=~s/~s", [log_cyclesize,log_sleeptime,log_path,log_prefix]}, "-l", {"0.0.0.0:~B,0.0.0.0:~B:1000",[port,dedicated_port]}, "-p", {"~B",[port]}, "-E","/opt/couchbase/lib/memcached/bucket_engine.so","-B","binary", "-r","-c","10000","-e", {"admin=~s;default_bucket_name=default;auto_create=false", [admin_user]}, {"~s",[verbosity]}], [{env, [{"EVENT_NOSELECT","1"}, {"MEMCACHED_TOP_KEYS","100"}, {"ISASL_PWFILE",{"~s",[{isasl,path}]}}]}, use_stdio,stderr_to_stdout,exit_status,port_server_send_eol, stream]}]}, {set, {node,'ns_1@127.0.0.1',ns_log}, [{'_vclock',[{<<"6ac3587b37a4af2a2855762c74f815ea">>,{1,63575667472}}]}, {filename,"/opt/couchbase/var/lib/couchbase/ns_log"}]}] [ns_server:info,2014-08-19T15:37:52.548,ns_1@127.0.0.1:ns_config<0.278.0>:ns_config_default:upgrade_config_from_1_8_1_to_2_0:473]Upgrading config from 1.8.1 to 2.0 [ns_server:debug,2014-08-19T15:37:52.549,ns_1@127.0.0.1:ns_config<0.278.0>:ns_config:do_upgrade_config:577]Upgrading config by changes: [{set,{node,'ns_1@127.0.0.1',config_version},{2,0}}, {set, {node,'ns_1@127.0.0.1',memcached}, [{mccouch_port,11213}, {engines, [{membase, [{engine,"/opt/couchbase/lib/memcached/ep.so"}, {static_config_string, "vb0=false;waitforwarmup=false;failpartialwarmup=false"}]}, {memcached, [{engine,"/opt/couchbase/lib/memcached/default_engine.so"}, {static_config_string,"vb0=true"}]}]}, {log_path,"/opt/couchbase/var/lib/couchbase/logs"}, {log_prefix,"memcached.log"}, {log_generations,20}, {log_cyclesize,10485760}, {log_sleeptime,19}, {log_rotation_period,39003}, {dedicated_port,11209}, {bucket_engine,"/opt/couchbase/lib/memcached/bucket_engine.so"}, {port,11210}, {dedicated_port,11209}, {admin_user,"_admin"}, {admin_pass,"*****"}, {verbosity,[]}]}, {set, {node,'ns_1@127.0.0.1',port_servers}, [{moxi,"/opt/couchbase/bin/moxi", ["-Z", {"port_listen=~B,default_bucket_name=default,downstream_max=1024,downstream_conn_max=4,connect_max_errors=5,connect_retry_interval=30000,connect_timeout=400,auth_timeout=100,cycle=200,downstream_conn_queue_timeout=200,downstream_timeout=5000,wait_queue_timeout=200", [port]}, "-z", {"url=http://127.0.0.1:~B/pools/default/saslBucketsStreaming", [{misc,this_node_rest_port,[]}]}, "-p","0","-Y","y","-O","stderr", {"~s",[verbosity]}], [{env, [{"EVENT_NOSELECT","1"}, {"MOXI_SASL_PLAIN_USR",{"~s",[{ns_moxi_sup,rest_user,[]}]}}, {"MOXI_SASL_PLAIN_PWD",{"~s",[{ns_moxi_sup,rest_pass,[]}]}}]}, use_stdio,exit_status,port_server_send_eol,stderr_to_stdout, stream]}, {memcached,"/opt/couchbase/bin/memcached", ["-X","/opt/couchbase/lib/memcached/stdin_term_handler.so","-X", {"/opt/couchbase/lib/memcached/file_logger.so,cyclesize=~B;sleeptime=~B;filename=~s/~s", [log_cyclesize,log_sleeptime,log_path,log_prefix]}, "-l", {"0.0.0.0:~B,0.0.0.0:~B:1000",[port,dedicated_port]}, "-p", {"~B",[port]}, "-E","/opt/couchbase/lib/memcached/bucket_engine.so","-B","binary", "-r","-c","10000","-e", {"admin=~s;default_bucket_name=default;auto_create=false", [admin_user]}, {"~s",[verbosity]}], [{env, [{"EVENT_NOSELECT","1"}, {"MEMCACHED_TOP_KEYS","100"}, {"ISASL_PWFILE",{"~s",[{isasl,path}]}}]}, use_stdio,stderr_to_stdout,exit_status,port_server_send_eol, stream]}]}] [ns_server:info,2014-08-19T15:37:52.550,ns_1@127.0.0.1:ns_config<0.278.0>:ns_config_default:upgrade_config_from_2_0_to_2_2_0:542]Upgrading config from 2.0 to 2.2.0 [ns_server:debug,2014-08-19T15:37:52.551,ns_1@127.0.0.1:ns_config<0.278.0>:ns_config:do_upgrade_config:577]Upgrading config by changes: [{set,{node,'ns_1@127.0.0.1',config_version},{2,2,0}}] [ns_server:info,2014-08-19T15:37:52.551,ns_1@127.0.0.1:ns_config<0.278.0>:ns_config_default:upgrade_config_from_2_2_0_to_2_3_0:549]Upgrading config from 2.2.0 to 2.3.0 [ns_server:debug,2014-08-19T15:37:52.552,ns_1@127.0.0.1:ns_config<0.278.0>:ns_config:do_upgrade_config:577]Upgrading config by changes: [{set,{node,'ns_1@127.0.0.1',config_version},{2,3,0}}, {set, {node,'ns_1@127.0.0.1',memcached}, [{mccouch_port,11213}, {engines, [{membase, [{engine,"/opt/couchbase/lib/memcached/ep.so"}, {static_config_string, "vb0=false;waitforwarmup=false;failpartialwarmup=false"}]}, {memcached, [{engine,"/opt/couchbase/lib/memcached/default_engine.so"}, {static_config_string,"vb0=true"}]}]}, {log_path,"/opt/couchbase/var/lib/couchbase/logs"}, {log_prefix,"memcached.log"}, {log_generations,20}, {log_cyclesize,10485760}, {log_sleeptime,19}, {log_rotation_period,39003}, {dedicated_port,11209}, {bucket_engine,"/opt/couchbase/lib/memcached/bucket_engine.so"}, {port,11210}, {dedicated_port,11209}, {admin_user,"_admin"}, {admin_pass,"*****"}, {verbosity,[]}]}] [ns_server:debug,2014-08-19T15:37:52.553,ns_1@127.0.0.1:ns_config<0.278.0>:ns_config:do_init:626]Upgraded initial config: {config, {full,"/opt/couchbase/etc/couchbase/config",undefined,ns_config_default}, [[], [{directory,"/opt/couchbase/var/lib/couchbase/config"}, {index_aware_rebalance_disabled,false}, {max_bucket_count,10}, {autocompaction, [{database_fragmentation_threshold,{30,undefined}}, {view_fragmentation_threshold,{30,undefined}}]}, {set_view_update_daemon, [{update_interval,5000}, {update_min_changes,5000}, {replica_update_min_changes,5000}]}, {fast_warmup, [{fast_warmup_enabled,true}, {min_memory_threshold,10}, {min_items_threshold,10}]}, {{node,'ns_1@127.0.0.1',compaction_daemon}, [{check_interval,30},{min_file_size,131072}]}, {nodes_wanted,['ns_1@127.0.0.1']}, {{node,'ns_1@127.0.0.1',membership},active}, {rest,[{port,8091}]}, {{couchdb,max_parallel_indexers},4}, {{couchdb,max_parallel_replica_indexers},2}, {{node,'ns_1@127.0.0.1',rest},[{port,8091},{port_meta,global}]}, {{node,'ns_1@127.0.0.1',ssl_rest_port},18091}, {{node,'ns_1@127.0.0.1',capi_port},8092}, {{node,'ns_1@127.0.0.1',ssl_capi_port},18092}, {{node,'ns_1@127.0.0.1',ssl_proxy_downstream_port},11214}, {{node,'ns_1@127.0.0.1',ssl_proxy_upstream_port},11215}, {rest_creds,[{creds,[]}]}, {remote_clusters,[]}, {{node,'ns_1@127.0.0.1',isasl}, [{'_vclock',[{<<"c3a87fe2e8c58375a03730a71fdf48a8">>,{1,63575667472}}]}, {path,"/opt/couchbase/var/lib/couchbase/isasl.pw"}]}, {{node,'ns_1@127.0.0.1',memcached}, [{'_vclock',[{<<"c3a87fe2e8c58375a03730a71fdf48a8">>,{1,63575667472}}]}, {port,11210}, {mccouch_port,11213}, {dedicated_port,11209}, {admin_user,"_admin"}, {admin_pass,"*****"}, {bucket_engine,"/opt/couchbase/lib/memcached/bucket_engine.so"}, {engines, [{membase, [{engine,"/opt/couchbase/lib/memcached/ep.so"}, {static_config_string, "vb0=false;waitforwarmup=false;failpartialwarmup=false"}]}, {memcached, [{engine,"/opt/couchbase/lib/memcached/default_engine.so"}, {static_config_string,"vb0=true"}]}]}, {log_path,"/opt/couchbase/var/lib/couchbase/logs"}, {log_prefix,"memcached.log"}, {log_generations,20}, {log_cyclesize,10485760}, {log_sleeptime,19}, {log_rotation_period,39003}, {verbosity,[]}]}, {memory_quota,58026}, {buckets,[{configs,[]}]}, {{node,'ns_1@127.0.0.1',moxi},[{port,11211},{verbosity,[]}]}, {{node,'ns_1@127.0.0.1',port_servers}, [{moxi,"/opt/couchbase/bin/moxi", ["-Z", {"port_listen=~B,default_bucket_name=default,downstream_max=1024,downstream_conn_max=4,connect_max_errors=5,connect_retry_interval=30000,connect_timeout=400,auth_timeout=100,cycle=200,downstream_conn_queue_timeout=200,downstream_timeout=5000,wait_queue_timeout=200", [port]}, "-z", {"url=http://127.0.0.1:~B/pools/default/saslBucketsStreaming", [{misc,this_node_rest_port,[]}]}, "-p","0","-Y","y","-O","stderr", {"~s",[verbosity]}], [{env, [{"EVENT_NOSELECT","1"}, {"MOXI_SASL_PLAIN_USR",{"~s",[{ns_moxi_sup,rest_user,[]}]}}, {"MOXI_SASL_PLAIN_PWD", {"~s",[{ns_moxi_sup,rest_pass,[]}]}}]}, use_stdio,exit_status,port_server_send_eol,stderr_to_stdout, stream]}, {memcached,"/opt/couchbase/bin/memcached", ["-X","/opt/couchbase/lib/memcached/stdin_term_handler.so","-X", {"/opt/couchbase/lib/memcached/file_logger.so,cyclesize=~B;sleeptime=~B;filename=~s/~s", [log_cyclesize,log_sleeptime,log_path,log_prefix]}, "-l", {"0.0.0.0:~B,0.0.0.0:~B:1000",[port,dedicated_port]}, "-p", {"~B",[port]}, "-E","/opt/couchbase/lib/memcached/bucket_engine.so","-B", "binary","-r","-c","10000","-e", {"admin=~s;default_bucket_name=default;auto_create=false", [admin_user]}, {"~s",[verbosity]}], [{env, [{"EVENT_NOSELECT","1"}, {"MEMCACHED_TOP_KEYS","100"}, {"ISASL_PWFILE",{"~s",[{isasl,path}]}}]}, use_stdio,stderr_to_stdout,exit_status,port_server_send_eol, stream]}]}, {{node,'ns_1@127.0.0.1',ns_log}, [{'_vclock',[{<<"c3a87fe2e8c58375a03730a71fdf48a8">>,{1,63575667472}}]}, {filename,"/opt/couchbase/var/lib/couchbase/ns_log"}]}, {email_alerts, [{recipients,["root@localhost"]}, {sender,"couchbase@localhost"}, {enabled,false}, {email_server, [{user,[]}, {pass,"*****"}, {host,"localhost"}, {port,25}, {encrypt,false}]}, {alerts, [auto_failover_node,auto_failover_maximum_reached, auto_failover_other_nodes_down, auto_failover_cluster_too_small,ip,disk,overhead, ep_oom_errors,ep_item_commit_failed]}]}, {alert_limits,[{max_overhead_perc,50},{max_disk_used,90}]}, {replication,[{enabled,true}]}, {auto_failover_cfg, [{enabled,false},{timeout,120},{max_nodes,1},{count,0}]}, {{request_limit,rest},undefined}, {{request_limit,capi},undefined}, {drop_request_memory_threshold_mib,undefined}, {replication_topology,star}]], [[{{node,'ns_1@127.0.0.1',config_version}, [{'_vclock',[{'ns_1@127.0.0.1',{7,63575667472}}]}|{2,3,0}]}, {alert_limits,[{max_overhead_perc,50},{max_disk_used,90}]}, {auto_failover_cfg, [{'_vclock',[{'ns_1@127.0.0.1',{1,63575667472}}]}, {enabled,false}, {timeout,120}, {max_nodes,1}, {count,0}]}, {autocompaction, [{database_fragmentation_threshold,{30,undefined}}, {view_fragmentation_threshold,{30,undefined}}]}, {buckets,[{configs,[]}]}, {drop_request_memory_threshold_mib,undefined}, {email_alerts, [{'_vclock',[{'ns_1@127.0.0.1',{1,63575667472}}]}, {recipients,["root@localhost"]}, {sender,"couchbase@localhost"}, {enabled,false}, {email_server, [{user,[]}, {pass,"*****"}, {host,"localhost"}, {port,25}, {encrypt,false}]}, {alerts, [auto_failover_node,auto_failover_maximum_reached, auto_failover_other_nodes_down, auto_failover_cluster_too_small,ip,disk,overhead, ep_oom_errors,ep_item_commit_failed]}]}, {fast_warmup, [{fast_warmup_enabled,true}, {min_memory_threshold,10}, {min_items_threshold,10}]}, {index_aware_rebalance_disabled,false}, {max_bucket_count,10}, {memory_quota,58026}, {nodes_wanted,['ns_1@127.0.0.1']}, {remote_clusters,[]}, {replication,[{enabled,true}]}, {replication_topology,star}, {rest,[{port,8091}]}, {rest_creds,[{creds,[]}]}, {set_view_update_daemon, [{update_interval,5000}, {update_min_changes,5000}, {replica_update_min_changes,5000}]}, {{couchdb,max_parallel_indexers},4}, {{couchdb,max_parallel_replica_indexers},2}, {{request_limit,capi},undefined}, {{request_limit,rest},undefined}, {{node,'ns_1@127.0.0.1',capi_port},8092}, {{node,'ns_1@127.0.0.1',compaction_daemon}, [{check_interval,30},{min_file_size,131072}]}, {{node,'ns_1@127.0.0.1',isasl}, [{'_vclock', [{'ns_1@127.0.0.1',{1,63575667472}}, {<<"c3a87fe2e8c58375a03730a71fdf48a8">>,{1,63575667472}}]}, {path,"/opt/couchbase/var/lib/couchbase/isasl.pw"}]}, {{node,'ns_1@127.0.0.1',membership},active}, {{node,'ns_1@127.0.0.1',memcached}, [{'_vclock', [{'ns_1@127.0.0.1',{3,63575667472}}, {<<"c3a87fe2e8c58375a03730a71fdf48a8">>,{1,63575667472}}]}, {mccouch_port,11213}, {engines, [{membase, [{engine,"/opt/couchbase/lib/memcached/ep.so"}, {static_config_string, "vb0=false;waitforwarmup=false;failpartialwarmup=false"}]}, {memcached, [{engine,"/opt/couchbase/lib/memcached/default_engine.so"}, {static_config_string,"vb0=true"}]}]}, {log_path,"/opt/couchbase/var/lib/couchbase/logs"}, {log_prefix,"memcached.log"}, {log_generations,20}, {log_cyclesize,10485760}, {log_sleeptime,19}, {log_rotation_period,39003}, {dedicated_port,11209}, {bucket_engine,"/opt/couchbase/lib/memcached/bucket_engine.so"}, {port,11210}, {dedicated_port,11209}, {admin_user,"_admin"}, {admin_pass,"*****"}, {verbosity,[]}]}, {{node,'ns_1@127.0.0.1',moxi},[{port,11211},{verbosity,[]}]}, {{node,'ns_1@127.0.0.1',ns_log}, [{'_vclock', [{'ns_1@127.0.0.1',{1,63575667472}}, {<<"c3a87fe2e8c58375a03730a71fdf48a8">>,{1,63575667472}}]}, {filename,"/opt/couchbase/var/lib/couchbase/ns_log"}]}, {{node,'ns_1@127.0.0.1',port_servers}, [{'_vclock',[{'ns_1@127.0.0.1',{3,63575667472}}]}, {moxi,"/opt/couchbase/bin/moxi", ["-Z", {"port_listen=~B,default_bucket_name=default,downstream_max=1024,downstream_conn_max=4,connect_max_errors=5,connect_retry_interval=30000,connect_timeout=400,auth_timeout=100,cycle=200,downstream_conn_queue_timeout=200,downstream_timeout=5000,wait_queue_timeout=200", [port]}, "-z", {"url=http://127.0.0.1:~B/pools/default/saslBucketsStreaming", [{misc,this_node_rest_port,[]}]}, "-p","0","-Y","y","-O","stderr", {"~s",[verbosity]}], [{env, [{"EVENT_NOSELECT","1"}, {"MOXI_SASL_PLAIN_USR",{"~s",[{ns_moxi_sup,rest_user,[]}]}}, {"MOXI_SASL_PLAIN_PWD", {"~s",[{ns_moxi_sup,rest_pass,[]}]}}]}, use_stdio,exit_status,port_server_send_eol,stderr_to_stdout, stream]}, {memcached,"/opt/couchbase/bin/memcached", ["-X","/opt/couchbase/lib/memcached/stdin_term_handler.so","-X", {"/opt/couchbase/lib/memcached/file_logger.so,cyclesize=~B;sleeptime=~B;filename=~s/~s", [log_cyclesize,log_sleeptime,log_path,log_prefix]}, "-l", {"0.0.0.0:~B,0.0.0.0:~B:1000",[port,dedicated_port]}, "-p", {"~B",[port]}, "-E","/opt/couchbase/lib/memcached/bucket_engine.so","-B", "binary","-r","-c","10000","-e", {"admin=~s;default_bucket_name=default;auto_create=false", [admin_user]}, {"~s",[verbosity]}], [{env, [{"EVENT_NOSELECT","1"}, {"MEMCACHED_TOP_KEYS","100"}, {"ISASL_PWFILE",{"~s",[{isasl,path}]}}]}, use_stdio,stderr_to_stdout,exit_status,port_server_send_eol, stream]}]}, {{node,'ns_1@127.0.0.1',rest},[{port,8091},{port_meta,global}]}, {{node,'ns_1@127.0.0.1',ssl_capi_port},18092}, {{node,'ns_1@127.0.0.1',ssl_proxy_downstream_port},11214}, {{node,'ns_1@127.0.0.1',ssl_proxy_upstream_port},11215}, {{node,'ns_1@127.0.0.1',ssl_rest_port},18091}]], ns_config_default, {ns_config,save_config_sync,[]}, undefined,false} [error_logger:info,2014-08-19T15:37:52.555,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_config_sup} started: [{pid,<0.278.0>}, {name,ns_config}, {mfargs, {ns_config,start_link, ["/opt/couchbase/etc/couchbase/config", ns_config_default]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2014-08-19T15:37:52.556,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_config_sup} started: [{pid,<0.281.0>}, {name,ns_config_remote}, {mfargs, {ns_config_replica,start_link, [{local,ns_config_remote}]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2014-08-19T15:37:52.557,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_config_sup} started: [{pid,<0.282.0>}, {name,ns_config_log}, {mfargs,{ns_config_log,start_link,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2014-08-19T15:37:52.586,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_config_sup} started: [{pid,<0.284.0>}, {name,cb_config_couch_sync}, {mfargs,{cb_config_couch_sync,start_link,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2014-08-19T15:37:52.586,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_cluster_sup} started: [{pid,<0.275.0>}, {name,ns_config_sup}, {mfargs,{ns_config_sup,start_link,[]}}, {restart_type,permanent}, {shutdown,infinity}, {child_type,supervisor}] [error_logger:info,2014-08-19T15:37:52.587,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_cluster_sup} started: [{pid,<0.286.0>}, {name,vbucket_filter_changes_registry}, {mfargs, {ns_process_registry,start_link, [vbucket_filter_changes_registry]}}, {restart_type,permanent}, {shutdown,100}, {child_type,worker}] [error_logger:info,2014-08-19T15:37:52.592,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.288.0>}, {name,diag_handler_worker}, {mfa,{work_queue,start_link,[diag_handler_worker]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [ns_server:info,2014-08-19T15:37:52.593,ns_1@127.0.0.1:ns_server_sup<0.287.0>:dir_size:start_link:47]Starting quick version of dir_size with program name: i386-linux-godu [error_logger:info,2014-08-19T15:37:52.594,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.289.0>}, {name,dir_size}, {mfa,{dir_size,start_link,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2014-08-19T15:37:52.595,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.290.0>}, {name,request_throttler}, {mfa,{request_throttler,start_link,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2014-08-19T15:37:52.597,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,kernel_safe_sup} started: [{pid,<0.292.0>}, {name,timer2_server}, {mfargs,{timer2,start_link,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [ns_server:warn,2014-08-19T15:37:52.598,ns_1@127.0.0.1:ns_log<0.291.0>:ns_log:read_logs:123]Couldn't load logs from "/opt/couchbase/var/lib/couchbase/ns_log" (perhaps it's first startup): {error, enoent} [error_logger:info,2014-08-19T15:37:52.598,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.291.0>}, {name,ns_log}, {mfa,{ns_log,start_link,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2014-08-19T15:37:52.598,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.293.0>}, {name,ns_crash_log_consumer}, {mfa,{ns_log,start_link_crash_consumer,[]}}, {restart_type,{permanent,4}}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2014-08-19T15:37:52.599,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.294.0>}, {name,ns_config_ets_dup}, {mfa,{ns_config_ets_dup,start_link,[]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [ns_server:debug,2014-08-19T15:37:52.599,ns_1@127.0.0.1:ns_config_log<0.282.0>:ns_config_log:log_common:138]config change: {node,'ns_1@127.0.0.1',config_version} -> {2,3,0} [ns_server:debug,2014-08-19T15:37:52.600,ns_1@127.0.0.1:ns_config_log<0.282.0>:ns_config_log:log_common:138]config change: alert_limits -> [{max_overhead_perc,50},{max_disk_used,90}] [ns_server:debug,2014-08-19T15:37:52.600,ns_1@127.0.0.1:ns_config_log<0.282.0>:ns_config_log:log_common:138]config change: auto_failover_cfg -> [{enabled,false},{timeout,120},{max_nodes,1},{count,0}] [ns_server:debug,2014-08-19T15:37:52.601,ns_1@127.0.0.1:ns_config_log<0.282.0>:ns_config_log:log_common:138]config change: autocompaction -> [{database_fragmentation_threshold,{30,undefined}}, {view_fragmentation_threshold,{30,undefined}}] [ns_server:debug,2014-08-19T15:37:52.601,ns_1@127.0.0.1:ns_config_log<0.282.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[]}] [ns_server:debug,2014-08-19T15:37:52.602,ns_1@127.0.0.1:ns_config_log<0.282.0>:ns_config_log:log_common:138]config change: drop_request_memory_threshold_mib -> undefined [ns_server:debug,2014-08-19T15:37:52.602,ns_1@127.0.0.1:ns_config_isasl_sync<0.297.0>:ns_config_isasl_sync:init:63]isasl_sync init: ["/opt/couchbase/var/lib/couchbase/isasl.pw","_admin", "f6126ae5fac44bf3d8316165791747f2"] [ns_server:debug,2014-08-19T15:37:52.602,ns_1@127.0.0.1:ns_config_log<0.282.0>:ns_config_log:log_common:138]config change: email_alerts -> [{recipients,["root@localhost"]}, {sender,"couchbase@localhost"}, {enabled,false}, {email_server,[{user,[]}, {pass,"*****"}, {host,"localhost"}, {port,25}, {encrypt,false}]}, {alerts,[auto_failover_node,auto_failover_maximum_reached, auto_failover_other_nodes_down,auto_failover_cluster_too_small,ip, disk,overhead,ep_oom_errors,ep_item_commit_failed]}] [ns_server:debug,2014-08-19T15:37:52.602,ns_1@127.0.0.1:ns_config_isasl_sync<0.297.0>:ns_config_isasl_sync:init:71]isasl_sync init buckets: [] [ns_server:debug,2014-08-19T15:37:52.602,ns_1@127.0.0.1:ns_config_log<0.282.0>:ns_config_log:log_common:138]config change: fast_warmup -> [{fast_warmup_enabled,true}, {min_memory_threshold,10}, {min_items_threshold,10}] [ns_server:debug,2014-08-19T15:37:52.602,ns_1@127.0.0.1:ns_config_log<0.282.0>:ns_config_log:log_common:138]config change: index_aware_rebalance_disabled -> false [ns_server:debug,2014-08-19T15:37:52.603,ns_1@127.0.0.1:ns_config_log<0.282.0>:ns_config_log:log_common:138]config change: max_bucket_count -> 10 [ns_server:debug,2014-08-19T15:37:52.603,ns_1@127.0.0.1:ns_config_log<0.282.0>:ns_config_log:log_common:138]config change: memory_quota -> 58026 [ns_server:debug,2014-08-19T15:37:52.603,ns_1@127.0.0.1:ns_config_isasl_sync<0.297.0>:ns_config_isasl_sync:writeSASLConf:143]Writing isasl passwd file: "/opt/couchbase/var/lib/couchbase/isasl.pw" [ns_server:debug,2014-08-19T15:37:52.604,ns_1@127.0.0.1:ns_config_log<0.282.0>:ns_config_log:log_common:138]config change: nodes_wanted -> ['ns_1@127.0.0.1'] [ns_server:debug,2014-08-19T15:37:52.604,ns_1@127.0.0.1:ns_config_log<0.282.0>:ns_config_log:log_common:138]config change: remote_clusters -> [] [ns_server:debug,2014-08-19T15:37:52.604,ns_1@127.0.0.1:ns_config_log<0.282.0>:ns_config_log:log_common:138]config change: replication -> [{enabled,true}] [ns_server:debug,2014-08-19T15:37:52.604,ns_1@127.0.0.1:ns_config_log<0.282.0>:ns_config_log:log_common:138]config change: replication_topology -> star [ns_server:debug,2014-08-19T15:37:52.604,ns_1@127.0.0.1:ns_config_log<0.282.0>:ns_config_log:log_common:138]config change: rest -> [{port,8091}] [ns_server:info,2014-08-19T15:37:52.604,ns_1@127.0.0.1:ns_config_log<0.282.0>:ns_config_log:handle_info:63]config change: rest_creds -> ******** [ns_server:debug,2014-08-19T15:37:52.604,ns_1@127.0.0.1:ns_config_log<0.282.0>:ns_config_log:log_common:138]config change: set_view_update_daemon -> [{update_interval,5000}, {update_min_changes,5000}, {replica_update_min_changes,5000}] [ns_server:debug,2014-08-19T15:37:52.604,ns_1@127.0.0.1:ns_config_log<0.282.0>:ns_config_log:log_common:138]config change: {couchdb,max_parallel_indexers} -> 4 [ns_server:debug,2014-08-19T15:37:52.604,ns_1@127.0.0.1:ns_config_log<0.282.0>:ns_config_log:log_common:138]config change: {couchdb,max_parallel_replica_indexers} -> 2 [ns_server:debug,2014-08-19T15:37:52.604,ns_1@127.0.0.1:ns_config_log<0.282.0>:ns_config_log:log_common:138]config change: {request_limit,capi} -> undefined [ns_server:debug,2014-08-19T15:37:52.605,ns_1@127.0.0.1:ns_config_log<0.282.0>:ns_config_log:log_common:138]config change: {request_limit,rest} -> undefined [ns_server:debug,2014-08-19T15:37:52.605,ns_1@127.0.0.1:ns_config_log<0.282.0>:ns_config_log:log_common:138]config change: {node,'ns_1@127.0.0.1',capi_port} -> 8092 [ns_server:debug,2014-08-19T15:37:52.605,ns_1@127.0.0.1:ns_config_log<0.282.0>:ns_config_log:log_common:138]config change: {node,'ns_1@127.0.0.1',compaction_daemon} -> [{check_interval,30},{min_file_size,131072}] [ns_server:debug,2014-08-19T15:37:52.605,ns_1@127.0.0.1:ns_config_log<0.282.0>:ns_config_log:log_common:138]config change: {node,'ns_1@127.0.0.1',isasl} -> [{path,"/opt/couchbase/var/lib/couchbase/isasl.pw"}] [ns_server:debug,2014-08-19T15:37:52.608,ns_1@127.0.0.1:ns_config_log<0.282.0>:ns_config_log:log_common:138]config change: {node,'ns_1@127.0.0.1',membership} -> active [ns_server:debug,2014-08-19T15:37:52.609,ns_1@127.0.0.1:ns_config_log<0.282.0>:ns_config_log:log_common:138]config change: {node,'ns_1@127.0.0.1',memcached} -> [{mccouch_port,11213}, {engines, [{membase, [{engine,"/opt/couchbase/lib/memcached/ep.so"}, {static_config_string, "vb0=false;waitforwarmup=false;failpartialwarmup=false"}]}, {memcached, [{engine,"/opt/couchbase/lib/memcached/default_engine.so"}, {static_config_string,"vb0=true"}]}]}, {log_path,"/opt/couchbase/var/lib/couchbase/logs"}, {log_prefix,"memcached.log"}, {log_generations,20}, {log_cyclesize,10485760}, {log_sleeptime,19}, {log_rotation_period,39003}, {dedicated_port,11209}, {bucket_engine,"/opt/couchbase/lib/memcached/bucket_engine.so"}, {port,11210}, {dedicated_port,11209}, {admin_user,"_admin"}, {admin_pass,"*****"}, {verbosity,[]}] [ns_server:debug,2014-08-19T15:37:52.609,ns_1@127.0.0.1:ns_config_log<0.282.0>:ns_config_log:log_common:138]config change: {node,'ns_1@127.0.0.1',moxi} -> [{port,11211},{verbosity,[]}] [ns_server:warn,2014-08-19T15:37:52.609,ns_1@127.0.0.1:ns_config_isasl_sync<0.297.0>:ns_memcached:connect:1161]Unable to connect: {error,{badmatch,{error,econnrefused}}}, retrying. [ns_server:debug,2014-08-19T15:37:52.609,ns_1@127.0.0.1:ns_config_log<0.282.0>:ns_config_log:log_common:138]config change: {node,'ns_1@127.0.0.1',ns_log} -> [{filename,"/opt/couchbase/var/lib/couchbase/ns_log"}] [ns_server:debug,2014-08-19T15:37:52.609,ns_1@127.0.0.1:ns_config_log<0.282.0>:ns_config_log:log_common:138]config change: {node,'ns_1@127.0.0.1',port_servers} -> [{moxi,"/opt/couchbase/bin/moxi", ["-Z", {"port_listen=~B,default_bucket_name=default,downstream_max=1024,downstream_conn_max=4,connect_max_errors=5,connect_retry_interval=30000,connect_timeout=400,auth_timeout=100,cycle=200,downstream_conn_queue_timeout=200,downstream_timeout=5000,wait_queue_timeout=200", [port]}, "-z", {"url=http://127.0.0.1:~B/pools/default/saslBucketsStreaming", [{misc,this_node_rest_port,[]}]}, "-p","0","-Y","y","-O","stderr", {"~s",[verbosity]}], [{env,[{"EVENT_NOSELECT","1"}, {"MOXI_SASL_PLAIN_USR",{"~s",[{ns_moxi_sup,rest_user,[]}]}}, {"MOXI_SASL_PLAIN_PWD",{"~s",[{ns_moxi_sup,rest_pass,[]}]}}]}, use_stdio,exit_status,port_server_send_eol,stderr_to_stdout,stream]}, {memcached,"/opt/couchbase/bin/memcached", ["-X","/opt/couchbase/lib/memcached/stdin_term_handler.so","-X", {"/opt/couchbase/lib/memcached/file_logger.so,cyclesize=~B;sleeptime=~B;filename=~s/~s", [log_cyclesize,log_sleeptime,log_path,log_prefix]}, "-l", {"0.0.0.0:~B,0.0.0.0:~B:1000",[port,dedicated_port]}, "-p", {"~B",[port]}, "-E","/opt/couchbase/lib/memcached/bucket_engine.so","-B", "binary","-r","-c","10000","-e", {"admin=~s;default_bucket_name=default;auto_create=false", [admin_user]}, {"~s",[verbosity]}], [{env,[{"EVENT_NOSELECT","1"}, {"MEMCACHED_TOP_KEYS","100"}, {"ISASL_PWFILE",{"~s",[{isasl,path}]}}]}, use_stdio,stderr_to_stdout,exit_status,port_server_send_eol, stream]}] [ns_server:debug,2014-08-19T15:37:52.610,ns_1@127.0.0.1:ns_config_log<0.282.0>:ns_config_log:log_common:138]config change: {node,'ns_1@127.0.0.1',rest} -> [{port,8091},{port_meta,global}] [ns_server:debug,2014-08-19T15:37:52.610,ns_1@127.0.0.1:ns_config_log<0.282.0>:ns_config_log:log_common:138]config change: {node,'ns_1@127.0.0.1',ssl_capi_port} -> 18092 [ns_server:debug,2014-08-19T15:37:52.610,ns_1@127.0.0.1:ns_config_log<0.282.0>:ns_config_log:log_common:138]config change: {node,'ns_1@127.0.0.1',ssl_proxy_downstream_port} -> 11214 [ns_server:debug,2014-08-19T15:37:52.610,ns_1@127.0.0.1:ns_config_log<0.282.0>:ns_config_log:log_common:138]config change: {node,'ns_1@127.0.0.1',ssl_proxy_upstream_port} -> 11215 [ns_server:debug,2014-08-19T15:37:52.610,ns_1@127.0.0.1:ns_config_log<0.282.0>:ns_config_log:log_common:138]config change: {node,'ns_1@127.0.0.1',ssl_rest_port} -> 18091 [error_logger:info,2014-08-19T15:37:53.610,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.297.0>}, {name,ns_config_isasl_sync}, {mfa,{ns_config_isasl_sync,start_link,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2014-08-19T15:37:53.610,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.301.0>}, {name,ns_log_events}, {mfa,{gen_event,start_link,[{local,ns_log_events}]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2014-08-19T15:37:53.611,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_node_disco_sup} started: [{pid,<0.303.0>}, {name,ns_node_disco_events}, {mfargs, {gen_event,start_link, [{local,ns_node_disco_events}]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [ns_server:debug,2014-08-19T15:37:53.612,ns_1@127.0.0.1:ns_node_disco<0.304.0>:ns_node_disco:init:103]Initting ns_node_disco with [] [ns_server:debug,2014-08-19T15:37:53.612,ns_1@127.0.0.1:ns_cookie_manager<0.273.0>:ns_cookie_manager:do_cookie_sync:110]ns_cookie_manager do_cookie_sync [user:info,2014-08-19T15:37:53.612,ns_1@127.0.0.1:ns_cookie_manager<0.273.0>:ns_cookie_manager:do_cookie_init:86]Initial otp cookie generated: alkbqedpsntmtnxa [ns_server:debug,2014-08-19T15:37:53.612,ns_1@127.0.0.1:ns_config_log<0.282.0>:ns_config_log:log_common:138]config change: otp -> [{cookie,alkbqedpsntmtnxa}] [ns_server:debug,2014-08-19T15:37:53.612,ns_1@127.0.0.1:ns_cookie_manager<0.273.0>:ns_cookie_manager:do_cookie_save:147]saving cookie to "/opt/couchbase/var/lib/couchbase/couchbase-server.cookie-ns-server" [ns_server:debug,2014-08-19T15:37:53.644,ns_1@127.0.0.1:ns_cookie_manager<0.273.0>:ns_cookie_manager:do_cookie_save:149]attempted to save cookie to "/opt/couchbase/var/lib/couchbase/couchbase-server.cookie-ns-server": ok [ns_server:debug,2014-08-19T15:37:53.644,ns_1@127.0.0.1:<0.305.0>:ns_node_disco:do_nodes_wanted_updated_fun:199]ns_node_disco: nodes_wanted updated: ['ns_1@127.0.0.1'], with cookie: alkbqedpsntmtnxa [ns_server:debug,2014-08-19T15:37:53.645,ns_1@127.0.0.1:<0.305.0>:ns_node_disco:do_nodes_wanted_updated_fun:205]ns_node_disco: nodes_wanted pong: ['ns_1@127.0.0.1'], with cookie: alkbqedpsntmtnxa [error_logger:info,2014-08-19T15:37:53.645,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_node_disco_sup} started: [{pid,<0.304.0>}, {name,ns_node_disco}, {mfargs,{ns_node_disco,start_link,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2014-08-19T15:37:53.646,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_node_disco_sup} started: [{pid,<0.308.0>}, {name,ns_node_disco_log}, {mfargs,{ns_node_disco_log,start_link,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2014-08-19T15:37:53.647,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_node_disco_sup} started: [{pid,<0.309.0>}, {name,ns_node_disco_conf_events}, {mfargs,{ns_node_disco_conf_events,start_link,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [ns_server:debug,2014-08-19T15:37:53.648,ns_1@127.0.0.1:ns_config_rep<0.311.0>:ns_config_rep:init:66]init pulling [error_logger:info,2014-08-19T15:37:53.648,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_node_disco_sup} started: [{pid,<0.310.0>}, {name,ns_config_rep_merger}, {mfargs,{ns_config_rep,start_link_merger,[]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [ns_server:debug,2014-08-19T15:37:53.648,ns_1@127.0.0.1:ns_config_rep<0.311.0>:ns_config_rep:init:68]init pushing [ns_server:debug,2014-08-19T15:37:53.649,ns_1@127.0.0.1:ns_config_rep<0.311.0>:ns_config_rep:init:72]init reannouncing [ns_server:debug,2014-08-19T15:37:53.649,ns_1@127.0.0.1:ns_config_events<0.276.0>:ns_node_disco_conf_events:handle_event:50]ns_node_disco_conf_events config on otp [ns_server:debug,2014-08-19T15:37:53.649,ns_1@127.0.0.1:ns_cookie_manager<0.273.0>:ns_cookie_manager:do_cookie_sync:110]ns_cookie_manager do_cookie_sync [ns_server:debug,2014-08-19T15:37:53.649,ns_1@127.0.0.1:ns_config_events<0.276.0>:ns_node_disco_conf_events:handle_event:44]ns_node_disco_conf_events config on nodes_wanted [ns_server:debug,2014-08-19T15:37:53.649,ns_1@127.0.0.1:ns_config_log<0.282.0>:ns_config_log:log_common:138]config change: otp -> [{cookie,alkbqedpsntmtnxa}] [ns_server:debug,2014-08-19T15:37:53.650,ns_1@127.0.0.1:ns_config_log<0.282.0>:ns_config_log:log_common:138]config change: {node,'ns_1@127.0.0.1',config_version} -> {2,3,0} [ns_server:debug,2014-08-19T15:37:53.650,ns_1@127.0.0.1:ns_cookie_manager<0.273.0>:ns_cookie_manager:do_cookie_save:147]saving cookie to "/opt/couchbase/var/lib/couchbase/couchbase-server.cookie-ns-server" [ns_server:debug,2014-08-19T15:37:53.650,ns_1@127.0.0.1:ns_config_log<0.282.0>:ns_config_log:log_common:138]config change: alert_limits -> [{max_overhead_perc,50},{max_disk_used,90}] [ns_server:debug,2014-08-19T15:37:53.650,ns_1@127.0.0.1:ns_config_log<0.282.0>:ns_config_log:log_common:138]config change: auto_failover_cfg -> [{enabled,false},{timeout,120},{max_nodes,1},{count,0}] [ns_server:debug,2014-08-19T15:37:53.650,ns_1@127.0.0.1:ns_config_log<0.282.0>:ns_config_log:log_common:138]config change: autocompaction -> [{database_fragmentation_threshold,{30,undefined}}, {view_fragmentation_threshold,{30,undefined}}] [ns_server:debug,2014-08-19T15:37:53.650,ns_1@127.0.0.1:ns_config_log<0.282.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[]}] [ns_server:debug,2014-08-19T15:37:53.650,ns_1@127.0.0.1:ns_config_log<0.282.0>:ns_config_log:log_common:138]config change: drop_request_memory_threshold_mib -> undefined [error_logger:info,2014-08-19T15:37:53.651,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_node_disco_sup} started: [{pid,<0.311.0>}, {name,ns_config_rep}, {mfargs,{ns_config_rep,start_link,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2014-08-19T15:37:53.651,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.302.0>}, {name,ns_node_disco_sup}, {mfa,{ns_node_disco_sup,start_link,[]}}, {restart_type,permanent}, {shutdown,infinity}, {child_type,supervisor}] [ns_server:debug,2014-08-19T15:37:53.651,ns_1@127.0.0.1:ns_config_log<0.282.0>:ns_config_log:log_common:138]config change: email_alerts -> [{recipients,["root@localhost"]}, {sender,"couchbase@localhost"}, {enabled,false}, {email_server,[{user,[]}, {pass,"*****"}, {host,"localhost"}, {port,25}, {encrypt,false}]}, {alerts,[auto_failover_node,auto_failover_maximum_reached, auto_failover_other_nodes_down,auto_failover_cluster_too_small,ip, disk,overhead,ep_oom_errors,ep_item_commit_failed]}] [ns_server:debug,2014-08-19T15:37:53.651,ns_1@127.0.0.1:ns_config_rep<0.311.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([alert_limits,auto_failover_cfg,autocompaction, buckets,drop_request_memory_threshold_mib, email_alerts,fast_warmup, index_aware_rebalance_disabled, max_bucket_count,memory_quota,nodes_wanted,otp, remote_clusters,replication, replication_topology,rest,rest_creds, set_view_update_daemon, {couchdb,max_parallel_indexers}, {couchdb,max_parallel_replica_indexers}, {request_limit,capi}, {request_limit,rest}, {node,'ns_1@127.0.0.1',capi_port}, {node,'ns_1@127.0.0.1',compaction_daemon}, {node,'ns_1@127.0.0.1',config_version}, {node,'ns_1@127.0.0.1',isasl}, {node,'ns_1@127.0.0.1',membership}, {node,'ns_1@127.0.0.1',memcached}, {node,'ns_1@127.0.0.1',moxi}, {node,'ns_1@127.0.0.1',ns_log}, {node,'ns_1@127.0.0.1',port_servers}, {node,'ns_1@127.0.0.1',rest}, {node,'ns_1@127.0.0.1',ssl_capi_port}, {node,'ns_1@127.0.0.1', ssl_proxy_downstream_port}, {node,'ns_1@127.0.0.1',ssl_proxy_upstream_port}, {node,'ns_1@127.0.0.1',ssl_rest_port}]..) [ns_server:debug,2014-08-19T15:37:53.651,ns_1@127.0.0.1:ns_config_log<0.282.0>:ns_config_log:log_common:138]config change: fast_warmup -> [{fast_warmup_enabled,true}, {min_memory_threshold,10}, {min_items_threshold,10}] [ns_server:debug,2014-08-19T15:37:53.651,ns_1@127.0.0.1:ns_config_log<0.282.0>:ns_config_log:log_common:138]config change: index_aware_rebalance_disabled -> false [ns_server:debug,2014-08-19T15:37:53.651,ns_1@127.0.0.1:ns_config_log<0.282.0>:ns_config_log:log_common:138]config change: max_bucket_count -> 10 [ns_server:debug,2014-08-19T15:37:53.651,ns_1@127.0.0.1:ns_config_log<0.282.0>:ns_config_log:log_common:138]config change: memory_quota -> 58026 [ns_server:debug,2014-08-19T15:37:53.651,ns_1@127.0.0.1:ns_config_log<0.282.0>:ns_config_log:log_common:138]config change: nodes_wanted -> ['ns_1@127.0.0.1'] [ns_server:debug,2014-08-19T15:37:53.652,ns_1@127.0.0.1:ns_config_log<0.282.0>:ns_config_log:log_common:138]config change: remote_clusters -> [] [ns_server:debug,2014-08-19T15:37:53.652,ns_1@127.0.0.1:ns_config_log<0.282.0>:ns_config_log:log_common:138]config change: replication -> [{enabled,true}] [error_logger:info,2014-08-19T15:37:53.652,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.317.0>}, {name,vbucket_map_mirror}, {mfa,{vbucket_map_mirror,start_link,[]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [ns_server:debug,2014-08-19T15:37:53.653,ns_1@127.0.0.1:ns_config_log<0.282.0>:ns_config_log:log_common:138]config change: replication_topology -> star [ns_server:debug,2014-08-19T15:37:53.653,ns_1@127.0.0.1:ns_config_log<0.282.0>:ns_config_log:log_common:138]config change: rest -> [{port,8091}] [ns_server:info,2014-08-19T15:37:53.653,ns_1@127.0.0.1:ns_config_log<0.282.0>:ns_config_log:handle_info:63]config change: rest_creds -> ******** [ns_server:debug,2014-08-19T15:37:53.653,ns_1@127.0.0.1:ns_config_log<0.282.0>:ns_config_log:log_common:138]config change: set_view_update_daemon -> [{update_interval,5000}, {update_min_changes,5000}, {replica_update_min_changes,5000}] [ns_server:debug,2014-08-19T15:37:53.653,ns_1@127.0.0.1:ns_config_log<0.282.0>:ns_config_log:log_common:138]config change: {couchdb,max_parallel_indexers} -> 4 [ns_server:debug,2014-08-19T15:37:53.653,ns_1@127.0.0.1:ns_config_log<0.282.0>:ns_config_log:log_common:138]config change: {couchdb,max_parallel_replica_indexers} -> 2 [ns_server:debug,2014-08-19T15:37:53.653,ns_1@127.0.0.1:ns_config_log<0.282.0>:ns_config_log:log_common:138]config change: {request_limit,capi} -> undefined [ns_server:debug,2014-08-19T15:37:53.653,ns_1@127.0.0.1:ns_config_log<0.282.0>:ns_config_log:log_common:138]config change: {request_limit,rest} -> undefined [ns_server:debug,2014-08-19T15:37:53.653,ns_1@127.0.0.1:ns_config_log<0.282.0>:ns_config_log:log_common:138]config change: {node,'ns_1@127.0.0.1',capi_port} -> 8092 [ns_server:debug,2014-08-19T15:37:53.653,ns_1@127.0.0.1:ns_config_log<0.282.0>:ns_config_log:log_common:138]config change: {node,'ns_1@127.0.0.1',compaction_daemon} -> [{check_interval,30},{min_file_size,131072}] [ns_server:debug,2014-08-19T15:37:53.654,ns_1@127.0.0.1:ns_config_log<0.282.0>:ns_config_log:log_common:138]config change: {node,'ns_1@127.0.0.1',isasl} -> [{path,"/opt/couchbase/var/lib/couchbase/isasl.pw"}] [ns_server:debug,2014-08-19T15:37:53.654,ns_1@127.0.0.1:ns_config_log<0.282.0>:ns_config_log:log_common:138]config change: {node,'ns_1@127.0.0.1',membership} -> active [ns_server:debug,2014-08-19T15:37:53.654,ns_1@127.0.0.1:ns_config_log<0.282.0>:ns_config_log:log_common:138]config change: {node,'ns_1@127.0.0.1',memcached} -> [{mccouch_port,11213}, {engines, [{membase, [{engine,"/opt/couchbase/lib/memcached/ep.so"}, {static_config_string, "vb0=false;waitforwarmup=false;failpartialwarmup=false"}]}, {memcached, [{engine,"/opt/couchbase/lib/memcached/default_engine.so"}, {static_config_string,"vb0=true"}]}]}, {log_path,"/opt/couchbase/var/lib/couchbase/logs"}, {log_prefix,"memcached.log"}, {log_generations,20}, {log_cyclesize,10485760}, {log_sleeptime,19}, {log_rotation_period,39003}, {dedicated_port,11209}, {bucket_engine,"/opt/couchbase/lib/memcached/bucket_engine.so"}, {port,11210}, {dedicated_port,11209}, {admin_user,"_admin"}, {admin_pass,"*****"}, {verbosity,[]}] [ns_server:debug,2014-08-19T15:37:53.654,ns_1@127.0.0.1:ns_config_log<0.282.0>:ns_config_log:log_common:138]config change: {node,'ns_1@127.0.0.1',moxi} -> [{port,11211},{verbosity,[]}] [ns_server:debug,2014-08-19T15:37:53.654,ns_1@127.0.0.1:ns_config_log<0.282.0>:ns_config_log:log_common:138]config change: {node,'ns_1@127.0.0.1',ns_log} -> [{filename,"/opt/couchbase/var/lib/couchbase/ns_log"}] [ns_server:debug,2014-08-19T15:37:53.655,ns_1@127.0.0.1:ns_config_log<0.282.0>:ns_config_log:log_common:138]config change: {node,'ns_1@127.0.0.1',port_servers} -> [{moxi,"/opt/couchbase/bin/moxi", ["-Z", {"port_listen=~B,default_bucket_name=default,downstream_max=1024,downstream_conn_max=4,connect_max_errors=5,connect_retry_interval=30000,connect_timeout=400,auth_timeout=100,cycle=200,downstream_conn_queue_timeout=200,downstream_timeout=5000,wait_queue_timeout=200", [port]}, "-z", {"url=http://127.0.0.1:~B/pools/default/saslBucketsStreaming", [{misc,this_node_rest_port,[]}]}, "-p","0","-Y","y","-O","stderr", {"~s",[verbosity]}], [{env,[{"EVENT_NOSELECT","1"}, {"MOXI_SASL_PLAIN_USR",{"~s",[{ns_moxi_sup,rest_user,[]}]}}, {"MOXI_SASL_PLAIN_PWD",{"~s",[{ns_moxi_sup,rest_pass,[]}]}}]}, use_stdio,exit_status,port_server_send_eol,stderr_to_stdout,stream]}, {memcached,"/opt/couchbase/bin/memcached", ["-X","/opt/couchbase/lib/memcached/stdin_term_handler.so","-X", {"/opt/couchbase/lib/memcached/file_logger.so,cyclesize=~B;sleeptime=~B;filename=~s/~s", [log_cyclesize,log_sleeptime,log_path,log_prefix]}, "-l", {"0.0.0.0:~B,0.0.0.0:~B:1000",[port,dedicated_port]}, "-p", {"~B",[port]}, "-E","/opt/couchbase/lib/memcached/bucket_engine.so","-B", "binary","-r","-c","10000","-e", {"admin=~s;default_bucket_name=default;auto_create=false", [admin_user]}, {"~s",[verbosity]}], [{env,[{"EVENT_NOSELECT","1"}, {"MEMCACHED_TOP_KEYS","100"}, {"ISASL_PWFILE",{"~s",[{isasl,path}]}}]}, use_stdio,stderr_to_stdout,exit_status,port_server_send_eol, stream]}] [ns_server:debug,2014-08-19T15:37:53.655,ns_1@127.0.0.1:ns_config_log<0.282.0>:ns_config_log:log_common:138]config change: {node,'ns_1@127.0.0.1',rest} -> [{port,8091},{port_meta,global}] [ns_server:debug,2014-08-19T15:37:53.655,ns_1@127.0.0.1:ns_config_log<0.282.0>:ns_config_log:log_common:138]config change: {node,'ns_1@127.0.0.1',ssl_capi_port} -> 18092 [ns_server:debug,2014-08-19T15:37:53.655,ns_1@127.0.0.1:ns_config_log<0.282.0>:ns_config_log:log_common:138]config change: {node,'ns_1@127.0.0.1',ssl_proxy_downstream_port} -> 11214 [ns_server:debug,2014-08-19T15:37:53.655,ns_1@127.0.0.1:ns_config_log<0.282.0>:ns_config_log:log_common:138]config change: {node,'ns_1@127.0.0.1',ssl_proxy_upstream_port} -> 11215 [ns_server:debug,2014-08-19T15:37:53.655,ns_1@127.0.0.1:ns_config_log<0.282.0>:ns_config_log:log_common:138]config change: {node,'ns_1@127.0.0.1',ssl_rest_port} -> 18091 [ns_server:debug,2014-08-19T15:37:53.837,ns_1@127.0.0.1:ns_cookie_manager<0.273.0>:ns_cookie_manager:do_cookie_save:149]attempted to save cookie to "/opt/couchbase/var/lib/couchbase/couchbase-server.cookie-ns-server": ok [error_logger:info,2014-08-19T15:37:53.837,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.319.0>}, {name,bucket_info_cache}, {mfa,{bucket_info_cache,start_link,[]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [ns_server:debug,2014-08-19T15:37:53.837,ns_1@127.0.0.1:ns_cookie_manager<0.273.0>:ns_cookie_manager:do_cookie_sync:110]ns_cookie_manager do_cookie_sync [error_logger:info,2014-08-19T15:37:53.837,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.322.0>}, {name,ns_tick_event}, {mfa,{gen_event,start_link,[{local,ns_tick_event}]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [ns_server:debug,2014-08-19T15:37:53.837,ns_1@127.0.0.1:<0.314.0>:ns_node_disco:do_nodes_wanted_updated_fun:199]ns_node_disco: nodes_wanted updated: ['ns_1@127.0.0.1'], with cookie: alkbqedpsntmtnxa [ns_server:debug,2014-08-19T15:37:53.837,ns_1@127.0.0.1:ns_cookie_manager<0.273.0>:ns_cookie_manager:do_cookie_save:147]saving cookie to "/opt/couchbase/var/lib/couchbase/couchbase-server.cookie-ns-server" [ns_server:debug,2014-08-19T15:37:53.837,ns_1@127.0.0.1:<0.314.0>:ns_node_disco:do_nodes_wanted_updated_fun:205]ns_node_disco: nodes_wanted pong: ['ns_1@127.0.0.1'], with cookie: alkbqedpsntmtnxa [error_logger:info,2014-08-19T15:37:53.837,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.323.0>}, {name,buckets_events}, {mfa,{gen_event,start_link,[{local,buckets_events}]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [ns_server:debug,2014-08-19T15:37:53.839,ns_1@127.0.0.1:ns_log_events<0.301.0>:ns_mail_log:init:44]ns_mail_log started up [error_logger:info,2014-08-19T15:37:53.839,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_mail_sup} started: [{pid,<0.325.0>}, {name,ns_mail_log}, {mfargs,{ns_mail_log,start_link,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2014-08-19T15:37:53.840,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.324.0>}, {name,ns_mail_sup}, {mfa,{ns_mail_sup,start_link,[]}}, {restart_type,permanent}, {shutdown,infinity}, {child_type,supervisor}] [error_logger:info,2014-08-19T15:37:53.840,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.326.0>}, {name,ns_stats_event}, {mfa,{gen_event,start_link,[{local,ns_stats_event}]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [ns_server:debug,2014-08-19T15:37:54.683,ns_1@127.0.0.1:ns_cookie_manager<0.273.0>:ns_cookie_manager:do_cookie_save:149]attempted to save cookie to "/opt/couchbase/var/lib/couchbase/couchbase-server.cookie-ns-server": ok [ns_server:debug,2014-08-19T15:37:54.684,ns_1@127.0.0.1:<0.315.0>:ns_node_disco:do_nodes_wanted_updated_fun:199]ns_node_disco: nodes_wanted updated: ['ns_1@127.0.0.1'], with cookie: alkbqedpsntmtnxa [error_logger:info,2014-08-19T15:37:54.684,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.327.0>}, {name,samples_loader_tasks}, {mfa,{samples_loader_tasks,start_link,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [ns_server:debug,2014-08-19T15:37:54.684,ns_1@127.0.0.1:<0.315.0>:ns_node_disco:do_nodes_wanted_updated_fun:205]ns_node_disco: nodes_wanted pong: ['ns_1@127.0.0.1'], with cookie: alkbqedpsntmtnxa [error_logger:info,2014-08-19T15:37:54.686,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.328.0>}, {name,ns_heart}, {mfa,{ns_heart,start_link,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [ns_server:debug,2014-08-19T15:37:54.687,ns_1@127.0.0.1:ns_heart_slow_status_updater<0.330.0>:ns_heart:current_status_slow:248]Ignoring failure to grab system stats: {'EXIT',{noproc,{gen_server,call, [{'stats_reader-@system','ns_1@127.0.0.1'}, {latest,"minute"}]}}} [error_logger:info,2014-08-19T15:37:54.690,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.332.0>}, {name,ns_doctor}, {mfa,{ns_doctor,start_link,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2014-08-19T15:37:54.696,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.335.0>}, {name,remote_clusters_info}, {mfa,{remote_clusters_info,start_link,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2014-08-19T15:37:54.697,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.336.0>}, {name,master_activity_events}, {mfa, {gen_event,start_link, [{local,master_activity_events}]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [ns_server:debug,2014-08-19T15:37:54.697,ns_1@127.0.0.1:ns_heart_slow_status_updater<0.330.0>:ns_heart:grab_local_xdcr_replications:438]Ignoring exception getting xdcr replication infos {exit,{noproc,{gen_server,call,[xdc_replication_sup,which_children,infinity]}}, [{gen_server,call,3}, {xdc_replication_sup,all_local_replication_infos,0}, {ns_heart,grab_local_xdcr_replications,0}, {ns_heart,current_status_slow,0}, {ns_heart,slow_updater_loop,1}, {proc_lib,init_p_do_apply,3}]} [ns_server:debug,2014-08-19T15:37:54.699,ns_1@127.0.0.1:ns_server_sup<0.287.0>:mb_master:check_master_takeover_needed:141]Sending master node question to the following nodes: [] [ns_server:debug,2014-08-19T15:37:54.699,ns_1@127.0.0.1:ns_server_sup<0.287.0>:mb_master:check_master_takeover_needed:143]Got replies: [] [ns_server:debug,2014-08-19T15:37:54.700,ns_1@127.0.0.1:ns_server_sup<0.287.0>:mb_master:check_master_takeover_needed:149]Was unable to discover master, not going to force mastership takeover [user:info,2014-08-19T15:37:54.702,ns_1@127.0.0.1:mb_master<0.339.0>:mb_master:init:86]I'm the only node, so I'm the master. [ns_server:debug,2014-08-19T15:37:54.708,ns_1@127.0.0.1:ns_heart_slow_status_updater<0.330.0>:ns_heart:current_status_slow:248]Ignoring failure to grab system stats: {'EXIT',{noproc,{gen_server,call, [{'stats_reader-@system','ns_1@127.0.0.1'}, {latest,"minute"}]}}} [ns_server:debug,2014-08-19T15:37:54.709,ns_1@127.0.0.1:ns_heart_slow_status_updater<0.330.0>:ns_heart:grab_local_xdcr_replications:438]Ignoring exception getting xdcr replication infos {exit,{noproc,{gen_server,call,[xdc_replication_sup,which_children,infinity]}}, [{gen_server,call,3}, {xdc_replication_sup,all_local_replication_infos,0}, {ns_heart,grab_local_xdcr_replications,0}, {ns_heart,current_status_slow,0}, {ns_heart,slow_updater_loop,1}]} [ns_server:debug,2014-08-19T15:37:54.714,ns_1@127.0.0.1:ns_config_log<0.282.0>:ns_config_log:log_common:138]config change: dynamic_config_version -> undefined [ns_server:info,2014-08-19T15:37:54.714,ns_1@127.0.0.1:ns_config<0.278.0>:ns_online_config_upgrader:upgrade_config_on_join_from_pre_2_0_to_2_0:70]Adding some 2.0 specific keys to the config [ns_server:debug,2014-08-19T15:37:54.714,ns_1@127.0.0.1:ns_config<0.278.0>:ns_config:do_upgrade_config:577]Upgrading config by changes: [{set,dynamic_config_version,[2,0]},{set,vbucket_map_history,[]}] [ns_server:debug,2014-08-19T15:37:54.715,ns_1@127.0.0.1:ns_config_rep<0.311.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([dynamic_config_version]..) [ns_server:debug,2014-08-19T15:37:54.717,ns_1@127.0.0.1:ns_config_log<0.282.0>:ns_config_log:log_common:138]config change: cluster_compat_version -> [2,5] [ns_server:debug,2014-08-19T15:37:54.717,ns_1@127.0.0.1:ns_config_rep<0.311.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([cluster_compat_version]..) [ns_server:debug,2014-08-19T15:37:54.717,ns_1@127.0.0.1:ns_config_rep<0.311.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@127.0.0.1' [ns_server:debug,2014-08-19T15:37:54.717,ns_1@127.0.0.1:ns_config_rep<0.311.0>:ns_config_rep:handle_call:119]Fully synchronized config in 10 us [user:warn,2014-08-19T15:37:54.717,ns_1@127.0.0.1:<0.346.0>:ns_orchestrator:consider_switching_compat_mode:1051]Changed cluster compat mode from undefined to [2,5] [ns_server:info,2014-08-19T15:37:54.717,ns_1@127.0.0.1:ns_config<0.278.0>:ns_online_config_upgrader:upgrade_config_from_pre_2_0_to_2_0:74]Performing online config upgrade to 2.0 version [ns_server:debug,2014-08-19T15:37:54.718,ns_1@127.0.0.1:ns_config_log<0.282.0>:ns_config_log:log_common:138]config change: dynamic_config_version -> undefined [ns_server:debug,2014-08-19T15:37:54.718,ns_1@127.0.0.1:ns_config<0.278.0>:ns_config:do_upgrade_config:577]Upgrading config by changes: [{set,dynamic_config_version,[2,0]}] [ns_server:info,2014-08-19T15:37:54.719,ns_1@127.0.0.1:ns_config<0.278.0>:ns_online_config_upgrader:upgrade_config_from_2_0_to_2_5:78]Performing online config upgrade to 2.5 version [ns_server:debug,2014-08-19T15:37:54.719,ns_1@127.0.0.1:ns_config<0.278.0>:ns_config:do_upgrade_config:577]Upgrading config by changes: [{set,dynamic_config_version,[2,5]}, {set,server_groups, [[{uuid,<<"0">>},{name,<<"Group 1">>},{nodes,['ns_1@127.0.0.1']}]]}] [ns_server:debug,2014-08-19T15:37:54.719,ns_1@127.0.0.1:ns_config_rep<0.311.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([dynamic_config_version]..) [ns_server:debug,2014-08-19T15:37:54.719,ns_1@127.0.0.1:mb_master_sup<0.341.0>:misc:start_singleton:986]start_singleton(gen_fsm, ns_orchestrator, [], []): started as <0.346.0> on 'ns_1@127.0.0.1' [error_logger:info,2014-08-19T15:37:54.719,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,mb_master_sup} started: [{pid,<0.346.0>}, {name,ns_orchestrator}, {mfargs,{ns_orchestrator,start_link,[]}}, {restart_type,permanent}, {shutdown,20}, {child_type,worker}] [ns_server:debug,2014-08-19T15:37:54.721,ns_1@127.0.0.1:mb_master_sup<0.341.0>:misc:start_singleton:986]start_singleton(gen_server, ns_tick, [], []): started as <0.354.0> on 'ns_1@127.0.0.1' [error_logger:info,2014-08-19T15:37:54.721,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,mb_master_sup} started: [{pid,<0.354.0>}, {name,ns_tick}, {mfargs,{ns_tick,start_link,[]}}, {restart_type,permanent}, {shutdown,10}, {child_type,worker}] [ns_server:debug,2014-08-19T15:37:54.743,ns_1@127.0.0.1:<0.355.0>:auto_failover:init:134]init auto_failover. [ns_server:debug,2014-08-19T15:37:54.743,ns_1@127.0.0.1:mb_master_sup<0.341.0>:misc:start_singleton:986]start_singleton(gen_server, auto_failover, [], []): started as <0.355.0> on 'ns_1@127.0.0.1' [error_logger:info,2014-08-19T15:37:54.743,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,mb_master_sup} started: [{pid,<0.355.0>}, {name,auto_failover}, {mfargs,{auto_failover,start_link,[]}}, {restart_type,permanent}, {shutdown,10}, {child_type,worker}] [error_logger:info,2014-08-19T15:37:54.743,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.339.0>}, {name,mb_master}, {mfa,{mb_master,start_link,[]}}, {restart_type,permanent}, {shutdown,infinity}, {child_type,supervisor}] [error_logger:info,2014-08-19T15:37:54.744,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.357.0>}, {name,master_activity_events_ingress}, {mfa, {gen_event,start_link, [{local,master_activity_events_ingress}]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [error_logger:info,2014-08-19T15:37:54.744,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.358.0>}, {name,master_activity_events_timestamper}, {mfa, {master_activity_events,start_link_timestamper,[]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [error_logger:info,2014-08-19T15:37:54.744,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.359.0>}, {name,master_activity_events_pids_watcher}, {mfa, {master_activity_events_pids_watcher,start_link, []}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [error_logger:info,2014-08-19T15:37:54.781,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.360.0>}, {name,master_activity_events_keeper}, {mfa,{master_activity_events_keeper,start_link,[]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [ns_server:debug,2014-08-19T15:37:58.407,ns_1@127.0.0.1:ns_ssl_services_setup<0.364.0>:ns_server_cert:generate_cert_and_pkey:44]Generated certificate and private key in 3621484 us [ns_server:debug,2014-08-19T15:37:58.408,ns_1@127.0.0.1:ns_config_rep<0.311.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([cert_and_pkey]..) [ns_server:debug,2014-08-19T15:37:58.408,ns_1@127.0.0.1:ns_config_log<0.282.0>:ns_config_log:log_common:138]config change: cert_and_pkey -> {<<"-----BEGIN CERTIFICATE-----\nMIICmDCCAYKgAwIBAgIIE4vQPzPIoEQwCwYJKoZIhvcNAQEFMAwxCjAIBgNVBAMT\nASowHhcNMTMwMTAxMDAwMDAwWhcNNDkxMjMxMjM1OTU5WjAMMQowCAYDVQQDEwEq\nMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAmwlh6UM1HlSt78Xr7YCe\n18VU0sN62xbybSOxadjU2gF03Q2jgd+n84Tr9iGKtuy7DUKk/eJJQDQWcCDGTxYg\n8QNmzAlnX/eufV4rhr/9nlksMKdIlXWDvOdLX4yO1FIZ/QvGtoFWBwEc832n3sfa\n1f+EzMV8X6nZxMPV/Stc0StxJPY2Akqi99je3Qs"...>>, <<"*****">>} [error_logger:info,2014-08-19T15:37:58.434,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_ssl_services_sup} started: [{pid,<0.364.0>}, {name,ns_ssl_services_setup}, {mfargs,{ns_ssl_services_setup,start_link,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [ns_server:debug,2014-08-19T15:37:58.436,ns_1@127.0.0.1:ns_ssl_services_setup<0.364.0>:ns_ssl_services_setup:restart_xdcr_proxy:201]Xdcr proxy restart failed. But that's usually normal. {'EXIT', {{badmatch, {badrpc, {'EXIT', {{case_clause, false}, [{ns_child_ports_sup, restart_port_by_name, 1}, {rpc, '-handle_call_call/6-fun-0-', 5}]}}}}, [{ns_ports_setup, restart_xdcr_proxy, 0}, {ns_ssl_services_setup, restart_xdcr_proxy, 0}, {ns_ssl_services_setup, init,1}, {gen_server,init_it, 6}, {proc_lib, init_p_do_apply, 3}]}} [error_logger:info,2014-08-19T15:37:58.461,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_ssl_services_sup} started: [{pid,<0.372.0>}, {name,ns_rest_ssl_service}, {mfargs, {ns_ssl_services_setup,start_link_rest_service,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2014-08-19T15:37:58.463,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_ssl_services_sup} started: [{pid,<0.389.0>}, {name,ns_capi_ssl_service}, {mfargs, {ns_ssl_services_setup,start_link_capi_service,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2014-08-19T15:37:58.463,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,menelaus_sup} started: [{pid,<0.363.0>}, {name,ns_ssl_services_sup}, {mfargs,{ns_ssl_services_sup,start_link,[]}}, {restart_type,permanent}, {shutdown,infinity}, {child_type,supervisor}] [error_logger:info,2014-08-19T15:37:58.465,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,menelaus_sup} started: [{pid,<0.406.0>}, {name,menelaus_ui_auth}, {mfargs,{menelaus_ui_auth,start_link,[]}}, {restart_type,permanent}, {shutdown,5000}, {child_type,worker}] [error_logger:info,2014-08-19T15:37:58.466,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,menelaus_sup} started: [{pid,<0.407.0>}, {name,menelaus_web_cache}, {mfargs,{menelaus_web_cache,start_link,[]}}, {restart_type,permanent}, {shutdown,5000}, {child_type,worker}] [error_logger:info,2014-08-19T15:37:58.467,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,menelaus_sup} started: [{pid,<0.408.0>}, {name,menelaus_stats_gatherer}, {mfargs,{menelaus_stats_gatherer,start_link,[]}}, {restart_type,permanent}, {shutdown,5000}, {child_type,worker}] [error_logger:info,2014-08-19T15:37:58.468,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,menelaus_sup} started: [{pid,<0.409.0>}, {name,menelaus_web}, {mfargs,{menelaus_web,start_link,[]}}, {restart_type,permanent}, {shutdown,5000}, {child_type,worker}] [error_logger:info,2014-08-19T15:37:58.469,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,menelaus_sup} started: [{pid,<0.426.0>}, {name,menelaus_event}, {mfargs,{menelaus_event,start_link,[]}}, {restart_type,permanent}, {shutdown,5000}, {child_type,worker}] [error_logger:info,2014-08-19T15:37:58.470,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,menelaus_sup} started: [{pid,<0.427.0>}, {name,hot_keys_keeper}, {mfargs,{hot_keys_keeper,start_link,[]}}, {restart_type,permanent}, {shutdown,5000}, {child_type,worker}] [error_logger:info,2014-08-19T15:37:58.473,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,menelaus_sup} started: [{pid,<0.428.0>}, {name,menelaus_web_alerts_srv}, {mfargs,{menelaus_web_alerts_srv,start_link,[]}}, {restart_type,permanent}, {shutdown,5000}, {child_type,worker}] [user:info,2014-08-19T15:37:58.473,ns_1@127.0.0.1:ns_server_sup<0.287.0>:menelaus_sup:start_link:44]Couchbase Server has started on web port 8091 on node 'ns_1@127.0.0.1'. [error_logger:info,2014-08-19T15:37:58.473,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.362.0>}, {name,menelaus}, {mfa,{menelaus_sup,start_link,[]}}, {restart_type,permanent}, {shutdown,infinity}, {child_type,supervisor}] [error_logger:info,2014-08-19T15:37:58.474,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,mc_sup} started: [{pid,<0.430.0>}, {name,mc_couch_events}, {mfargs, {gen_event,start_link,[{local,mc_couch_events}]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [error_logger:info,2014-08-19T15:37:58.475,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,mc_sup} started: [{pid,<0.431.0>}, {name,mc_conn_sup}, {mfargs,{mc_conn_sup,start_link,[]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,supervisor}] [ns_server:info,2014-08-19T15:37:58.476,ns_1@127.0.0.1:<0.432.0>:mc_tcp_listener:init:24]mccouch is listening on port 11213 [error_logger:info,2014-08-19T15:37:58.476,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,mc_sup} started: [{pid,<0.432.0>}, {name,mc_tcp_listener}, {mfargs,{mc_tcp_listener,start_link,[11213]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [error_logger:info,2014-08-19T15:37:58.476,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.429.0>}, {name,mc_sup}, {mfa,{mc_sup,start_link,[]}}, {restart_type,permanent}, {shutdown,infinity}, {child_type,supervisor}] [error_logger:info,2014-08-19T15:37:58.476,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.433.0>}, {name,ns_ports_setup}, {mfa,{ns_ports_setup,start,[]}}, {restart_type,{permanent,4}}, {shutdown,brutal_kill}, {child_type,worker}] [error_logger:info,2014-08-19T15:37:58.476,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.434.0>}, {name,ns_port_memcached_killer}, {mfa,{ns_ports_setup,start_memcached_force_killer,[]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [ns_server:info,2014-08-19T15:37:58.477,ns_1@127.0.0.1:<0.436.0>:ns_memcached_log_rotator:init:28]Starting log rotator on "/opt/couchbase/var/lib/couchbase/logs"/"memcached.log"* with an initial period of 39003ms [error_logger:info,2014-08-19T15:37:58.477,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.436.0>}, {name,ns_memcached_log_rotator}, {mfa,{ns_memcached_log_rotator,start_link,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2014-08-19T15:37:58.481,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.438.0>}, {name,memcached_clients_pool}, {mfa,{memcached_clients_pool,start_link,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2014-08-19T15:37:58.482,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.439.0>}, {name,proxied_memcached_clients_pool}, {mfa,{proxied_memcached_clients_pool,start_link,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2014-08-19T15:37:58.483,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.440.0>}, {name,xdc_lhttpc_pool}, {mfa, {lhttpc_manager,start_link, [[{name,xdc_lhttpc_pool}, {connection_timeout,120000}, {pool_size,200}]]}}, {restart_type,permanent}, {shutdown,10000}, {child_type,worker}] [error_logger:info,2014-08-19T15:37:58.483,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.441.0>}, {name,ns_null_connection_pool}, {mfa, {ns_null_connection_pool,start_link, [ns_null_connection_pool]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2014-08-19T15:37:58.483,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.442.0>}, {name,xdc_replication_sup}, {mfa,{xdc_replication_sup,start_link,[]}}, {restart_type,permanent}, {shutdown,infinity}, {child_type,supervisor}] [error_logger:info,2014-08-19T15:37:58.510,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.443.0>}, {name,xdc_rep_manager}, {mfa,{xdc_rep_manager,start_link,[]}}, {restart_type,permanent}, {shutdown,30000}, {child_type,worker}] [error_logger:info,2014-08-19T15:37:58.511,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.452.0>}, {name,ns_memcached_sockets_pool}, {mfa,{ns_memcached_sockets_pool,start_link,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2014-08-19T15:37:58.516,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_bucket_worker_sup} started: [{pid,<0.455.0>}, {name,ns_bucket_worker}, {mfargs,{work_queue,start_link,[ns_bucket_worker]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2014-08-19T15:37:58.517,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_bucket_sup} started: [{pid,<0.457.0>}, {name,buckets_observing_subscription}, {mfargs,{ns_bucket_sup,subscribe_on_config_events,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2014-08-19T15:37:58.517,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_bucket_worker_sup} started: [{pid,<0.456.0>}, {name,ns_bucket_sup}, {mfargs,{ns_bucket_sup,start_link,[]}}, {restart_type,permanent}, {shutdown,infinity}, {child_type,supervisor}] [error_logger:info,2014-08-19T15:37:58.517,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.454.0>}, {name,ns_bucket_worker_sup}, {mfa,{ns_bucket_worker_sup,start_link,[]}}, {restart_type,permanent}, {shutdown,infinity}, {child_type,supervisor}] [error_logger:info,2014-08-19T15:37:58.518,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.458.0>}, {name,system_stats_collector}, {mfa,{system_stats_collector,start_link,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2014-08-19T15:37:58.519,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.461.0>}, {name,{stats_archiver,"@system"}}, {mfa,{stats_archiver,start_link,["@system"]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2014-08-19T15:37:58.519,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.463.0>}, {name,{stats_reader,"@system"}}, {mfa,{stats_reader,start_link,["@system"]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [ns_server:debug,2014-08-19T15:37:58.522,ns_1@127.0.0.1:compaction_daemon<0.464.0>:compaction_daemon:handle_info:444]No buckets to compact. Rescheduling compaction. [error_logger:info,2014-08-19T15:37:58.522,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.464.0>}, {name,compaction_daemon}, {mfa,{compaction_daemon,start_link,[]}}, {restart_type,{permanent,4}}, {shutdown,86400000}, {child_type,worker}] [ns_server:debug,2014-08-19T15:37:58.523,ns_1@127.0.0.1:compaction_daemon<0.464.0>:compaction_daemon:schedule_next_compaction:1519]Finished compaction too soon. Next run will be in 30s [ns_server:debug,2014-08-19T15:37:58.524,ns_1@127.0.0.1:xdc_rdoc_replication_srv<0.466.0>:xdc_rdoc_replication_srv:init:76]Loaded the following docs: [] [ns_server:debug,2014-08-19T15:37:58.524,ns_1@127.0.0.1:xdc_rdoc_replication_srv<0.466.0>:xdc_rdoc_replication_srv:handle_info:154]doing replicate_newnodes_docs [error_logger:info,2014-08-19T15:37:58.524,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.466.0>}, {name,xdc_rdoc_replication_srv}, {mfa,{xdc_rdoc_replication_srv,start_link,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [ns_server:info,2014-08-19T15:37:58.525,ns_1@127.0.0.1:set_view_update_daemon<0.468.0>:set_view_update_daemon:init:50]Set view update daemon, starting with the following settings: update interval: 5000ms minimum number of changes: 5000 [ns_server:debug,2014-08-19T15:37:58.525,ns_1@127.0.0.1:<0.2.0>:child_erlang:child_loop:104]Entered child_loop [error_logger:info,2014-08-19T15:37:58.525,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.468.0>}, {name,set_view_update_daemon}, {mfa,{set_view_update_daemon,start_link,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2014-08-19T15:37:58.525,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_cluster_sup} started: [{pid,<0.287.0>}, {name,ns_server_sup}, {mfargs,{ns_server_sup,start_link,[]}}, {restart_type,permanent}, {shutdown,infinity}, {child_type,supervisor}] [error_logger:info,2014-08-19T15:37:58.526,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= application: ns_server started_at: 'ns_1@127.0.0.1' [ns_server:debug,2014-08-19T15:37:59.687,ns_1@127.0.0.1:ns_heart_slow_status_updater<0.330.0>:ns_heart:current_status_slow:248]Ignoring failure to grab system stats: {error,no_samples} [ns_server:debug,2014-08-19T15:38:28.524,ns_1@127.0.0.1:compaction_daemon<0.464.0>:compaction_daemon:handle_info:444]No buckets to compact. Rescheduling compaction. [ns_server:debug,2014-08-19T15:38:28.524,ns_1@127.0.0.1:compaction_daemon<0.464.0>:compaction_daemon:schedule_next_compaction:1519]Finished compaction too soon. Next run will be in 30s [ns_server:debug,2014-08-19T15:38:58.525,ns_1@127.0.0.1:compaction_daemon<0.464.0>:compaction_daemon:handle_info:444]No buckets to compact. Rescheduling compaction. [ns_server:debug,2014-08-19T15:38:58.525,ns_1@127.0.0.1:compaction_daemon<0.464.0>:compaction_daemon:schedule_next_compaction:1519]Finished compaction too soon. Next run will be in 30s [ns_server:debug,2014-08-19T15:39:28.526,ns_1@127.0.0.1:compaction_daemon<0.464.0>:compaction_daemon:handle_info:444]No buckets to compact. Rescheduling compaction. [ns_server:debug,2014-08-19T15:39:28.526,ns_1@127.0.0.1:compaction_daemon<0.464.0>:compaction_daemon:schedule_next_compaction:1519]Finished compaction too soon. Next run will be in 30s [ns_server:debug,2014-08-19T15:39:58.527,ns_1@127.0.0.1:compaction_daemon<0.464.0>:compaction_daemon:handle_info:444]No buckets to compact. Rescheduling compaction. [ns_server:debug,2014-08-19T15:39:58.527,ns_1@127.0.0.1:compaction_daemon<0.464.0>:compaction_daemon:schedule_next_compaction:1519]Finished compaction too soon. Next run will be in 30s [ns_server:debug,2014-08-19T15:40:28.528,ns_1@127.0.0.1:compaction_daemon<0.464.0>:compaction_daemon:handle_info:444]No buckets to compact. Rescheduling compaction. [ns_server:debug,2014-08-19T15:40:28.528,ns_1@127.0.0.1:compaction_daemon<0.464.0>:compaction_daemon:schedule_next_compaction:1519]Finished compaction too soon. Next run will be in 30s [ns_server:debug,2014-08-19T15:40:58.529,ns_1@127.0.0.1:compaction_daemon<0.464.0>:compaction_daemon:handle_info:444]No buckets to compact. Rescheduling compaction. [ns_server:debug,2014-08-19T15:40:58.529,ns_1@127.0.0.1:compaction_daemon<0.464.0>:compaction_daemon:schedule_next_compaction:1519]Finished compaction too soon. Next run will be in 30s [ns_server:debug,2014-08-19T15:41:28.530,ns_1@127.0.0.1:compaction_daemon<0.464.0>:compaction_daemon:handle_info:444]No buckets to compact. Rescheduling compaction. [ns_server:debug,2014-08-19T15:41:28.530,ns_1@127.0.0.1:compaction_daemon<0.464.0>:compaction_daemon:schedule_next_compaction:1519]Finished compaction too soon. Next run will be in 30s [ns_server:debug,2014-08-19T15:41:58.531,ns_1@127.0.0.1:compaction_daemon<0.464.0>:compaction_daemon:handle_info:444]No buckets to compact. Rescheduling compaction. [ns_server:debug,2014-08-19T15:41:58.531,ns_1@127.0.0.1:compaction_daemon<0.464.0>:compaction_daemon:schedule_next_compaction:1519]Finished compaction too soon. Next run will be in 30s [ns_server:debug,2014-08-19T15:42:28.532,ns_1@127.0.0.1:compaction_daemon<0.464.0>:compaction_daemon:handle_info:444]No buckets to compact. Rescheduling compaction. [ns_server:debug,2014-08-19T15:42:28.532,ns_1@127.0.0.1:compaction_daemon<0.464.0>:compaction_daemon:schedule_next_compaction:1519]Finished compaction too soon. Next run will be in 30s [ns_server:debug,2014-08-19T15:42:58.533,ns_1@127.0.0.1:compaction_daemon<0.464.0>:compaction_daemon:handle_info:444]No buckets to compact. Rescheduling compaction. [ns_server:debug,2014-08-19T15:42:58.533,ns_1@127.0.0.1:compaction_daemon<0.464.0>:compaction_daemon:schedule_next_compaction:1519]Finished compaction too soon. Next run will be in 30s [ns_server:debug,2014-08-19T15:43:28.534,ns_1@127.0.0.1:compaction_daemon<0.464.0>:compaction_daemon:handle_info:444]No buckets to compact. Rescheduling compaction. [ns_server:debug,2014-08-19T15:43:28.534,ns_1@127.0.0.1:compaction_daemon<0.464.0>:compaction_daemon:schedule_next_compaction:1519]Finished compaction too soon. Next run will be in 30s [ns_server:debug,2014-08-19T15:43:58.535,ns_1@127.0.0.1:compaction_daemon<0.464.0>:compaction_daemon:handle_info:444]No buckets to compact. Rescheduling compaction. [ns_server:debug,2014-08-19T15:43:58.535,ns_1@127.0.0.1:compaction_daemon<0.464.0>:compaction_daemon:schedule_next_compaction:1519]Finished compaction too soon. Next run will be in 30s [ns_server:debug,2014-08-19T15:44:28.536,ns_1@127.0.0.1:compaction_daemon<0.464.0>:compaction_daemon:handle_info:444]No buckets to compact. Rescheduling compaction. [ns_server:debug,2014-08-19T15:44:28.536,ns_1@127.0.0.1:compaction_daemon<0.464.0>:compaction_daemon:schedule_next_compaction:1519]Finished compaction too soon. Next run will be in 30s [ns_server:debug,2014-08-19T15:44:58.537,ns_1@127.0.0.1:compaction_daemon<0.464.0>:compaction_daemon:handle_info:444]No buckets to compact. Rescheduling compaction. [ns_server:debug,2014-08-19T15:44:58.537,ns_1@127.0.0.1:compaction_daemon<0.464.0>:compaction_daemon:schedule_next_compaction:1519]Finished compaction too soon. Next run will be in 30s [ns_server:debug,2014-08-19T15:45:28.538,ns_1@127.0.0.1:compaction_daemon<0.464.0>:compaction_daemon:handle_info:444]No buckets to compact. Rescheduling compaction. [ns_server:debug,2014-08-19T15:45:28.538,ns_1@127.0.0.1:compaction_daemon<0.464.0>:compaction_daemon:schedule_next_compaction:1519]Finished compaction too soon. Next run will be in 30s [ns_server:debug,2014-08-19T15:45:58.539,ns_1@127.0.0.1:compaction_daemon<0.464.0>:compaction_daemon:handle_info:444]No buckets to compact. Rescheduling compaction. [ns_server:debug,2014-08-19T15:45:58.540,ns_1@127.0.0.1:compaction_daemon<0.464.0>:compaction_daemon:schedule_next_compaction:1519]Finished compaction too soon. Next run will be in 30s [user:info,2014-08-19T15:46:25.248,ns_1@127.0.0.1:<0.293.0>:ns_log:crash_consumption_loop:64]Port server memcached on node 'babysitter_of_ns_1@127.0.0.1' exited with status 0. Restarting. Messages: EOL on stdin. Initiating shutdown [user:info,2014-08-19T15:46:25.249,ns_1@127.0.0.1:<0.293.0>:ns_log:crash_consumption_loop:64]Port server moxi on node 'babysitter_of_ns_1@127.0.0.1' exited with status 0. Restarting. Messages: EOL on stdin. Exiting [ns_server:debug,2014-08-19T15:46:25.249,ns_1@127.0.0.1:<0.435.0>:ns_pubsub:do_subscribe_link:136]Parent process of subscription {ns_config_events,<0.433.0>} exited with reason shutdown [ns_server:debug,2014-08-19T15:46:25.249,ns_1@127.0.0.1:<0.2.0>:child_erlang:child_loop:108]Got EOL [error_logger:info,2014-08-19T15:46:25.249,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.1886.0>}, {name,ns_ports_setup}, {mfa,{ns_ports_setup,start,[]}}, {restart_type,{permanent,4}}, {shutdown,brutal_kill}, {child_type,worker}] [ns_server:info,2014-08-19T15:46:25.250,ns_1@127.0.0.1:<0.2.0>:ns_bootstrap:stop:41]Initiated server shutdown [error_logger:info,2014-08-19T15:46:25.250,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:119]Initiated server shutdown [ns_server:debug,2014-08-19T15:46:25.250,ns_1@127.0.0.1:<0.469.0>:ns_pubsub:do_subscribe_link:136]Parent process of subscription {ns_config_events,<0.468.0>} exited with reason shutdown [ns_server:debug,2014-08-19T15:46:25.250,ns_1@127.0.0.1:<0.465.0>:ns_pubsub:do_subscribe_link:136]Parent process of subscription {ns_config_events,<0.464.0>} exited with reason shutdown [ns_server:debug,2014-08-19T15:46:25.463,ns_1@127.0.0.1:<0.462.0>:ns_pubsub:do_subscribe_link:136]Parent process of subscription {ns_stats_event,<0.461.0>} exited with reason shutdown [ns_server:debug,2014-08-19T15:46:25.464,ns_1@127.0.0.1:<0.460.0>:ns_pubsub:do_subscribe_link:136]Parent process of subscription {ns_tick_event,<0.458.0>} exited with reason shutdown [ns_server:debug,2014-08-19T15:46:25.464,ns_1@127.0.0.1:<0.457.0>:ns_pubsub:do_subscribe_link:136]Parent process of subscription {ns_config_events,<0.456.0>} exited with reason shutdown [error_logger:error,2014-08-19T15:46:25.464,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================SUPERVISOR REPORT========================= Supervisor: {local,ns_bucket_sup} Context: shutdown_error Reason: normal Offender: [{pid,<0.457.0>}, {name,buckets_observing_subscription}, {mfargs,{ns_bucket_sup,subscribe_on_config_events,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [ns_server:debug,2014-08-19T15:46:25.464,ns_1@127.0.0.1:<0.1887.0>:ns_pubsub:do_subscribe_link:136]Parent process of subscription {ns_config_events,<0.1886.0>} exited with reason killed [ns_server:debug,2014-08-19T15:46:25.464,ns_1@127.0.0.1:<0.437.0>:ns_pubsub:do_subscribe_link:136]Parent process of subscription {ns_config_events,<0.434.0>} exited with reason killed [ns_server:debug,2014-08-19T15:46:25.465,ns_1@127.0.0.1:<0.365.0>:ns_pubsub:do_subscribe_link:136]Parent process of subscription {ns_config_events,<0.364.0>} exited with reason shutdown [ns_server:debug,2014-08-19T15:46:25.465,ns_1@127.0.0.1:<0.361.0>:ns_pubsub:do_subscribe_link:136]Parent process of subscription {master_activity_events,<0.360.0>} exited with reason killed [ns_server:info,2014-08-19T15:46:25.465,ns_1@127.0.0.1:mb_master<0.339.0>:mb_master:terminate:299]Synchronously shutting down child mb_master_sup [ns_server:debug,2014-08-19T15:46:25.465,ns_1@127.0.0.1:<0.340.0>:ns_pubsub:do_subscribe_link:136]Parent process of subscription {ns_config_events,<0.339.0>} exited with reason shutdown [ns_server:debug,2014-08-19T15:46:25.465,ns_1@127.0.0.1:<0.333.0>:ns_pubsub:do_subscribe_link:136]Parent process of subscription {ns_config_events,<0.332.0>} exited with reason shutdown [ns_server:debug,2014-08-19T15:46:25.465,ns_1@127.0.0.1:<0.329.0>:ns_pubsub:do_subscribe_link:136]Parent process of subscription {buckets_events,<0.328.0>} exited with reason shutdown [ns_server:debug,2014-08-19T15:46:25.465,ns_1@127.0.0.1:<0.321.0>:ns_pubsub:do_subscribe_link:136]Parent process of subscription {ns_config_events,<0.319.0>} exited with reason killed [ns_server:debug,2014-08-19T15:46:25.465,ns_1@127.0.0.1:<0.312.0>:ns_pubsub:do_subscribe_link:136]Parent process of subscription {ns_config_events_local,<0.311.0>} exited with reason shutdown [ns_server:debug,2014-08-19T15:46:25.466,ns_1@127.0.0.1:<0.318.0>:ns_pubsub:do_subscribe_link:136]Parent process of subscription {ns_config_events,<0.317.0>} exited with reason killed [ns_server:debug,2014-08-19T15:46:25.466,ns_1@127.0.0.1:<0.298.0>:ns_pubsub:do_subscribe_link:136]Parent process of subscription {ns_config_events,<0.297.0>} exited with reason shutdown [ns_server:debug,2014-08-19T15:46:25.466,ns_1@127.0.0.1:<0.295.0>:ns_pubsub:do_subscribe_link:136]Parent process of subscription {ns_config_events,<0.294.0>} exited with reason killed [error_logger:error,2014-08-19T15:46:25.467,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================CRASH REPORT========================= crasher: initial call: gen_event:init_it/6 pid: <0.320.0> registered_name: bucket_info_cache_invalidations exception exit: killed in function gen_event:terminate_server/4 ancestors: [bucket_info_cache,ns_server_sup,ns_server_cluster_sup, <0.58.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 233 stack_size: 24 reductions: 119 neighbours: [error_logger:error,2014-08-19T15:46:25.568,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================SUPERVISOR REPORT========================= Supervisor: {local,ns_server_cluster_sup} Context: shutdown_error Reason: killed Offender: [{pid,<0.286.0>}, {name,vbucket_filter_changes_registry}, {mfargs, {ns_process_registry,start_link, [vbucket_filter_changes_registry]}}, {restart_type,permanent}, {shutdown,100}, {child_type,worker}] [ns_server:debug,2014-08-19T15:46:25.568,ns_1@127.0.0.1:<0.285.0>:ns_pubsub:do_subscribe_link:136]Parent process of subscription {ns_config_events,<0.284.0>} exited with reason shutdown [ns_server:debug,2014-08-19T15:46:25.568,ns_1@127.0.0.1:<0.283.0>:ns_pubsub:do_subscribe_link:136]Parent process of subscription {ns_config_events,<0.282.0>} exited with reason shutdown [ns_server:debug,2014-08-19T15:46:25.568,ns_1@127.0.0.1:ns_config<0.278.0>:ns_config:wait_saver:652]Done waiting for saver. [error_logger:error,2014-08-19T15:46:25.570,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================CRASH REPORT========================= crasher: initial call: couch_file:spawn_writer/2 pid: <0.225.0> registered_name: [] exception exit: {noproc, {gen_server,call, [couch_file_write_guard, {remove,<0.225.0>}, infinity]}} in function gen_server:call/3 in call from couch_file:writer_loop/4 ancestors: [<0.222.0>,couch_server,couch_primary_services, couch_server_sup,cb_couch_sup,ns_server_cluster_sup, <0.58.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 233 stack_size: 24 reductions: 2139 neighbours: [error_logger:error,2014-08-19T15:46:25.571,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================CRASH REPORT========================= crasher: initial call: couch_file:spawn_writer/2 pid: <0.447.0> registered_name: [] exception exit: {noproc, {gen_server,call, [couch_file_write_guard, {remove,<0.447.0>}, infinity]}} in function gen_server:call/3 in call from couch_file:writer_loop/4 ancestors: [<0.444.0>,couch_server,couch_primary_services, couch_server_sup,cb_couch_sup,ns_server_cluster_sup, <0.58.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 377 stack_size: 24 reductions: 783 neighbours: [error_logger:error,2014-08-19T15:46:25.571,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:119]** Generic server <0.226.0> terminating ** Last message in was {'EXIT',<0.211.0>,killed} ** When Server state == {db,<0.226.0>,<0.227.0>,nil,<<"1408448272284601">>, <0.222.0>,<0.228.0>, {db_header,11,1, <<0,0,0,0,13,103,0,0,0,0,0,51,0,0,0,0,1,0,0,0, 0,0,0,0,0,0,13,69>>, <<0,0,0,0,13,154,0,0,0,0,0,49,0,0,0,0,1>>, nil,0,nil,nil}, 1, {btree,<0.222.0>, {3431, <<0,0,0,0,1,0,0,0,0,0,0,0,0,0,13,69>>, 51}, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.222.0>, {3482,<<0,0,0,0,1>>,49}, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.222.0>,nil,identity,identity, #Fun,nil,1279,2558, true}, 1,<<"_users">>, "/opt/couchbase/var/lib/couchbase/data/_users.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], [create, {user_ctx, {user_ctx,null,[<<"_admin">>],undefined}}, sys_db]} ** Reason for termination == ** killed [error_logger:error,2014-08-19T15:46:25.572,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.226.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.58.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 1597 stack_size: 24 reductions: 285 neighbours: [error_logger:error,2014-08-19T15:46:25.572,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:119]** Generic server <0.448.0> terminating ** Last message in was {'EXIT',<0.211.0>,killed} ** When Server state == {db,<0.448.0>,<0.449.0>,nil,<<"1408448278509999">>, <0.444.0>,<0.450.0>, {db_header,11,0,nil,nil,nil,0,nil,nil}, 0, {btree,<0.444.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.444.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.444.0>,nil,identity,identity, #Fun,nil,1279,2558, true}, 0,<<"_replicator">>, "/opt/couchbase/var/lib/couchbase/data/_replicator.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], [create,sys_db, {user_ctx, {user_ctx,null, [<<"_admin">>,<<"_replicator">>], undefined}}]} ** Reason for termination == ** killed [error_logger:error,2014-08-19T15:46:25.572,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.448.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.58.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 610 stack_size: 24 reductions: 249 neighbours: [error_logger:info,2014-08-19T15:46:25.578,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================INFO REPORT========================= application: ns_server exited: stopped type: permanent [ns_server:info,2014-08-19T15:46:30.362,nonode@nohost:<0.58.0>:ns_server:init_logging:248]Started & configured logging [ns_server:info,2014-08-19T15:46:30.365,nonode@nohost:<0.58.0>:ns_server:log_pending:30]Static config terms: [{error_logger_mf_dir,"/opt/couchbase/var/lib/couchbase/logs"}, {error_logger_mf_maxbytes,10485760}, {error_logger_mf_maxfiles,20}, {path_config_bindir,"/opt/couchbase/bin"}, {path_config_etcdir,"/opt/couchbase/etc/couchbase"}, {path_config_libdir,"/opt/couchbase/lib"}, {path_config_datadir,"/opt/couchbase/var/lib/couchbase"}, {path_config_tmpdir,"/opt/couchbase/var/lib/couchbase/tmp"}, {nodefile,"/opt/couchbase/var/lib/couchbase/couchbase-server.node"}, {loglevel_default,debug}, {loglevel_couchdb,info}, {loglevel_ns_server,debug}, {loglevel_error_logger,debug}, {loglevel_user,debug}, {loglevel_menelaus,debug}, {loglevel_ns_doctor,debug}, {loglevel_stats,debug}, {loglevel_rebalance,debug}, {loglevel_cluster,debug}, {loglevel_views,debug}, {loglevel_mapreduce_errors,debug}, {loglevel_xdcr,debug}] [ns_server:info,2014-08-19T15:46:30.515,nonode@nohost:<0.58.0>:ns_server:start:58]Locked myself into a memory successfully. [error_logger:info,2014-08-19T15:46:30.551,nonode@nohost:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,crypto_sup} started: [{pid,<0.166.0>}, {name,crypto_server}, {mfargs,{crypto_server,start_link,[]}}, {restart_type,permanent}, {shutdown,2000}, {child_type,worker}] [error_logger:info,2014-08-19T15:46:30.552,nonode@nohost:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= application: crypto started_at: nonode@nohost [error_logger:info,2014-08-19T15:46:30.560,nonode@nohost:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= application: asn1 started_at: nonode@nohost [error_logger:info,2014-08-19T15:46:30.564,nonode@nohost:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= application: public_key started_at: nonode@nohost [error_logger:info,2014-08-19T15:46:30.570,nonode@nohost:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,inets_sup} started: [{pid,<0.173.0>}, {name,ftp_sup}, {mfargs,{ftp_sup,start_link,[]}}, {restart_type,permanent}, {shutdown,infinity}, {child_type,supervisor}] [error_logger:info,2014-08-19T15:46:30.589,nonode@nohost:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,httpc_profile_sup} started: [{pid,<0.176.0>}, {name,httpc_manager}, {mfargs, {httpc_manager,start_link, [default,only_session_cookies,inets]}}, {restart_type,permanent}, {shutdown,4000}, {child_type,worker}] [error_logger:info,2014-08-19T15:46:30.590,nonode@nohost:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,httpc_sup} started: [{pid,<0.175.0>}, {name,httpc_profile_sup}, {mfargs, {httpc_profile_sup,start_link, [[{httpc,{default,only_session_cookies}}]]}}, {restart_type,permanent}, {shutdown,infinity}, {child_type,supervisor}] [error_logger:info,2014-08-19T15:46:30.592,nonode@nohost:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,httpc_sup} started: [{pid,<0.177.0>}, {name,httpc_handler_sup}, {mfargs,{httpc_handler_sup,start_link,[]}}, {restart_type,permanent}, {shutdown,infinity}, {child_type,supervisor}] [error_logger:info,2014-08-19T15:46:30.592,nonode@nohost:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,inets_sup} started: [{pid,<0.174.0>}, {name,httpc_sup}, {mfargs, {httpc_sup,start_link, [[{httpc,{default,only_session_cookies}}]]}}, {restart_type,permanent}, {shutdown,infinity}, {child_type,supervisor}] [error_logger:info,2014-08-19T15:46:30.595,nonode@nohost:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,inets_sup} started: [{pid,<0.178.0>}, {name,httpd_sup}, {mfargs,{httpd_sup,start_link,[[]]}}, {restart_type,permanent}, {shutdown,infinity}, {child_type,supervisor}] [error_logger:info,2014-08-19T15:46:30.598,nonode@nohost:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,inets_sup} started: [{pid,<0.179.0>}, {name,tftp_sup}, {mfargs,{tftp_sup,start_link,[[]]}}, {restart_type,permanent}, {shutdown,infinity}, {child_type,supervisor}] [error_logger:info,2014-08-19T15:46:30.598,nonode@nohost:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= application: inets started_at: nonode@nohost [error_logger:info,2014-08-19T15:46:30.598,nonode@nohost:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= application: oauth started_at: nonode@nohost [error_logger:info,2014-08-19T15:46:30.608,nonode@nohost:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ssl_sup} started: [{pid,<0.185.0>}, {name,ssl_broker_sup}, {mfargs,{ssl_broker_sup,start_link,[]}}, {restart_type,permanent}, {shutdown,2000}, {child_type,supervisor}] [error_logger:info,2014-08-19T15:46:30.615,nonode@nohost:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ssl_sup} started: [{pid,<0.186.0>}, {name,ssl_manager}, {mfargs,{ssl_manager,start_link,[[]]}}, {restart_type,permanent}, {shutdown,4000}, {child_type,worker}] [error_logger:info,2014-08-19T15:46:30.617,nonode@nohost:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ssl_sup} started: [{pid,<0.187.0>}, {name,ssl_connection}, {mfargs,{ssl_connection_sup,start_link,[]}}, {restart_type,permanent}, {shutdown,4000}, {child_type,supervisor}] [error_logger:info,2014-08-19T15:46:30.617,nonode@nohost:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= application: ssl started_at: nonode@nohost [error_logger:info,2014-08-19T15:46:30.770,nonode@nohost:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ssl_sup} started: [{pid,<0.194.0>}, {name,ssl_server}, {mfargs,{ssl_server,start_link,[]}}, {restart_type,permanent}, {shutdown,2000}, {child_type,worker}] [error_logger:info,2014-08-19T15:46:30.770,nonode@nohost:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,lhttpc_sup} started: [{pid,<0.192.0>}, {name,lhttpc_manager}, {mfargs, {lhttpc_manager,start_link, [[{name,lhttpc_manager}]]}}, {restart_type,permanent}, {shutdown,10000}, {child_type,worker}] [error_logger:info,2014-08-19T15:46:30.770,nonode@nohost:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= application: lhttpc started_at: nonode@nohost [error_logger:info,2014-08-19T15:46:30.775,nonode@nohost:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= application: xmerl started_at: nonode@nohost [error_logger:info,2014-08-19T15:46:30.790,nonode@nohost:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= application: compiler started_at: nonode@nohost [error_logger:info,2014-08-19T15:46:30.796,nonode@nohost:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= application: syntax_tools started_at: nonode@nohost [error_logger:info,2014-08-19T15:46:30.796,nonode@nohost:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= application: mochiweb started_at: nonode@nohost [error_logger:info,2014-08-19T15:46:30.800,nonode@nohost:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= application: couch_view_parser started_at: nonode@nohost [error_logger:info,2014-08-19T15:46:30.804,nonode@nohost:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= application: couch_set_view started_at: nonode@nohost [error_logger:info,2014-08-19T15:46:30.808,nonode@nohost:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= application: couch_index_merger started_at: nonode@nohost [error_logger:info,2014-08-19T15:46:30.811,nonode@nohost:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= application: mapreduce started_at: nonode@nohost [error_logger:info,2014-08-19T15:46:30.848,nonode@nohost:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,couch_server_sup} started: [{pid,<0.203.0>}, {name,couch_config}, {mfargs, {couch_server_sup,couch_config_start_link_wrapper, [["/opt/couchbase/etc/couchdb/default.ini", "/opt/couchbase/etc/couchdb/default.d/capi.ini", "/opt/couchbase/etc/couchdb/default.d/geocouch.ini", "/opt/couchbase/etc/couchdb/local.ini"], <0.203.0>]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [error_logger:info,2014-08-19T15:46:30.871,nonode@nohost:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,couch_primary_services} started: [{pid,<0.206.0>}, {name,collation_driver}, {mfargs,{couch_drv,start_link,[]}}, {restart_type,permanent}, {shutdown,infinity}, {child_type,supervisor}] [error_logger:info,2014-08-19T15:46:30.872,nonode@nohost:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,couch_primary_services} started: [{pid,<0.207.0>}, {name,couch_task_events}, {mfargs, {gen_event,start_link,[{local,couch_task_events}]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [error_logger:info,2014-08-19T15:46:30.873,nonode@nohost:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,couch_primary_services} started: [{pid,<0.208.0>}, {name,couch_task_status}, {mfargs,{couch_task_status,start_link,[]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [error_logger:info,2014-08-19T15:46:30.875,nonode@nohost:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,couch_primary_services} started: [{pid,<0.209.0>}, {name,couch_file_write_guard}, {mfargs,{couch_file_write_guard,sup_start_link,[]}}, {restart_type,permanent}, {shutdown,10000}, {child_type,worker}] [error_logger:info,2014-08-19T15:46:30.897,nonode@nohost:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,couch_primary_services} started: [{pid,<0.210.0>}, {name,couch_server}, {mfargs,{couch_server,sup_start_link,[]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [error_logger:info,2014-08-19T15:46:30.897,nonode@nohost:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,couch_primary_services} started: [{pid,<0.223.0>}, {name,couch_db_update_event}, {mfargs, {gen_event,start_link,[{local,couch_db_update}]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [error_logger:info,2014-08-19T15:46:30.898,nonode@nohost:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,couch_primary_services} started: [{pid,<0.224.0>}, {name,couch_replication_event}, {mfargs, {gen_event,start_link,[{local,couch_replication}]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [error_logger:info,2014-08-19T15:46:30.898,nonode@nohost:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,couch_primary_services} started: [{pid,<0.225.0>}, {name,couch_replication_supervisor}, {mfargs,{couch_rep_sup,start_link,[]}}, {restart_type,permanent}, {shutdown,infinity}, {child_type,supervisor}] [error_logger:info,2014-08-19T15:46:30.900,nonode@nohost:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,couch_primary_services} started: [{pid,<0.226.0>}, {name,couch_log}, {mfargs,{couch_log,start_link,[]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [error_logger:info,2014-08-19T15:46:30.903,nonode@nohost:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,couch_primary_services} started: [{pid,<0.227.0>}, {name,couch_main_index_barrier}, {mfargs, {couch_index_barrier,start_link, [couch_main_index_barrier, "max_parallel_indexers"]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [error_logger:info,2014-08-19T15:46:30.904,nonode@nohost:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,couch_primary_services} started: [{pid,<0.228.0>}, {name,couch_replica_index_barrier}, {mfargs, {couch_index_barrier,start_link, [couch_replica_index_barrier, "max_parallel_replica_indexers"]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [error_logger:info,2014-08-19T15:46:30.904,nonode@nohost:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,couch_primary_services} started: [{pid,<0.229.0>}, {name,couch_spatial_index_barrier}, {mfargs, {couch_index_barrier,start_link, [couch_spatial_index_barrier, "max_parallel_spatial_indexers"]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [error_logger:info,2014-08-19T15:46:30.904,nonode@nohost:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,couch_server_sup} started: [{pid,<0.205.0>}, {name,couch_primary_services}, {mfargs,{couch_primary_sup,start_link,[]}}, {restart_type,permanent}, {shutdown,infinity}, {child_type,supervisor}] [error_logger:info,2014-08-19T15:46:30.907,nonode@nohost:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,couch_secondary_services} started: [{pid,<0.231.0>}, {name,couch_db_update_notifier_sup}, {mfargs,{couch_db_update_notifier_sup,start_link,[]}}, {restart_type,permanent}, {shutdown,infinity}, {child_type,supervisor}] [error_logger:info,2014-08-19T15:46:30.914,nonode@nohost:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,couch_secondary_services} started: [{pid,<0.232.0>}, {name,auth_cache}, {mfargs,{couch_auth_cache,start_link,[]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [error_logger:info,2014-08-19T15:46:30.923,nonode@nohost:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,couch_secondary_services} started: [{pid,<0.235.0>}, {name,set_view_manager}, {mfargs,{couch_set_view,start_link,[]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [error_logger:info,2014-08-19T15:46:30.925,nonode@nohost:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,couch_secondary_services} started: [{pid,<0.238.0>}, {name,spatial_manager}, {mfargs,{couch_spatial,start_link,[]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [error_logger:info,2014-08-19T15:46:30.925,nonode@nohost:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,couch_secondary_services} started: [{pid,<0.240.0>}, {name,index_merger_pool}, {mfargs, {lhttpc_manager,start_link, [[{connection_timeout,90000}, {pool_size,10000}, {name,couch_index_merger_connection_pool}]]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [error_logger:info,2014-08-19T15:46:30.928,nonode@nohost:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,couch_secondary_services} started: [{pid,<0.241.0>}, {name,query_servers}, {mfargs,{couch_query_servers,start_link,[]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [error_logger:info,2014-08-19T15:46:30.930,nonode@nohost:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,couch_secondary_services} started: [{pid,<0.243.0>}, {name,couch_set_view_ddoc_cache}, {mfargs,{couch_set_view_ddoc_cache,start_link,[]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [error_logger:info,2014-08-19T15:46:30.934,nonode@nohost:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,couch_secondary_services} started: [{pid,<0.245.0>}, {name,view_manager}, {mfargs,{couch_view,start_link,[]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [error_logger:info,2014-08-19T15:46:30.947,nonode@nohost:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,couch_secondary_services} started: [{pid,<0.247.0>}, {name,httpd}, {mfargs,{couch_httpd,start_link,[]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [error_logger:info,2014-08-19T15:46:30.948,nonode@nohost:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,couch_secondary_services} started: [{pid,<0.264.0>}, {name,uuids}, {mfargs,{couch_uuids,start,[]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [error_logger:info,2014-08-19T15:46:30.948,nonode@nohost:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,couch_server_sup} started: [{pid,<0.230.0>}, {name,couch_secondary_services}, {mfargs,{couch_secondary_sup,start_link,[]}}, {restart_type,permanent}, {shutdown,infinity}, {child_type,supervisor}] [error_logger:info,2014-08-19T15:46:30.949,nonode@nohost:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,cb_couch_sup} started: [{pid,<0.204.0>}, {name,couch_app}, {mfargs, {couch_app,start, [fake, ["/opt/couchbase/etc/couchdb/default.ini", "/opt/couchbase/etc/couchdb/local.ini"]]}}, {restart_type,permanent}, {shutdown,infinity}, {child_type,supervisor}] [error_logger:info,2014-08-19T15:46:30.949,nonode@nohost:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_cluster_sup} started: [{pid,<0.161.0>}, {name,cb_couch_sup}, {mfargs,{cb_couch_sup,start_link,[]}}, {restart_type,permanent}, {shutdown,5000}, {child_type,supervisor}] [ns_server:info,2014-08-19T15:46:30.956,nonode@nohost:ns_server_cluster_sup<0.160.0>:log_os_info:start_link:25]OS type: {unix,linux} Version: {2,6,32} Runtime info: [{otp_release,"R14B04"}, {erl_version,"5.8.5"}, {erl_version_long, "Erlang R14B04 (erts-5.8.5) [source] [64-bit] [smp:24:24] [rq:24] [async-threads:16] [kernel-poll:true]\n"}, {system_arch_raw,"x86_64-unknown-linux-gnu"}, {system_arch,"x86_64-unknown-linux-gnu"}, {localtime,{{2014,8,19},{15,46,30}}}, {memory, [{total,560723616}, {processes,5526464}, {processes_used,5517808}, {system,555197152}, {atom,830761}, {atom_used,821985}, {binary,52944}, {code,7808288}, {ets,644944}]}, {loaded, [ns_info,log_os_info,couch_config_writer,cb_init_loggers, couch_uuids,mochiweb_acceptor,inet_tcp,gen_tcp, mochiweb_socket,mochiweb_socket_server,mochilists, mochiweb_http,eval_bits,couch_httpd,couch_view, couch_set_view_ddoc_cache,couch_query_servers, couch_spatial,mapreduce,couch_set_view, couch_db_update_notifier,snappy,couch_compress, couch_auth_cache,couch_db_update_notifier_sup, couch_secondary_sup,queue,couch_index_barrier, couch_event_sup,couch_log,couch_rep_sup,couch_btree, couch_ref_counter,couch_db_updater,couch_db,httpd_util, filelib,couch_file,couch_file_write_guard, couch_task_status,erl_ddll,couch_drv,couch_primary_sup, couch_server,string,re,file2,couch_util,couch_config, couch_server_sup,ssl_server,crypto,ssl,lhttpc_manager, lhttpc_sup,lhttpc,ssl_connection_sup,ssl_session_cache, ssl_certificate_db,ssl_manager,ssl_broker_sup,ssl_sup, ssl_app,tftp_sup,httpd_sup,httpc_handler_sup,httpc_cookie, inets,httpc_manager,httpc,httpc_profile_sup,httpc_sup, ftp_sup,inets_sup,inets_app,crypto_server,crypto_sup, crypto_app,couch_app,cb_couch_sup,ns_server_cluster_sup, mlockall,calendar,ale_default_formatter,otp_internal,misc, 'ale_logger-xdcr','ale_logger-mapreduce_errors', 'ale_logger-views','ale_logger-cluster',timer, io_lib_fread,'ale_logger-rebalance','ale_logger-stats', 'ale_logger-ns_doctor','ale_logger-menelaus', 'ale_logger-user','ale_logger-ns_server', 'ale_logger-couchdb',ns_log_sink,disk_log_sup, disk_log_server,disk_log_1,disk_log,ale_disk_sink, ns_server,cpu_sup,memsup,disksup,os_mon,io, release_handler,overload,alarm_handler,log_mf_h,sasl, ale_error_logger_handler,'ale_logger-ale_logger', 'ale_logger-error_logger',beam_opcodes,beam_dict,beam_asm, beam_validator,beam_flatten,beam_trim,beam_receive, beam_bsm,beam_peep,beam_dead,beam_type,beam_bool, beam_clean,beam_utils,beam_jump,beam_block,v3_codegen, v3_life,v3_kernel,sys_core_dsetel,erl_bifs,sys_core_fold, cerl_trees,sys_core_inline,core_lib,cerl,v3_core,erl_bits, erl_expand_records,sys_pre_expand,sofs,erl_internal,sets, ordsets,erl_lint,compile,dynamic_compile,ale_utils, io_lib_pretty,io_lib_format,io_lib,ale_codegen,dict,ale, ale_dynamic_sup,ale_sup,ale_app,ns_bootstrap,child_erlang, file_io_server,orddict,erl_eval,file,c,kernel_config, user_sup,supervisor_bridge,standard_error,unicode,binary, ets,gb_sets,hipe_unified_loader,packages,code_server,code, file_server,net_kernel,global_group,erl_distribution, filename,inet_gethost_native,os,inet_parse,inet,inet_udp, inet_config,inet_db,global,gb_trees,rpc,supervisor,kernel, application_master,sys,application,gen_server,erl_parse, proplists,erl_scan,lists,application_controller,proc_lib, gen,gen_event,error_logger,heart,error_handler,erlang, erl_prim_loader,prim_zip,zlib,prim_file,prim_inet,init, otp_ring0]}, {applications, [{public_key,"Public key infrastructure","0.13"}, {asn1,"The Erlang ASN1 compiler version 1.6.18","1.6.18"}, {lhttpc,"Lightweight HTTP Client","1.3.0"}, {ale,"Another Logger for Erlang","8ca6d2a"}, {os_mon,"CPO CXC 138 46","2.2.7"}, {couch_set_view,"Set views","1.2.0a-a425d97-git"}, {compiler,"ERTS CXC 138 10","4.7.5"}, {inets,"INETS CXC 138 49","5.7.1"}, {couch,"Apache CouchDB","1.2.0a-a425d97-git"}, {mapreduce,"MapReduce using V8 JavaScript engine","1.0.0"}, {couch_index_merger,"Index merger","1.2.0a-a425d97-git"}, {kernel,"ERTS CXC 138 10","2.14.5"}, {crypto,"CRYPTO version 2","2.0.4"}, {ssl,"Erlang/OTP SSL application","4.1.6"}, {sasl,"SASL CXC 138 11","2.1.10"}, {couch_view_parser,"Couch view parser","1.0.0"}, {ns_server,"Couchbase server","2.5.1-1083-rel-enterprise"}, {mochiweb,"MochiMedia Web Server","2.4.2"}, {syntax_tools,"Syntax tools","1.6.7.1"}, {xmerl,"XML parser","1.2.10"}, {oauth,"Erlang OAuth implementation","7d85d3ef"}, {stdlib,"ERTS CXC 138 10","1.17.5"}]}, {pre_loaded, [erlang,erl_prim_loader,prim_zip,zlib,prim_file,prim_inet, init,otp_ring0]}, {process_count,152}, {node,nonode@nohost}, {nodes,[]}, {registered, [ssl_sup,couch_file_write_guard,global_group, lhttpc_manager,tftp_sup,ale_sup,lhttpc_sup,httpc_sup, disk_log_sup,ale_dynamic_sup,disk_log_server, erl_prim_loader,httpc_profile_sup,os_mon_sup, httpc_manager,code_server,ns_server_cluster_sup, httpc_handler_sup,sasl_sup,'sink-ns_log',cpu_sup, 'sink-disk_stats',ftp_sup,couch_db_update_notifier_sup, memsup,application_controller,'sink-disk_xdcr_errors', disksup,ale,'sink-disk_xdcr',error_logger, standard_error_sup,standard_error,'sink-disk_debug', couch_log,'sink-disk_couchdb', 'sink-disk_mapreduce_errors',couch_auth_cache, 'sink-disk_views',inets_sup,couch_rep_sup, 'sink-disk_error',crypto_server,timer_server,crypto_sup, couch_view,cb_couch_sup,ssl_connection_sup, release_handler,couch_server_sup,couch_secondary_services, ssl_manager,couch_primary_services,overload, couch_db_update,couch_spatial_index_barrier, couch_replica_index_barrier,couch_query_servers, alarm_handler,httpd_sup,couch_set_view, couch_set_view_ddoc_cache,kernel_safe_sup,couch_config, couch_main_index_barrier,rex,inet_db,couch_task_status, couch_replication,couch_index_merger_connection_pool, 'sink-disk_default',kernel_sup,global_name_server, couch_spatial,ssl_broker_sup,couch_task_events, couch_server,couch_httpd,file_server_2,init,sasl_safe_sup, ssl_server,couch_drv,couch_uuids]}, {cookie,nocookie}, {wordsize,8}, {wall_clock,1}] [ns_server:info,2014-08-19T15:46:30.960,nonode@nohost:ns_server_cluster_sup<0.160.0>:log_os_info:start_link:27]Manifest: ["","", " ", " ", " ", " ", " ", " ", " "," ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " "," "] [error_logger:info,2014-08-19T15:46:30.962,nonode@nohost:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_cluster_sup} started: [{pid,<0.266.0>}, {name,timeout_diag_logger}, {mfargs,{timeout_diag_logger,start_link,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [ns_server:info,2014-08-19T15:46:30.964,nonode@nohost:dist_manager<0.267.0>:dist_manager:read_address_config_from_path:83]Reading ip config from "/opt/couchbase/var/lib/couchbase/ip_start" [ns_server:info,2014-08-19T15:46:30.964,nonode@nohost:dist_manager<0.267.0>:dist_manager:read_address_config_from_path:83]Reading ip config from "/opt/couchbase/var/lib/couchbase/ip" [ns_server:info,2014-08-19T15:46:30.964,nonode@nohost:dist_manager<0.267.0>:dist_manager:init:159]ip config not found. Looks like we're brand new node [error_logger:info,2014-08-19T15:46:30.964,nonode@nohost:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,inet_gethost_native_sup} started: [{pid,<0.269.0>},{mfa,{inet_gethost_native,init,[[]]}}] [error_logger:info,2014-08-19T15:46:30.964,nonode@nohost:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,kernel_safe_sup} started: [{pid,<0.268.0>}, {name,inet_gethost_native_sup}, {mfargs,{inet_gethost_native,start_link,[]}}, {restart_type,temporary}, {shutdown,1000}, {child_type,worker}] [ns_server:info,2014-08-19T15:46:31.097,nonode@nohost:dist_manager<0.267.0>:dist_manager:bringup:230]Attempting to bring up net_kernel with name 'ns_1@127.0.0.1' [error_logger:info,2014-08-19T15:46:31.100,nonode@nohost:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,net_sup} started: [{pid,<0.271.0>}, {name,erl_epmd}, {mfargs,{erl_epmd,start_link,[]}}, {restart_type,permanent}, {shutdown,2000}, {child_type,worker}] [error_logger:info,2014-08-19T15:46:31.100,nonode@nohost:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,net_sup} started: [{pid,<0.272.0>}, {name,auth}, {mfargs,{auth,start_link,[]}}, {restart_type,permanent}, {shutdown,2000}, {child_type,worker}] [error_logger:info,2014-08-19T15:46:31.101,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,net_sup} started: [{pid,<0.273.0>}, {name,net_kernel}, {mfargs, {net_kernel,start_link, [['ns_1@127.0.0.1',longnames]]}}, {restart_type,permanent}, {shutdown,2000}, {child_type,worker}] [ns_server:info,2014-08-19T15:46:31.101,ns_1@127.0.0.1:dist_manager<0.267.0>:dist_manager:save_node:143]saving node to "/opt/couchbase/var/lib/couchbase/couchbase-server.node" [error_logger:info,2014-08-19T15:46:31.102,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,kernel_sup} started: [{pid,<0.270.0>}, {name,net_sup_dynamic}, {mfargs, {erl_distribution,start_link, [['ns_1@127.0.0.1',longnames]]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,supervisor}] [ns_server:debug,2014-08-19T15:46:31.132,ns_1@127.0.0.1:dist_manager<0.267.0>:dist_manager:bringup:238]Attempted to save node name to disk: ok [error_logger:info,2014-08-19T15:46:31.133,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_cluster_sup} started: [{pid,<0.267.0>}, {name,dist_manager}, {mfargs,{dist_manager,start_link,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2014-08-19T15:46:31.134,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_cluster_sup} started: [{pid,<0.276.0>}, {name,ns_cookie_manager}, {mfargs,{ns_cookie_manager,start_link,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2014-08-19T15:46:31.136,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_cluster_sup} started: [{pid,<0.277.0>}, {name,ns_cluster}, {mfargs,{ns_cluster,start_link,[]}}, {restart_type,permanent}, {shutdown,5000}, {child_type,worker}] [ns_server:info,2014-08-19T15:46:31.137,ns_1@127.0.0.1:ns_config_sup<0.278.0>:ns_config_sup:init:32]loading static ns_config from "/opt/couchbase/etc/couchbase/config" [error_logger:info,2014-08-19T15:46:31.137,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_config_sup} started: [{pid,<0.279.0>}, {name,ns_config_events}, {mfargs, {gen_event,start_link,[{local,ns_config_events}]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2014-08-19T15:46:31.137,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_config_sup} started: [{pid,<0.280.0>}, {name,ns_config_events_local}, {mfargs, {gen_event,start_link, [{local,ns_config_events_local}]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [ns_server:info,2014-08-19T15:46:31.150,ns_1@127.0.0.1:ns_config<0.281.0>:ns_config:load_config:795]Loading static config from "/opt/couchbase/etc/couchbase/config" [ns_server:info,2014-08-19T15:46:31.150,ns_1@127.0.0.1:ns_config<0.281.0>:ns_config:load_config:809]Loading dynamic config from "/opt/couchbase/var/lib/couchbase/config/config.dat" [ns_server:debug,2014-08-19T15:46:31.152,ns_1@127.0.0.1:ns_config<0.281.0>:ns_config:load_config:816]Here's full dynamic config we loaded: [[{cert_and_pkey, [{'_vclock',[{'ns_1@127.0.0.1',{1,63575667478}}]}| {<<"-----BEGIN CERTIFICATE-----\nMIICmDCCAYKgAwIBAgIIE4vQPzPIoEQwCwYJKoZIhvcNAQEFMAwxCjAIBgNVBAMT\nASowHhcNMTMwMTAxMDAwMDAwWhcNNDkxMjMxMjM1OTU5WjAMMQowCAYDVQQDEwEq\nMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAmwlh6UM1HlSt78Xr7YCe\n18VU0sN62xbybSOxadjU2gF03Q2jgd+n84Tr9iGKtuy7DUKk/eJJQDQWcCDGTxYg\n8QNmzAlnX/eufV4rhr/9nlksMKdIlXWDvOdLX4yO1FIZ/QvGtoFWBwEc832n3sfa\n1f+EzMV8X6nZxMPV/Stc0StxJPY2Akqi99je3QsYDNvapLjSSawb2oEl8ssA4mmR\ne2P+F4r1j3FAsOsO0VOuKtmsul6utqBCmO34s0vYc6X58RbQVYx8iu5XiTFu5rTi\nFbuHeJ+rjVi4gMxuD4yVIkTJq4KED+p1SkD9H4YvUWy5O7XlmPsA30fmdMpKsZWi\n6QIDAQABowIwADALBgkqhkiG9w0BAQUDggEBADSaYJBLzwuTm8X5KVmfNhrblZTL\n3Lc/PewFJZvp3UuiF6xJQdQMO9mvLZ6MaY/Z4NL/sLionbrmQuGxxChpTwyLNL7a\n666VquUle7zrVYOJKlv/2hgFjk1rhfD0JpqwKFaRTYyMqBRG7hXkPlPZPFJVeAft\ntvYLLJc5Iou4tvQvw3lB6F3g2jpzW4UQMXKklf3c0pZqYKCNYvEt7elnIyS/Aata\nFViP8384q9BMsSeoyj/mDfV4czbAwYgZN5ZRylM+IElGWNZVBydbBQaGJgj3yJD3\n3+2X3gSf7HN33p4dPCEeNBKnL0vBdS3GPkDibxHzKv5J3euds09QGtsK4BQ=\n-----END CERTIFICATE-----\n">>, <<"*****">>}]}, {server_groups, [{'_vclock',[{'ns_1@127.0.0.1',{1,63575667474}}]}, [{uuid,<<"0">>},{name,<<"Group 1">>},{nodes,['ns_1@127.0.0.1']}]]}, {dynamic_config_version, [{'_vclock',[{'ns_1@127.0.0.1',{5,63575667474}}]},2,5]}, {cluster_compat_version, [{'_vclock',[{'ns_1@127.0.0.1',{1,63575667474}}]},2,5]}, {vbucket_map_history,[{'_vclock',[{'ns_1@127.0.0.1',{1,63575667474}}]}]}, {otp, [{'_vclock',[{'ns_1@127.0.0.1',{1,63575667473}}]}, {cookie,alkbqedpsntmtnxa}]}, {{node,'ns_1@127.0.0.1',config_version}, [{'_vclock',[{'ns_1@127.0.0.1',{7,63575667472}}]}|{2,3,0}]}, {alert_limits,[{max_overhead_perc,50},{max_disk_used,90}]}, {auto_failover_cfg, [{'_vclock',[{'ns_1@127.0.0.1',{1,63575667472}}]}, {enabled,false}, {timeout,120}, {max_nodes,1}, {count,0}]}, {autocompaction, [{database_fragmentation_threshold,{30,undefined}}, {view_fragmentation_threshold,{30,undefined}}]}, {buckets,[{configs,[]}]}, {drop_request_memory_threshold_mib,undefined}, {email_alerts, [{'_vclock',[{'ns_1@127.0.0.1',{1,63575667472}}]}, {recipients,["root@localhost"]}, {sender,"couchbase@localhost"}, {enabled,false}, {email_server, [{user,[]}, {pass,"*****"}, {host,"localhost"}, {port,25}, {encrypt,false}]}, {alerts, [auto_failover_node,auto_failover_maximum_reached, auto_failover_other_nodes_down,auto_failover_cluster_too_small,ip, disk,overhead,ep_oom_errors,ep_item_commit_failed]}]}, {fast_warmup, [{fast_warmup_enabled,true}, {min_memory_threshold,10}, {min_items_threshold,10}]}, {index_aware_rebalance_disabled,false}, {max_bucket_count,10}, {memory_quota,58026}, {nodes_wanted,['ns_1@127.0.0.1']}, {remote_clusters,[]}, {replication,[{enabled,true}]}, {replication_topology,star}, {rest,[{port,8091}]}, {rest_creds,[{creds,[]}]}, {set_view_update_daemon, [{update_interval,5000}, {update_min_changes,5000}, {replica_update_min_changes,5000}]}, {{couchdb,max_parallel_indexers},4}, {{couchdb,max_parallel_replica_indexers},2}, {{request_limit,capi},undefined}, {{request_limit,rest},undefined}, {{node,'ns_1@127.0.0.1',capi_port},8092}, {{node,'ns_1@127.0.0.1',compaction_daemon}, [{check_interval,30},{min_file_size,131072}]}, {{node,'ns_1@127.0.0.1',isasl}, [{'_vclock', [{'ns_1@127.0.0.1',{1,63575667472}}, {<<"c3a87fe2e8c58375a03730a71fdf48a8">>,{1,63575667472}}]}, {path,"/opt/couchbase/var/lib/couchbase/isasl.pw"}]}, {{node,'ns_1@127.0.0.1',membership},active}, {{node,'ns_1@127.0.0.1',memcached}, [{'_vclock', [{'ns_1@127.0.0.1',{3,63575667472}}, {<<"c3a87fe2e8c58375a03730a71fdf48a8">>,{1,63575667472}}]}, {mccouch_port,11213}, {engines, [{membase, [{engine,"/opt/couchbase/lib/memcached/ep.so"}, {static_config_string, "vb0=false;waitforwarmup=false;failpartialwarmup=false"}]}, {memcached, [{engine,"/opt/couchbase/lib/memcached/default_engine.so"}, {static_config_string,"vb0=true"}]}]}, {log_path,"/opt/couchbase/var/lib/couchbase/logs"}, {log_prefix,"memcached.log"}, {log_generations,20}, {log_cyclesize,10485760}, {log_sleeptime,19}, {log_rotation_period,39003}, {dedicated_port,11209}, {bucket_engine,"/opt/couchbase/lib/memcached/bucket_engine.so"}, {port,11210}, {dedicated_port,11209}, {admin_user,"_admin"}, {admin_pass,"*****"}, {verbosity,[]}]}, {{node,'ns_1@127.0.0.1',moxi},[{port,11211},{verbosity,[]}]}, {{node,'ns_1@127.0.0.1',ns_log}, [{'_vclock', [{'ns_1@127.0.0.1',{1,63575667472}}, {<<"c3a87fe2e8c58375a03730a71fdf48a8">>,{1,63575667472}}]}, {filename,"/opt/couchbase/var/lib/couchbase/ns_log"}]}, {{node,'ns_1@127.0.0.1',port_servers}, [{'_vclock',[{'ns_1@127.0.0.1',{3,63575667472}}]}, {moxi,"/opt/couchbase/bin/moxi", ["-Z", {"port_listen=~B,default_bucket_name=default,downstream_max=1024,downstream_conn_max=4,connect_max_errors=5,connect_retry_interval=30000,connect_timeout=400,auth_timeout=100,cycle=200,downstream_conn_queue_timeout=200,downstream_timeout=5000,wait_queue_timeout=200", [port]}, "-z", {"url=http://127.0.0.1:~B/pools/default/saslBucketsStreaming", [{misc,this_node_rest_port,[]}]}, "-p","0","-Y","y","-O","stderr", {"~s",[verbosity]}], [{env, [{"EVENT_NOSELECT","1"}, {"MOXI_SASL_PLAIN_USR",{"~s",[{ns_moxi_sup,rest_user,[]}]}}, {"MOXI_SASL_PLAIN_PWD",{"~s",[{ns_moxi_sup,rest_pass,[]}]}}]}, use_stdio,exit_status,port_server_send_eol,stderr_to_stdout,stream]}, {memcached,"/opt/couchbase/bin/memcached", ["-X","/opt/couchbase/lib/memcached/stdin_term_handler.so","-X", {"/opt/couchbase/lib/memcached/file_logger.so,cyclesize=~B;sleeptime=~B;filename=~s/~s", [log_cyclesize,log_sleeptime,log_path,log_prefix]}, "-l", {"0.0.0.0:~B,0.0.0.0:~B:1000",[port,dedicated_port]}, "-p", {"~B",[port]}, "-E","/opt/couchbase/lib/memcached/bucket_engine.so","-B","binary", "-r","-c","10000","-e", {"admin=~s;default_bucket_name=default;auto_create=false", [admin_user]}, {"~s",[verbosity]}], [{env, [{"EVENT_NOSELECT","1"}, {"MEMCACHED_TOP_KEYS","100"}, {"ISASL_PWFILE",{"~s",[{isasl,path}]}}]}, use_stdio,stderr_to_stdout,exit_status,port_server_send_eol, stream]}]}, {{node,'ns_1@127.0.0.1',rest},[{port,8091},{port_meta,global}]}, {{node,'ns_1@127.0.0.1',ssl_capi_port},18092}, {{node,'ns_1@127.0.0.1',ssl_proxy_downstream_port},11214}, {{node,'ns_1@127.0.0.1',ssl_proxy_upstream_port},11215}, {{node,'ns_1@127.0.0.1',ssl_rest_port},18091}]] [ns_server:info,2014-08-19T15:46:31.154,ns_1@127.0.0.1:ns_config<0.281.0>:ns_config:load_config:827]Here's full dynamic config we loaded + static & default config: [{{node,'ns_1@127.0.0.1',ssl_rest_port},18091}, {{node,'ns_1@127.0.0.1',ssl_proxy_upstream_port},11215}, {{node,'ns_1@127.0.0.1',ssl_proxy_downstream_port},11214}, {{node,'ns_1@127.0.0.1',ssl_capi_port},18092}, {{node,'ns_1@127.0.0.1',rest},[{port,8091},{port_meta,global}]}, {{node,'ns_1@127.0.0.1',port_servers}, [{'_vclock',[{'ns_1@127.0.0.1',{3,63575667472}}]}, {moxi,"/opt/couchbase/bin/moxi", ["-Z", {"port_listen=~B,default_bucket_name=default,downstream_max=1024,downstream_conn_max=4,connect_max_errors=5,connect_retry_interval=30000,connect_timeout=400,auth_timeout=100,cycle=200,downstream_conn_queue_timeout=200,downstream_timeout=5000,wait_queue_timeout=200", [port]}, "-z", {"url=http://127.0.0.1:~B/pools/default/saslBucketsStreaming", [{misc,this_node_rest_port,[]}]}, "-p","0","-Y","y","-O","stderr", {"~s",[verbosity]}], [{env, [{"EVENT_NOSELECT","1"}, {"MOXI_SASL_PLAIN_USR",{"~s",[{ns_moxi_sup,rest_user,[]}]}}, {"MOXI_SASL_PLAIN_PWD",{"~s",[{ns_moxi_sup,rest_pass,[]}]}}]}, use_stdio,exit_status,port_server_send_eol,stderr_to_stdout,stream]}, {memcached,"/opt/couchbase/bin/memcached", ["-X","/opt/couchbase/lib/memcached/stdin_term_handler.so","-X", {"/opt/couchbase/lib/memcached/file_logger.so,cyclesize=~B;sleeptime=~B;filename=~s/~s", [log_cyclesize,log_sleeptime,log_path,log_prefix]}, "-l", {"0.0.0.0:~B,0.0.0.0:~B:1000",[port,dedicated_port]}, "-p", {"~B",[port]}, "-E","/opt/couchbase/lib/memcached/bucket_engine.so","-B","binary", "-r","-c","10000","-e", {"admin=~s;default_bucket_name=default;auto_create=false", [admin_user]}, {"~s",[verbosity]}], [{env, [{"EVENT_NOSELECT","1"}, {"MEMCACHED_TOP_KEYS","100"}, {"ISASL_PWFILE",{"~s",[{isasl,path}]}}]}, use_stdio,stderr_to_stdout,exit_status,port_server_send_eol,stream]}]}, {{node,'ns_1@127.0.0.1',ns_log}, [{'_vclock', [{'ns_1@127.0.0.1',{1,63575667472}}, {<<"c3a87fe2e8c58375a03730a71fdf48a8">>,{1,63575667472}}]}, {filename,"/opt/couchbase/var/lib/couchbase/ns_log"}]}, {{node,'ns_1@127.0.0.1',moxi},[{port,11211},{verbosity,[]}]}, {{node,'ns_1@127.0.0.1',memcached}, [{'_vclock', [{'ns_1@127.0.0.1',{3,63575667472}}, {<<"c3a87fe2e8c58375a03730a71fdf48a8">>,{1,63575667472}}]}, {mccouch_port,11213}, {engines, [{membase, [{engine,"/opt/couchbase/lib/memcached/ep.so"}, {static_config_string, "vb0=false;waitforwarmup=false;failpartialwarmup=false"}]}, {memcached, [{engine,"/opt/couchbase/lib/memcached/default_engine.so"}, {static_config_string,"vb0=true"}]}]}, {log_path,"/opt/couchbase/var/lib/couchbase/logs"}, {log_prefix,"memcached.log"}, {log_generations,20}, {log_cyclesize,10485760}, {log_sleeptime,19}, {log_rotation_period,39003}, {dedicated_port,11209}, {bucket_engine,"/opt/couchbase/lib/memcached/bucket_engine.so"}, {port,11210}, {dedicated_port,11209}, {admin_user,"_admin"}, {admin_pass,"*****"}, {verbosity,[]}]}, {{node,'ns_1@127.0.0.1',membership},active}, {{node,'ns_1@127.0.0.1',isasl}, [{'_vclock', [{'ns_1@127.0.0.1',{1,63575667472}}, {<<"c3a87fe2e8c58375a03730a71fdf48a8">>,{1,63575667472}}]}, {path,"/opt/couchbase/var/lib/couchbase/isasl.pw"}]}, {{node,'ns_1@127.0.0.1',compaction_daemon}, [{check_interval,30},{min_file_size,131072}]}, {{node,'ns_1@127.0.0.1',capi_port},8092}, {{request_limit,rest},undefined}, {{request_limit,capi},undefined}, {{couchdb,max_parallel_replica_indexers},2}, {{couchdb,max_parallel_indexers},4}, {set_view_update_daemon, [{update_interval,5000}, {update_min_changes,5000}, {replica_update_min_changes,5000}]}, {rest_creds,[{creds,[]}]}, {rest,[{port,8091}]}, {replication_topology,star}, {replication,[{enabled,true}]}, {remote_clusters,[]}, {nodes_wanted,['ns_1@127.0.0.1']}, {memory_quota,58026}, {max_bucket_count,10}, {index_aware_rebalance_disabled,false}, {fast_warmup, [{fast_warmup_enabled,true}, {min_memory_threshold,10}, {min_items_threshold,10}]}, {email_alerts, [{'_vclock',[{'ns_1@127.0.0.1',{1,63575667472}}]}, {recipients,["root@localhost"]}, {sender,"couchbase@localhost"}, {enabled,false}, {email_server, [{user,[]}, {pass,"*****"}, {host,"localhost"}, {port,25}, {encrypt,false}]}, {alerts, [auto_failover_node,auto_failover_maximum_reached, auto_failover_other_nodes_down,auto_failover_cluster_too_small,ip, disk,overhead,ep_oom_errors,ep_item_commit_failed]}]}, {drop_request_memory_threshold_mib,undefined}, {buckets,[{configs,[]}]}, {autocompaction, [{database_fragmentation_threshold,{30,undefined}}, {view_fragmentation_threshold,{30,undefined}}]}, {auto_failover_cfg, [{'_vclock',[{'ns_1@127.0.0.1',{1,63575667472}}]}, {enabled,false}, {timeout,120}, {max_nodes,1}, {count,0}]}, {alert_limits,[{max_overhead_perc,50},{max_disk_used,90}]}, {{node,'ns_1@127.0.0.1',config_version}, [{'_vclock',[{'ns_1@127.0.0.1',{7,63575667472}}]}|{2,3,0}]}, {otp, [{'_vclock',[{'ns_1@127.0.0.1',{1,63575667473}}]}, {cookie,alkbqedpsntmtnxa}]}, {vbucket_map_history,[{'_vclock',[{'ns_1@127.0.0.1',{1,63575667474}}]}]}, {cluster_compat_version, [{'_vclock',[{'ns_1@127.0.0.1',{1,63575667474}}]},2,5]}, {dynamic_config_version, [{'_vclock',[{'ns_1@127.0.0.1',{5,63575667474}}]},2,5]}, {server_groups, [{'_vclock',[{'ns_1@127.0.0.1',{1,63575667474}}]}, [{uuid,<<"0">>},{name,<<"Group 1">>},{nodes,['ns_1@127.0.0.1']}]]}, {cert_and_pkey, [{'_vclock',[{'ns_1@127.0.0.1',{1,63575667478}}]}| {<<"-----BEGIN CERTIFICATE-----\nMIICmDCCAYKgAwIBAgIIE4vQPzPIoEQwCwYJKoZIhvcNAQEFMAwxCjAIBgNVBAMT\nASowHhcNMTMwMTAxMDAwMDAwWhcNNDkxMjMxMjM1OTU5WjAMMQowCAYDVQQDEwEq\nMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAmwlh6UM1HlSt78Xr7YCe\n18VU0sN62xbybSOxadjU2gF03Q2jgd+n84Tr9iGKtuy7DUKk/eJJQDQWcCDGTxYg\n8QNmzAlnX/eufV4rhr/9nlksMKdIlXWDvOdLX4yO1FIZ/QvGtoFWBwEc832n3sfa\n1f+EzMV8X6nZxMPV/Stc0StxJPY2Akqi99je3QsYDNvapLjSSawb2oEl8ssA4mmR\ne2P+F4r1j3FAsOsO0VOuKtmsul6utqBCmO34s0vYc6X58RbQVYx8iu5XiTFu5rTi\nFbuHeJ+rjVi4gMxuD4yVIkTJq4KED+p1SkD9H4YvUWy5O7XlmPsA30fmdMpKsZWi\n6QIDAQABowIwADALBgkqhkiG9w0BAQUDggEBADSaYJBLzwuTm8X5KVmfNhrblZTL\n3Lc/PewFJZvp3UuiF6xJQdQMO9mvLZ6MaY/Z4NL/sLionbrmQuGxxChpTwyLNL7a\n666VquUle7zrVYOJKlv/2hgFjk1rhfD0JpqwKFaRTYyMqBRG7hXkPlPZPFJVeAft\ntvYLLJc5Iou4tvQvw3lB6F3g2jpzW4UQMXKklf3c0pZqYKCNYvEt7elnIyS/Aata\nFViP8384q9BMsSeoyj/mDfV4czbAwYgZN5ZRylM+IElGWNZVBydbBQaGJgj3yJD3\n3+2X3gSf7HN33p4dPCEeNBKnL0vBdS3GPkDibxHzKv5J3euds09QGtsK4BQ=\n-----END CERTIFICATE-----\n">>, <<"*****">>}]}] [error_logger:info,2014-08-19T15:46:31.156,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_config_sup} started: [{pid,<0.281.0>}, {name,ns_config}, {mfargs, {ns_config,start_link, ["/opt/couchbase/etc/couchbase/config", ns_config_default]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2014-08-19T15:46:31.157,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_config_sup} started: [{pid,<0.283.0>}, {name,ns_config_remote}, {mfargs, {ns_config_replica,start_link, [{local,ns_config_remote}]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2014-08-19T15:46:31.158,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_config_sup} started: [{pid,<0.284.0>}, {name,ns_config_log}, {mfargs,{ns_config_log,start_link,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2014-08-19T15:46:31.159,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_config_sup} started: [{pid,<0.286.0>}, {name,cb_config_couch_sync}, {mfargs,{cb_config_couch_sync,start_link,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2014-08-19T15:46:31.159,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_cluster_sup} started: [{pid,<0.278.0>}, {name,ns_config_sup}, {mfargs,{ns_config_sup,start_link,[]}}, {restart_type,permanent}, {shutdown,infinity}, {child_type,supervisor}] [error_logger:info,2014-08-19T15:46:31.160,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_cluster_sup} started: [{pid,<0.288.0>}, {name,vbucket_filter_changes_registry}, {mfargs, {ns_process_registry,start_link, [vbucket_filter_changes_registry]}}, {restart_type,permanent}, {shutdown,100}, {child_type,worker}] [error_logger:info,2014-08-19T15:46:31.166,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.290.0>}, {name,diag_handler_worker}, {mfa,{work_queue,start_link,[diag_handler_worker]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [ns_server:info,2014-08-19T15:46:31.166,ns_1@127.0.0.1:ns_server_sup<0.289.0>:dir_size:start_link:47]Starting quick version of dir_size with program name: i386-linux-godu [error_logger:info,2014-08-19T15:46:31.167,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.291.0>}, {name,dir_size}, {mfa,{dir_size,start_link,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2014-08-19T15:46:31.168,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.292.0>}, {name,request_throttler}, {mfa,{request_throttler,start_link,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2014-08-19T15:46:31.171,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,kernel_safe_sup} started: [{pid,<0.294.0>}, {name,timer2_server}, {mfargs,{timer2,start_link,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2014-08-19T15:46:31.171,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.293.0>}, {name,ns_log}, {mfa,{ns_log,start_link,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2014-08-19T15:46:31.172,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.295.0>}, {name,ns_crash_log_consumer}, {mfa,{ns_log,start_link_crash_consumer,[]}}, {restart_type,{permanent,4}}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2014-08-19T15:46:31.173,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.296.0>}, {name,ns_config_ets_dup}, {mfa,{ns_config_ets_dup,start_link,[]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [ns_server:debug,2014-08-19T15:46:31.173,ns_1@127.0.0.1:ns_config_log<0.284.0>:ns_config_log:log_common:138]config change: alert_limits -> [{max_overhead_perc,50},{max_disk_used,90}] [ns_server:debug,2014-08-19T15:46:31.173,ns_1@127.0.0.1:ns_config_log<0.284.0>:ns_config_log:log_common:138]config change: auto_failover_cfg -> [{enabled,false},{timeout,120},{max_nodes,1},{count,0}] [ns_server:debug,2014-08-19T15:46:31.173,ns_1@127.0.0.1:ns_config_log<0.284.0>:ns_config_log:log_common:138]config change: autocompaction -> [{database_fragmentation_threshold,{30,undefined}}, {view_fragmentation_threshold,{30,undefined}}] [ns_server:debug,2014-08-19T15:46:31.174,ns_1@127.0.0.1:ns_config_log<0.284.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[]}] [ns_server:debug,2014-08-19T15:46:31.174,ns_1@127.0.0.1:ns_config_log<0.284.0>:ns_config_log:log_common:138]config change: cert_and_pkey -> {<<"-----BEGIN CERTIFICATE-----\nMIICmDCCAYKgAwIBAgIIE4vQPzPIoEQwCwYJKoZIhvcNAQEFMAwxCjAIBgNVBAMT\nASowHhcNMTMwMTAxMDAwMDAwWhcNNDkxMjMxMjM1OTU5WjAMMQowCAYDVQQDEwEq\nMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAmwlh6UM1HlSt78Xr7YCe\n18VU0sN62xbybSOxadjU2gF03Q2jgd+n84Tr9iGKtuy7DUKk/eJJQDQWcCDGTxYg\n8QNmzAlnX/eufV4rhr/9nlksMKdIlXWDvOdLX4yO1FIZ/QvGtoFWBwEc832n3sfa\n1f+EzMV8X6nZxMPV/Stc0StxJPY2Akqi99je3Qs"...>>, <<"*****">>} [ns_server:debug,2014-08-19T15:46:31.174,ns_1@127.0.0.1:ns_config_log<0.284.0>:ns_config_log:log_common:138]config change: cluster_compat_version -> [2,5] [ns_server:debug,2014-08-19T15:46:31.175,ns_1@127.0.0.1:ns_config_log<0.284.0>:ns_config_log:log_common:138]config change: drop_request_memory_threshold_mib -> undefined [ns_server:debug,2014-08-19T15:46:31.175,ns_1@127.0.0.1:ns_config_log<0.284.0>:ns_config_log:log_common:138]config change: dynamic_config_version -> [2,5] [ns_server:debug,2014-08-19T15:46:31.175,ns_1@127.0.0.1:ns_config_log<0.284.0>:ns_config_log:log_common:138]config change: email_alerts -> [{recipients,["root@localhost"]}, {sender,"couchbase@localhost"}, {enabled,false}, {email_server,[{user,[]}, {pass,"*****"}, {host,"localhost"}, {port,25}, {encrypt,false}]}, {alerts,[auto_failover_node,auto_failover_maximum_reached, auto_failover_other_nodes_down,auto_failover_cluster_too_small,ip, disk,overhead,ep_oom_errors,ep_item_commit_failed]}] [ns_server:debug,2014-08-19T15:46:31.176,ns_1@127.0.0.1:ns_config_log<0.284.0>:ns_config_log:log_common:138]config change: fast_warmup -> [{fast_warmup_enabled,true}, {min_memory_threshold,10}, {min_items_threshold,10}] [ns_server:debug,2014-08-19T15:46:31.176,ns_1@127.0.0.1:ns_config_log<0.284.0>:ns_config_log:log_common:138]config change: index_aware_rebalance_disabled -> false [ns_server:debug,2014-08-19T15:46:31.176,ns_1@127.0.0.1:ns_config_log<0.284.0>:ns_config_log:log_common:138]config change: max_bucket_count -> 10 [ns_server:debug,2014-08-19T15:46:31.176,ns_1@127.0.0.1:ns_config_log<0.284.0>:ns_config_log:log_common:138]config change: memory_quota -> 58026 [ns_server:debug,2014-08-19T15:46:31.176,ns_1@127.0.0.1:ns_config_log<0.284.0>:ns_config_log:log_common:138]config change: nodes_wanted -> ['ns_1@127.0.0.1'] [ns_server:debug,2014-08-19T15:46:31.176,ns_1@127.0.0.1:ns_config_log<0.284.0>:ns_config_log:log_common:138]config change: otp -> [{cookie,alkbqedpsntmtnxa}] [ns_server:debug,2014-08-19T15:46:31.176,ns_1@127.0.0.1:ns_config_log<0.284.0>:ns_config_log:log_common:138]config change: remote_clusters -> [] [ns_server:debug,2014-08-19T15:46:31.176,ns_1@127.0.0.1:ns_config_log<0.284.0>:ns_config_log:log_common:138]config change: replication -> [{enabled,true}] [ns_server:debug,2014-08-19T15:46:31.176,ns_1@127.0.0.1:ns_config_log<0.284.0>:ns_config_log:log_common:138]config change: replication_topology -> star [ns_server:debug,2014-08-19T15:46:31.176,ns_1@127.0.0.1:ns_config_log<0.284.0>:ns_config_log:log_common:138]config change: rest -> [{port,8091}] [ns_server:debug,2014-08-19T15:46:31.176,ns_1@127.0.0.1:ns_config_isasl_sync<0.299.0>:ns_config_isasl_sync:init:63]isasl_sync init: ["/opt/couchbase/var/lib/couchbase/isasl.pw","_admin", "f6126ae5fac44bf3d8316165791747f2"] [ns_server:info,2014-08-19T15:46:31.178,ns_1@127.0.0.1:ns_config_log<0.284.0>:ns_config_log:handle_info:63]config change: rest_creds -> ******** [ns_server:debug,2014-08-19T15:46:31.178,ns_1@127.0.0.1:ns_config_isasl_sync<0.299.0>:ns_config_isasl_sync:init:71]isasl_sync init buckets: [] [ns_server:debug,2014-08-19T15:46:31.178,ns_1@127.0.0.1:ns_config_log<0.284.0>:ns_config_log:log_common:138]config change: server_groups -> [[{uuid,<<"0">>},{name,<<"Group 1">>},{nodes,['ns_1@127.0.0.1']}]] [ns_server:debug,2014-08-19T15:46:31.178,ns_1@127.0.0.1:ns_config_log<0.284.0>:ns_config_log:log_common:138]config change: set_view_update_daemon -> [{update_interval,5000}, {update_min_changes,5000}, {replica_update_min_changes,5000}] [ns_server:debug,2014-08-19T15:46:31.178,ns_1@127.0.0.1:ns_config_log<0.284.0>:ns_config_log:log_common:138]config change: vbucket_map_history -> [] [ns_server:debug,2014-08-19T15:46:31.178,ns_1@127.0.0.1:ns_config_isasl_sync<0.299.0>:ns_config_isasl_sync:writeSASLConf:143]Writing isasl passwd file: "/opt/couchbase/var/lib/couchbase/isasl.pw" [ns_server:debug,2014-08-19T15:46:31.178,ns_1@127.0.0.1:ns_config_log<0.284.0>:ns_config_log:log_common:138]config change: {couchdb,max_parallel_indexers} -> 4 [ns_server:debug,2014-08-19T15:46:31.178,ns_1@127.0.0.1:ns_config_log<0.284.0>:ns_config_log:log_common:138]config change: {couchdb,max_parallel_replica_indexers} -> 2 [ns_server:debug,2014-08-19T15:46:31.178,ns_1@127.0.0.1:ns_config_log<0.284.0>:ns_config_log:log_common:138]config change: {request_limit,capi} -> undefined [ns_server:debug,2014-08-19T15:46:31.178,ns_1@127.0.0.1:ns_config_log<0.284.0>:ns_config_log:log_common:138]config change: {request_limit,rest} -> undefined [ns_server:debug,2014-08-19T15:46:31.178,ns_1@127.0.0.1:ns_config_log<0.284.0>:ns_config_log:log_common:138]config change: {node,'ns_1@127.0.0.1',capi_port} -> 8092 [ns_server:debug,2014-08-19T15:46:31.179,ns_1@127.0.0.1:ns_config_log<0.284.0>:ns_config_log:log_common:138]config change: {node,'ns_1@127.0.0.1',compaction_daemon} -> [{check_interval,30},{min_file_size,131072}] [ns_server:debug,2014-08-19T15:46:31.179,ns_1@127.0.0.1:ns_config_log<0.284.0>:ns_config_log:log_common:138]config change: {node,'ns_1@127.0.0.1',config_version} -> {2,3,0} [ns_server:debug,2014-08-19T15:46:31.179,ns_1@127.0.0.1:ns_config_log<0.284.0>:ns_config_log:log_common:138]config change: {node,'ns_1@127.0.0.1',isasl} -> [{path,"/opt/couchbase/var/lib/couchbase/isasl.pw"}] [ns_server:debug,2014-08-19T15:46:31.179,ns_1@127.0.0.1:ns_config_log<0.284.0>:ns_config_log:log_common:138]config change: {node,'ns_1@127.0.0.1',membership} -> active [ns_server:debug,2014-08-19T15:46:31.179,ns_1@127.0.0.1:ns_config_log<0.284.0>:ns_config_log:log_common:138]config change: {node,'ns_1@127.0.0.1',memcached} -> [{mccouch_port,11213}, {engines, [{membase, [{engine,"/opt/couchbase/lib/memcached/ep.so"}, {static_config_string, "vb0=false;waitforwarmup=false;failpartialwarmup=false"}]}, {memcached, [{engine,"/opt/couchbase/lib/memcached/default_engine.so"}, {static_config_string,"vb0=true"}]}]}, {log_path,"/opt/couchbase/var/lib/couchbase/logs"}, {log_prefix,"memcached.log"}, {log_generations,20}, {log_cyclesize,10485760}, {log_sleeptime,19}, {log_rotation_period,39003}, {dedicated_port,11209}, {bucket_engine,"/opt/couchbase/lib/memcached/bucket_engine.so"}, {port,11210}, {dedicated_port,11209}, {admin_user,"_admin"}, {admin_pass,"*****"}, {verbosity,[]}] [ns_server:debug,2014-08-19T15:46:31.179,ns_1@127.0.0.1:ns_config_log<0.284.0>:ns_config_log:log_common:138]config change: {node,'ns_1@127.0.0.1',moxi} -> [{port,11211},{verbosity,[]}] [ns_server:debug,2014-08-19T15:46:31.179,ns_1@127.0.0.1:ns_config_log<0.284.0>:ns_config_log:log_common:138]config change: {node,'ns_1@127.0.0.1',ns_log} -> [{filename,"/opt/couchbase/var/lib/couchbase/ns_log"}] [ns_server:debug,2014-08-19T15:46:31.183,ns_1@127.0.0.1:ns_config_log<0.284.0>:ns_config_log:log_common:138]config change: {node,'ns_1@127.0.0.1',port_servers} -> [{moxi,"/opt/couchbase/bin/moxi", ["-Z", {"port_listen=~B,default_bucket_name=default,downstream_max=1024,downstream_conn_max=4,connect_max_errors=5,connect_retry_interval=30000,connect_timeout=400,auth_timeout=100,cycle=200,downstream_conn_queue_timeout=200,downstream_timeout=5000,wait_queue_timeout=200", [port]}, "-z", {"url=http://127.0.0.1:~B/pools/default/saslBucketsStreaming", [{misc,this_node_rest_port,[]}]}, "-p","0","-Y","y","-O","stderr", {"~s",[verbosity]}], [{env,[{"EVENT_NOSELECT","1"}, {"MOXI_SASL_PLAIN_USR",{"~s",[{ns_moxi_sup,rest_user,[]}]}}, {"MOXI_SASL_PLAIN_PWD",{"~s",[{ns_moxi_sup,rest_pass,[]}]}}]}, use_stdio,exit_status,port_server_send_eol,stderr_to_stdout,stream]}, {memcached,"/opt/couchbase/bin/memcached", ["-X","/opt/couchbase/lib/memcached/stdin_term_handler.so","-X", {"/opt/couchbase/lib/memcached/file_logger.so,cyclesize=~B;sleeptime=~B;filename=~s/~s", [log_cyclesize,log_sleeptime,log_path,log_prefix]}, "-l", {"0.0.0.0:~B,0.0.0.0:~B:1000",[port,dedicated_port]}, "-p", {"~B",[port]}, "-E","/opt/couchbase/lib/memcached/bucket_engine.so","-B", "binary","-r","-c","10000","-e", {"admin=~s;default_bucket_name=default;auto_create=false", [admin_user]}, {"~s",[verbosity]}], [{env,[{"EVENT_NOSELECT","1"}, {"MEMCACHED_TOP_KEYS","100"}, {"ISASL_PWFILE",{"~s",[{isasl,path}]}}]}, use_stdio,stderr_to_stdout,exit_status,port_server_send_eol, stream]}] [ns_server:debug,2014-08-19T15:46:31.183,ns_1@127.0.0.1:ns_config_log<0.284.0>:ns_config_log:log_common:138]config change: {node,'ns_1@127.0.0.1',rest} -> [{port,8091},{port_meta,global}] [ns_server:debug,2014-08-19T15:46:31.183,ns_1@127.0.0.1:ns_config_log<0.284.0>:ns_config_log:log_common:138]config change: {node,'ns_1@127.0.0.1',ssl_capi_port} -> 18092 [ns_server:warn,2014-08-19T15:46:31.183,ns_1@127.0.0.1:ns_config_isasl_sync<0.299.0>:ns_memcached:connect:1161]Unable to connect: {error,{badmatch,{error,econnrefused}}}, retrying. [ns_server:debug,2014-08-19T15:46:31.183,ns_1@127.0.0.1:ns_config_log<0.284.0>:ns_config_log:log_common:138]config change: {node,'ns_1@127.0.0.1',ssl_proxy_downstream_port} -> 11214 [ns_server:debug,2014-08-19T15:46:31.183,ns_1@127.0.0.1:ns_config_log<0.284.0>:ns_config_log:log_common:138]config change: {node,'ns_1@127.0.0.1',ssl_proxy_upstream_port} -> 11215 [ns_server:debug,2014-08-19T15:46:31.184,ns_1@127.0.0.1:ns_config_log<0.284.0>:ns_config_log:log_common:138]config change: {node,'ns_1@127.0.0.1',ssl_rest_port} -> 18091 [error_logger:info,2014-08-19T15:46:32.184,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.299.0>}, {name,ns_config_isasl_sync}, {mfa,{ns_config_isasl_sync,start_link,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2014-08-19T15:46:32.184,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.303.0>}, {name,ns_log_events}, {mfa,{gen_event,start_link,[{local,ns_log_events}]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2014-08-19T15:46:32.185,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_node_disco_sup} started: [{pid,<0.305.0>}, {name,ns_node_disco_events}, {mfargs, {gen_event,start_link, [{local,ns_node_disco_events}]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [ns_server:debug,2014-08-19T15:46:32.187,ns_1@127.0.0.1:ns_node_disco<0.306.0>:ns_node_disco:init:103]Initting ns_node_disco with [] [ns_server:debug,2014-08-19T15:46:32.187,ns_1@127.0.0.1:ns_cookie_manager<0.276.0>:ns_cookie_manager:do_cookie_sync:110]ns_cookie_manager do_cookie_sync [user:info,2014-08-19T15:46:32.187,ns_1@127.0.0.1:ns_cookie_manager<0.276.0>:ns_cookie_manager:do_cookie_sync:130]Node 'ns_1@127.0.0.1' synchronized otp cookie alkbqedpsntmtnxa from cluster [ns_server:debug,2014-08-19T15:46:32.187,ns_1@127.0.0.1:ns_cookie_manager<0.276.0>:ns_cookie_manager:do_cookie_save:147]saving cookie to "/opt/couchbase/var/lib/couchbase/couchbase-server.cookie-ns-server" [ns_server:debug,2014-08-19T15:46:32.216,ns_1@127.0.0.1:ns_cookie_manager<0.276.0>:ns_cookie_manager:do_cookie_save:149]attempted to save cookie to "/opt/couchbase/var/lib/couchbase/couchbase-server.cookie-ns-server": ok [ns_server:debug,2014-08-19T15:46:32.216,ns_1@127.0.0.1:<0.307.0>:ns_node_disco:do_nodes_wanted_updated_fun:199]ns_node_disco: nodes_wanted updated: ['ns_1@127.0.0.1'], with cookie: alkbqedpsntmtnxa [ns_server:debug,2014-08-19T15:46:32.217,ns_1@127.0.0.1:<0.307.0>:ns_node_disco:do_nodes_wanted_updated_fun:205]ns_node_disco: nodes_wanted pong: ['ns_1@127.0.0.1'], with cookie: alkbqedpsntmtnxa [error_logger:info,2014-08-19T15:46:32.217,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_node_disco_sup} started: [{pid,<0.306.0>}, {name,ns_node_disco}, {mfargs,{ns_node_disco,start_link,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2014-08-19T15:46:32.218,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_node_disco_sup} started: [{pid,<0.309.0>}, {name,ns_node_disco_log}, {mfargs,{ns_node_disco_log,start_link,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2014-08-19T15:46:32.219,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_node_disco_sup} started: [{pid,<0.310.0>}, {name,ns_node_disco_conf_events}, {mfargs,{ns_node_disco_conf_events,start_link,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2014-08-19T15:46:32.220,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_node_disco_sup} started: [{pid,<0.311.0>}, {name,ns_config_rep_merger}, {mfargs,{ns_config_rep,start_link_merger,[]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [ns_server:debug,2014-08-19T15:46:32.221,ns_1@127.0.0.1:ns_config_rep<0.312.0>:ns_config_rep:init:66]init pulling [ns_server:debug,2014-08-19T15:46:32.221,ns_1@127.0.0.1:ns_config_rep<0.312.0>:ns_config_rep:init:68]init pushing [ns_server:debug,2014-08-19T15:46:32.221,ns_1@127.0.0.1:ns_config_rep<0.312.0>:ns_config_rep:init:72]init reannouncing [ns_server:debug,2014-08-19T15:46:32.222,ns_1@127.0.0.1:ns_config_events<0.279.0>:ns_node_disco_conf_events:handle_event:44]ns_node_disco_conf_events config on nodes_wanted [ns_server:debug,2014-08-19T15:46:32.222,ns_1@127.0.0.1:ns_config_log<0.284.0>:ns_config_log:log_common:138]config change: alert_limits -> [{max_overhead_perc,50},{max_disk_used,90}] [ns_server:debug,2014-08-19T15:46:32.222,ns_1@127.0.0.1:ns_config_events<0.279.0>:ns_node_disco_conf_events:handle_event:50]ns_node_disco_conf_events config on otp [ns_server:debug,2014-08-19T15:46:32.222,ns_1@127.0.0.1:ns_config_log<0.284.0>:ns_config_log:log_common:138]config change: auto_failover_cfg -> [{enabled,false},{timeout,120},{max_nodes,1},{count,0}] [ns_server:debug,2014-08-19T15:46:32.222,ns_1@127.0.0.1:ns_cookie_manager<0.276.0>:ns_cookie_manager:do_cookie_sync:110]ns_cookie_manager do_cookie_sync [ns_server:debug,2014-08-19T15:46:32.222,ns_1@127.0.0.1:ns_config_log<0.284.0>:ns_config_log:log_common:138]config change: autocompaction -> [{database_fragmentation_threshold,{30,undefined}}, {view_fragmentation_threshold,{30,undefined}}] [ns_server:debug,2014-08-19T15:46:32.222,ns_1@127.0.0.1:ns_cookie_manager<0.276.0>:ns_cookie_manager:do_cookie_save:147]saving cookie to "/opt/couchbase/var/lib/couchbase/couchbase-server.cookie-ns-server" [ns_server:debug,2014-08-19T15:46:32.222,ns_1@127.0.0.1:ns_config_log<0.284.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[]}] [error_logger:info,2014-08-19T15:46:32.223,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_node_disco_sup} started: [{pid,<0.312.0>}, {name,ns_config_rep}, {mfargs,{ns_config_rep,start_link,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [ns_server:debug,2014-08-19T15:46:32.223,ns_1@127.0.0.1:ns_config_log<0.284.0>:ns_config_log:log_common:138]config change: cert_and_pkey -> {<<"-----BEGIN CERTIFICATE-----\nMIICmDCCAYKgAwIBAgIIE4vQPzPIoEQwCwYJKoZIhvcNAQEFMAwxCjAIBgNVBAMT\nASowHhcNMTMwMTAxMDAwMDAwWhcNNDkxMjMxMjM1OTU5WjAMMQowCAYDVQQDEwEq\nMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAmwlh6UM1HlSt78Xr7YCe\n18VU0sN62xbybSOxadjU2gF03Q2jgd+n84Tr9iGKtuy7DUKk/eJJQDQWcCDGTxYg\n8QNmzAlnX/eufV4rhr/9nlksMKdIlXWDvOdLX4yO1FIZ/QvGtoFWBwEc832n3sfa\n1f+EzMV8X6nZxMPV/Stc0StxJPY2Akqi99je3Qs"...>>, <<"*****">>} [ns_server:debug,2014-08-19T15:46:32.223,ns_1@127.0.0.1:ns_config_log<0.284.0>:ns_config_log:log_common:138]config change: cluster_compat_version -> [2,5] [error_logger:info,2014-08-19T15:46:32.223,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.304.0>}, {name,ns_node_disco_sup}, {mfa,{ns_node_disco_sup,start_link,[]}}, {restart_type,permanent}, {shutdown,infinity}, {child_type,supervisor}] [ns_server:debug,2014-08-19T15:46:32.223,ns_1@127.0.0.1:ns_config_rep<0.312.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([alert_limits,auto_failover_cfg,autocompaction, buckets,cert_and_pkey,cluster_compat_version, drop_request_memory_threshold_mib, dynamic_config_version,email_alerts, fast_warmup,index_aware_rebalance_disabled, max_bucket_count,memory_quota,nodes_wanted,otp, remote_clusters,replication, replication_topology,rest,rest_creds, server_groups,set_view_update_daemon, vbucket_map_history, {couchdb,max_parallel_indexers}, {couchdb,max_parallel_replica_indexers}, {request_limit,capi}, {request_limit,rest}, {node,'ns_1@127.0.0.1',capi_port}, {node,'ns_1@127.0.0.1',compaction_daemon}, {node,'ns_1@127.0.0.1',config_version}, {node,'ns_1@127.0.0.1',isasl}, {node,'ns_1@127.0.0.1',membership}, {node,'ns_1@127.0.0.1',memcached}, {node,'ns_1@127.0.0.1',moxi}, {node,'ns_1@127.0.0.1',ns_log}, {node,'ns_1@127.0.0.1',port_servers}, {node,'ns_1@127.0.0.1',rest}, {node,'ns_1@127.0.0.1',ssl_capi_port}, {node,'ns_1@127.0.0.1', ssl_proxy_downstream_port}, {node,'ns_1@127.0.0.1',ssl_proxy_upstream_port}, {node,'ns_1@127.0.0.1',ssl_rest_port}]..) [ns_server:debug,2014-08-19T15:46:32.223,ns_1@127.0.0.1:ns_config_log<0.284.0>:ns_config_log:log_common:138]config change: drop_request_memory_threshold_mib -> undefined [ns_server:debug,2014-08-19T15:46:32.223,ns_1@127.0.0.1:ns_config_log<0.284.0>:ns_config_log:log_common:138]config change: dynamic_config_version -> [2,5] [ns_server:debug,2014-08-19T15:46:32.224,ns_1@127.0.0.1:ns_config_log<0.284.0>:ns_config_log:log_common:138]config change: email_alerts -> [{recipients,["root@localhost"]}, {sender,"couchbase@localhost"}, {enabled,false}, {email_server,[{user,[]}, {pass,"*****"}, {host,"localhost"}, {port,25}, {encrypt,false}]}, {alerts,[auto_failover_node,auto_failover_maximum_reached, auto_failover_other_nodes_down,auto_failover_cluster_too_small,ip, disk,overhead,ep_oom_errors,ep_item_commit_failed]}] [error_logger:info,2014-08-19T15:46:32.224,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.318.0>}, {name,vbucket_map_mirror}, {mfa,{vbucket_map_mirror,start_link,[]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [ns_server:debug,2014-08-19T15:46:32.224,ns_1@127.0.0.1:ns_config_log<0.284.0>:ns_config_log:log_common:138]config change: fast_warmup -> [{fast_warmup_enabled,true}, {min_memory_threshold,10}, {min_items_threshold,10}] [ns_server:debug,2014-08-19T15:46:32.224,ns_1@127.0.0.1:ns_config_log<0.284.0>:ns_config_log:log_common:138]config change: index_aware_rebalance_disabled -> false [ns_server:debug,2014-08-19T15:46:32.224,ns_1@127.0.0.1:ns_config_log<0.284.0>:ns_config_log:log_common:138]config change: max_bucket_count -> 10 [ns_server:debug,2014-08-19T15:46:32.224,ns_1@127.0.0.1:ns_config_log<0.284.0>:ns_config_log:log_common:138]config change: memory_quota -> 58026 [ns_server:debug,2014-08-19T15:46:32.225,ns_1@127.0.0.1:ns_config_log<0.284.0>:ns_config_log:log_common:138]config change: nodes_wanted -> ['ns_1@127.0.0.1'] [ns_server:debug,2014-08-19T15:46:32.225,ns_1@127.0.0.1:ns_config_log<0.284.0>:ns_config_log:log_common:138]config change: otp -> [{cookie,alkbqedpsntmtnxa}] [error_logger:info,2014-08-19T15:46:32.225,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.320.0>}, {name,bucket_info_cache}, {mfa,{bucket_info_cache,start_link,[]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [ns_server:debug,2014-08-19T15:46:32.225,ns_1@127.0.0.1:ns_config_log<0.284.0>:ns_config_log:log_common:138]config change: remote_clusters -> [] [ns_server:debug,2014-08-19T15:46:32.226,ns_1@127.0.0.1:ns_config_log<0.284.0>:ns_config_log:log_common:138]config change: replication -> [{enabled,true}] [error_logger:info,2014-08-19T15:46:32.225,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.323.0>}, {name,ns_tick_event}, {mfa,{gen_event,start_link,[{local,ns_tick_event}]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [ns_server:debug,2014-08-19T15:46:32.226,ns_1@127.0.0.1:ns_config_log<0.284.0>:ns_config_log:log_common:138]config change: replication_topology -> star [ns_server:debug,2014-08-19T15:46:32.226,ns_1@127.0.0.1:ns_config_log<0.284.0>:ns_config_log:log_common:138]config change: rest -> [{port,8091}] [ns_server:info,2014-08-19T15:46:32.226,ns_1@127.0.0.1:ns_config_log<0.284.0>:ns_config_log:handle_info:63]config change: rest_creds -> ******** [error_logger:info,2014-08-19T15:46:32.226,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.324.0>}, {name,buckets_events}, {mfa,{gen_event,start_link,[{local,buckets_events}]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [ns_server:debug,2014-08-19T15:46:32.226,ns_1@127.0.0.1:ns_config_log<0.284.0>:ns_config_log:log_common:138]config change: server_groups -> [[{uuid,<<"0">>},{name,<<"Group 1">>},{nodes,['ns_1@127.0.0.1']}]] [ns_server:debug,2014-08-19T15:46:32.226,ns_1@127.0.0.1:ns_config_log<0.284.0>:ns_config_log:log_common:138]config change: set_view_update_daemon -> [{update_interval,5000}, {update_min_changes,5000}, {replica_update_min_changes,5000}] [ns_server:debug,2014-08-19T15:46:32.226,ns_1@127.0.0.1:ns_config_log<0.284.0>:ns_config_log:log_common:138]config change: vbucket_map_history -> [] [ns_server:debug,2014-08-19T15:46:32.226,ns_1@127.0.0.1:ns_config_log<0.284.0>:ns_config_log:log_common:138]config change: {couchdb,max_parallel_indexers} -> 4 [ns_server:debug,2014-08-19T15:46:32.226,ns_1@127.0.0.1:ns_config_log<0.284.0>:ns_config_log:log_common:138]config change: {couchdb,max_parallel_replica_indexers} -> 2 [ns_server:debug,2014-08-19T15:46:32.226,ns_1@127.0.0.1:ns_config_log<0.284.0>:ns_config_log:log_common:138]config change: {request_limit,capi} -> undefined [ns_server:debug,2014-08-19T15:46:32.227,ns_1@127.0.0.1:ns_config_log<0.284.0>:ns_config_log:log_common:138]config change: {request_limit,rest} -> undefined [ns_server:debug,2014-08-19T15:46:32.227,ns_1@127.0.0.1:ns_config_log<0.284.0>:ns_config_log:log_common:138]config change: {node,'ns_1@127.0.0.1',capi_port} -> 8092 [ns_server:debug,2014-08-19T15:46:32.227,ns_1@127.0.0.1:ns_config_log<0.284.0>:ns_config_log:log_common:138]config change: {node,'ns_1@127.0.0.1',compaction_daemon} -> [{check_interval,30},{min_file_size,131072}] [ns_server:debug,2014-08-19T15:46:32.227,ns_1@127.0.0.1:ns_config_log<0.284.0>:ns_config_log:log_common:138]config change: {node,'ns_1@127.0.0.1',config_version} -> {2,3,0} [ns_server:debug,2014-08-19T15:46:32.227,ns_1@127.0.0.1:ns_config_log<0.284.0>:ns_config_log:log_common:138]config change: {node,'ns_1@127.0.0.1',isasl} -> [{path,"/opt/couchbase/var/lib/couchbase/isasl.pw"}] [ns_server:debug,2014-08-19T15:46:32.227,ns_1@127.0.0.1:ns_config_log<0.284.0>:ns_config_log:log_common:138]config change: {node,'ns_1@127.0.0.1',membership} -> active [ns_server:debug,2014-08-19T15:46:32.227,ns_1@127.0.0.1:ns_config_log<0.284.0>:ns_config_log:log_common:138]config change: {node,'ns_1@127.0.0.1',memcached} -> [{mccouch_port,11213}, {engines, [{membase, [{engine,"/opt/couchbase/lib/memcached/ep.so"}, {static_config_string, "vb0=false;waitforwarmup=false;failpartialwarmup=false"}]}, {memcached, [{engine,"/opt/couchbase/lib/memcached/default_engine.so"}, {static_config_string,"vb0=true"}]}]}, {log_path,"/opt/couchbase/var/lib/couchbase/logs"}, {log_prefix,"memcached.log"}, {log_generations,20}, {log_cyclesize,10485760}, {log_sleeptime,19}, {log_rotation_period,39003}, {dedicated_port,11209}, {bucket_engine,"/opt/couchbase/lib/memcached/bucket_engine.so"}, {port,11210}, {dedicated_port,11209}, {admin_user,"_admin"}, {admin_pass,"*****"}, {verbosity,[]}] [ns_server:debug,2014-08-19T15:46:32.227,ns_1@127.0.0.1:ns_config_log<0.284.0>:ns_config_log:log_common:138]config change: {node,'ns_1@127.0.0.1',moxi} -> [{port,11211},{verbosity,[]}] [ns_server:debug,2014-08-19T15:46:32.227,ns_1@127.0.0.1:ns_config_log<0.284.0>:ns_config_log:log_common:138]config change: {node,'ns_1@127.0.0.1',ns_log} -> [{filename,"/opt/couchbase/var/lib/couchbase/ns_log"}] [ns_server:debug,2014-08-19T15:46:32.228,ns_1@127.0.0.1:ns_config_log<0.284.0>:ns_config_log:log_common:138]config change: {node,'ns_1@127.0.0.1',port_servers} -> [{moxi,"/opt/couchbase/bin/moxi", ["-Z", {"port_listen=~B,default_bucket_name=default,downstream_max=1024,downstream_conn_max=4,connect_max_errors=5,connect_retry_interval=30000,connect_timeout=400,auth_timeout=100,cycle=200,downstream_conn_queue_timeout=200,downstream_timeout=5000,wait_queue_timeout=200", [port]}, "-z", {"url=http://127.0.0.1:~B/pools/default/saslBucketsStreaming", [{misc,this_node_rest_port,[]}]}, "-p","0","-Y","y","-O","stderr", {"~s",[verbosity]}], [{env,[{"EVENT_NOSELECT","1"}, {"MOXI_SASL_PLAIN_USR",{"~s",[{ns_moxi_sup,rest_user,[]}]}}, {"MOXI_SASL_PLAIN_PWD",{"~s",[{ns_moxi_sup,rest_pass,[]}]}}]}, use_stdio,exit_status,port_server_send_eol,stderr_to_stdout,stream]}, {memcached,"/opt/couchbase/bin/memcached", ["-X","/opt/couchbase/lib/memcached/stdin_term_handler.so","-X", {"/opt/couchbase/lib/memcached/file_logger.so,cyclesize=~B;sleeptime=~B;filename=~s/~s", [log_cyclesize,log_sleeptime,log_path,log_prefix]}, "-l", {"0.0.0.0:~B,0.0.0.0:~B:1000",[port,dedicated_port]}, "-p", {"~B",[port]}, "-E","/opt/couchbase/lib/memcached/bucket_engine.so","-B", "binary","-r","-c","10000","-e", {"admin=~s;default_bucket_name=default;auto_create=false", [admin_user]}, {"~s",[verbosity]}], [{env,[{"EVENT_NOSELECT","1"}, {"MEMCACHED_TOP_KEYS","100"}, {"ISASL_PWFILE",{"~s",[{isasl,path}]}}]}, use_stdio,stderr_to_stdout,exit_status,port_server_send_eol, stream]}] [ns_server:debug,2014-08-19T15:46:32.228,ns_1@127.0.0.1:ns_config_log<0.284.0>:ns_config_log:log_common:138]config change: {node,'ns_1@127.0.0.1',rest} -> [{port,8091},{port_meta,global}] [ns_server:debug,2014-08-19T15:46:32.228,ns_1@127.0.0.1:ns_config_log<0.284.0>:ns_config_log:log_common:138]config change: {node,'ns_1@127.0.0.1',ssl_capi_port} -> 18092 [ns_server:debug,2014-08-19T15:46:32.228,ns_1@127.0.0.1:ns_config_log<0.284.0>:ns_config_log:log_common:138]config change: {node,'ns_1@127.0.0.1',ssl_proxy_downstream_port} -> 11214 [ns_server:debug,2014-08-19T15:46:32.228,ns_1@127.0.0.1:ns_config_log<0.284.0>:ns_config_log:log_common:138]config change: {node,'ns_1@127.0.0.1',ssl_proxy_upstream_port} -> 11215 [ns_server:debug,2014-08-19T15:46:32.228,ns_1@127.0.0.1:ns_config_log<0.284.0>:ns_config_log:log_common:138]config change: {node,'ns_1@127.0.0.1',ssl_rest_port} -> 18091 [ns_server:debug,2014-08-19T15:46:32.258,ns_1@127.0.0.1:ns_cookie_manager<0.276.0>:ns_cookie_manager:do_cookie_save:149]attempted to save cookie to "/opt/couchbase/var/lib/couchbase/couchbase-server.cookie-ns-server": ok [ns_server:debug,2014-08-19T15:46:32.258,ns_1@127.0.0.1:ns_cookie_manager<0.276.0>:ns_cookie_manager:do_cookie_sync:110]ns_cookie_manager do_cookie_sync [ns_server:debug,2014-08-19T15:46:32.258,ns_1@127.0.0.1:<0.315.0>:ns_node_disco:do_nodes_wanted_updated_fun:199]ns_node_disco: nodes_wanted updated: ['ns_1@127.0.0.1'], with cookie: alkbqedpsntmtnxa [ns_server:debug,2014-08-19T15:46:32.259,ns_1@127.0.0.1:ns_cookie_manager<0.276.0>:ns_cookie_manager:do_cookie_save:147]saving cookie to "/opt/couchbase/var/lib/couchbase/couchbase-server.cookie-ns-server" [ns_server:debug,2014-08-19T15:46:32.259,ns_1@127.0.0.1:<0.315.0>:ns_node_disco:do_nodes_wanted_updated_fun:205]ns_node_disco: nodes_wanted pong: ['ns_1@127.0.0.1'], with cookie: alkbqedpsntmtnxa [ns_server:debug,2014-08-19T15:46:32.259,ns_1@127.0.0.1:ns_log_events<0.303.0>:ns_mail_log:init:44]ns_mail_log started up [error_logger:info,2014-08-19T15:46:32.259,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_mail_sup} started: [{pid,<0.326.0>}, {name,ns_mail_log}, {mfargs,{ns_mail_log,start_link,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2014-08-19T15:46:32.260,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.325.0>}, {name,ns_mail_sup}, {mfa,{ns_mail_sup,start_link,[]}}, {restart_type,permanent}, {shutdown,infinity}, {child_type,supervisor}] [error_logger:info,2014-08-19T15:46:32.260,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.327.0>}, {name,ns_stats_event}, {mfa,{gen_event,start_link,[{local,ns_stats_event}]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2014-08-19T15:46:32.261,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.328.0>}, {name,samples_loader_tasks}, {mfa,{samples_loader_tasks,start_link,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [ns_server:debug,2014-08-19T15:46:32.285,ns_1@127.0.0.1:ns_cookie_manager<0.276.0>:ns_cookie_manager:do_cookie_save:149]attempted to save cookie to "/opt/couchbase/var/lib/couchbase/couchbase-server.cookie-ns-server": ok [error_logger:info,2014-08-19T15:46:32.285,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.329.0>}, {name,ns_heart}, {mfa,{ns_heart,start_link,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [ns_server:debug,2014-08-19T15:46:32.285,ns_1@127.0.0.1:<0.316.0>:ns_node_disco:do_nodes_wanted_updated_fun:199]ns_node_disco: nodes_wanted updated: ['ns_1@127.0.0.1'], with cookie: alkbqedpsntmtnxa [ns_server:debug,2014-08-19T15:46:32.285,ns_1@127.0.0.1:<0.316.0>:ns_node_disco:do_nodes_wanted_updated_fun:205]ns_node_disco: nodes_wanted pong: ['ns_1@127.0.0.1'], with cookie: alkbqedpsntmtnxa [error_logger:info,2014-08-19T15:46:32.287,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.333.0>}, {name,ns_doctor}, {mfa,{ns_doctor,start_link,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [ns_server:debug,2014-08-19T15:46:32.288,ns_1@127.0.0.1:ns_heart_slow_status_updater<0.331.0>:ns_heart:current_status_slow:248]Ignoring failure to grab system stats: {'EXIT',{noproc,{gen_server,call, [{'stats_reader-@system','ns_1@127.0.0.1'}, {latest,"minute"}]}}} [ns_server:info,2014-08-19T15:46:32.292,ns_1@127.0.0.1:remote_clusters_info<0.336.0>:remote_clusters_info:read_or_create_table:540]Reading remote_clusters_info content from /opt/couchbase/var/lib/couchbase/remote_clusters_cache_v3 [error_logger:info,2014-08-19T15:46:32.295,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.336.0>}, {name,remote_clusters_info}, {mfa,{remote_clusters_info,start_link,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2014-08-19T15:46:32.295,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.339.0>}, {name,master_activity_events}, {mfa, {gen_event,start_link, [{local,master_activity_events}]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [ns_server:debug,2014-08-19T15:46:32.296,ns_1@127.0.0.1:ns_heart_slow_status_updater<0.331.0>:ns_heart:grab_local_xdcr_replications:438]Ignoring exception getting xdcr replication infos {exit,{noproc,{gen_server,call,[xdc_replication_sup,which_children,infinity]}}, [{gen_server,call,3}, {xdc_replication_sup,all_local_replication_infos,0}, {ns_heart,grab_local_xdcr_replications,0}, {ns_heart,current_status_slow,0}, {ns_heart,slow_updater_loop,1}, {proc_lib,init_p_do_apply,3}]} [ns_server:debug,2014-08-19T15:46:32.298,ns_1@127.0.0.1:ns_server_sup<0.289.0>:mb_master:check_master_takeover_needed:141]Sending master node question to the following nodes: [] [ns_server:debug,2014-08-19T15:46:32.298,ns_1@127.0.0.1:ns_server_sup<0.289.0>:mb_master:check_master_takeover_needed:143]Got replies: [] [ns_server:debug,2014-08-19T15:46:32.298,ns_1@127.0.0.1:ns_server_sup<0.289.0>:mb_master:check_master_takeover_needed:149]Was unable to discover master, not going to force mastership takeover [user:info,2014-08-19T15:46:32.300,ns_1@127.0.0.1:mb_master<0.342.0>:mb_master:init:86]I'm the only node, so I'm the master. [ns_server:debug,2014-08-19T15:46:32.304,ns_1@127.0.0.1:ns_heart_slow_status_updater<0.331.0>:ns_heart:current_status_slow:248]Ignoring failure to grab system stats: {'EXIT',{noproc,{gen_server,call, [{'stats_reader-@system','ns_1@127.0.0.1'}, {latest,"minute"}]}}} [ns_server:debug,2014-08-19T15:46:32.305,ns_1@127.0.0.1:ns_heart_slow_status_updater<0.331.0>:ns_heart:grab_local_xdcr_replications:438]Ignoring exception getting xdcr replication infos {exit,{noproc,{gen_server,call,[xdc_replication_sup,which_children,infinity]}}, [{gen_server,call,3}, {xdc_replication_sup,all_local_replication_infos,0}, {ns_heart,grab_local_xdcr_replications,0}, {ns_heart,current_status_slow,0}, {ns_heart,slow_updater_loop,1}]} [ns_server:debug,2014-08-19T15:46:32.311,ns_1@127.0.0.1:ns_config_log<0.284.0>:ns_config_log:log_common:138]config change: dynamic_config_version -> [2,5] [ns_server:debug,2014-08-19T15:46:32.311,ns_1@127.0.0.1:ns_config_rep<0.312.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([dynamic_config_version]..) [ns_server:debug,2014-08-19T15:46:32.311,ns_1@127.0.0.1:mb_master_sup<0.347.0>:misc:start_singleton:986]start_singleton(gen_fsm, ns_orchestrator, [], []): started as <0.349.0> on 'ns_1@127.0.0.1' [error_logger:info,2014-08-19T15:46:32.311,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,mb_master_sup} started: [{pid,<0.349.0>}, {name,ns_orchestrator}, {mfargs,{ns_orchestrator,start_link,[]}}, {restart_type,permanent}, {shutdown,20}, {child_type,worker}] [ns_server:debug,2014-08-19T15:46:32.313,ns_1@127.0.0.1:mb_master_sup<0.347.0>:misc:start_singleton:986]start_singleton(gen_server, ns_tick, [], []): started as <0.352.0> on 'ns_1@127.0.0.1' [error_logger:info,2014-08-19T15:46:32.313,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,mb_master_sup} started: [{pid,<0.352.0>}, {name,ns_tick}, {mfargs,{ns_tick,start_link,[]}}, {restart_type,permanent}, {shutdown,10}, {child_type,worker}] [ns_server:debug,2014-08-19T15:46:32.314,ns_1@127.0.0.1:<0.353.0>:auto_failover:init:134]init auto_failover. [ns_server:debug,2014-08-19T15:46:32.315,ns_1@127.0.0.1:mb_master_sup<0.347.0>:misc:start_singleton:986]start_singleton(gen_server, auto_failover, [], []): started as <0.353.0> on 'ns_1@127.0.0.1' [error_logger:info,2014-08-19T15:46:32.315,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,mb_master_sup} started: [{pid,<0.353.0>}, {name,auto_failover}, {mfargs,{auto_failover,start_link,[]}}, {restart_type,permanent}, {shutdown,10}, {child_type,worker}] [error_logger:info,2014-08-19T15:46:32.315,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.342.0>}, {name,mb_master}, {mfa,{mb_master,start_link,[]}}, {restart_type,permanent}, {shutdown,infinity}, {child_type,supervisor}] [error_logger:info,2014-08-19T15:46:32.315,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.354.0>}, {name,master_activity_events_ingress}, {mfa, {gen_event,start_link, [{local,master_activity_events_ingress}]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [error_logger:info,2014-08-19T15:46:32.315,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.355.0>}, {name,master_activity_events_timestamper}, {mfa, {master_activity_events,start_link_timestamper,[]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [error_logger:info,2014-08-19T15:46:32.343,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.356.0>}, {name,master_activity_events_pids_watcher}, {mfa, {master_activity_events_pids_watcher,start_link, []}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [error_logger:info,2014-08-19T15:46:32.360,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.357.0>}, {name,master_activity_events_keeper}, {mfa,{master_activity_events_keeper,start_link,[]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [error_logger:info,2014-08-19T15:46:32.393,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_ssl_services_sup} started: [{pid,<0.361.0>}, {name,ns_ssl_services_setup}, {mfargs,{ns_ssl_services_setup,start_link,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [ns_server:debug,2014-08-19T15:46:32.395,ns_1@127.0.0.1:ns_ssl_services_setup<0.361.0>:ns_ssl_services_setup:restart_xdcr_proxy:201]Xdcr proxy restart failed. But that's usually normal. {'EXIT', {{badmatch, {badrpc, {'EXIT', {{case_clause, false}, [{ns_child_ports_sup, restart_port_by_name, 1}, {rpc, '-handle_call_call/6-fun-0-', 5}]}}}}, [{ns_ports_setup, restart_xdcr_proxy, 0}, {ns_ssl_services_setup, restart_xdcr_proxy, 0}, {ns_ssl_services_setup, init,1}, {gen_server,init_it, 6}, {proc_lib, init_p_do_apply, 3}]}} [error_logger:info,2014-08-19T15:46:32.414,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_ssl_services_sup} started: [{pid,<0.363.0>}, {name,ns_rest_ssl_service}, {mfargs, {ns_ssl_services_setup,start_link_rest_service,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2014-08-19T15:46:32.415,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_ssl_services_sup} started: [{pid,<0.380.0>}, {name,ns_capi_ssl_service}, {mfargs, {ns_ssl_services_setup,start_link_capi_service,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2014-08-19T15:46:32.415,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,menelaus_sup} started: [{pid,<0.360.0>}, {name,ns_ssl_services_sup}, {mfargs,{ns_ssl_services_sup,start_link,[]}}, {restart_type,permanent}, {shutdown,infinity}, {child_type,supervisor}] [error_logger:info,2014-08-19T15:46:32.416,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,menelaus_sup} started: [{pid,<0.397.0>}, {name,menelaus_ui_auth}, {mfargs,{menelaus_ui_auth,start_link,[]}}, {restart_type,permanent}, {shutdown,5000}, {child_type,worker}] [error_logger:info,2014-08-19T15:46:32.417,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,menelaus_sup} started: [{pid,<0.398.0>}, {name,menelaus_web_cache}, {mfargs,{menelaus_web_cache,start_link,[]}}, {restart_type,permanent}, {shutdown,5000}, {child_type,worker}] [error_logger:info,2014-08-19T15:46:32.418,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,menelaus_sup} started: [{pid,<0.399.0>}, {name,menelaus_stats_gatherer}, {mfargs,{menelaus_stats_gatherer,start_link,[]}}, {restart_type,permanent}, {shutdown,5000}, {child_type,worker}] [error_logger:info,2014-08-19T15:46:32.418,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,menelaus_sup} started: [{pid,<0.400.0>}, {name,menelaus_web}, {mfargs,{menelaus_web,start_link,[]}}, {restart_type,permanent}, {shutdown,5000}, {child_type,worker}] [error_logger:info,2014-08-19T15:46:32.419,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,menelaus_sup} started: [{pid,<0.417.0>}, {name,menelaus_event}, {mfargs,{menelaus_event,start_link,[]}}, {restart_type,permanent}, {shutdown,5000}, {child_type,worker}] [error_logger:info,2014-08-19T15:46:32.420,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,menelaus_sup} started: [{pid,<0.418.0>}, {name,hot_keys_keeper}, {mfargs,{hot_keys_keeper,start_link,[]}}, {restart_type,permanent}, {shutdown,5000}, {child_type,worker}] [error_logger:info,2014-08-19T15:46:32.424,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,menelaus_sup} started: [{pid,<0.419.0>}, {name,menelaus_web_alerts_srv}, {mfargs,{menelaus_web_alerts_srv,start_link,[]}}, {restart_type,permanent}, {shutdown,5000}, {child_type,worker}] [user:info,2014-08-19T15:46:32.424,ns_1@127.0.0.1:ns_server_sup<0.289.0>:menelaus_sup:start_link:44]Couchbase Server has started on web port 8091 on node 'ns_1@127.0.0.1'. [error_logger:info,2014-08-19T15:46:32.424,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.359.0>}, {name,menelaus}, {mfa,{menelaus_sup,start_link,[]}}, {restart_type,permanent}, {shutdown,infinity}, {child_type,supervisor}] [error_logger:info,2014-08-19T15:46:32.425,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,mc_sup} started: [{pid,<0.421.0>}, {name,mc_couch_events}, {mfargs, {gen_event,start_link,[{local,mc_couch_events}]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [error_logger:info,2014-08-19T15:46:32.426,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,mc_sup} started: [{pid,<0.422.0>}, {name,mc_conn_sup}, {mfargs,{mc_conn_sup,start_link,[]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,supervisor}] [ns_server:info,2014-08-19T15:46:32.426,ns_1@127.0.0.1:<0.423.0>:mc_tcp_listener:init:24]mccouch is listening on port 11213 [error_logger:info,2014-08-19T15:46:32.426,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,mc_sup} started: [{pid,<0.423.0>}, {name,mc_tcp_listener}, {mfargs,{mc_tcp_listener,start_link,[11213]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [error_logger:info,2014-08-19T15:46:32.427,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.420.0>}, {name,mc_sup}, {mfa,{mc_sup,start_link,[]}}, {restart_type,permanent}, {shutdown,infinity}, {child_type,supervisor}] [error_logger:info,2014-08-19T15:46:32.427,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.424.0>}, {name,ns_ports_setup}, {mfa,{ns_ports_setup,start,[]}}, {restart_type,{permanent,4}}, {shutdown,brutal_kill}, {child_type,worker}] [error_logger:info,2014-08-19T15:46:32.427,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.425.0>}, {name,ns_port_memcached_killer}, {mfa,{ns_ports_setup,start_memcached_force_killer,[]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [ns_server:info,2014-08-19T15:46:32.428,ns_1@127.0.0.1:<0.427.0>:ns_memcached_log_rotator:init:28]Starting log rotator on "/opt/couchbase/var/lib/couchbase/logs"/"memcached.log"* with an initial period of 39003ms [error_logger:info,2014-08-19T15:46:32.428,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.427.0>}, {name,ns_memcached_log_rotator}, {mfa,{ns_memcached_log_rotator,start_link,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2014-08-19T15:46:32.431,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.429.0>}, {name,memcached_clients_pool}, {mfa,{memcached_clients_pool,start_link,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2014-08-19T15:46:32.434,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.430.0>}, {name,proxied_memcached_clients_pool}, {mfa,{proxied_memcached_clients_pool,start_link,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2014-08-19T15:46:32.434,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.431.0>}, {name,xdc_lhttpc_pool}, {mfa, {lhttpc_manager,start_link, [[{name,xdc_lhttpc_pool}, {connection_timeout,120000}, {pool_size,200}]]}}, {restart_type,permanent}, {shutdown,10000}, {child_type,worker}] [error_logger:info,2014-08-19T15:46:32.434,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.432.0>}, {name,ns_null_connection_pool}, {mfa, {ns_null_connection_pool,start_link, [ns_null_connection_pool]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2014-08-19T15:46:32.435,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.433.0>}, {name,xdc_replication_sup}, {mfa,{xdc_replication_sup,start_link,[]}}, {restart_type,permanent}, {shutdown,infinity}, {child_type,supervisor}] [error_logger:info,2014-08-19T15:46:32.436,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.434.0>}, {name,xdc_rep_manager}, {mfa,{xdc_rep_manager,start_link,[]}}, {restart_type,permanent}, {shutdown,30000}, {child_type,worker}] [error_logger:info,2014-08-19T15:46:32.437,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.436.0>}, {name,ns_memcached_sockets_pool}, {mfa,{ns_memcached_sockets_pool,start_link,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2014-08-19T15:46:32.439,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_bucket_worker_sup} started: [{pid,<0.439.0>}, {name,ns_bucket_worker}, {mfargs,{work_queue,start_link,[ns_bucket_worker]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2014-08-19T15:46:32.440,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_bucket_sup} started: [{pid,<0.441.0>}, {name,buckets_observing_subscription}, {mfargs,{ns_bucket_sup,subscribe_on_config_events,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2014-08-19T15:46:32.440,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_bucket_worker_sup} started: [{pid,<0.440.0>}, {name,ns_bucket_sup}, {mfargs,{ns_bucket_sup,start_link,[]}}, {restart_type,permanent}, {shutdown,infinity}, {child_type,supervisor}] [error_logger:info,2014-08-19T15:46:32.441,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.438.0>}, {name,ns_bucket_worker_sup}, {mfa,{ns_bucket_worker_sup,start_link,[]}}, {restart_type,permanent}, {shutdown,infinity}, {child_type,supervisor}] [error_logger:info,2014-08-19T15:46:32.441,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.442.0>}, {name,system_stats_collector}, {mfa,{system_stats_collector,start_link,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2014-08-19T15:46:32.442,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.445.0>}, {name,{stats_archiver,"@system"}}, {mfa,{stats_archiver,start_link,["@system"]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2014-08-19T15:46:32.442,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.447.0>}, {name,{stats_reader,"@system"}}, {mfa,{stats_reader,start_link,["@system"]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2014-08-19T15:46:32.446,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.448.0>}, {name,compaction_daemon}, {mfa,{compaction_daemon,start_link,[]}}, {restart_type,{permanent,4}}, {shutdown,86400000}, {child_type,worker}] [ns_server:debug,2014-08-19T15:46:32.447,ns_1@127.0.0.1:compaction_daemon<0.448.0>:compaction_daemon:handle_info:444]No buckets to compact. Rescheduling compaction. [ns_server:debug,2014-08-19T15:46:32.447,ns_1@127.0.0.1:compaction_daemon<0.448.0>:compaction_daemon:schedule_next_compaction:1519]Finished compaction too soon. Next run will be in 30s [ns_server:debug,2014-08-19T15:46:32.454,ns_1@127.0.0.1:xdc_rdoc_replication_srv<0.450.0>:xdc_rdoc_replication_srv:init:76]Loaded the following docs: [] [ns_server:debug,2014-08-19T15:46:32.454,ns_1@127.0.0.1:xdc_rdoc_replication_srv<0.450.0>:xdc_rdoc_replication_srv:handle_info:154]doing replicate_newnodes_docs [error_logger:info,2014-08-19T15:46:32.454,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.450.0>}, {name,xdc_rdoc_replication_srv}, {mfa,{xdc_rdoc_replication_srv,start_link,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [ns_server:info,2014-08-19T15:46:32.455,ns_1@127.0.0.1:set_view_update_daemon<0.452.0>:set_view_update_daemon:init:50]Set view update daemon, starting with the following settings: update interval: 5000ms minimum number of changes: 5000 [ns_server:debug,2014-08-19T15:46:32.455,ns_1@127.0.0.1:<0.2.0>:child_erlang:child_loop:104]Entered child_loop [error_logger:info,2014-08-19T15:46:32.455,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.452.0>}, {name,set_view_update_daemon}, {mfa,{set_view_update_daemon,start_link,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2014-08-19T15:46:32.455,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_cluster_sup} started: [{pid,<0.289.0>}, {name,ns_server_sup}, {mfargs,{ns_server_sup,start_link,[]}}, {restart_type,permanent}, {shutdown,infinity}, {child_type,supervisor}] [error_logger:info,2014-08-19T15:46:32.455,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= application: ns_server started_at: 'ns_1@127.0.0.1' [ns_server:debug,2014-08-19T15:47:02.448,ns_1@127.0.0.1:compaction_daemon<0.448.0>:compaction_daemon:handle_info:444]No buckets to compact. Rescheduling compaction. [ns_server:debug,2014-08-19T15:47:02.448,ns_1@127.0.0.1:compaction_daemon<0.448.0>:compaction_daemon:schedule_next_compaction:1519]Finished compaction too soon. Next run will be in 30s [ns_server:debug,2014-08-19T15:47:32.449,ns_1@127.0.0.1:compaction_daemon<0.448.0>:compaction_daemon:handle_info:444]No buckets to compact. Rescheduling compaction. [ns_server:debug,2014-08-19T15:47:32.449,ns_1@127.0.0.1:compaction_daemon<0.448.0>:compaction_daemon:schedule_next_compaction:1519]Finished compaction too soon. Next run will be in 30s [ns_server:debug,2014-08-19T15:48:02.450,ns_1@127.0.0.1:compaction_daemon<0.448.0>:compaction_daemon:handle_info:444]No buckets to compact. Rescheduling compaction. [ns_server:debug,2014-08-19T15:48:02.450,ns_1@127.0.0.1:compaction_daemon<0.448.0>:compaction_daemon:schedule_next_compaction:1519]Finished compaction too soon. Next run will be in 30s [ns_server:debug,2014-08-19T15:48:32.451,ns_1@127.0.0.1:compaction_daemon<0.448.0>:compaction_daemon:handle_info:444]No buckets to compact. Rescheduling compaction. [ns_server:debug,2014-08-19T15:48:32.451,ns_1@127.0.0.1:compaction_daemon<0.448.0>:compaction_daemon:schedule_next_compaction:1519]Finished compaction too soon. Next run will be in 30s [ns_server:debug,2014-08-19T15:49:02.452,ns_1@127.0.0.1:compaction_daemon<0.448.0>:compaction_daemon:handle_info:444]No buckets to compact. Rescheduling compaction. [ns_server:debug,2014-08-19T15:49:02.452,ns_1@127.0.0.1:compaction_daemon<0.448.0>:compaction_daemon:schedule_next_compaction:1519]Finished compaction too soon. Next run will be in 30s [ns_server:debug,2014-08-19T15:49:32.453,ns_1@127.0.0.1:compaction_daemon<0.448.0>:compaction_daemon:handle_info:444]No buckets to compact. Rescheduling compaction. [ns_server:debug,2014-08-19T15:49:32.453,ns_1@127.0.0.1:compaction_daemon<0.448.0>:compaction_daemon:schedule_next_compaction:1519]Finished compaction too soon. Next run will be in 30s [ns_server:debug,2014-08-19T15:50:02.454,ns_1@127.0.0.1:compaction_daemon<0.448.0>:compaction_daemon:handle_info:444]No buckets to compact. Rescheduling compaction. [ns_server:debug,2014-08-19T15:50:02.454,ns_1@127.0.0.1:compaction_daemon<0.448.0>:compaction_daemon:schedule_next_compaction:1519]Finished compaction too soon. Next run will be in 30s [ns_server:debug,2014-08-19T15:50:32.455,ns_1@127.0.0.1:compaction_daemon<0.448.0>:compaction_daemon:handle_info:444]No buckets to compact. Rescheduling compaction. [ns_server:debug,2014-08-19T15:50:32.455,ns_1@127.0.0.1:compaction_daemon<0.448.0>:compaction_daemon:schedule_next_compaction:1519]Finished compaction too soon. Next run will be in 30s [ns_server:debug,2014-08-19T15:51:02.456,ns_1@127.0.0.1:compaction_daemon<0.448.0>:compaction_daemon:handle_info:444]No buckets to compact. Rescheduling compaction. [ns_server:debug,2014-08-19T15:51:02.456,ns_1@127.0.0.1:compaction_daemon<0.448.0>:compaction_daemon:schedule_next_compaction:1519]Finished compaction too soon. Next run will be in 30s [ns_server:debug,2014-08-19T15:51:32.457,ns_1@127.0.0.1:compaction_daemon<0.448.0>:compaction_daemon:handle_info:444]No buckets to compact. Rescheduling compaction. [ns_server:debug,2014-08-19T15:51:32.457,ns_1@127.0.0.1:compaction_daemon<0.448.0>:compaction_daemon:schedule_next_compaction:1519]Finished compaction too soon. Next run will be in 30s [ns_server:debug,2014-08-19T15:52:02.458,ns_1@127.0.0.1:compaction_daemon<0.448.0>:compaction_daemon:handle_info:444]No buckets to compact. Rescheduling compaction. [ns_server:debug,2014-08-19T15:52:02.458,ns_1@127.0.0.1:compaction_daemon<0.448.0>:compaction_daemon:schedule_next_compaction:1519]Finished compaction too soon. Next run will be in 30s [ns_server:debug,2014-08-19T15:52:32.459,ns_1@127.0.0.1:compaction_daemon<0.448.0>:compaction_daemon:handle_info:444]No buckets to compact. Rescheduling compaction. [ns_server:debug,2014-08-19T15:52:32.459,ns_1@127.0.0.1:compaction_daemon<0.448.0>:compaction_daemon:schedule_next_compaction:1519]Finished compaction too soon. Next run will be in 30s [ns_server:debug,2014-08-19T15:53:02.460,ns_1@127.0.0.1:compaction_daemon<0.448.0>:compaction_daemon:handle_info:444]No buckets to compact. Rescheduling compaction. [ns_server:debug,2014-08-19T15:53:02.460,ns_1@127.0.0.1:compaction_daemon<0.448.0>:compaction_daemon:schedule_next_compaction:1519]Finished compaction too soon. Next run will be in 30s [ns_server:debug,2014-08-19T15:53:32.461,ns_1@127.0.0.1:compaction_daemon<0.448.0>:compaction_daemon:handle_info:444]No buckets to compact. Rescheduling compaction. [ns_server:debug,2014-08-19T15:53:32.461,ns_1@127.0.0.1:compaction_daemon<0.448.0>:compaction_daemon:schedule_next_compaction:1519]Finished compaction too soon. Next run will be in 30s [ns_server:debug,2014-08-19T15:54:02.462,ns_1@127.0.0.1:compaction_daemon<0.448.0>:compaction_daemon:handle_info:444]No buckets to compact. Rescheduling compaction. [ns_server:debug,2014-08-19T15:54:02.462,ns_1@127.0.0.1:compaction_daemon<0.448.0>:compaction_daemon:schedule_next_compaction:1519]Finished compaction too soon. Next run will be in 30s [ns_server:debug,2014-08-19T15:54:32.463,ns_1@127.0.0.1:compaction_daemon<0.448.0>:compaction_daemon:handle_info:444]No buckets to compact. Rescheduling compaction. [ns_server:debug,2014-08-19T15:54:32.463,ns_1@127.0.0.1:compaction_daemon<0.448.0>:compaction_daemon:schedule_next_compaction:1519]Finished compaction too soon. Next run will be in 30s [ns_server:debug,2014-08-19T15:55:02.464,ns_1@127.0.0.1:compaction_daemon<0.448.0>:compaction_daemon:handle_info:444]No buckets to compact. Rescheduling compaction. [ns_server:debug,2014-08-19T15:55:02.464,ns_1@127.0.0.1:compaction_daemon<0.448.0>:compaction_daemon:schedule_next_compaction:1519]Finished compaction too soon. Next run will be in 30s [ns_server:debug,2014-08-19T15:55:32.465,ns_1@127.0.0.1:compaction_daemon<0.448.0>:compaction_daemon:handle_info:444]No buckets to compact. Rescheduling compaction. [ns_server:debug,2014-08-19T15:55:32.465,ns_1@127.0.0.1:compaction_daemon<0.448.0>:compaction_daemon:schedule_next_compaction:1519]Finished compaction too soon. Next run will be in 30s [ns_server:debug,2014-08-19T15:56:02.466,ns_1@127.0.0.1:compaction_daemon<0.448.0>:compaction_daemon:handle_info:444]No buckets to compact. Rescheduling compaction. [ns_server:debug,2014-08-19T15:56:02.466,ns_1@127.0.0.1:compaction_daemon<0.448.0>:compaction_daemon:schedule_next_compaction:1519]Finished compaction too soon. Next run will be in 30s [ns_server:debug,2014-08-19T15:56:32.467,ns_1@127.0.0.1:compaction_daemon<0.448.0>:compaction_daemon:handle_info:444]No buckets to compact. Rescheduling compaction. [ns_server:debug,2014-08-19T15:56:32.467,ns_1@127.0.0.1:compaction_daemon<0.448.0>:compaction_daemon:schedule_next_compaction:1519]Finished compaction too soon. Next run will be in 30s [ns_server:debug,2014-08-19T15:57:02.468,ns_1@127.0.0.1:compaction_daemon<0.448.0>:compaction_daemon:handle_info:444]No buckets to compact. Rescheduling compaction. [ns_server:debug,2014-08-19T15:57:02.468,ns_1@127.0.0.1:compaction_daemon<0.448.0>:compaction_daemon:schedule_next_compaction:1519]Finished compaction too soon. Next run will be in 30s [ns_server:debug,2014-08-19T15:57:32.469,ns_1@127.0.0.1:compaction_daemon<0.448.0>:compaction_daemon:handle_info:444]No buckets to compact. Rescheduling compaction. [ns_server:debug,2014-08-19T15:57:32.469,ns_1@127.0.0.1:compaction_daemon<0.448.0>:compaction_daemon:schedule_next_compaction:1519]Finished compaction too soon. Next run will be in 30s [ns_server:debug,2014-08-19T15:58:02.470,ns_1@127.0.0.1:compaction_daemon<0.448.0>:compaction_daemon:handle_info:444]No buckets to compact. Rescheduling compaction. [ns_server:debug,2014-08-19T15:58:02.470,ns_1@127.0.0.1:compaction_daemon<0.448.0>:compaction_daemon:schedule_next_compaction:1519]Finished compaction too soon. Next run will be in 30s [ns_server:debug,2014-08-19T15:58:32.471,ns_1@127.0.0.1:compaction_daemon<0.448.0>:compaction_daemon:handle_info:444]No buckets to compact. Rescheduling compaction. [ns_server:debug,2014-08-19T15:58:32.471,ns_1@127.0.0.1:compaction_daemon<0.448.0>:compaction_daemon:schedule_next_compaction:1519]Finished compaction too soon. Next run will be in 30s [ns_server:debug,2014-08-19T15:59:02.472,ns_1@127.0.0.1:compaction_daemon<0.448.0>:compaction_daemon:handle_info:444]No buckets to compact. Rescheduling compaction. [ns_server:debug,2014-08-19T15:59:02.472,ns_1@127.0.0.1:compaction_daemon<0.448.0>:compaction_daemon:schedule_next_compaction:1519]Finished compaction too soon. Next run will be in 30s [ns_server:debug,2014-08-19T15:59:32.473,ns_1@127.0.0.1:compaction_daemon<0.448.0>:compaction_daemon:handle_info:444]No buckets to compact. Rescheduling compaction. [ns_server:debug,2014-08-19T15:59:32.473,ns_1@127.0.0.1:compaction_daemon<0.448.0>:compaction_daemon:schedule_next_compaction:1519]Finished compaction too soon. Next run will be in 30s [ns_server:debug,2014-08-19T16:00:02.474,ns_1@127.0.0.1:compaction_daemon<0.448.0>:compaction_daemon:handle_info:444]No buckets to compact. Rescheduling compaction. [ns_server:debug,2014-08-19T16:00:02.474,ns_1@127.0.0.1:compaction_daemon<0.448.0>:compaction_daemon:schedule_next_compaction:1519]Finished compaction too soon. Next run will be in 30s [ns_server:debug,2014-08-19T16:00:32.510,ns_1@127.0.0.1:compaction_daemon<0.448.0>:compaction_daemon:handle_info:444]No buckets to compact. Rescheduling compaction. [ns_server:debug,2014-08-19T16:00:32.511,ns_1@127.0.0.1:compaction_daemon<0.448.0>:compaction_daemon:schedule_next_compaction:1519]Finished compaction too soon. Next run will be in 30s [ns_server:debug,2014-08-19T16:01:02.512,ns_1@127.0.0.1:compaction_daemon<0.448.0>:compaction_daemon:handle_info:444]No buckets to compact. Rescheduling compaction. [ns_server:debug,2014-08-19T16:01:02.512,ns_1@127.0.0.1:compaction_daemon<0.448.0>:compaction_daemon:schedule_next_compaction:1519]Finished compaction too soon. Next run will be in 30s [ns_server:debug,2014-08-19T16:01:32.513,ns_1@127.0.0.1:compaction_daemon<0.448.0>:compaction_daemon:handle_info:444]No buckets to compact. Rescheduling compaction. [ns_server:debug,2014-08-19T16:01:32.513,ns_1@127.0.0.1:compaction_daemon<0.448.0>:compaction_daemon:schedule_next_compaction:1519]Finished compaction too soon. Next run will be in 30s [ns_server:debug,2014-08-19T16:02:02.514,ns_1@127.0.0.1:compaction_daemon<0.448.0>:compaction_daemon:handle_info:444]No buckets to compact. Rescheduling compaction. [ns_server:debug,2014-08-19T16:02:02.514,ns_1@127.0.0.1:compaction_daemon<0.448.0>:compaction_daemon:schedule_next_compaction:1519]Finished compaction too soon. Next run will be in 30s [ns_server:debug,2014-08-19T16:02:32.519,ns_1@127.0.0.1:compaction_daemon<0.448.0>:compaction_daemon:handle_info:444]No buckets to compact. Rescheduling compaction. [ns_server:debug,2014-08-19T16:02:32.519,ns_1@127.0.0.1:compaction_daemon<0.448.0>:compaction_daemon:schedule_next_compaction:1519]Finished compaction too soon. Next run will be in 30s [ns_server:debug,2014-08-19T16:03:02.520,ns_1@127.0.0.1:compaction_daemon<0.448.0>:compaction_daemon:handle_info:444]No buckets to compact. Rescheduling compaction. [ns_server:debug,2014-08-19T16:03:02.520,ns_1@127.0.0.1:compaction_daemon<0.448.0>:compaction_daemon:schedule_next_compaction:1519]Finished compaction too soon. Next run will be in 30s [ns_server:debug,2014-08-19T16:03:32.521,ns_1@127.0.0.1:compaction_daemon<0.448.0>:compaction_daemon:handle_info:444]No buckets to compact. Rescheduling compaction. [ns_server:debug,2014-08-19T16:03:32.521,ns_1@127.0.0.1:compaction_daemon<0.448.0>:compaction_daemon:schedule_next_compaction:1519]Finished compaction too soon. Next run will be in 30s [ns_server:debug,2014-08-19T16:04:02.522,ns_1@127.0.0.1:compaction_daemon<0.448.0>:compaction_daemon:handle_info:444]No buckets to compact. Rescheduling compaction. [ns_server:debug,2014-08-19T16:04:02.522,ns_1@127.0.0.1:compaction_daemon<0.448.0>:compaction_daemon:schedule_next_compaction:1519]Finished compaction too soon. Next run will be in 30s [ns_server:debug,2014-08-19T16:04:32.523,ns_1@127.0.0.1:compaction_daemon<0.448.0>:compaction_daemon:handle_info:444]No buckets to compact. Rescheduling compaction. [ns_server:debug,2014-08-19T16:04:32.523,ns_1@127.0.0.1:compaction_daemon<0.448.0>:compaction_daemon:schedule_next_compaction:1519]Finished compaction too soon. Next run will be in 30s [ns_server:debug,2014-08-19T16:05:02.524,ns_1@127.0.0.1:compaction_daemon<0.448.0>:compaction_daemon:handle_info:444]No buckets to compact. Rescheduling compaction. [ns_server:debug,2014-08-19T16:05:02.524,ns_1@127.0.0.1:compaction_daemon<0.448.0>:compaction_daemon:schedule_next_compaction:1519]Finished compaction too soon. Next run will be in 30s [ns_server:debug,2014-08-19T16:05:32.525,ns_1@127.0.0.1:compaction_daemon<0.448.0>:compaction_daemon:handle_info:444]No buckets to compact. Rescheduling compaction. [ns_server:debug,2014-08-19T16:05:32.525,ns_1@127.0.0.1:compaction_daemon<0.448.0>:compaction_daemon:schedule_next_compaction:1519]Finished compaction too soon. Next run will be in 30s [ns_server:debug,2014-08-19T16:06:02.526,ns_1@127.0.0.1:compaction_daemon<0.448.0>:compaction_daemon:handle_info:444]No buckets to compact. Rescheduling compaction. [ns_server:debug,2014-08-19T16:06:02.526,ns_1@127.0.0.1:compaction_daemon<0.448.0>:compaction_daemon:schedule_next_compaction:1519]Finished compaction too soon. Next run will be in 30s [ns_server:debug,2014-08-19T16:06:32.528,ns_1@127.0.0.1:compaction_daemon<0.448.0>:compaction_daemon:handle_info:444]No buckets to compact. Rescheduling compaction. [ns_server:debug,2014-08-19T16:06:32.528,ns_1@127.0.0.1:compaction_daemon<0.448.0>:compaction_daemon:schedule_next_compaction:1519]Finished compaction too soon. Next run will be in 30s [ns_server:debug,2014-08-19T16:07:02.529,ns_1@127.0.0.1:compaction_daemon<0.448.0>:compaction_daemon:handle_info:444]No buckets to compact. Rescheduling compaction. [ns_server:debug,2014-08-19T16:07:02.529,ns_1@127.0.0.1:compaction_daemon<0.448.0>:compaction_daemon:schedule_next_compaction:1519]Finished compaction too soon. Next run will be in 30s [ns_server:debug,2014-08-19T16:07:32.530,ns_1@127.0.0.1:compaction_daemon<0.448.0>:compaction_daemon:handle_info:444]No buckets to compact. Rescheduling compaction. [ns_server:debug,2014-08-19T16:07:32.530,ns_1@127.0.0.1:compaction_daemon<0.448.0>:compaction_daemon:schedule_next_compaction:1519]Finished compaction too soon. Next run will be in 30s [ns_server:debug,2014-08-19T16:08:02.531,ns_1@127.0.0.1:compaction_daemon<0.448.0>:compaction_daemon:handle_info:444]No buckets to compact. Rescheduling compaction. [ns_server:debug,2014-08-19T16:08:02.531,ns_1@127.0.0.1:compaction_daemon<0.448.0>:compaction_daemon:schedule_next_compaction:1519]Finished compaction too soon. Next run will be in 30s [ns_server:debug,2014-08-19T16:08:32.532,ns_1@127.0.0.1:compaction_daemon<0.448.0>:compaction_daemon:handle_info:444]No buckets to compact. Rescheduling compaction. [ns_server:debug,2014-08-19T16:08:32.532,ns_1@127.0.0.1:compaction_daemon<0.448.0>:compaction_daemon:schedule_next_compaction:1519]Finished compaction too soon. Next run will be in 30s [ns_server:debug,2014-08-19T16:09:02.533,ns_1@127.0.0.1:compaction_daemon<0.448.0>:compaction_daemon:handle_info:444]No buckets to compact. Rescheduling compaction. [ns_server:debug,2014-08-19T16:09:02.533,ns_1@127.0.0.1:compaction_daemon<0.448.0>:compaction_daemon:schedule_next_compaction:1519]Finished compaction too soon. Next run will be in 30s [ns_server:debug,2014-08-19T16:09:32.534,ns_1@127.0.0.1:compaction_daemon<0.448.0>:compaction_daemon:handle_info:444]No buckets to compact. Rescheduling compaction. [ns_server:debug,2014-08-19T16:09:32.534,ns_1@127.0.0.1:compaction_daemon<0.448.0>:compaction_daemon:schedule_next_compaction:1519]Finished compaction too soon. Next run will be in 30s [ns_server:debug,2014-08-19T16:10:02.535,ns_1@127.0.0.1:compaction_daemon<0.448.0>:compaction_daemon:handle_info:444]No buckets to compact. Rescheduling compaction. [ns_server:debug,2014-08-19T16:10:02.535,ns_1@127.0.0.1:compaction_daemon<0.448.0>:compaction_daemon:schedule_next_compaction:1519]Finished compaction too soon. Next run will be in 30s [ns_server:debug,2014-08-19T16:10:32.536,ns_1@127.0.0.1:compaction_daemon<0.448.0>:compaction_daemon:handle_info:444]No buckets to compact. Rescheduling compaction. [ns_server:debug,2014-08-19T16:10:32.537,ns_1@127.0.0.1:compaction_daemon<0.448.0>:compaction_daemon:schedule_next_compaction:1519]Finished compaction too soon. Next run will be in 30s [ns_server:debug,2014-08-19T16:11:02.538,ns_1@127.0.0.1:compaction_daemon<0.448.0>:compaction_daemon:handle_info:444]No buckets to compact. Rescheduling compaction. [ns_server:debug,2014-08-19T16:11:02.538,ns_1@127.0.0.1:compaction_daemon<0.448.0>:compaction_daemon:schedule_next_compaction:1519]Finished compaction too soon. Next run will be in 30s [ns_server:debug,2014-08-19T16:11:32.539,ns_1@127.0.0.1:compaction_daemon<0.448.0>:compaction_daemon:handle_info:444]No buckets to compact. Rescheduling compaction. [ns_server:debug,2014-08-19T16:11:32.539,ns_1@127.0.0.1:compaction_daemon<0.448.0>:compaction_daemon:schedule_next_compaction:1519]Finished compaction too soon. Next run will be in 30s [ns_server:debug,2014-08-19T16:12:02.540,ns_1@127.0.0.1:compaction_daemon<0.448.0>:compaction_daemon:handle_info:444]No buckets to compact. Rescheduling compaction. [ns_server:debug,2014-08-19T16:12:02.540,ns_1@127.0.0.1:compaction_daemon<0.448.0>:compaction_daemon:schedule_next_compaction:1519]Finished compaction too soon. Next run will be in 30s [ns_server:debug,2014-08-19T16:12:32.546,ns_1@127.0.0.1:compaction_daemon<0.448.0>:compaction_daemon:handle_info:444]No buckets to compact. Rescheduling compaction. [ns_server:debug,2014-08-19T16:12:32.546,ns_1@127.0.0.1:compaction_daemon<0.448.0>:compaction_daemon:schedule_next_compaction:1519]Finished compaction too soon. Next run will be in 30s [ns_server:debug,2014-08-19T16:13:02.547,ns_1@127.0.0.1:compaction_daemon<0.448.0>:compaction_daemon:handle_info:444]No buckets to compact. Rescheduling compaction. [ns_server:debug,2014-08-19T16:13:02.547,ns_1@127.0.0.1:compaction_daemon<0.448.0>:compaction_daemon:schedule_next_compaction:1519]Finished compaction too soon. Next run will be in 30s [ns_server:debug,2014-08-19T16:13:32.548,ns_1@127.0.0.1:compaction_daemon<0.448.0>:compaction_daemon:handle_info:444]No buckets to compact. Rescheduling compaction. [ns_server:debug,2014-08-19T16:13:32.548,ns_1@127.0.0.1:compaction_daemon<0.448.0>:compaction_daemon:schedule_next_compaction:1519]Finished compaction too soon. Next run will be in 30s [ns_server:debug,2014-08-19T16:14:02.549,ns_1@127.0.0.1:compaction_daemon<0.448.0>:compaction_daemon:handle_info:444]No buckets to compact. Rescheduling compaction. [ns_server:debug,2014-08-19T16:14:02.549,ns_1@127.0.0.1:compaction_daemon<0.448.0>:compaction_daemon:schedule_next_compaction:1519]Finished compaction too soon. Next run will be in 30s [ns_server:debug,2014-08-19T16:14:32.550,ns_1@127.0.0.1:compaction_daemon<0.448.0>:compaction_daemon:handle_info:444]No buckets to compact. Rescheduling compaction. [ns_server:debug,2014-08-19T16:14:32.550,ns_1@127.0.0.1:compaction_daemon<0.448.0>:compaction_daemon:schedule_next_compaction:1519]Finished compaction too soon. Next run will be in 30s [ns_server:debug,2014-08-19T16:15:02.551,ns_1@127.0.0.1:compaction_daemon<0.448.0>:compaction_daemon:handle_info:444]No buckets to compact. Rescheduling compaction. [ns_server:debug,2014-08-19T16:15:02.551,ns_1@127.0.0.1:compaction_daemon<0.448.0>:compaction_daemon:schedule_next_compaction:1519]Finished compaction too soon. Next run will be in 30s [ns_server:debug,2014-08-19T16:15:32.552,ns_1@127.0.0.1:compaction_daemon<0.448.0>:compaction_daemon:handle_info:444]No buckets to compact. Rescheduling compaction. [ns_server:debug,2014-08-19T16:15:32.552,ns_1@127.0.0.1:compaction_daemon<0.448.0>:compaction_daemon:schedule_next_compaction:1519]Finished compaction too soon. Next run will be in 30s [ns_server:debug,2014-08-19T16:16:02.553,ns_1@127.0.0.1:compaction_daemon<0.448.0>:compaction_daemon:handle_info:444]No buckets to compact. Rescheduling compaction. [ns_server:debug,2014-08-19T16:16:02.553,ns_1@127.0.0.1:compaction_daemon<0.448.0>:compaction_daemon:schedule_next_compaction:1519]Finished compaction too soon. Next run will be in 30s [ns_server:debug,2014-08-19T16:16:32.554,ns_1@127.0.0.1:compaction_daemon<0.448.0>:compaction_daemon:handle_info:444]No buckets to compact. Rescheduling compaction. [ns_server:debug,2014-08-19T16:16:32.554,ns_1@127.0.0.1:compaction_daemon<0.448.0>:compaction_daemon:schedule_next_compaction:1519]Finished compaction too soon. Next run will be in 30s [ns_server:debug,2014-08-19T16:17:02.555,ns_1@127.0.0.1:compaction_daemon<0.448.0>:compaction_daemon:handle_info:444]No buckets to compact. Rescheduling compaction. [ns_server:debug,2014-08-19T16:17:02.555,ns_1@127.0.0.1:compaction_daemon<0.448.0>:compaction_daemon:schedule_next_compaction:1519]Finished compaction too soon. Next run will be in 30s [ns_server:debug,2014-08-19T16:17:32.556,ns_1@127.0.0.1:compaction_daemon<0.448.0>:compaction_daemon:handle_info:444]No buckets to compact. Rescheduling compaction. [ns_server:debug,2014-08-19T16:17:32.556,ns_1@127.0.0.1:compaction_daemon<0.448.0>:compaction_daemon:schedule_next_compaction:1519]Finished compaction too soon. Next run will be in 30s [ns_server:debug,2014-08-19T16:18:02.557,ns_1@127.0.0.1:compaction_daemon<0.448.0>:compaction_daemon:handle_info:444]No buckets to compact. Rescheduling compaction. [ns_server:debug,2014-08-19T16:18:02.557,ns_1@127.0.0.1:compaction_daemon<0.448.0>:compaction_daemon:schedule_next_compaction:1519]Finished compaction too soon. Next run will be in 30s [ns_server:debug,2014-08-19T16:18:32.558,ns_1@127.0.0.1:compaction_daemon<0.448.0>:compaction_daemon:handle_info:444]No buckets to compact. Rescheduling compaction. [ns_server:debug,2014-08-19T16:18:32.558,ns_1@127.0.0.1:compaction_daemon<0.448.0>:compaction_daemon:schedule_next_compaction:1519]Finished compaction too soon. Next run will be in 30s [ns_server:debug,2014-08-19T16:19:02.559,ns_1@127.0.0.1:compaction_daemon<0.448.0>:compaction_daemon:handle_info:444]No buckets to compact. Rescheduling compaction. [ns_server:debug,2014-08-19T16:19:02.559,ns_1@127.0.0.1:compaction_daemon<0.448.0>:compaction_daemon:schedule_next_compaction:1519]Finished compaction too soon. Next run will be in 30s [ns_server:debug,2014-08-19T16:19:32.560,ns_1@127.0.0.1:compaction_daemon<0.448.0>:compaction_daemon:handle_info:444]No buckets to compact. Rescheduling compaction. [ns_server:debug,2014-08-19T16:19:32.560,ns_1@127.0.0.1:compaction_daemon<0.448.0>:compaction_daemon:schedule_next_compaction:1519]Finished compaction too soon. Next run will be in 30s [ns_server:debug,2014-08-19T16:20:02.561,ns_1@127.0.0.1:compaction_daemon<0.448.0>:compaction_daemon:handle_info:444]No buckets to compact. Rescheduling compaction. [ns_server:debug,2014-08-19T16:20:02.561,ns_1@127.0.0.1:compaction_daemon<0.448.0>:compaction_daemon:schedule_next_compaction:1519]Finished compaction too soon. Next run will be in 30s [ns_server:debug,2014-08-19T16:20:32.562,ns_1@127.0.0.1:compaction_daemon<0.448.0>:compaction_daemon:handle_info:444]No buckets to compact. Rescheduling compaction. [ns_server:debug,2014-08-19T16:20:32.562,ns_1@127.0.0.1:compaction_daemon<0.448.0>:compaction_daemon:schedule_next_compaction:1519]Finished compaction too soon. Next run will be in 30s [ns_server:debug,2014-08-19T16:21:02.563,ns_1@127.0.0.1:compaction_daemon<0.448.0>:compaction_daemon:handle_info:444]No buckets to compact. Rescheduling compaction. [ns_server:debug,2014-08-19T16:21:02.563,ns_1@127.0.0.1:compaction_daemon<0.448.0>:compaction_daemon:schedule_next_compaction:1519]Finished compaction too soon. Next run will be in 30s [ns_server:debug,2014-08-19T16:21:32.564,ns_1@127.0.0.1:compaction_daemon<0.448.0>:compaction_daemon:handle_info:444]No buckets to compact. Rescheduling compaction. [ns_server:debug,2014-08-19T16:21:32.564,ns_1@127.0.0.1:compaction_daemon<0.448.0>:compaction_daemon:schedule_next_compaction:1519]Finished compaction too soon. Next run will be in 30s [ns_server:debug,2014-08-19T16:22:02.565,ns_1@127.0.0.1:compaction_daemon<0.448.0>:compaction_daemon:handle_info:444]No buckets to compact. Rescheduling compaction. [ns_server:debug,2014-08-19T16:22:02.565,ns_1@127.0.0.1:compaction_daemon<0.448.0>:compaction_daemon:schedule_next_compaction:1519]Finished compaction too soon. Next run will be in 30s [ns_server:debug,2014-08-19T16:22:32.566,ns_1@127.0.0.1:compaction_daemon<0.448.0>:compaction_daemon:handle_info:444]No buckets to compact. Rescheduling compaction. [ns_server:debug,2014-08-19T16:22:32.566,ns_1@127.0.0.1:compaction_daemon<0.448.0>:compaction_daemon:schedule_next_compaction:1519]Finished compaction too soon. Next run will be in 30s [ns_server:debug,2014-08-19T16:23:02.567,ns_1@127.0.0.1:compaction_daemon<0.448.0>:compaction_daemon:handle_info:444]No buckets to compact. Rescheduling compaction. [ns_server:debug,2014-08-19T16:23:02.567,ns_1@127.0.0.1:compaction_daemon<0.448.0>:compaction_daemon:schedule_next_compaction:1519]Finished compaction too soon. Next run will be in 30s [ns_server:debug,2014-08-19T16:23:32.568,ns_1@127.0.0.1:compaction_daemon<0.448.0>:compaction_daemon:handle_info:444]No buckets to compact. Rescheduling compaction. [ns_server:debug,2014-08-19T16:23:32.568,ns_1@127.0.0.1:compaction_daemon<0.448.0>:compaction_daemon:schedule_next_compaction:1519]Finished compaction too soon. Next run will be in 30s [ns_server:debug,2014-08-19T16:24:02.569,ns_1@127.0.0.1:compaction_daemon<0.448.0>:compaction_daemon:handle_info:444]No buckets to compact. Rescheduling compaction. [ns_server:debug,2014-08-19T16:24:02.569,ns_1@127.0.0.1:compaction_daemon<0.448.0>:compaction_daemon:schedule_next_compaction:1519]Finished compaction too soon. Next run will be in 30s [ns_server:debug,2014-08-19T16:24:32.570,ns_1@127.0.0.1:compaction_daemon<0.448.0>:compaction_daemon:handle_info:444]No buckets to compact. Rescheduling compaction. [ns_server:debug,2014-08-19T16:24:32.570,ns_1@127.0.0.1:compaction_daemon<0.448.0>:compaction_daemon:schedule_next_compaction:1519]Finished compaction too soon. Next run will be in 30s [ns_server:debug,2014-08-19T16:25:02.571,ns_1@127.0.0.1:compaction_daemon<0.448.0>:compaction_daemon:handle_info:444]No buckets to compact. Rescheduling compaction. [ns_server:debug,2014-08-19T16:25:02.571,ns_1@127.0.0.1:compaction_daemon<0.448.0>:compaction_daemon:schedule_next_compaction:1519]Finished compaction too soon. Next run will be in 30s [ns_server:debug,2014-08-19T16:25:32.572,ns_1@127.0.0.1:compaction_daemon<0.448.0>:compaction_daemon:handle_info:444]No buckets to compact. Rescheduling compaction. [ns_server:debug,2014-08-19T16:25:32.572,ns_1@127.0.0.1:compaction_daemon<0.448.0>:compaction_daemon:schedule_next_compaction:1519]Finished compaction too soon. Next run will be in 30s [ns_server:debug,2014-08-19T16:26:02.573,ns_1@127.0.0.1:compaction_daemon<0.448.0>:compaction_daemon:handle_info:444]No buckets to compact. Rescheduling compaction. [ns_server:debug,2014-08-19T16:26:02.573,ns_1@127.0.0.1:compaction_daemon<0.448.0>:compaction_daemon:schedule_next_compaction:1519]Finished compaction too soon. Next run will be in 30s [ns_server:debug,2014-08-19T16:26:32.574,ns_1@127.0.0.1:compaction_daemon<0.448.0>:compaction_daemon:handle_info:444]No buckets to compact. Rescheduling compaction. [ns_server:debug,2014-08-19T16:26:32.574,ns_1@127.0.0.1:compaction_daemon<0.448.0>:compaction_daemon:schedule_next_compaction:1519]Finished compaction too soon. Next run will be in 30s [ns_server:debug,2014-08-19T16:27:02.575,ns_1@127.0.0.1:compaction_daemon<0.448.0>:compaction_daemon:handle_info:444]No buckets to compact. Rescheduling compaction. [ns_server:debug,2014-08-19T16:27:02.575,ns_1@127.0.0.1:compaction_daemon<0.448.0>:compaction_daemon:schedule_next_compaction:1519]Finished compaction too soon. Next run will be in 30s [ns_server:debug,2014-08-19T16:27:32.576,ns_1@127.0.0.1:compaction_daemon<0.448.0>:compaction_daemon:handle_info:444]No buckets to compact. Rescheduling compaction. [ns_server:debug,2014-08-19T16:27:32.576,ns_1@127.0.0.1:compaction_daemon<0.448.0>:compaction_daemon:schedule_next_compaction:1519]Finished compaction too soon. Next run will be in 30s [ns_server:debug,2014-08-19T16:28:02.577,ns_1@127.0.0.1:compaction_daemon<0.448.0>:compaction_daemon:handle_info:444]No buckets to compact. Rescheduling compaction. [ns_server:debug,2014-08-19T16:28:02.577,ns_1@127.0.0.1:compaction_daemon<0.448.0>:compaction_daemon:schedule_next_compaction:1519]Finished compaction too soon. Next run will be in 30s [ns_server:debug,2014-08-19T16:28:32.578,ns_1@127.0.0.1:compaction_daemon<0.448.0>:compaction_daemon:handle_info:444]No buckets to compact. Rescheduling compaction. [ns_server:debug,2014-08-19T16:28:32.578,ns_1@127.0.0.1:compaction_daemon<0.448.0>:compaction_daemon:schedule_next_compaction:1519]Finished compaction too soon. Next run will be in 30s [ns_server:debug,2014-08-19T16:29:02.579,ns_1@127.0.0.1:compaction_daemon<0.448.0>:compaction_daemon:handle_info:444]No buckets to compact. Rescheduling compaction. [ns_server:debug,2014-08-19T16:29:02.579,ns_1@127.0.0.1:compaction_daemon<0.448.0>:compaction_daemon:schedule_next_compaction:1519]Finished compaction too soon. Next run will be in 30s [ns_server:debug,2014-08-19T16:29:32.580,ns_1@127.0.0.1:compaction_daemon<0.448.0>:compaction_daemon:handle_info:444]No buckets to compact. Rescheduling compaction. [ns_server:debug,2014-08-19T16:29:32.580,ns_1@127.0.0.1:compaction_daemon<0.448.0>:compaction_daemon:schedule_next_compaction:1519]Finished compaction too soon. Next run will be in 30s [ns_server:debug,2014-08-19T16:30:02.581,ns_1@127.0.0.1:compaction_daemon<0.448.0>:compaction_daemon:handle_info:444]No buckets to compact. Rescheduling compaction. [ns_server:debug,2014-08-19T16:30:02.581,ns_1@127.0.0.1:compaction_daemon<0.448.0>:compaction_daemon:schedule_next_compaction:1519]Finished compaction too soon. Next run will be in 30s [ns_server:debug,2014-08-19T16:30:32.582,ns_1@127.0.0.1:compaction_daemon<0.448.0>:compaction_daemon:handle_info:444]No buckets to compact. Rescheduling compaction. [ns_server:debug,2014-08-19T16:30:32.582,ns_1@127.0.0.1:compaction_daemon<0.448.0>:compaction_daemon:schedule_next_compaction:1519]Finished compaction too soon. Next run will be in 30s [ns_server:debug,2014-08-19T16:31:02.583,ns_1@127.0.0.1:compaction_daemon<0.448.0>:compaction_daemon:handle_info:444]No buckets to compact. Rescheduling compaction. [ns_server:debug,2014-08-19T16:31:02.583,ns_1@127.0.0.1:compaction_daemon<0.448.0>:compaction_daemon:schedule_next_compaction:1519]Finished compaction too soon. Next run will be in 30s [ns_server:debug,2014-08-19T16:31:32.584,ns_1@127.0.0.1:compaction_daemon<0.448.0>:compaction_daemon:handle_info:444]No buckets to compact. Rescheduling compaction. [ns_server:debug,2014-08-19T16:31:32.584,ns_1@127.0.0.1:compaction_daemon<0.448.0>:compaction_daemon:schedule_next_compaction:1519]Finished compaction too soon. Next run will be in 30s [ns_server:debug,2014-08-19T16:32:02.585,ns_1@127.0.0.1:compaction_daemon<0.448.0>:compaction_daemon:handle_info:444]No buckets to compact. Rescheduling compaction. [ns_server:debug,2014-08-19T16:32:02.585,ns_1@127.0.0.1:compaction_daemon<0.448.0>:compaction_daemon:schedule_next_compaction:1519]Finished compaction too soon. Next run will be in 30s [ns_server:debug,2014-08-19T16:32:32.586,ns_1@127.0.0.1:compaction_daemon<0.448.0>:compaction_daemon:handle_info:444]No buckets to compact. Rescheduling compaction. [ns_server:debug,2014-08-19T16:32:32.586,ns_1@127.0.0.1:compaction_daemon<0.448.0>:compaction_daemon:schedule_next_compaction:1519]Finished compaction too soon. Next run will be in 30s [ns_server:debug,2014-08-19T16:33:02.587,ns_1@127.0.0.1:compaction_daemon<0.448.0>:compaction_daemon:handle_info:444]No buckets to compact. Rescheduling compaction. [ns_server:debug,2014-08-19T16:33:02.587,ns_1@127.0.0.1:compaction_daemon<0.448.0>:compaction_daemon:schedule_next_compaction:1519]Finished compaction too soon. Next run will be in 30s [ns_server:debug,2014-08-19T16:33:32.588,ns_1@127.0.0.1:compaction_daemon<0.448.0>:compaction_daemon:handle_info:444]No buckets to compact. Rescheduling compaction. [ns_server:debug,2014-08-19T16:33:32.588,ns_1@127.0.0.1:compaction_daemon<0.448.0>:compaction_daemon:schedule_next_compaction:1519]Finished compaction too soon. Next run will be in 30s [ns_server:debug,2014-08-19T16:34:02.589,ns_1@127.0.0.1:compaction_daemon<0.448.0>:compaction_daemon:handle_info:444]No buckets to compact. Rescheduling compaction. [ns_server:debug,2014-08-19T16:34:02.589,ns_1@127.0.0.1:compaction_daemon<0.448.0>:compaction_daemon:schedule_next_compaction:1519]Finished compaction too soon. Next run will be in 30s [ns_server:debug,2014-08-19T16:34:32.590,ns_1@127.0.0.1:compaction_daemon<0.448.0>:compaction_daemon:handle_info:444]No buckets to compact. Rescheduling compaction. [ns_server:debug,2014-08-19T16:34:32.590,ns_1@127.0.0.1:compaction_daemon<0.448.0>:compaction_daemon:schedule_next_compaction:1519]Finished compaction too soon. Next run will be in 30s [ns_server:debug,2014-08-19T16:35:02.591,ns_1@127.0.0.1:compaction_daemon<0.448.0>:compaction_daemon:handle_info:444]No buckets to compact. Rescheduling compaction. [ns_server:debug,2014-08-19T16:35:02.591,ns_1@127.0.0.1:compaction_daemon<0.448.0>:compaction_daemon:schedule_next_compaction:1519]Finished compaction too soon. Next run will be in 30s [ns_server:debug,2014-08-19T16:35:32.592,ns_1@127.0.0.1:compaction_daemon<0.448.0>:compaction_daemon:handle_info:444]No buckets to compact. Rescheduling compaction. [ns_server:debug,2014-08-19T16:35:32.592,ns_1@127.0.0.1:compaction_daemon<0.448.0>:compaction_daemon:schedule_next_compaction:1519]Finished compaction too soon. Next run will be in 30s [ns_server:debug,2014-08-19T16:36:02.593,ns_1@127.0.0.1:compaction_daemon<0.448.0>:compaction_daemon:handle_info:444]No buckets to compact. Rescheduling compaction. [ns_server:debug,2014-08-19T16:36:02.593,ns_1@127.0.0.1:compaction_daemon<0.448.0>:compaction_daemon:schedule_next_compaction:1519]Finished compaction too soon. Next run will be in 30s [ns_server:debug,2014-08-19T16:36:32.594,ns_1@127.0.0.1:compaction_daemon<0.448.0>:compaction_daemon:handle_info:444]No buckets to compact. Rescheduling compaction. [ns_server:debug,2014-08-19T16:36:32.594,ns_1@127.0.0.1:compaction_daemon<0.448.0>:compaction_daemon:schedule_next_compaction:1519]Finished compaction too soon. Next run will be in 30s [ns_server:debug,2014-08-19T16:37:02.595,ns_1@127.0.0.1:compaction_daemon<0.448.0>:compaction_daemon:handle_info:444]No buckets to compact. Rescheduling compaction. [ns_server:debug,2014-08-19T16:37:02.595,ns_1@127.0.0.1:compaction_daemon<0.448.0>:compaction_daemon:schedule_next_compaction:1519]Finished compaction too soon. Next run will be in 30s [ns_server:debug,2014-08-19T16:37:32.596,ns_1@127.0.0.1:compaction_daemon<0.448.0>:compaction_daemon:handle_info:444]No buckets to compact. Rescheduling compaction. [ns_server:debug,2014-08-19T16:37:32.596,ns_1@127.0.0.1:compaction_daemon<0.448.0>:compaction_daemon:schedule_next_compaction:1519]Finished compaction too soon. Next run will be in 30s [ns_server:debug,2014-08-19T16:38:02.597,ns_1@127.0.0.1:compaction_daemon<0.448.0>:compaction_daemon:handle_info:444]No buckets to compact. Rescheduling compaction. [ns_server:debug,2014-08-19T16:38:02.597,ns_1@127.0.0.1:compaction_daemon<0.448.0>:compaction_daemon:schedule_next_compaction:1519]Finished compaction too soon. Next run will be in 30s [ns_server:debug,2014-08-19T16:38:32.598,ns_1@127.0.0.1:compaction_daemon<0.448.0>:compaction_daemon:handle_info:444]No buckets to compact. Rescheduling compaction. [ns_server:debug,2014-08-19T16:38:32.598,ns_1@127.0.0.1:compaction_daemon<0.448.0>:compaction_daemon:schedule_next_compaction:1519]Finished compaction too soon. Next run will be in 30s [ns_server:debug,2014-08-19T16:39:02.599,ns_1@127.0.0.1:compaction_daemon<0.448.0>:compaction_daemon:handle_info:444]No buckets to compact. Rescheduling compaction. [ns_server:debug,2014-08-19T16:39:02.599,ns_1@127.0.0.1:compaction_daemon<0.448.0>:compaction_daemon:schedule_next_compaction:1519]Finished compaction too soon. Next run will be in 30s [ns_server:debug,2014-08-19T16:39:32.600,ns_1@127.0.0.1:compaction_daemon<0.448.0>:compaction_daemon:handle_info:444]No buckets to compact. Rescheduling compaction. [ns_server:debug,2014-08-19T16:39:32.600,ns_1@127.0.0.1:compaction_daemon<0.448.0>:compaction_daemon:schedule_next_compaction:1519]Finished compaction too soon. Next run will be in 30s [ns_server:debug,2014-08-19T16:40:02.601,ns_1@127.0.0.1:compaction_daemon<0.448.0>:compaction_daemon:handle_info:444]No buckets to compact. Rescheduling compaction. [ns_server:debug,2014-08-19T16:40:02.601,ns_1@127.0.0.1:compaction_daemon<0.448.0>:compaction_daemon:schedule_next_compaction:1519]Finished compaction too soon. Next run will be in 30s [ns_server:debug,2014-08-19T16:40:32.602,ns_1@127.0.0.1:compaction_daemon<0.448.0>:compaction_daemon:handle_info:444]No buckets to compact. Rescheduling compaction. [ns_server:debug,2014-08-19T16:40:32.602,ns_1@127.0.0.1:compaction_daemon<0.448.0>:compaction_daemon:schedule_next_compaction:1519]Finished compaction too soon. Next run will be in 30s [ns_server:debug,2014-08-19T16:41:02.603,ns_1@127.0.0.1:compaction_daemon<0.448.0>:compaction_daemon:handle_info:444]No buckets to compact. Rescheduling compaction. [ns_server:debug,2014-08-19T16:41:02.603,ns_1@127.0.0.1:compaction_daemon<0.448.0>:compaction_daemon:schedule_next_compaction:1519]Finished compaction too soon. Next run will be in 30s [ns_server:debug,2014-08-19T16:41:32.604,ns_1@127.0.0.1:compaction_daemon<0.448.0>:compaction_daemon:handle_info:444]No buckets to compact. Rescheduling compaction. [ns_server:debug,2014-08-19T16:41:32.604,ns_1@127.0.0.1:compaction_daemon<0.448.0>:compaction_daemon:schedule_next_compaction:1519]Finished compaction too soon. Next run will be in 30s [ns_server:debug,2014-08-19T16:42:02.605,ns_1@127.0.0.1:compaction_daemon<0.448.0>:compaction_daemon:handle_info:444]No buckets to compact. Rescheduling compaction. [ns_server:debug,2014-08-19T16:42:02.605,ns_1@127.0.0.1:compaction_daemon<0.448.0>:compaction_daemon:schedule_next_compaction:1519]Finished compaction too soon. Next run will be in 30s [ns_server:debug,2014-08-19T16:42:32.606,ns_1@127.0.0.1:compaction_daemon<0.448.0>:compaction_daemon:handle_info:444]No buckets to compact. Rescheduling compaction. [ns_server:debug,2014-08-19T16:42:32.606,ns_1@127.0.0.1:compaction_daemon<0.448.0>:compaction_daemon:schedule_next_compaction:1519]Finished compaction too soon. Next run will be in 30s [ns_server:debug,2014-08-19T16:43:02.607,ns_1@127.0.0.1:compaction_daemon<0.448.0>:compaction_daemon:handle_info:444]No buckets to compact. Rescheduling compaction. [ns_server:debug,2014-08-19T16:43:02.607,ns_1@127.0.0.1:compaction_daemon<0.448.0>:compaction_daemon:schedule_next_compaction:1519]Finished compaction too soon. Next run will be in 30s [ns_server:debug,2014-08-19T16:43:32.608,ns_1@127.0.0.1:compaction_daemon<0.448.0>:compaction_daemon:handle_info:444]No buckets to compact. Rescheduling compaction. [ns_server:debug,2014-08-19T16:43:32.608,ns_1@127.0.0.1:compaction_daemon<0.448.0>:compaction_daemon:schedule_next_compaction:1519]Finished compaction too soon. Next run will be in 30s [ns_server:debug,2014-08-19T16:44:02.609,ns_1@127.0.0.1:compaction_daemon<0.448.0>:compaction_daemon:handle_info:444]No buckets to compact. Rescheduling compaction. [ns_server:debug,2014-08-19T16:44:02.609,ns_1@127.0.0.1:compaction_daemon<0.448.0>:compaction_daemon:schedule_next_compaction:1519]Finished compaction too soon. Next run will be in 30s [ns_server:debug,2014-08-19T16:44:32.610,ns_1@127.0.0.1:compaction_daemon<0.448.0>:compaction_daemon:handle_info:444]No buckets to compact. Rescheduling compaction. [ns_server:debug,2014-08-19T16:44:32.610,ns_1@127.0.0.1:compaction_daemon<0.448.0>:compaction_daemon:schedule_next_compaction:1519]Finished compaction too soon. Next run will be in 30s [ns_server:debug,2014-08-19T16:45:02.611,ns_1@127.0.0.1:compaction_daemon<0.448.0>:compaction_daemon:handle_info:444]No buckets to compact. Rescheduling compaction. [ns_server:debug,2014-08-19T16:45:02.611,ns_1@127.0.0.1:compaction_daemon<0.448.0>:compaction_daemon:schedule_next_compaction:1519]Finished compaction too soon. Next run will be in 30s [ns_server:debug,2014-08-19T16:45:32.612,ns_1@127.0.0.1:compaction_daemon<0.448.0>:compaction_daemon:handle_info:444]No buckets to compact. Rescheduling compaction. [ns_server:debug,2014-08-19T16:45:32.612,ns_1@127.0.0.1:compaction_daemon<0.448.0>:compaction_daemon:schedule_next_compaction:1519]Finished compaction too soon. Next run will be in 30s [ns_server:debug,2014-08-19T16:46:02.613,ns_1@127.0.0.1:compaction_daemon<0.448.0>:compaction_daemon:handle_info:444]No buckets to compact. Rescheduling compaction. [ns_server:debug,2014-08-19T16:46:02.613,ns_1@127.0.0.1:compaction_daemon<0.448.0>:compaction_daemon:schedule_next_compaction:1519]Finished compaction too soon. Next run will be in 30s [ns_server:debug,2014-08-19T16:46:32.621,ns_1@127.0.0.1:compaction_daemon<0.448.0>:compaction_daemon:handle_info:444]No buckets to compact. Rescheduling compaction. [ns_server:debug,2014-08-19T16:46:32.621,ns_1@127.0.0.1:compaction_daemon<0.448.0>:compaction_daemon:schedule_next_compaction:1519]Finished compaction too soon. Next run will be in 30s [ns_server:debug,2014-08-19T16:47:02.622,ns_1@127.0.0.1:compaction_daemon<0.448.0>:compaction_daemon:handle_info:444]No buckets to compact. Rescheduling compaction. [ns_server:debug,2014-08-19T16:47:02.622,ns_1@127.0.0.1:compaction_daemon<0.448.0>:compaction_daemon:schedule_next_compaction:1519]Finished compaction too soon. Next run will be in 30s [ns_server:debug,2014-08-19T16:47:32.623,ns_1@127.0.0.1:compaction_daemon<0.448.0>:compaction_daemon:handle_info:444]No buckets to compact. Rescheduling compaction. [ns_server:debug,2014-08-19T16:47:32.623,ns_1@127.0.0.1:compaction_daemon<0.448.0>:compaction_daemon:schedule_next_compaction:1519]Finished compaction too soon. Next run will be in 30s [ns_server:debug,2014-08-19T16:48:02.624,ns_1@127.0.0.1:compaction_daemon<0.448.0>:compaction_daemon:handle_info:444]No buckets to compact. Rescheduling compaction. [ns_server:debug,2014-08-19T16:48:02.624,ns_1@127.0.0.1:compaction_daemon<0.448.0>:compaction_daemon:schedule_next_compaction:1519]Finished compaction too soon. Next run will be in 30s [ns_server:debug,2014-08-19T16:48:32.625,ns_1@127.0.0.1:compaction_daemon<0.448.0>:compaction_daemon:handle_info:444]No buckets to compact. Rescheduling compaction. [ns_server:debug,2014-08-19T16:48:32.625,ns_1@127.0.0.1:compaction_daemon<0.448.0>:compaction_daemon:schedule_next_compaction:1519]Finished compaction too soon. Next run will be in 30s [ns_server:debug,2014-08-19T16:48:43.418,ns_1@127.0.0.1:ns_config_log<0.284.0>:ns_config_log:log_common:138]config change: uuid -> <<"7470311bdaa2a4acd47d21222af5c9ae">> [ns_server:debug,2014-08-19T16:48:43.418,ns_1@127.0.0.1:ns_config_rep<0.312.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([uuid]..) [user:info,2014-08-19T16:48:58.464,ns_1@127.0.0.1:<0.10867.0>:ns_storage_conf:setup_disk_storage_conf:116]Setting database directory path to /var/lib/pgsql and index directory path to /var/lib/pgsql [ns_server:info,2014-08-19T16:48:58.465,ns_1@127.0.0.1:<0.10867.0>:ns_storage_conf:setup_disk_storage_conf:124]Removing all the buckets because database path has changed (old database path /opt/couchbase/var/lib/couchbase/data) [ns_server:info,2014-08-19T16:48:58.465,ns_1@127.0.0.1:<0.10867.0>:ns_storage_conf:setup_disk_storage_conf:130]Removing all unused database files [ns_server:debug,2014-08-19T16:48:58.473,ns_1@127.0.0.1:<0.453.0>:ns_pubsub:do_subscribe_link:136]Parent process of subscription {ns_config_events,<0.452.0>} exited with reason shutdown [ns_server:debug,2014-08-19T16:48:58.473,ns_1@127.0.0.1:<0.449.0>:ns_pubsub:do_subscribe_link:136]Parent process of subscription {ns_config_events,<0.448.0>} exited with reason shutdown [ns_server:debug,2014-08-19T16:48:58.776,ns_1@127.0.0.1:<0.446.0>:ns_pubsub:do_subscribe_link:136]Parent process of subscription {ns_stats_event,<0.445.0>} exited with reason shutdown [ns_server:debug,2014-08-19T16:48:58.777,ns_1@127.0.0.1:<0.444.0>:ns_pubsub:do_subscribe_link:136]Parent process of subscription {ns_tick_event,<0.442.0>} exited with reason shutdown [ns_server:debug,2014-08-19T16:48:58.777,ns_1@127.0.0.1:<0.441.0>:ns_pubsub:do_subscribe_link:136]Parent process of subscription {ns_config_events,<0.440.0>} exited with reason shutdown [error_logger:error,2014-08-19T16:48:58.777,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================SUPERVISOR REPORT========================= Supervisor: {local,ns_bucket_sup} Context: shutdown_error Reason: normal Offender: [{pid,<0.441.0>}, {name,buckets_observing_subscription}, {mfargs,{ns_bucket_sup,subscribe_on_config_events,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [ns_server:debug,2014-08-19T16:48:58.777,ns_1@127.0.0.1:<0.426.0>:ns_pubsub:do_subscribe_link:136]Parent process of subscription {ns_config_events,<0.424.0>} exited with reason killed [ns_server:debug,2014-08-19T16:48:58.777,ns_1@127.0.0.1:<0.428.0>:ns_pubsub:do_subscribe_link:136]Parent process of subscription {ns_config_events,<0.425.0>} exited with reason killed [ns_server:debug,2014-08-19T16:48:58.778,ns_1@127.0.0.1:<0.362.0>:ns_pubsub:do_subscribe_link:136]Parent process of subscription {ns_config_events,<0.361.0>} exited with reason shutdown [ns_server:info,2014-08-19T16:48:58.778,ns_1@127.0.0.1:mb_master<0.342.0>:mb_master:terminate:299]Synchronously shutting down child mb_master_sup [ns_server:debug,2014-08-19T16:48:58.778,ns_1@127.0.0.1:<0.358.0>:ns_pubsub:do_subscribe_link:136]Parent process of subscription {master_activity_events,<0.357.0>} exited with reason killed [ns_server:debug,2014-08-19T16:48:58.778,ns_1@127.0.0.1:<0.343.0>:ns_pubsub:do_subscribe_link:136]Parent process of subscription {ns_config_events,<0.342.0>} exited with reason shutdown [ns_server:debug,2014-08-19T16:48:58.779,ns_1@127.0.0.1:<0.334.0>:ns_pubsub:do_subscribe_link:136]Parent process of subscription {ns_config_events,<0.333.0>} exited with reason shutdown [ns_server:debug,2014-08-19T16:48:58.779,ns_1@127.0.0.1:<0.330.0>:ns_pubsub:do_subscribe_link:136]Parent process of subscription {buckets_events,<0.329.0>} exited with reason shutdown [ns_server:debug,2014-08-19T16:48:58.779,ns_1@127.0.0.1:<0.322.0>:ns_pubsub:do_subscribe_link:136]Parent process of subscription {ns_config_events,<0.320.0>} exited with reason killed [ns_server:debug,2014-08-19T16:48:58.779,ns_1@127.0.0.1:<0.319.0>:ns_pubsub:do_subscribe_link:136]Parent process of subscription {ns_config_events,<0.318.0>} exited with reason killed [ns_server:debug,2014-08-19T16:48:58.779,ns_1@127.0.0.1:<0.313.0>:ns_pubsub:do_subscribe_link:136]Parent process of subscription {ns_config_events_local,<0.312.0>} exited with reason shutdown [ns_server:debug,2014-08-19T16:48:58.779,ns_1@127.0.0.1:<0.301.0>:ns_pubsub:do_subscribe_link:136]Parent process of subscription {ns_config_events,<0.299.0>} exited with reason shutdown [ns_server:debug,2014-08-19T16:48:58.779,ns_1@127.0.0.1:<0.297.0>:ns_pubsub:do_subscribe_link:136]Parent process of subscription {ns_config_events,<0.296.0>} exited with reason killed [error_logger:error,2014-08-19T16:48:58.781,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================CRASH REPORT========================= crasher: initial call: gen_event:init_it/6 pid: <0.321.0> registered_name: bucket_info_cache_invalidations exception exit: killed in function gen_event:terminate_server/4 ancestors: [bucket_info_cache,ns_server_sup,ns_server_cluster_sup, <0.58.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 233 stack_size: 24 reductions: 119 neighbours: [ns_server:debug,2014-08-19T16:48:58.882,ns_1@127.0.0.1:ns_config<0.281.0>:ns_config:wait_saver:652]Done waiting for saver. [ns_server:debug,2014-08-19T16:48:58.882,ns_1@127.0.0.1:<0.287.0>:ns_pubsub:do_subscribe_link:136]Parent process of subscription {ns_config_events,<0.286.0>} exited with reason shutdown [ns_server:debug,2014-08-19T16:48:58.882,ns_1@127.0.0.1:<0.285.0>:ns_pubsub:do_subscribe_link:136]Parent process of subscription {ns_config_events,<0.284.0>} exited with reason shutdown [error_logger:error,2014-08-19T16:48:58.882,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================SUPERVISOR REPORT========================= Supervisor: {local,ns_server_cluster_sup} Context: shutdown_error Reason: killed Offender: [{pid,<0.288.0>}, {name,vbucket_filter_changes_registry}, {mfargs, {ns_process_registry,start_link, [vbucket_filter_changes_registry]}}, {restart_type,permanent}, {shutdown,100}, {child_type,worker}] [error_logger:error,2014-08-19T16:48:58.883,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:119]** Generic server <0.220.0> terminating ** Last message in was {'EXIT',<0.210.0>,killed} ** When Server state == {db,<0.220.0>,<0.221.0>,nil,<<"1408448790897241">>, <0.217.0>,<0.222.0>, {db_header,11,1, <<0,0,0,0,13,103,0,0,0,0,0,51,0,0,0,0,1,0,0,0, 0,0,0,0,0,0,13,69>>, <<0,0,0,0,13,154,0,0,0,0,0,49,0,0,0,0,1>>, nil,0,nil,nil}, 1, {btree,<0.217.0>, {3431, <<0,0,0,0,1,0,0,0,0,0,0,0,0,0,13,69>>, 51}, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.217.0>, {3482,<<0,0,0,0,1>>,49}, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.217.0>,nil,identity,identity, #Fun,nil,1279,2558, true}, 1,<<"_users">>, "/opt/couchbase/var/lib/couchbase/data/_users.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], []} ** Reason for termination == ** killed [error_logger:error,2014-08-19T16:48:58.884,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:119]** Generic server <0.214.0> terminating ** Last message in was {'EXIT',<0.210.0>,killed} ** When Server state == {db,<0.214.0>,<0.215.0>,nil,<<"1408448790891054">>, <0.211.0>,<0.216.0>, {db_header,11,0,nil,nil,nil,0,nil,nil}, 0, {btree,<0.211.0>,nil, #Fun, #Fun, #Fun, #Fun,1279, 2558,true}, {btree,<0.211.0>,nil, #Fun, #Fun, #Fun, #Fun,1279, 2558,true}, {btree,<0.211.0>,nil,identity,identity, #Fun,nil,1279,2558, true}, 0,<<"_replicator">>, "/opt/couchbase/var/lib/couchbase/data/_replicator.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], []} ** Reason for termination == ** killed [error_logger:error,2014-08-19T16:48:58.884,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.214.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.58.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 610 stack_size: 24 reductions: 249 neighbours: [error_logger:error,2014-08-19T16:48:58.885,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.220.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.58.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 610 stack_size: 24 reductions: 210 neighbours: [error_logger:info,2014-08-19T16:48:58.885,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================INFO REPORT========================= application: mapreduce exited: stopped type: temporary [error_logger:info,2014-08-19T16:48:58.885,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================INFO REPORT========================= application: couch_view_parser exited: stopped type: temporary [error_logger:info,2014-08-19T16:48:58.885,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================INFO REPORT========================= application: couch_index_merger exited: stopped type: temporary [error_logger:info,2014-08-19T16:48:58.885,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================INFO REPORT========================= application: couch_set_view exited: stopped type: temporary [error_logger:info,2014-08-19T16:48:58.885,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= application: couch_view_parser started_at: 'ns_1@127.0.0.1' [error_logger:info,2014-08-19T16:48:58.885,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= application: couch_set_view started_at: 'ns_1@127.0.0.1' [error_logger:info,2014-08-19T16:48:58.885,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= application: couch_index_merger started_at: 'ns_1@127.0.0.1' [error_logger:info,2014-08-19T16:48:58.886,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= application: mapreduce started_at: 'ns_1@127.0.0.1' [error_logger:info,2014-08-19T16:48:58.911,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,couch_server_sup} started: [{pid,<0.10932.0>}, {name,couch_config}, {mfargs, {couch_server_sup,couch_config_start_link_wrapper, [["/opt/couchbase/etc/couchdb/default.ini", "/opt/couchbase/etc/couchdb/default.d/capi.ini", "/opt/couchbase/etc/couchdb/default.d/geocouch.ini", "/opt/couchbase/etc/couchdb/local.ini"], <0.10932.0>]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [error_logger:info,2014-08-19T16:48:58.917,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,couch_primary_services} started: [{pid,<0.10935.0>}, {name,collation_driver}, {mfargs,{couch_drv,start_link,[]}}, {restart_type,permanent}, {shutdown,infinity}, {child_type,supervisor}] [error_logger:info,2014-08-19T16:48:58.918,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,couch_primary_services} started: [{pid,<0.10936.0>}, {name,couch_task_events}, {mfargs, {gen_event,start_link,[{local,couch_task_events}]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [error_logger:info,2014-08-19T16:48:58.918,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,couch_primary_services} started: [{pid,<0.10937.0>}, {name,couch_task_status}, {mfargs,{couch_task_status,start_link,[]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [error_logger:info,2014-08-19T16:48:58.918,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,couch_primary_services} started: [{pid,<0.10938.0>}, {name,couch_file_write_guard}, {mfargs,{couch_file_write_guard,sup_start_link,[]}}, {restart_type,permanent}, {shutdown,10000}, {child_type,worker}] [error_logger:info,2014-08-19T16:49:02.551,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,couch_primary_services} started: [{pid,<0.10939.0>}, {name,couch_server}, {mfargs,{couch_server,sup_start_link,[]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [error_logger:info,2014-08-19T16:49:02.552,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,couch_primary_services} started: [{pid,<0.17108.0>}, {name,couch_db_update_event}, {mfargs, {gen_event,start_link,[{local,couch_db_update}]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [error_logger:info,2014-08-19T16:49:02.552,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,couch_primary_services} started: [{pid,<0.17109.0>}, {name,couch_replication_event}, {mfargs, {gen_event,start_link,[{local,couch_replication}]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [error_logger:info,2014-08-19T16:49:02.552,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,couch_primary_services} started: [{pid,<0.17110.0>}, {name,couch_replication_supervisor}, {mfargs,{couch_rep_sup,start_link,[]}}, {restart_type,permanent}, {shutdown,infinity}, {child_type,supervisor}] [error_logger:info,2014-08-19T16:49:02.552,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,couch_primary_services} started: [{pid,<0.17111.0>}, {name,couch_log}, {mfargs,{couch_log,start_link,[]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [error_logger:info,2014-08-19T16:49:02.552,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,couch_primary_services} started: [{pid,<0.17112.0>}, {name,couch_main_index_barrier}, {mfargs, {couch_index_barrier,start_link, [couch_main_index_barrier, "max_parallel_indexers"]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [error_logger:info,2014-08-19T16:49:02.553,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,couch_primary_services} started: [{pid,<0.17113.0>}, {name,couch_replica_index_barrier}, {mfargs, {couch_index_barrier,start_link, [couch_replica_index_barrier, "max_parallel_replica_indexers"]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [error_logger:info,2014-08-19T16:49:02.553,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,couch_primary_services} started: [{pid,<0.17114.0>}, {name,couch_spatial_index_barrier}, {mfargs, {couch_index_barrier,start_link, [couch_spatial_index_barrier, "max_parallel_spatial_indexers"]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [error_logger:info,2014-08-19T16:49:02.553,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,couch_server_sup} started: [{pid,<0.10934.0>}, {name,couch_primary_services}, {mfargs,{couch_primary_sup,start_link,[]}}, {restart_type,permanent}, {shutdown,infinity}, {child_type,supervisor}] [error_logger:info,2014-08-19T16:49:02.553,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,couch_secondary_services} started: [{pid,<0.17116.0>}, {name,couch_db_update_notifier_sup}, {mfargs,{couch_db_update_notifier_sup,start_link,[]}}, {restart_type,permanent}, {shutdown,infinity}, {child_type,supervisor}] [error_logger:info,2014-08-19T16:49:02.554,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,couch_secondary_services} started: [{pid,<0.17117.0>}, {name,auth_cache}, {mfargs,{couch_auth_cache,start_link,[]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [error_logger:info,2014-08-19T16:49:02.554,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,couch_secondary_services} started: [{pid,<0.17119.0>}, {name,set_view_manager}, {mfargs,{couch_set_view,start_link,[]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [error_logger:info,2014-08-19T16:49:02.555,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,couch_secondary_services} started: [{pid,<0.17121.0>}, {name,spatial_manager}, {mfargs,{couch_spatial,start_link,[]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [error_logger:info,2014-08-19T16:49:02.555,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,couch_secondary_services} started: [{pid,<0.17123.0>}, {name,index_merger_pool}, {mfargs, {lhttpc_manager,start_link, [[{connection_timeout,90000}, {pool_size,10000}, {name,couch_index_merger_connection_pool}]]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [error_logger:info,2014-08-19T16:49:02.555,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,couch_secondary_services} started: [{pid,<0.17124.0>}, {name,query_servers}, {mfargs,{couch_query_servers,start_link,[]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [error_logger:info,2014-08-19T16:49:02.555,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,couch_secondary_services} started: [{pid,<0.17126.0>}, {name,couch_set_view_ddoc_cache}, {mfargs,{couch_set_view_ddoc_cache,start_link,[]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [error_logger:info,2014-08-19T16:49:02.555,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,couch_secondary_services} started: [{pid,<0.17128.0>}, {name,view_manager}, {mfargs,{couch_view,start_link,[]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [error_logger:info,2014-08-19T16:49:02.556,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,couch_secondary_services} started: [{pid,<0.17130.0>}, {name,httpd}, {mfargs,{couch_httpd,start_link,[]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [error_logger:info,2014-08-19T16:49:02.557,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,couch_secondary_services} started: [{pid,<0.17147.0>}, {name,uuids}, {mfargs,{couch_uuids,start,[]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [error_logger:info,2014-08-19T16:49:02.557,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,couch_server_sup} started: [{pid,<0.17115.0>}, {name,couch_secondary_services}, {mfargs,{couch_secondary_sup,start_link,[]}}, {restart_type,permanent}, {shutdown,infinity}, {child_type,supervisor}] [error_logger:info,2014-08-19T16:49:02.557,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,cb_couch_sup} started: [{pid,<0.10933.0>}, {name,couch_app}, {mfargs, {couch_app,start, [fake, ["/opt/couchbase/etc/couchdb/default.ini", "/opt/couchbase/etc/couchdb/local.ini"]]}}, {restart_type,permanent}, {shutdown,infinity}, {child_type,supervisor}] [ns_server:info,2014-08-19T16:49:02.560,ns_1@127.0.0.1:ns_server_cluster_sup<0.160.0>:log_os_info:start_link:25]OS type: {unix,linux} Version: {2,6,32} Runtime info: [{otp_release,"R14B04"}, {erl_version,"5.8.5"}, {erl_version_long, "Erlang R14B04 (erts-5.8.5) [source] [64-bit] [smp:24:24] [rq:24] [async-threads:16] [kernel-poll:true]\n"}, {system_arch_raw,"x86_64-unknown-linux-gnu"}, {system_arch,"x86_64-unknown-linux-gnu"}, {localtime,{{2014,8,19},{16,49,2}}}, {memory, [{total,601493728}, {processes,39621448}, {processes_used,39614480}, {system,561872280}, {atom,1158025}, {atom_used,1136631}, {binary,434880}, {code,10957303}, {ets,2290232}]}, {loaded, [lib,capi_utils,mochiweb_mime,mochiweb_io,stats_collector, menelaus_web_remote_clusters,mb_grid,ejson, mochiweb_response,menelaus_web_buckets,menelaus_auth, mochiweb_util,mochiweb_request,mochiweb_headers, set_view_update_daemon,xdc_rdoc_replication_srv, compaction_daemon,stats_archiver,ns_bucket_sup, ns_bucket_worker_sup,couch_changes, ns_memcached_sockets_pool,xdc_rep_manager, ns_null_connection_pool,proxied_memcached_clients_pool, ns_moxi_sup,ns_connection_pool,memcached_clients_pool, ns_cluster_membership,ns_memcached_log_rotator, mc_tcp_listener,mc_conn_sup,mc_sup, menelaus_web_alerts_srv,hot_keys_keeper,menelaus_event, menelaus_stats_gatherer,menelaus_web_cache, menelaus_ui_auth,ssl_tls1,ssl_cipher,ssl_record,mochiweb, menelaus_util,menelaus_web,ns_ports_setup,ns_server_cert, ns_ssl_services_setup,ns_ssl_services_sup,menelaus_sup, ringbuffer,master_activity_events_keeper, master_activity_events_pids_watcher,auto_failover,ns_tick, ns_online_config_upgrader,ns_orchestrator, master_activity_events,system_stats_collector, mb_master_sup,failover_safeness_level,gen_fsm,mb_master, xdc_replication_sup,ns_bucket,remote_clusters_info, stats_reader,ns_doctor,ns_heart,samples_loader_tasks, ns_mail_log,ns_mail_sup,bucket_info_cache, vbucket_map_mirror,ns_node_disco_rep_events,ns_config_rep, ns_node_disco_conf_events,ns_node_disco_log,net_adm, cluster_compat_mode,ns_node_disco,ns_node_disco_sup, ns_memcached,dist_util,ns_config_isasl_sync,ns_crash_log, ns_config_ets_dup,random,timer2,ns_log,request_throttler, menelaus_deps,dir_size,work_queue,supervisor2, ns_server_sup,ns_process_registry,cb_config_couch_sync, ns_pubsub,ns_config_replica,ns_config_log,vclock, ns_storage_conf,ns_config_default,ns_config,ns_config_sup, ns_cluster,ns_cookie_manager,erl_epmd,inet_tcp_dist, gen_udp,dist_manager,timeout_diag_logger,path_config, diag_handler,auth,ns_info,log_os_info,couch_config_writer, cb_init_loggers,couch_uuids,mochiweb_acceptor,inet_tcp, gen_tcp,mochiweb_socket,mochiweb_socket_server,mochilists, mochiweb_http,eval_bits,couch_httpd,couch_view, couch_set_view_ddoc_cache,couch_query_servers, couch_spatial,mapreduce,couch_set_view, couch_db_update_notifier,snappy,couch_compress, couch_auth_cache,couch_db_update_notifier_sup, couch_secondary_sup,queue,couch_index_barrier, couch_event_sup,couch_log,couch_rep_sup,couch_btree, couch_ref_counter,couch_db_updater,couch_db,httpd_util, filelib,couch_file,couch_file_write_guard, couch_task_status,erl_ddll,couch_drv,couch_primary_sup, couch_server,string,re,file2,couch_util,couch_config, couch_server_sup,ssl_server,crypto,ssl,lhttpc_manager, lhttpc_sup,lhttpc,ssl_connection_sup,ssl_session_cache, ssl_certificate_db,ssl_manager,ssl_broker_sup,ssl_sup, ssl_app,tftp_sup,httpd_sup,httpc_handler_sup,httpc_cookie, inets,httpc_manager,httpc,httpc_profile_sup,httpc_sup, ftp_sup,inets_sup,inets_app,crypto_server,crypto_sup, crypto_app,couch_app,cb_couch_sup,ns_server_cluster_sup, mlockall,calendar,ale_default_formatter,otp_internal,misc, 'ale_logger-xdcr','ale_logger-mapreduce_errors', 'ale_logger-views','ale_logger-cluster',timer, io_lib_fread,'ale_logger-rebalance','ale_logger-stats', 'ale_logger-ns_doctor','ale_logger-menelaus', 'ale_logger-user','ale_logger-ns_server', 'ale_logger-couchdb',ns_log_sink,disk_log_sup, disk_log_server,disk_log_1,disk_log,ale_disk_sink, ns_server,cpu_sup,memsup,disksup,os_mon,io, release_handler,overload,alarm_handler,log_mf_h,sasl, ale_error_logger_handler,'ale_logger-ale_logger', 'ale_logger-error_logger',beam_opcodes,beam_dict,beam_asm, beam_validator,beam_flatten,beam_trim,beam_receive, beam_bsm,beam_peep,beam_dead,beam_type,beam_bool, beam_clean,beam_utils,beam_jump,beam_block,v3_codegen, v3_life,v3_kernel,sys_core_dsetel,erl_bifs,sys_core_fold, cerl_trees,sys_core_inline,core_lib,cerl,v3_core,erl_bits, erl_expand_records,sys_pre_expand,sofs,erl_internal,sets, ordsets,erl_lint,compile,dynamic_compile,ale_utils, io_lib_pretty,io_lib_format,io_lib,ale_codegen,dict,ale, ale_dynamic_sup,ale_sup,ale_app,ns_bootstrap,child_erlang, file_io_server,orddict,erl_eval,file,c,kernel_config, user_sup,supervisor_bridge,standard_error,unicode,binary, ets,gb_sets,hipe_unified_loader,packages,code_server,code, file_server,net_kernel,global_group,erl_distribution, filename,inet_gethost_native,os,inet_parse,inet,inet_udp, inet_config,inet_db,global,gb_trees,rpc,supervisor,kernel, application_master,sys,application,gen_server,erl_parse, proplists,erl_scan,lists,application_controller,proc_lib, gen,gen_event,error_logger,heart,error_handler,erlang, erl_prim_loader,prim_zip,zlib,prim_file,prim_inet,init, otp_ring0]}, {applications, [{public_key,"Public key infrastructure","0.13"}, {asn1,"The Erlang ASN1 compiler version 1.6.18","1.6.18"}, {lhttpc,"Lightweight HTTP Client","1.3.0"}, {ale,"Another Logger for Erlang","8ca6d2a"}, {os_mon,"CPO CXC 138 46","2.2.7"}, {couch_set_view,"Set views","1.2.0a-a425d97-git"}, {compiler,"ERTS CXC 138 10","4.7.5"}, {inets,"INETS CXC 138 49","5.7.1"}, {couch,"Apache CouchDB","1.2.0a-a425d97-git"}, {mapreduce,"MapReduce using V8 JavaScript engine","1.0.0"}, {couch_index_merger,"Index merger","1.2.0a-a425d97-git"}, {kernel,"ERTS CXC 138 10","2.14.5"}, {crypto,"CRYPTO version 2","2.0.4"}, {ssl,"Erlang/OTP SSL application","4.1.6"}, {sasl,"SASL CXC 138 11","2.1.10"}, {couch_view_parser,"Couch view parser","1.0.0"}, {ns_server,"Couchbase server","2.5.1-1083-rel-enterprise"}, {mochiweb,"MochiMedia Web Server","2.4.2"}, {syntax_tools,"Syntax tools","1.6.7.1"}, {xmerl,"XML parser","1.2.10"}, {oauth,"Erlang OAuth implementation","7d85d3ef"}, {stdlib,"ERTS CXC 138 10","1.17.5"}]}, {pre_loaded, [erlang,erl_prim_loader,prim_zip,zlib,prim_file,prim_inet, init,otp_ring0]}, {process_count,6319}, {node,'ns_1@127.0.0.1'}, {nodes,[]}, {registered, [disk_log_sup,disk_log_server,httpc_sup,ssl_broker_sup, code_server,httpc_profile_sup,couch_set_view_ddoc_cache, httpc_manager,ssl_server,inet_gethost_native_sup, httpc_handler_sup,ssl_sup,application_controller, couch_index_merger_connection_pool,ftp_sup,couch_spatial, standard_error_sup,inets_sup,crypto_server,crypto_sup, couch_secondary_services,couch_primary_services, couch_db_update,couch_config,error_logger,couch_server, couch_uuids,'sink-disk_default',os_mon_sup,cpu_sup,memsup, disksup,kernel_safe_sup,auth,couch_db_update_notifier_sup, dist_manager,couch_log,couch_auth_cache,couch_rep_sup, sasl_safe_sup,couch_view,couch_server_sup,cb_couch_sup, timer_server,couch_query_servers,couch_task_status, couch_httpd,couch_drv,rex,couch_file_write_guard,net_sup, kernel_sup,global_name_server,sasl_sup,net_kernel, file_server_2,release_handler,overload,alarm_handler, ale_sup,ale_dynamic_sup,lhttpc_sup,ale, couch_spatial_index_barrier,couch_replica_index_barrier, couch_main_index_barrier,couch_set_view,couch_replication, couch_task_events,lhttpc_manager,timer2_server,tftp_sup, ns_server_cluster_sup,standard_error,erl_prim_loader, inet_gethost_native,init,inet_db,httpd_sup,'sink-ns_log', 'sink-disk_stats','sink-disk_xdcr_errors', 'sink-disk_xdcr','sink-disk_debug','sink-disk_couchdb', 'sink-disk_mapreduce_errors','sink-disk_views', global_group,'sink-disk_error',ssl_connection_sup, ssl_manager,erl_epmd]}, {cookie,alkbqedpsntmtnxa}, {wordsize,8}, {wall_clock,3753}] [ns_server:info,2014-08-19T16:49:02.562,ns_1@127.0.0.1:ns_server_cluster_sup<0.160.0>:log_os_info:start_link:27]Manifest: ["","", " ", " ", " ", " ", " ", " ", " "," ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " "," "] [error_logger:info,2014-08-19T16:49:02.563,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_cluster_sup} started: [{pid,<0.17149.0>}, {name,timeout_diag_logger}, {mfargs,{timeout_diag_logger,start_link,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [ns_server:info,2014-08-19T16:49:02.564,ns_1@127.0.0.1:ns_config_sup<0.17152.0>:ns_config_sup:init:32]loading static ns_config from "/opt/couchbase/etc/couchbase/config" [error_logger:info,2014-08-19T16:49:02.564,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_cluster_sup} started: [{pid,<0.17150.0>}, {name,ns_cookie_manager}, {mfargs,{ns_cookie_manager,start_link,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2014-08-19T16:49:02.564,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_cluster_sup} started: [{pid,<0.17151.0>}, {name,ns_cluster}, {mfargs,{ns_cluster,start_link,[]}}, {restart_type,permanent}, {shutdown,5000}, {child_type,worker}] [error_logger:info,2014-08-19T16:49:02.564,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_config_sup} started: [{pid,<0.17153.0>}, {name,ns_config_events}, {mfargs, {gen_event,start_link,[{local,ns_config_events}]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2014-08-19T16:49:02.564,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_config_sup} started: [{pid,<0.17154.0>}, {name,ns_config_events_local}, {mfargs, {gen_event,start_link, [{local,ns_config_events_local}]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [ns_server:info,2014-08-19T16:49:02.565,ns_1@127.0.0.1:ns_config<0.17155.0>:ns_config:load_config:795]Loading static config from "/opt/couchbase/etc/couchbase/config" [ns_server:info,2014-08-19T16:49:02.566,ns_1@127.0.0.1:ns_config<0.17155.0>:ns_config:load_config:809]Loading dynamic config from "/opt/couchbase/var/lib/couchbase/config/config.dat" [ns_server:debug,2014-08-19T16:49:02.566,ns_1@127.0.0.1:ns_config<0.17155.0>:ns_config:load_config:816]Here's full dynamic config we loaded: [[{uuid, [{'_vclock',[{'ns_1@127.0.0.1',{1,63575671723}}]}| <<"7470311bdaa2a4acd47d21222af5c9ae">>]}, {dynamic_config_version, [{'_vclock',[{'ns_1@127.0.0.1',{5,63575667474}}]},2,5]}, {alert_limits,[{max_overhead_perc,50},{max_disk_used,90}]}, {auto_failover_cfg, [{'_vclock',[{'ns_1@127.0.0.1',{1,63575667472}}]}, {enabled,false}, {timeout,120}, {max_nodes,1}, {count,0}]}, {autocompaction, [{database_fragmentation_threshold,{30,undefined}}, {view_fragmentation_threshold,{30,undefined}}]}, {buckets,[{configs,[]}]}, {cert_and_pkey, [{'_vclock',[{'ns_1@127.0.0.1',{1,63575667478}}]}| {<<"-----BEGIN CERTIFICATE-----\nMIICmDCCAYKgAwIBAgIIE4vQPzPIoEQwCwYJKoZIhvcNAQEFMAwxCjAIBgNVBAMT\nASowHhcNMTMwMTAxMDAwMDAwWhcNNDkxMjMxMjM1OTU5WjAMMQowCAYDVQQDEwEq\nMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAmwlh6UM1HlSt78Xr7YCe\n18VU0sN62xbybSOxadjU2gF03Q2jgd+n84Tr9iGKtuy7DUKk/eJJQDQWcCDGTxYg\n8QNmzAlnX/eufV4rhr/9nlksMKdIlXWDvOdLX4yO1FIZ/QvGtoFWBwEc832n3sfa\n1f+EzMV8X6nZxMPV/Stc0StxJPY2Akqi99je3QsYDNvapLjSSawb2oEl8ssA4mmR\ne2P+F4r1j3FAsOsO0VOuKtmsul6utqBCmO34s0vYc6X58RbQVYx8iu5XiTFu5rTi\nFbuHeJ+rjVi4gMxuD4yVIkTJq4KED+p1SkD9H4YvUWy5O7XlmPsA30fmdMpKsZWi\n6QIDAQABowIwADALBgkqhkiG9w0BAQUDggEBADSaYJBLzwuTm8X5KVmfNhrblZTL\n3Lc/PewFJZvp3UuiF6xJQdQMO9mvLZ6MaY/Z4NL/sLionbrmQuGxxChpTwyLNL7a\n666VquUle7zrVYOJKlv/2hgFjk1rhfD0JpqwKFaRTYyMqBRG7hXkPlPZPFJVeAft\ntvYLLJc5Iou4tvQvw3lB6F3g2jpzW4UQMXKklf3c0pZqYKCNYvEt7elnIyS/Aata\nFViP8384q9BMsSeoyj/mDfV4czbAwYgZN5ZRylM+IElGWNZVBydbBQaGJgj3yJD3\n3+2X3gSf7HN33p4dPCEeNBKnL0vBdS3GPkDibxHzKv5J3euds09QGtsK4BQ=\n-----END CERTIFICATE-----\n">>, <<"*****">>}]}, {cluster_compat_version, [{'_vclock',[{'ns_1@127.0.0.1',{1,63575667474}}]},2,5]}, {drop_request_memory_threshold_mib,undefined}, {email_alerts, [{'_vclock',[{'ns_1@127.0.0.1',{1,63575667472}}]}, {recipients,["root@localhost"]}, {sender,"couchbase@localhost"}, {enabled,false}, {email_server, [{user,[]}, {pass,"*****"}, {host,"localhost"}, {port,25}, {encrypt,false}]}, {alerts, [auto_failover_node,auto_failover_maximum_reached, auto_failover_other_nodes_down,auto_failover_cluster_too_small,ip, disk,overhead,ep_oom_errors,ep_item_commit_failed]}]}, {fast_warmup, [{fast_warmup_enabled,true}, {min_memory_threshold,10}, {min_items_threshold,10}]}, {index_aware_rebalance_disabled,false}, {max_bucket_count,10}, {memory_quota,58026}, {nodes_wanted,['ns_1@127.0.0.1']}, {otp, [{'_vclock',[{'ns_1@127.0.0.1',{1,63575667473}}]}, {cookie,alkbqedpsntmtnxa}]}, {remote_clusters,[]}, {replication,[{enabled,true}]}, {replication_topology,star}, {rest,[{port,8091}]}, {rest_creds,[{creds,[]}]}, {server_groups, [{'_vclock',[{'ns_1@127.0.0.1',{1,63575667474}}]}, [{uuid,<<"0">>},{name,<<"Group 1">>},{nodes,['ns_1@127.0.0.1']}]]}, {set_view_update_daemon, [{update_interval,5000}, {update_min_changes,5000}, {replica_update_min_changes,5000}]}, {vbucket_map_history,[{'_vclock',[{'ns_1@127.0.0.1',{1,63575667474}}]}]}, {{couchdb,max_parallel_indexers},4}, {{couchdb,max_parallel_replica_indexers},2}, {{request_limit,capi},undefined}, {{request_limit,rest},undefined}, {{node,'ns_1@127.0.0.1',capi_port},8092}, {{node,'ns_1@127.0.0.1',compaction_daemon}, [{check_interval,30},{min_file_size,131072}]}, {{node,'ns_1@127.0.0.1',config_version}, [{'_vclock',[{'ns_1@127.0.0.1',{7,63575667472}}]}|{2,3,0}]}, {{node,'ns_1@127.0.0.1',isasl}, [{'_vclock', [{'ns_1@127.0.0.1',{1,63575667472}}, {<<"c3a87fe2e8c58375a03730a71fdf48a8">>,{1,63575667472}}]}, {path,"/opt/couchbase/var/lib/couchbase/isasl.pw"}]}, {{node,'ns_1@127.0.0.1',membership},active}, {{node,'ns_1@127.0.0.1',memcached}, [{'_vclock', [{'ns_1@127.0.0.1',{3,63575667472}}, {<<"c3a87fe2e8c58375a03730a71fdf48a8">>,{1,63575667472}}]}, {mccouch_port,11213}, {engines, [{membase, [{engine,"/opt/couchbase/lib/memcached/ep.so"}, {static_config_string, "vb0=false;waitforwarmup=false;failpartialwarmup=false"}]}, {memcached, [{engine,"/opt/couchbase/lib/memcached/default_engine.so"}, {static_config_string,"vb0=true"}]}]}, {log_path,"/opt/couchbase/var/lib/couchbase/logs"}, {log_prefix,"memcached.log"}, {log_generations,20}, {log_cyclesize,10485760}, {log_sleeptime,19}, {log_rotation_period,39003}, {dedicated_port,11209}, {bucket_engine,"/opt/couchbase/lib/memcached/bucket_engine.so"}, {port,11210}, {dedicated_port,11209}, {admin_user,"_admin"}, {admin_pass,"*****"}, {verbosity,[]}]}, {{node,'ns_1@127.0.0.1',moxi},[{port,11211},{verbosity,[]}]}, {{node,'ns_1@127.0.0.1',ns_log}, [{'_vclock', [{'ns_1@127.0.0.1',{1,63575667472}}, {<<"c3a87fe2e8c58375a03730a71fdf48a8">>,{1,63575667472}}]}, {filename,"/opt/couchbase/var/lib/couchbase/ns_log"}]}, {{node,'ns_1@127.0.0.1',port_servers}, [{'_vclock',[{'ns_1@127.0.0.1',{3,63575667472}}]}, {moxi,"/opt/couchbase/bin/moxi", ["-Z", {"port_listen=~B,default_bucket_name=default,downstream_max=1024,downstream_conn_max=4,connect_max_errors=5,connect_retry_interval=30000,connect_timeout=400,auth_timeout=100,cycle=200,downstream_conn_queue_timeout=200,downstream_timeout=5000,wait_queue_timeout=200", [port]}, "-z", {"url=http://127.0.0.1:~B/pools/default/saslBucketsStreaming", [{misc,this_node_rest_port,[]}]}, "-p","0","-Y","y","-O","stderr", {"~s",[verbosity]}], [{env, [{"EVENT_NOSELECT","1"}, {"MOXI_SASL_PLAIN_USR",{"~s",[{ns_moxi_sup,rest_user,[]}]}}, {"MOXI_SASL_PLAIN_PWD",{"~s",[{ns_moxi_sup,rest_pass,[]}]}}]}, use_stdio,exit_status,port_server_send_eol,stderr_to_stdout,stream]}, {memcached,"/opt/couchbase/bin/memcached", ["-X","/opt/couchbase/lib/memcached/stdin_term_handler.so","-X", {"/opt/couchbase/lib/memcached/file_logger.so,cyclesize=~B;sleeptime=~B;filename=~s/~s", [log_cyclesize,log_sleeptime,log_path,log_prefix]}, "-l", {"0.0.0.0:~B,0.0.0.0:~B:1000",[port,dedicated_port]}, "-p", {"~B",[port]}, "-E","/opt/couchbase/lib/memcached/bucket_engine.so","-B","binary", "-r","-c","10000","-e", {"admin=~s;default_bucket_name=default;auto_create=false", [admin_user]}, {"~s",[verbosity]}], [{env, [{"EVENT_NOSELECT","1"}, {"MEMCACHED_TOP_KEYS","100"}, {"ISASL_PWFILE",{"~s",[{isasl,path}]}}]}, use_stdio,stderr_to_stdout,exit_status,port_server_send_eol, stream]}]}, {{node,'ns_1@127.0.0.1',rest},[{port,8091},{port_meta,global}]}, {{node,'ns_1@127.0.0.1',ssl_capi_port},18092}, {{node,'ns_1@127.0.0.1',ssl_proxy_downstream_port},11214}, {{node,'ns_1@127.0.0.1',ssl_proxy_upstream_port},11215}, {{node,'ns_1@127.0.0.1',ssl_rest_port},18091}]] [ns_server:info,2014-08-19T16:49:02.568,ns_1@127.0.0.1:ns_config<0.17155.0>:ns_config:load_config:827]Here's full dynamic config we loaded + static & default config: [{{node,'ns_1@127.0.0.1',ssl_rest_port},18091}, {{node,'ns_1@127.0.0.1',ssl_proxy_upstream_port},11215}, {{node,'ns_1@127.0.0.1',ssl_proxy_downstream_port},11214}, {{node,'ns_1@127.0.0.1',ssl_capi_port},18092}, {{node,'ns_1@127.0.0.1',rest},[{port,8091},{port_meta,global}]}, {{node,'ns_1@127.0.0.1',port_servers}, [{'_vclock',[{'ns_1@127.0.0.1',{3,63575667472}}]}, {moxi,"/opt/couchbase/bin/moxi", ["-Z", {"port_listen=~B,default_bucket_name=default,downstream_max=1024,downstream_conn_max=4,connect_max_errors=5,connect_retry_interval=30000,connect_timeout=400,auth_timeout=100,cycle=200,downstream_conn_queue_timeout=200,downstream_timeout=5000,wait_queue_timeout=200", [port]}, "-z", {"url=http://127.0.0.1:~B/pools/default/saslBucketsStreaming", [{misc,this_node_rest_port,[]}]}, "-p","0","-Y","y","-O","stderr", {"~s",[verbosity]}], [{env, [{"EVENT_NOSELECT","1"}, {"MOXI_SASL_PLAIN_USR",{"~s",[{ns_moxi_sup,rest_user,[]}]}}, {"MOXI_SASL_PLAIN_PWD",{"~s",[{ns_moxi_sup,rest_pass,[]}]}}]}, use_stdio,exit_status,port_server_send_eol,stderr_to_stdout,stream]}, {memcached,"/opt/couchbase/bin/memcached", ["-X","/opt/couchbase/lib/memcached/stdin_term_handler.so","-X", {"/opt/couchbase/lib/memcached/file_logger.so,cyclesize=~B;sleeptime=~B;filename=~s/~s", [log_cyclesize,log_sleeptime,log_path,log_prefix]}, "-l", {"0.0.0.0:~B,0.0.0.0:~B:1000",[port,dedicated_port]}, "-p", {"~B",[port]}, "-E","/opt/couchbase/lib/memcached/bucket_engine.so","-B","binary", "-r","-c","10000","-e", {"admin=~s;default_bucket_name=default;auto_create=false", [admin_user]}, {"~s",[verbosity]}], [{env, [{"EVENT_NOSELECT","1"}, {"MEMCACHED_TOP_KEYS","100"}, {"ISASL_PWFILE",{"~s",[{isasl,path}]}}]}, use_stdio,stderr_to_stdout,exit_status,port_server_send_eol,stream]}]}, {{node,'ns_1@127.0.0.1',ns_log}, [{'_vclock', [{'ns_1@127.0.0.1',{1,63575667472}}, {<<"c3a87fe2e8c58375a03730a71fdf48a8">>,{1,63575667472}}]}, {filename,"/opt/couchbase/var/lib/couchbase/ns_log"}]}, {{node,'ns_1@127.0.0.1',moxi},[{port,11211},{verbosity,[]}]}, {{node,'ns_1@127.0.0.1',memcached}, [{'_vclock', [{'ns_1@127.0.0.1',{3,63575667472}}, {<<"c3a87fe2e8c58375a03730a71fdf48a8">>,{1,63575667472}}]}, {mccouch_port,11213}, {engines, [{membase, [{engine,"/opt/couchbase/lib/memcached/ep.so"}, {static_config_string, "vb0=false;waitforwarmup=false;failpartialwarmup=false"}]}, {memcached, [{engine,"/opt/couchbase/lib/memcached/default_engine.so"}, {static_config_string,"vb0=true"}]}]}, {log_path,"/opt/couchbase/var/lib/couchbase/logs"}, {log_prefix,"memcached.log"}, {log_generations,20}, {log_cyclesize,10485760}, {log_sleeptime,19}, {log_rotation_period,39003}, {dedicated_port,11209}, {bucket_engine,"/opt/couchbase/lib/memcached/bucket_engine.so"}, {port,11210}, {dedicated_port,11209}, {admin_user,"_admin"}, {admin_pass,"*****"}, {verbosity,[]}]}, {{node,'ns_1@127.0.0.1',membership},active}, {{node,'ns_1@127.0.0.1',isasl}, [{'_vclock', [{'ns_1@127.0.0.1',{1,63575667472}}, {<<"c3a87fe2e8c58375a03730a71fdf48a8">>,{1,63575667472}}]}, {path,"/opt/couchbase/var/lib/couchbase/isasl.pw"}]}, {{node,'ns_1@127.0.0.1',config_version}, [{'_vclock',[{'ns_1@127.0.0.1',{7,63575667472}}]}|{2,3,0}]}, {{node,'ns_1@127.0.0.1',compaction_daemon}, [{check_interval,30},{min_file_size,131072}]}, {{node,'ns_1@127.0.0.1',capi_port},8092}, {{request_limit,rest},undefined}, {{request_limit,capi},undefined}, {{couchdb,max_parallel_replica_indexers},2}, {{couchdb,max_parallel_indexers},4}, {vbucket_map_history,[{'_vclock',[{'ns_1@127.0.0.1',{1,63575667474}}]}]}, {set_view_update_daemon, [{update_interval,5000}, {update_min_changes,5000}, {replica_update_min_changes,5000}]}, {server_groups, [{'_vclock',[{'ns_1@127.0.0.1',{1,63575667474}}]}, [{uuid,<<"0">>},{name,<<"Group 1">>},{nodes,['ns_1@127.0.0.1']}]]}, {rest_creds,[{creds,[]}]}, {rest,[{port,8091}]}, {replication_topology,star}, {replication,[{enabled,true}]}, {remote_clusters,[]}, {otp, [{'_vclock',[{'ns_1@127.0.0.1',{1,63575667473}}]}, {cookie,alkbqedpsntmtnxa}]}, {nodes_wanted,['ns_1@127.0.0.1']}, {memory_quota,58026}, {max_bucket_count,10}, {index_aware_rebalance_disabled,false}, {fast_warmup, [{fast_warmup_enabled,true}, {min_memory_threshold,10}, {min_items_threshold,10}]}, {email_alerts, [{'_vclock',[{'ns_1@127.0.0.1',{1,63575667472}}]}, {recipients,["root@localhost"]}, {sender,"couchbase@localhost"}, {enabled,false}, {email_server, [{user,[]}, {pass,"*****"}, {host,"localhost"}, {port,25}, {encrypt,false}]}, {alerts, [auto_failover_node,auto_failover_maximum_reached, auto_failover_other_nodes_down,auto_failover_cluster_too_small,ip, disk,overhead,ep_oom_errors,ep_item_commit_failed]}]}, {drop_request_memory_threshold_mib,undefined}, {cluster_compat_version, [{'_vclock',[{'ns_1@127.0.0.1',{1,63575667474}}]},2,5]}, {cert_and_pkey, [{'_vclock',[{'ns_1@127.0.0.1',{1,63575667478}}]}| {<<"-----BEGIN CERTIFICATE-----\nMIICmDCCAYKgAwIBAgIIE4vQPzPIoEQwCwYJKoZIhvcNAQEFMAwxCjAIBgNVBAMT\nASowHhcNMTMwMTAxMDAwMDAwWhcNNDkxMjMxMjM1OTU5WjAMMQowCAYDVQQDEwEq\nMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAmwlh6UM1HlSt78Xr7YCe\n18VU0sN62xbybSOxadjU2gF03Q2jgd+n84Tr9iGKtuy7DUKk/eJJQDQWcCDGTxYg\n8QNmzAlnX/eufV4rhr/9nlksMKdIlXWDvOdLX4yO1FIZ/QvGtoFWBwEc832n3sfa\n1f+EzMV8X6nZxMPV/Stc0StxJPY2Akqi99je3QsYDNvapLjSSawb2oEl8ssA4mmR\ne2P+F4r1j3FAsOsO0VOuKtmsul6utqBCmO34s0vYc6X58RbQVYx8iu5XiTFu5rTi\nFbuHeJ+rjVi4gMxuD4yVIkTJq4KED+p1SkD9H4YvUWy5O7XlmPsA30fmdMpKsZWi\n6QIDAQABowIwADALBgkqhkiG9w0BAQUDggEBADSaYJBLzwuTm8X5KVmfNhrblZTL\n3Lc/PewFJZvp3UuiF6xJQdQMO9mvLZ6MaY/Z4NL/sLionbrmQuGxxChpTwyLNL7a\n666VquUle7zrVYOJKlv/2hgFjk1rhfD0JpqwKFaRTYyMqBRG7hXkPlPZPFJVeAft\ntvYLLJc5Iou4tvQvw3lB6F3g2jpzW4UQMXKklf3c0pZqYKCNYvEt7elnIyS/Aata\nFViP8384q9BMsSeoyj/mDfV4czbAwYgZN5ZRylM+IElGWNZVBydbBQaGJgj3yJD3\n3+2X3gSf7HN33p4dPCEeNBKnL0vBdS3GPkDibxHzKv5J3euds09QGtsK4BQ=\n-----END CERTIFICATE-----\n">>, <<"*****">>}]}, {buckets,[{configs,[]}]}, {autocompaction, [{database_fragmentation_threshold,{30,undefined}}, {view_fragmentation_threshold,{30,undefined}}]}, {auto_failover_cfg, [{'_vclock',[{'ns_1@127.0.0.1',{1,63575667472}}]}, {enabled,false}, {timeout,120}, {max_nodes,1}, {count,0}]}, {alert_limits,[{max_overhead_perc,50},{max_disk_used,90}]}, {dynamic_config_version, [{'_vclock',[{'ns_1@127.0.0.1',{5,63575667474}}]},2,5]}, {uuid, [{'_vclock',[{'ns_1@127.0.0.1',{1,63575671723}}]}| <<"7470311bdaa2a4acd47d21222af5c9ae">>]}] [error_logger:info,2014-08-19T16:49:02.571,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_config_sup} started: [{pid,<0.17155.0>}, {name,ns_config}, {mfargs, {ns_config,start_link, ["/opt/couchbase/etc/couchbase/config", ns_config_default]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2014-08-19T16:49:02.572,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_config_sup} started: [{pid,<0.17157.0>}, {name,ns_config_remote}, {mfargs, {ns_config_replica,start_link, [{local,ns_config_remote}]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2014-08-19T16:49:02.572,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_config_sup} started: [{pid,<0.17158.0>}, {name,ns_config_log}, {mfargs,{ns_config_log,start_link,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2014-08-19T16:49:02.572,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_config_sup} started: [{pid,<0.17160.0>}, {name,cb_config_couch_sync}, {mfargs,{cb_config_couch_sync,start_link,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2014-08-19T16:49:02.572,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_cluster_sup} started: [{pid,<0.17152.0>}, {name,ns_config_sup}, {mfargs,{ns_config_sup,start_link,[]}}, {restart_type,permanent}, {shutdown,infinity}, {child_type,supervisor}] [ns_server:info,2014-08-19T16:49:02.572,ns_1@127.0.0.1:ns_server_sup<0.17163.0>:dir_size:start_link:47]Starting quick version of dir_size with program name: i386-linux-godu [error_logger:info,2014-08-19T16:49:02.572,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_cluster_sup} started: [{pid,<0.17162.0>}, {name,vbucket_filter_changes_registry}, {mfargs, {ns_process_registry,start_link, [vbucket_filter_changes_registry]}}, {restart_type,permanent}, {shutdown,100}, {child_type,worker}] [error_logger:info,2014-08-19T16:49:02.572,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.17164.0>}, {name,diag_handler_worker}, {mfa,{work_queue,start_link,[diag_handler_worker]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2014-08-19T16:49:02.573,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.17165.0>}, {name,dir_size}, {mfa,{dir_size,start_link,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2014-08-19T16:49:02.573,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.17166.0>}, {name,request_throttler}, {mfa,{request_throttler,start_link,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2014-08-19T16:49:02.573,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.17167.0>}, {name,ns_log}, {mfa,{ns_log,start_link,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2014-08-19T16:49:02.573,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.17168.0>}, {name,ns_crash_log_consumer}, {mfa,{ns_log,start_link_crash_consumer,[]}}, {restart_type,{permanent,4}}, {shutdown,1000}, {child_type,worker}] [ns_server:debug,2014-08-19T16:49:02.573,ns_1@127.0.0.1:ns_config_isasl_sync<0.17171.0>:ns_config_isasl_sync:init:63]isasl_sync init: ["/opt/couchbase/var/lib/couchbase/isasl.pw","_admin", "f6126ae5fac44bf3d8316165791747f2"] [error_logger:info,2014-08-19T16:49:02.574,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.17169.0>}, {name,ns_config_ets_dup}, {mfa,{ns_config_ets_dup,start_link,[]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [ns_server:debug,2014-08-19T16:49:02.574,ns_1@127.0.0.1:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: alert_limits -> [{max_overhead_perc,50},{max_disk_used,90}] [ns_server:debug,2014-08-19T16:49:02.574,ns_1@127.0.0.1:ns_config_isasl_sync<0.17171.0>:ns_config_isasl_sync:init:71]isasl_sync init buckets: [] [ns_server:debug,2014-08-19T16:49:02.574,ns_1@127.0.0.1:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: auto_failover_cfg -> [{enabled,false},{timeout,120},{max_nodes,1},{count,0}] [ns_server:debug,2014-08-19T16:49:02.574,ns_1@127.0.0.1:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: autocompaction -> [{database_fragmentation_threshold,{30,undefined}}, {view_fragmentation_threshold,{30,undefined}}] [ns_server:debug,2014-08-19T16:49:02.574,ns_1@127.0.0.1:ns_config_isasl_sync<0.17171.0>:ns_config_isasl_sync:writeSASLConf:143]Writing isasl passwd file: "/opt/couchbase/var/lib/couchbase/isasl.pw" [ns_server:debug,2014-08-19T16:49:02.574,ns_1@127.0.0.1:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[]}] [ns_server:debug,2014-08-19T16:49:02.574,ns_1@127.0.0.1:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: cert_and_pkey -> {<<"-----BEGIN CERTIFICATE-----\nMIICmDCCAYKgAwIBAgIIE4vQPzPIoEQwCwYJKoZIhvcNAQEFMAwxCjAIBgNVBAMT\nASowHhcNMTMwMTAxMDAwMDAwWhcNNDkxMjMxMjM1OTU5WjAMMQowCAYDVQQDEwEq\nMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAmwlh6UM1HlSt78Xr7YCe\n18VU0sN62xbybSOxadjU2gF03Q2jgd+n84Tr9iGKtuy7DUKk/eJJQDQWcCDGTxYg\n8QNmzAlnX/eufV4rhr/9nlksMKdIlXWDvOdLX4yO1FIZ/QvGtoFWBwEc832n3sfa\n1f+EzMV8X6nZxMPV/Stc0StxJPY2Akqi99je3Qs"...>>, <<"*****">>} [ns_server:debug,2014-08-19T16:49:02.574,ns_1@127.0.0.1:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: cluster_compat_version -> [2,5] [ns_server:debug,2014-08-19T16:49:02.574,ns_1@127.0.0.1:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: drop_request_memory_threshold_mib -> undefined [ns_server:debug,2014-08-19T16:49:02.574,ns_1@127.0.0.1:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: dynamic_config_version -> [2,5] [ns_server:debug,2014-08-19T16:49:02.575,ns_1@127.0.0.1:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: email_alerts -> [{recipients,["root@localhost"]}, {sender,"couchbase@localhost"}, {enabled,false}, {email_server,[{user,[]}, {pass,"*****"}, {host,"localhost"}, {port,25}, {encrypt,false}]}, {alerts,[auto_failover_node,auto_failover_maximum_reached, auto_failover_other_nodes_down,auto_failover_cluster_too_small,ip, disk,overhead,ep_oom_errors,ep_item_commit_failed]}] [ns_server:debug,2014-08-19T16:49:02.575,ns_1@127.0.0.1:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: fast_warmup -> [{fast_warmup_enabled,true}, {min_memory_threshold,10}, {min_items_threshold,10}] [ns_server:debug,2014-08-19T16:49:02.575,ns_1@127.0.0.1:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: index_aware_rebalance_disabled -> false [ns_server:debug,2014-08-19T16:49:02.575,ns_1@127.0.0.1:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: max_bucket_count -> 10 [ns_server:debug,2014-08-19T16:49:02.575,ns_1@127.0.0.1:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: memory_quota -> 58026 [ns_server:debug,2014-08-19T16:49:02.575,ns_1@127.0.0.1:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: nodes_wanted -> ['ns_1@127.0.0.1'] [ns_server:debug,2014-08-19T16:49:02.575,ns_1@127.0.0.1:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: otp -> [{cookie,alkbqedpsntmtnxa}] [ns_server:debug,2014-08-19T16:49:02.575,ns_1@127.0.0.1:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: remote_clusters -> [] [ns_server:debug,2014-08-19T16:49:02.575,ns_1@127.0.0.1:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: replication -> [{enabled,true}] [ns_server:debug,2014-08-19T16:49:02.575,ns_1@127.0.0.1:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: replication_topology -> star [ns_server:debug,2014-08-19T16:49:02.575,ns_1@127.0.0.1:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: rest -> [{port,8091}] [ns_server:info,2014-08-19T16:49:02.575,ns_1@127.0.0.1:ns_config_log<0.17158.0>:ns_config_log:handle_info:63]config change: rest_creds -> ******** [ns_server:debug,2014-08-19T16:49:02.576,ns_1@127.0.0.1:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: server_groups -> [[{uuid,<<"0">>},{name,<<"Group 1">>},{nodes,['ns_1@127.0.0.1']}]] [ns_server:debug,2014-08-19T16:49:02.576,ns_1@127.0.0.1:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: set_view_update_daemon -> [{update_interval,5000}, {update_min_changes,5000}, {replica_update_min_changes,5000}] [ns_server:debug,2014-08-19T16:49:02.576,ns_1@127.0.0.1:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: uuid -> <<"7470311bdaa2a4acd47d21222af5c9ae">> [ns_server:debug,2014-08-19T16:49:02.576,ns_1@127.0.0.1:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: vbucket_map_history -> [] [ns_server:debug,2014-08-19T16:49:02.576,ns_1@127.0.0.1:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: {couchdb,max_parallel_indexers} -> 4 [ns_server:debug,2014-08-19T16:49:02.578,ns_1@127.0.0.1:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: {couchdb,max_parallel_replica_indexers} -> 2 [ns_server:debug,2014-08-19T16:49:02.578,ns_1@127.0.0.1:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: {request_limit,capi} -> undefined [ns_server:debug,2014-08-19T16:49:02.578,ns_1@127.0.0.1:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: {request_limit,rest} -> undefined [ns_server:debug,2014-08-19T16:49:02.578,ns_1@127.0.0.1:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: {node,'ns_1@127.0.0.1',capi_port} -> 8092 [ns_server:debug,2014-08-19T16:49:02.578,ns_1@127.0.0.1:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: {node,'ns_1@127.0.0.1',compaction_daemon} -> [{check_interval,30},{min_file_size,131072}] [ns_server:debug,2014-08-19T16:49:02.578,ns_1@127.0.0.1:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: {node,'ns_1@127.0.0.1',config_version} -> {2,3,0} [ns_server:debug,2014-08-19T16:49:02.578,ns_1@127.0.0.1:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: {node,'ns_1@127.0.0.1',isasl} -> [{path,"/opt/couchbase/var/lib/couchbase/isasl.pw"}] [ns_server:debug,2014-08-19T16:49:02.578,ns_1@127.0.0.1:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: {node,'ns_1@127.0.0.1',membership} -> active [ns_server:debug,2014-08-19T16:49:02.579,ns_1@127.0.0.1:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: {node,'ns_1@127.0.0.1',memcached} -> [{mccouch_port,11213}, {engines, [{membase, [{engine,"/opt/couchbase/lib/memcached/ep.so"}, {static_config_string, "vb0=false;waitforwarmup=false;failpartialwarmup=false"}]}, {memcached, [{engine,"/opt/couchbase/lib/memcached/default_engine.so"}, {static_config_string,"vb0=true"}]}]}, {log_path,"/opt/couchbase/var/lib/couchbase/logs"}, {log_prefix,"memcached.log"}, {log_generations,20}, {log_cyclesize,10485760}, {log_sleeptime,19}, {log_rotation_period,39003}, {dedicated_port,11209}, {bucket_engine,"/opt/couchbase/lib/memcached/bucket_engine.so"}, {port,11210}, {dedicated_port,11209}, {admin_user,"_admin"}, {admin_pass,"*****"}, {verbosity,[]}] [ns_server:debug,2014-08-19T16:49:02.579,ns_1@127.0.0.1:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: {node,'ns_1@127.0.0.1',moxi} -> [{port,11211},{verbosity,[]}] [ns_server:debug,2014-08-19T16:49:02.579,ns_1@127.0.0.1:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: {node,'ns_1@127.0.0.1',ns_log} -> [{filename,"/opt/couchbase/var/lib/couchbase/ns_log"}] [error_logger:info,2014-08-19T16:49:02.580,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.17171.0>}, {name,ns_config_isasl_sync}, {mfa,{ns_config_isasl_sync,start_link,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [ns_server:debug,2014-08-19T16:49:02.580,ns_1@127.0.0.1:ns_node_disco<0.17177.0>:ns_node_disco:init:103]Initting ns_node_disco with [] [error_logger:info,2014-08-19T16:49:02.580,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.17174.0>}, {name,ns_log_events}, {mfa,{gen_event,start_link,[{local,ns_log_events}]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [ns_server:debug,2014-08-19T16:49:02.580,ns_1@127.0.0.1:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: {node,'ns_1@127.0.0.1',port_servers} -> [{moxi,"/opt/couchbase/bin/moxi", ["-Z", {"port_listen=~B,default_bucket_name=default,downstream_max=1024,downstream_conn_max=4,connect_max_errors=5,connect_retry_interval=30000,connect_timeout=400,auth_timeout=100,cycle=200,downstream_conn_queue_timeout=200,downstream_timeout=5000,wait_queue_timeout=200", [port]}, "-z", {"url=http://127.0.0.1:~B/pools/default/saslBucketsStreaming", [{misc,this_node_rest_port,[]}]}, "-p","0","-Y","y","-O","stderr", {"~s",[verbosity]}], [{env,[{"EVENT_NOSELECT","1"}, {"MOXI_SASL_PLAIN_USR",{"~s",[{ns_moxi_sup,rest_user,[]}]}}, {"MOXI_SASL_PLAIN_PWD",{"~s",[{ns_moxi_sup,rest_pass,[]}]}}]}, use_stdio,exit_status,port_server_send_eol,stderr_to_stdout,stream]}, {memcached,"/opt/couchbase/bin/memcached", ["-X","/opt/couchbase/lib/memcached/stdin_term_handler.so","-X", {"/opt/couchbase/lib/memcached/file_logger.so,cyclesize=~B;sleeptime=~B;filename=~s/~s", [log_cyclesize,log_sleeptime,log_path,log_prefix]}, "-l", {"0.0.0.0:~B,0.0.0.0:~B:1000",[port,dedicated_port]}, "-p", {"~B",[port]}, "-E","/opt/couchbase/lib/memcached/bucket_engine.so","-B", "binary","-r","-c","10000","-e", {"admin=~s;default_bucket_name=default;auto_create=false", [admin_user]}, {"~s",[verbosity]}], [{env,[{"EVENT_NOSELECT","1"}, {"MEMCACHED_TOP_KEYS","100"}, {"ISASL_PWFILE",{"~s",[{isasl,path}]}}]}, use_stdio,stderr_to_stdout,exit_status,port_server_send_eol, stream]}] [ns_server:debug,2014-08-19T16:49:02.580,ns_1@127.0.0.1:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: {node,'ns_1@127.0.0.1',rest} -> [{port,8091},{port_meta,global}] [ns_server:debug,2014-08-19T16:49:02.580,ns_1@127.0.0.1:ns_cookie_manager<0.17150.0>:ns_cookie_manager:do_cookie_sync:110]ns_cookie_manager do_cookie_sync [error_logger:info,2014-08-19T16:49:02.580,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_node_disco_sup} started: [{pid,<0.17176.0>}, {name,ns_node_disco_events}, {mfargs, {gen_event,start_link, [{local,ns_node_disco_events}]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [ns_server:debug,2014-08-19T16:49:02.580,ns_1@127.0.0.1:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: {node,'ns_1@127.0.0.1',ssl_capi_port} -> 18092 [ns_server:debug,2014-08-19T16:49:02.580,ns_1@127.0.0.1:ns_cookie_manager<0.17150.0>:ns_cookie_manager:do_cookie_save:147]saving cookie to "/opt/couchbase/var/lib/couchbase/couchbase-server.cookie-ns-server" [ns_server:debug,2014-08-19T16:49:02.580,ns_1@127.0.0.1:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: {node,'ns_1@127.0.0.1',ssl_proxy_downstream_port} -> 11214 [ns_server:debug,2014-08-19T16:49:02.580,ns_1@127.0.0.1:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: {node,'ns_1@127.0.0.1',ssl_proxy_upstream_port} -> 11215 [ns_server:debug,2014-08-19T16:49:02.580,ns_1@127.0.0.1:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: {node,'ns_1@127.0.0.1',ssl_rest_port} -> 18091 [ns_server:debug,2014-08-19T16:49:02.616,ns_1@127.0.0.1:ns_cookie_manager<0.17150.0>:ns_cookie_manager:do_cookie_save:149]attempted to save cookie to "/opt/couchbase/var/lib/couchbase/couchbase-server.cookie-ns-server": ok [ns_server:debug,2014-08-19T16:49:02.617,ns_1@127.0.0.1:<0.17178.0>:ns_node_disco:do_nodes_wanted_updated_fun:199]ns_node_disco: nodes_wanted updated: ['ns_1@127.0.0.1'], with cookie: alkbqedpsntmtnxa [ns_server:debug,2014-08-19T16:49:02.617,ns_1@127.0.0.1:<0.17178.0>:ns_node_disco:do_nodes_wanted_updated_fun:205]ns_node_disco: nodes_wanted pong: ['ns_1@127.0.0.1'], with cookie: alkbqedpsntmtnxa [error_logger:info,2014-08-19T16:49:02.617,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_node_disco_sup} started: [{pid,<0.17177.0>}, {name,ns_node_disco}, {mfargs,{ns_node_disco,start_link,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [ns_server:debug,2014-08-19T16:49:02.617,ns_1@127.0.0.1:ns_config_rep<0.17183.0>:ns_config_rep:init:66]init pulling [ns_server:debug,2014-08-19T16:49:02.617,ns_1@127.0.0.1:ns_config_rep<0.17183.0>:ns_config_rep:init:68]init pushing [error_logger:info,2014-08-19T16:49:02.617,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_node_disco_sup} started: [{pid,<0.17180.0>}, {name,ns_node_disco_log}, {mfargs,{ns_node_disco_log,start_link,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2014-08-19T16:49:02.617,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_node_disco_sup} started: [{pid,<0.17181.0>}, {name,ns_node_disco_conf_events}, {mfargs,{ns_node_disco_conf_events,start_link,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2014-08-19T16:49:02.617,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_node_disco_sup} started: [{pid,<0.17182.0>}, {name,ns_config_rep_merger}, {mfargs,{ns_config_rep,start_link_merger,[]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [ns_server:debug,2014-08-19T16:49:02.619,ns_1@127.0.0.1:ns_config_rep<0.17183.0>:ns_config_rep:init:72]init reannouncing [ns_server:debug,2014-08-19T16:49:02.619,ns_1@127.0.0.1:ns_config_events<0.17153.0>:ns_node_disco_conf_events:handle_event:44]ns_node_disco_conf_events config on nodes_wanted [ns_server:debug,2014-08-19T16:49:02.619,ns_1@127.0.0.1:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: alert_limits -> [{max_overhead_perc,50},{max_disk_used,90}] [ns_server:debug,2014-08-19T16:49:02.619,ns_1@127.0.0.1:ns_config_events<0.17153.0>:ns_node_disco_conf_events:handle_event:50]ns_node_disco_conf_events config on otp [error_logger:info,2014-08-19T16:49:02.619,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_node_disco_sup} started: [{pid,<0.17183.0>}, {name,ns_config_rep}, {mfargs,{ns_config_rep,start_link,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [ns_server:debug,2014-08-19T16:49:02.619,ns_1@127.0.0.1:ns_cookie_manager<0.17150.0>:ns_cookie_manager:do_cookie_sync:110]ns_cookie_manager do_cookie_sync [ns_server:debug,2014-08-19T16:49:02.619,ns_1@127.0.0.1:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: auto_failover_cfg -> [{enabled,false},{timeout,120},{max_nodes,1},{count,0}] [ns_server:debug,2014-08-19T16:49:02.619,ns_1@127.0.0.1:ns_cookie_manager<0.17150.0>:ns_cookie_manager:do_cookie_save:147]saving cookie to "/opt/couchbase/var/lib/couchbase/couchbase-server.cookie-ns-server" [error_logger:info,2014-08-19T16:49:02.619,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.17175.0>}, {name,ns_node_disco_sup}, {mfa,{ns_node_disco_sup,start_link,[]}}, {restart_type,permanent}, {shutdown,infinity}, {child_type,supervisor}] [ns_server:debug,2014-08-19T16:49:02.619,ns_1@127.0.0.1:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: autocompaction -> [{database_fragmentation_threshold,{30,undefined}}, {view_fragmentation_threshold,{30,undefined}}] [ns_server:debug,2014-08-19T16:49:02.619,ns_1@127.0.0.1:ns_config_rep<0.17183.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([alert_limits,auto_failover_cfg,autocompaction, buckets,cert_and_pkey,cluster_compat_version, drop_request_memory_threshold_mib, dynamic_config_version,email_alerts, fast_warmup,index_aware_rebalance_disabled, max_bucket_count,memory_quota,nodes_wanted,otp, remote_clusters,replication, replication_topology,rest,rest_creds, server_groups,set_view_update_daemon,uuid, vbucket_map_history, {couchdb,max_parallel_indexers}, {couchdb,max_parallel_replica_indexers}, {request_limit,capi}, {request_limit,rest}, {node,'ns_1@127.0.0.1',capi_port}, {node,'ns_1@127.0.0.1',compaction_daemon}, {node,'ns_1@127.0.0.1',config_version}, {node,'ns_1@127.0.0.1',isasl}, {node,'ns_1@127.0.0.1',membership}, {node,'ns_1@127.0.0.1',memcached}, {node,'ns_1@127.0.0.1',moxi}, {node,'ns_1@127.0.0.1',ns_log}, {node,'ns_1@127.0.0.1',port_servers}, {node,'ns_1@127.0.0.1',rest}, {node,'ns_1@127.0.0.1',ssl_capi_port}, {node,'ns_1@127.0.0.1', ssl_proxy_downstream_port}, {node,'ns_1@127.0.0.1',ssl_proxy_upstream_port}, {node,'ns_1@127.0.0.1',ssl_rest_port}]..) [ns_server:debug,2014-08-19T16:49:02.619,ns_1@127.0.0.1:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[]}] [error_logger:info,2014-08-19T16:49:02.619,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.17186.0>}, {name,vbucket_map_mirror}, {mfa,{vbucket_map_mirror,start_link,[]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [ns_server:debug,2014-08-19T16:49:02.619,ns_1@127.0.0.1:ns_log_events<0.17174.0>:ns_mail_log:init:44]ns_mail_log started up [error_logger:info,2014-08-19T16:49:02.620,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.17190.0>}, {name,bucket_info_cache}, {mfa,{bucket_info_cache,start_link,[]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [ns_server:debug,2014-08-19T16:49:02.620,ns_1@127.0.0.1:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: cert_and_pkey -> {<<"-----BEGIN CERTIFICATE-----\nMIICmDCCAYKgAwIBAgIIE4vQPzPIoEQwCwYJKoZIhvcNAQEFMAwxCjAIBgNVBAMT\nASowHhcNMTMwMTAxMDAwMDAwWhcNNDkxMjMxMjM1OTU5WjAMMQowCAYDVQQDEwEq\nMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAmwlh6UM1HlSt78Xr7YCe\n18VU0sN62xbybSOxadjU2gF03Q2jgd+n84Tr9iGKtuy7DUKk/eJJQDQWcCDGTxYg\n8QNmzAlnX/eufV4rhr/9nlksMKdIlXWDvOdLX4yO1FIZ/QvGtoFWBwEc832n3sfa\n1f+EzMV8X6nZxMPV/Stc0StxJPY2Akqi99je3Qs"...>>, <<"*****">>} [ns_server:debug,2014-08-19T16:49:02.620,ns_1@127.0.0.1:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: cluster_compat_version -> [2,5] [ns_server:debug,2014-08-19T16:49:02.620,ns_1@127.0.0.1:ns_heart_slow_status_updater<0.17201.0>:ns_heart:current_status_slow:248]Ignoring failure to grab system stats: {'EXIT',{noproc,{gen_server,call, [{'stats_reader-@system','ns_1@127.0.0.1'}, {latest,"minute"}]}}} [ns_server:debug,2014-08-19T16:49:02.620,ns_1@127.0.0.1:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: drop_request_memory_threshold_mib -> undefined [error_logger:info,2014-08-19T16:49:02.620,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.17193.0>}, {name,ns_tick_event}, {mfa,{gen_event,start_link,[{local,ns_tick_event}]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [ns_server:debug,2014-08-19T16:49:02.620,ns_1@127.0.0.1:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: dynamic_config_version -> [2,5] [error_logger:info,2014-08-19T16:49:02.620,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.17194.0>}, {name,buckets_events}, {mfa,{gen_event,start_link,[{local,buckets_events}]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [ns_server:info,2014-08-19T16:49:02.620,ns_1@127.0.0.1:remote_clusters_info<0.17206.0>:remote_clusters_info:read_or_create_table:540]Reading remote_clusters_info content from /opt/couchbase/var/lib/couchbase/remote_clusters_cache_v3 [ns_server:debug,2014-08-19T16:49:02.620,ns_1@127.0.0.1:ns_heart_slow_status_updater<0.17201.0>:ns_heart:grab_local_xdcr_replications:438]Ignoring exception getting xdcr replication infos {exit,{noproc,{gen_server,call,[xdc_replication_sup,which_children,infinity]}}, [{gen_server,call,3}, {xdc_replication_sup,all_local_replication_infos,0}, {ns_heart,grab_local_xdcr_replications,0}, {ns_heart,current_status_slow,0}, {ns_heart,slow_updater_loop,1}, {proc_lib,init_p_do_apply,3}]} [ns_server:debug,2014-08-19T16:49:02.620,ns_1@127.0.0.1:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: email_alerts -> [{recipients,["root@localhost"]}, {sender,"couchbase@localhost"}, {enabled,false}, {email_server,[{user,[]}, {pass,"*****"}, {host,"localhost"}, {port,25}, {encrypt,false}]}, {alerts,[auto_failover_node,auto_failover_maximum_reached, auto_failover_other_nodes_down,auto_failover_cluster_too_small,ip, disk,overhead,ep_oom_errors,ep_item_commit_failed]}] [error_logger:info,2014-08-19T16:49:02.620,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_mail_sup} started: [{pid,<0.17196.0>}, {name,ns_mail_log}, {mfargs,{ns_mail_log,start_link,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [ns_server:debug,2014-08-19T16:49:02.620,ns_1@127.0.0.1:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: fast_warmup -> [{fast_warmup_enabled,true}, {min_memory_threshold,10}, {min_items_threshold,10}] [error_logger:info,2014-08-19T16:49:02.620,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.17195.0>}, {name,ns_mail_sup}, {mfa,{ns_mail_sup,start_link,[]}}, {restart_type,permanent}, {shutdown,infinity}, {child_type,supervisor}] [ns_server:debug,2014-08-19T16:49:02.620,ns_1@127.0.0.1:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: index_aware_rebalance_disabled -> false [ns_server:debug,2014-08-19T16:49:02.620,ns_1@127.0.0.1:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: max_bucket_count -> 10 [error_logger:info,2014-08-19T16:49:02.620,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.17197.0>}, {name,ns_stats_event}, {mfa,{gen_event,start_link,[{local,ns_stats_event}]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [ns_server:debug,2014-08-19T16:49:02.620,ns_1@127.0.0.1:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: memory_quota -> 58026 [ns_server:debug,2014-08-19T16:49:02.621,ns_1@127.0.0.1:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: nodes_wanted -> ['ns_1@127.0.0.1'] [error_logger:info,2014-08-19T16:49:02.621,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.17198.0>}, {name,samples_loader_tasks}, {mfa,{samples_loader_tasks,start_link,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [ns_server:debug,2014-08-19T16:49:02.621,ns_1@127.0.0.1:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: otp -> [{cookie,alkbqedpsntmtnxa}] [error_logger:info,2014-08-19T16:49:02.621,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.17199.0>}, {name,ns_heart}, {mfa,{ns_heart,start_link,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [ns_server:debug,2014-08-19T16:49:02.621,ns_1@127.0.0.1:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: remote_clusters -> [] [ns_server:debug,2014-08-19T16:49:02.621,ns_1@127.0.0.1:ns_server_sup<0.17163.0>:mb_master:check_master_takeover_needed:141]Sending master node question to the following nodes: [] [ns_server:debug,2014-08-19T16:49:02.621,ns_1@127.0.0.1:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: replication -> [{enabled,true}] [ns_server:debug,2014-08-19T16:49:02.621,ns_1@127.0.0.1:ns_server_sup<0.17163.0>:mb_master:check_master_takeover_needed:143]Got replies: [] [error_logger:info,2014-08-19T16:49:02.621,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.17202.0>}, {name,ns_doctor}, {mfa,{ns_doctor,start_link,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [ns_server:debug,2014-08-19T16:49:02.621,ns_1@127.0.0.1:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: replication_topology -> star [ns_server:debug,2014-08-19T16:49:02.621,ns_1@127.0.0.1:ns_server_sup<0.17163.0>:mb_master:check_master_takeover_needed:149]Was unable to discover master, not going to force mastership takeover [ns_server:debug,2014-08-19T16:49:02.621,ns_1@127.0.0.1:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: rest -> [{port,8091}] [ns_server:info,2014-08-19T16:49:02.621,ns_1@127.0.0.1:ns_config_log<0.17158.0>:ns_config_log:handle_info:63]config change: rest_creds -> ******** [error_logger:info,2014-08-19T16:49:02.621,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.17206.0>}, {name,remote_clusters_info}, {mfa,{remote_clusters_info,start_link,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [user:info,2014-08-19T16:49:02.621,ns_1@127.0.0.1:mb_master<0.17213.0>:mb_master:init:86]I'm the only node, so I'm the master. [ns_server:debug,2014-08-19T16:49:02.621,ns_1@127.0.0.1:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: server_groups -> [[{uuid,<<"0">>},{name,<<"Group 1">>},{nodes,['ns_1@127.0.0.1']}]] [ns_server:debug,2014-08-19T16:49:02.621,ns_1@127.0.0.1:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: set_view_update_daemon -> [{update_interval,5000}, {update_min_changes,5000}, {replica_update_min_changes,5000}] [ns_server:debug,2014-08-19T16:49:02.622,ns_1@127.0.0.1:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: uuid -> <<"7470311bdaa2a4acd47d21222af5c9ae">> [error_logger:info,2014-08-19T16:49:02.621,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.17211.0>}, {name,master_activity_events}, {mfa, {gen_event,start_link, [{local,master_activity_events}]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [ns_server:debug,2014-08-19T16:49:02.622,ns_1@127.0.0.1:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: vbucket_map_history -> [] [ns_server:debug,2014-08-19T16:49:02.622,ns_1@127.0.0.1:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: {couchdb,max_parallel_indexers} -> 4 [ns_server:debug,2014-08-19T16:49:02.622,ns_1@127.0.0.1:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: {couchdb,max_parallel_replica_indexers} -> 2 [ns_server:debug,2014-08-19T16:49:02.622,ns_1@127.0.0.1:ns_heart_slow_status_updater<0.17201.0>:ns_heart:current_status_slow:248]Ignoring failure to grab system stats: {'EXIT',{noproc,{gen_server,call, [{'stats_reader-@system','ns_1@127.0.0.1'}, {latest,"minute"}]}}} [ns_server:debug,2014-08-19T16:49:02.622,ns_1@127.0.0.1:ns_config_rep<0.17183.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([dynamic_config_version]..) [ns_server:debug,2014-08-19T16:49:02.622,ns_1@127.0.0.1:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: {request_limit,capi} -> undefined [ns_server:debug,2014-08-19T16:49:02.622,ns_1@127.0.0.1:mb_master_sup<0.17215.0>:misc:start_singleton:986]start_singleton(gen_fsm, ns_orchestrator, [], []): started as <0.17216.0> on 'ns_1@127.0.0.1' [ns_server:debug,2014-08-19T16:49:02.622,ns_1@127.0.0.1:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: {request_limit,rest} -> undefined [error_logger:info,2014-08-19T16:49:02.622,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,mb_master_sup} started: [{pid,<0.17216.0>}, {name,ns_orchestrator}, {mfargs,{ns_orchestrator,start_link,[]}}, {restart_type,permanent}, {shutdown,20}, {child_type,worker}] [ns_server:debug,2014-08-19T16:49:02.622,ns_1@127.0.0.1:ns_heart_slow_status_updater<0.17201.0>:ns_heart:grab_local_xdcr_replications:438]Ignoring exception getting xdcr replication infos {exit,{noproc,{gen_server,call,[xdc_replication_sup,which_children,infinity]}}, [{gen_server,call,3}, {xdc_replication_sup,all_local_replication_infos,0}, {ns_heart,grab_local_xdcr_replications,0}, {ns_heart,current_status_slow,0}, {ns_heart,slow_updater_loop,1}]} [ns_server:debug,2014-08-19T16:49:02.622,ns_1@127.0.0.1:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: {node,'ns_1@127.0.0.1',capi_port} -> 8092 [ns_server:debug,2014-08-19T16:49:02.622,ns_1@127.0.0.1:mb_master_sup<0.17215.0>:misc:start_singleton:986]start_singleton(gen_server, ns_tick, [], []): started as <0.17221.0> on 'ns_1@127.0.0.1' [ns_server:debug,2014-08-19T16:49:02.622,ns_1@127.0.0.1:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: {node,'ns_1@127.0.0.1',compaction_daemon} -> [{check_interval,30},{min_file_size,131072}] [ns_server:debug,2014-08-19T16:49:02.623,ns_1@127.0.0.1:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: {node,'ns_1@127.0.0.1',config_version} -> {2,3,0} [error_logger:info,2014-08-19T16:49:02.622,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,mb_master_sup} started: [{pid,<0.17221.0>}, {name,ns_tick}, {mfargs,{ns_tick,start_link,[]}}, {restart_type,permanent}, {shutdown,10}, {child_type,worker}] [ns_server:debug,2014-08-19T16:49:02.623,ns_1@127.0.0.1:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: {node,'ns_1@127.0.0.1',isasl} -> [{path,"/opt/couchbase/var/lib/couchbase/isasl.pw"}] [ns_server:debug,2014-08-19T16:49:02.623,ns_1@127.0.0.1:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: {node,'ns_1@127.0.0.1',membership} -> active [ns_server:debug,2014-08-19T16:49:02.623,ns_1@127.0.0.1:<0.17224.0>:auto_failover:init:134]init auto_failover. [ns_server:debug,2014-08-19T16:49:02.623,ns_1@127.0.0.1:mb_master_sup<0.17215.0>:misc:start_singleton:986]start_singleton(gen_server, auto_failover, [], []): started as <0.17224.0> on 'ns_1@127.0.0.1' [ns_server:debug,2014-08-19T16:49:02.623,ns_1@127.0.0.1:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: {node,'ns_1@127.0.0.1',memcached} -> [{mccouch_port,11213}, {engines, [{membase, [{engine,"/opt/couchbase/lib/memcached/ep.so"}, {static_config_string, "vb0=false;waitforwarmup=false;failpartialwarmup=false"}]}, {memcached, [{engine,"/opt/couchbase/lib/memcached/default_engine.so"}, {static_config_string,"vb0=true"}]}]}, {log_path,"/opt/couchbase/var/lib/couchbase/logs"}, {log_prefix,"memcached.log"}, {log_generations,20}, {log_cyclesize,10485760}, {log_sleeptime,19}, {log_rotation_period,39003}, {dedicated_port,11209}, {bucket_engine,"/opt/couchbase/lib/memcached/bucket_engine.so"}, {port,11210}, {dedicated_port,11209}, {admin_user,"_admin"}, {admin_pass,"*****"}, {verbosity,[]}] [error_logger:info,2014-08-19T16:49:02.623,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,mb_master_sup} started: [{pid,<0.17224.0>}, {name,auto_failover}, {mfargs,{auto_failover,start_link,[]}}, {restart_type,permanent}, {shutdown,10}, {child_type,worker}] [ns_server:debug,2014-08-19T16:49:02.623,ns_1@127.0.0.1:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: {node,'ns_1@127.0.0.1',moxi} -> [{port,11211},{verbosity,[]}] [ns_server:debug,2014-08-19T16:49:02.623,ns_1@127.0.0.1:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: {node,'ns_1@127.0.0.1',ns_log} -> [{filename,"/opt/couchbase/var/lib/couchbase/ns_log"}] [error_logger:info,2014-08-19T16:49:02.623,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.17213.0>}, {name,mb_master}, {mfa,{mb_master,start_link,[]}}, {restart_type,permanent}, {shutdown,infinity}, {child_type,supervisor}] [error_logger:info,2014-08-19T16:49:02.623,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.17225.0>}, {name,master_activity_events_ingress}, {mfa, {gen_event,start_link, [{local,master_activity_events_ingress}]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [error_logger:info,2014-08-19T16:49:02.624,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.17226.0>}, {name,master_activity_events_timestamper}, {mfa, {master_activity_events,start_link_timestamper,[]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [ns_server:debug,2014-08-19T16:49:02.624,ns_1@127.0.0.1:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: {node,'ns_1@127.0.0.1',port_servers} -> [{moxi,"/opt/couchbase/bin/moxi", ["-Z", {"port_listen=~B,default_bucket_name=default,downstream_max=1024,downstream_conn_max=4,connect_max_errors=5,connect_retry_interval=30000,connect_timeout=400,auth_timeout=100,cycle=200,downstream_conn_queue_timeout=200,downstream_timeout=5000,wait_queue_timeout=200", [port]}, "-z", {"url=http://127.0.0.1:~B/pools/default/saslBucketsStreaming", [{misc,this_node_rest_port,[]}]}, "-p","0","-Y","y","-O","stderr", {"~s",[verbosity]}], [{env,[{"EVENT_NOSELECT","1"}, {"MOXI_SASL_PLAIN_USR",{"~s",[{ns_moxi_sup,rest_user,[]}]}}, {"MOXI_SASL_PLAIN_PWD",{"~s",[{ns_moxi_sup,rest_pass,[]}]}}]}, use_stdio,exit_status,port_server_send_eol,stderr_to_stdout,stream]}, {memcached,"/opt/couchbase/bin/memcached", ["-X","/opt/couchbase/lib/memcached/stdin_term_handler.so","-X", {"/opt/couchbase/lib/memcached/file_logger.so,cyclesize=~B;sleeptime=~B;filename=~s/~s", [log_cyclesize,log_sleeptime,log_path,log_prefix]}, "-l", {"0.0.0.0:~B,0.0.0.0:~B:1000",[port,dedicated_port]}, "-p", {"~B",[port]}, "-E","/opt/couchbase/lib/memcached/bucket_engine.so","-B", "binary","-r","-c","10000","-e", {"admin=~s;default_bucket_name=default;auto_create=false", [admin_user]}, {"~s",[verbosity]}], [{env,[{"EVENT_NOSELECT","1"}, {"MEMCACHED_TOP_KEYS","100"}, {"ISASL_PWFILE",{"~s",[{isasl,path}]}}]}, use_stdio,stderr_to_stdout,exit_status,port_server_send_eol, stream]}] [error_logger:info,2014-08-19T16:49:02.624,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.17227.0>}, {name,master_activity_events_pids_watcher}, {mfa, {master_activity_events_pids_watcher,start_link, []}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [ns_server:debug,2014-08-19T16:49:02.624,ns_1@127.0.0.1:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: {node,'ns_1@127.0.0.1',rest} -> [{port,8091},{port_meta,global}] [ns_server:debug,2014-08-19T16:49:02.624,ns_1@127.0.0.1:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: {node,'ns_1@127.0.0.1',ssl_capi_port} -> 18092 [ns_server:debug,2014-08-19T16:49:02.624,ns_1@127.0.0.1:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: {node,'ns_1@127.0.0.1',ssl_proxy_downstream_port} -> 11214 [ns_server:debug,2014-08-19T16:49:02.624,ns_1@127.0.0.1:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: {node,'ns_1@127.0.0.1',ssl_proxy_upstream_port} -> 11215 [ns_server:debug,2014-08-19T16:49:02.624,ns_1@127.0.0.1:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: {node,'ns_1@127.0.0.1',ssl_rest_port} -> 18091 [ns_server:debug,2014-08-19T16:49:02.624,ns_1@127.0.0.1:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: dynamic_config_version -> [2,5] [error_logger:info,2014-08-19T16:49:02.637,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.17228.0>}, {name,master_activity_events_keeper}, {mfa,{master_activity_events_keeper,start_link,[]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [ns_server:debug,2014-08-19T16:49:02.665,ns_1@127.0.0.1:ns_cookie_manager<0.17150.0>:ns_cookie_manager:do_cookie_save:149]attempted to save cookie to "/opt/couchbase/var/lib/couchbase/couchbase-server.cookie-ns-server": ok [ns_server:debug,2014-08-19T16:49:02.665,ns_1@127.0.0.1:ns_cookie_manager<0.17150.0>:ns_cookie_manager:do_cookie_sync:110]ns_cookie_manager do_cookie_sync [ns_server:debug,2014-08-19T16:49:02.665,ns_1@127.0.0.1:<0.17188.0>:ns_node_disco:do_nodes_wanted_updated_fun:199]ns_node_disco: nodes_wanted updated: ['ns_1@127.0.0.1'], with cookie: alkbqedpsntmtnxa [ns_server:debug,2014-08-19T16:49:02.665,ns_1@127.0.0.1:ns_cookie_manager<0.17150.0>:ns_cookie_manager:do_cookie_save:147]saving cookie to "/opt/couchbase/var/lib/couchbase/couchbase-server.cookie-ns-server" [ns_server:debug,2014-08-19T16:49:02.665,ns_1@127.0.0.1:<0.17188.0>:ns_node_disco:do_nodes_wanted_updated_fun:205]ns_node_disco: nodes_wanted pong: ['ns_1@127.0.0.1'], with cookie: alkbqedpsntmtnxa [error_logger:info,2014-08-19T16:49:02.693,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_ssl_services_sup} started: [{pid,<0.17232.0>}, {name,ns_ssl_services_setup}, {mfargs,{ns_ssl_services_setup,start_link,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2014-08-19T16:49:02.695,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_ssl_services_sup} started: [{pid,<0.17234.0>}, {name,ns_rest_ssl_service}, {mfargs, {ns_ssl_services_setup,start_link_rest_service,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2014-08-19T16:49:02.697,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_ssl_services_sup} started: [{pid,<0.17251.0>}, {name,ns_capi_ssl_service}, {mfargs, {ns_ssl_services_setup,start_link_capi_service,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2014-08-19T16:49:02.697,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,menelaus_sup} started: [{pid,<0.17231.0>}, {name,ns_ssl_services_sup}, {mfargs,{ns_ssl_services_sup,start_link,[]}}, {restart_type,permanent}, {shutdown,infinity}, {child_type,supervisor}] [error_logger:info,2014-08-19T16:49:02.697,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,menelaus_sup} started: [{pid,<0.17268.0>}, {name,menelaus_ui_auth}, {mfargs,{menelaus_ui_auth,start_link,[]}}, {restart_type,permanent}, {shutdown,5000}, {child_type,worker}] [error_logger:info,2014-08-19T16:49:02.697,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,menelaus_sup} started: [{pid,<0.17269.0>}, {name,menelaus_web_cache}, {mfargs,{menelaus_web_cache,start_link,[]}}, {restart_type,permanent}, {shutdown,5000}, {child_type,worker}] [error_logger:info,2014-08-19T16:49:02.697,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,menelaus_sup} started: [{pid,<0.17270.0>}, {name,menelaus_stats_gatherer}, {mfargs,{menelaus_stats_gatherer,start_link,[]}}, {restart_type,permanent}, {shutdown,5000}, {child_type,worker}] [error_logger:info,2014-08-19T16:49:02.698,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,menelaus_sup} started: [{pid,<0.17271.0>}, {name,menelaus_web}, {mfargs,{menelaus_web,start_link,[]}}, {restart_type,permanent}, {shutdown,5000}, {child_type,worker}] [error_logger:info,2014-08-19T16:49:02.698,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,menelaus_sup} started: [{pid,<0.17288.0>}, {name,menelaus_event}, {mfargs,{menelaus_event,start_link,[]}}, {restart_type,permanent}, {shutdown,5000}, {child_type,worker}] [error_logger:info,2014-08-19T16:49:02.698,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,menelaus_sup} started: [{pid,<0.17289.0>}, {name,hot_keys_keeper}, {mfargs,{hot_keys_keeper,start_link,[]}}, {restart_type,permanent}, {shutdown,5000}, {child_type,worker}] [user:info,2014-08-19T16:49:02.698,ns_1@127.0.0.1:ns_server_sup<0.17163.0>:menelaus_sup:start_link:44]Couchbase Server has started on web port 8091 on node 'ns_1@127.0.0.1'. [error_logger:info,2014-08-19T16:49:02.698,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,menelaus_sup} started: [{pid,<0.17290.0>}, {name,menelaus_web_alerts_srv}, {mfargs,{menelaus_web_alerts_srv,start_link,[]}}, {restart_type,permanent}, {shutdown,5000}, {child_type,worker}] [error_logger:info,2014-08-19T16:49:02.698,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.17230.0>}, {name,menelaus}, {mfa,{menelaus_sup,start_link,[]}}, {restart_type,permanent}, {shutdown,infinity}, {child_type,supervisor}] [ns_server:info,2014-08-19T16:49:02.698,ns_1@127.0.0.1:<0.17294.0>:mc_tcp_listener:init:24]mccouch is listening on port 11213 [error_logger:info,2014-08-19T16:49:02.698,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,mc_sup} started: [{pid,<0.17292.0>}, {name,mc_couch_events}, {mfargs, {gen_event,start_link,[{local,mc_couch_events}]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [error_logger:info,2014-08-19T16:49:02.699,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,mc_sup} started: [{pid,<0.17293.0>}, {name,mc_conn_sup}, {mfargs,{mc_conn_sup,start_link,[]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,supervisor}] [error_logger:info,2014-08-19T16:49:02.699,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,mc_sup} started: [{pid,<0.17294.0>}, {name,mc_tcp_listener}, {mfargs,{mc_tcp_listener,start_link,[11213]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [ns_server:info,2014-08-19T16:49:02.699,ns_1@127.0.0.1:<0.17298.0>:ns_memcached_log_rotator:init:28]Starting log rotator on "/opt/couchbase/var/lib/couchbase/logs"/"memcached.log"* with an initial period of 39003ms [error_logger:info,2014-08-19T16:49:02.699,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.17291.0>}, {name,mc_sup}, {mfa,{mc_sup,start_link,[]}}, {restart_type,permanent}, {shutdown,infinity}, {child_type,supervisor}] [error_logger:info,2014-08-19T16:49:02.699,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.17295.0>}, {name,ns_ports_setup}, {mfa,{ns_ports_setup,start,[]}}, {restart_type,{permanent,4}}, {shutdown,brutal_kill}, {child_type,worker}] [error_logger:info,2014-08-19T16:49:02.700,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.17296.0>}, {name,ns_port_memcached_killer}, {mfa,{ns_ports_setup,start_memcached_force_killer,[]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [error_logger:info,2014-08-19T16:49:02.700,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.17298.0>}, {name,ns_memcached_log_rotator}, {mfa,{ns_memcached_log_rotator,start_link,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2014-08-19T16:49:02.700,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.17300.0>}, {name,memcached_clients_pool}, {mfa,{memcached_clients_pool,start_link,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2014-08-19T16:49:02.700,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.17301.0>}, {name,proxied_memcached_clients_pool}, {mfa,{proxied_memcached_clients_pool,start_link,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2014-08-19T16:49:02.701,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.17302.0>}, {name,xdc_lhttpc_pool}, {mfa, {lhttpc_manager,start_link, [[{name,xdc_lhttpc_pool}, {connection_timeout,120000}, {pool_size,200}]]}}, {restart_type,permanent}, {shutdown,10000}, {child_type,worker}] [error_logger:info,2014-08-19T16:49:02.701,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.17303.0>}, {name,ns_null_connection_pool}, {mfa, {ns_null_connection_pool,start_link, [ns_null_connection_pool]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2014-08-19T16:49:02.701,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.17304.0>}, {name,xdc_replication_sup}, {mfa,{xdc_replication_sup,start_link,[]}}, {restart_type,permanent}, {shutdown,infinity}, {child_type,supervisor}] [error_logger:info,2014-08-19T16:49:02.701,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.17305.0>}, {name,xdc_rep_manager}, {mfa,{xdc_rep_manager,start_link,[]}}, {restart_type,permanent}, {shutdown,30000}, {child_type,worker}] [error_logger:info,2014-08-19T16:49:02.702,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.17307.0>}, {name,ns_memcached_sockets_pool}, {mfa,{ns_memcached_sockets_pool,start_link,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2014-08-19T16:49:02.702,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_bucket_worker_sup} started: [{pid,<0.17310.0>}, {name,ns_bucket_worker}, {mfargs,{work_queue,start_link,[ns_bucket_worker]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2014-08-19T16:49:02.702,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_bucket_sup} started: [{pid,<0.17312.0>}, {name,buckets_observing_subscription}, {mfargs,{ns_bucket_sup,subscribe_on_config_events,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2014-08-19T16:49:02.702,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_bucket_worker_sup} started: [{pid,<0.17311.0>}, {name,ns_bucket_sup}, {mfargs,{ns_bucket_sup,start_link,[]}}, {restart_type,permanent}, {shutdown,infinity}, {child_type,supervisor}] [error_logger:info,2014-08-19T16:49:02.702,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.17308.0>}, {name,ns_bucket_worker_sup}, {mfa,{ns_bucket_worker_sup,start_link,[]}}, {restart_type,permanent}, {shutdown,infinity}, {child_type,supervisor}] [error_logger:info,2014-08-19T16:49:02.703,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.17313.0>}, {name,system_stats_collector}, {mfa,{system_stats_collector,start_link,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2014-08-19T16:49:02.703,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.17316.0>}, {name,{stats_archiver,"@system"}}, {mfa,{stats_archiver,start_link,["@system"]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [ns_server:debug,2014-08-19T16:49:02.703,ns_1@127.0.0.1:compaction_daemon<0.17319.0>:compaction_daemon:handle_info:444]No buckets to compact. Rescheduling compaction. [ns_server:debug,2014-08-19T16:49:02.703,ns_1@127.0.0.1:xdc_rdoc_replication_srv<0.17321.0>:xdc_rdoc_replication_srv:init:76]Loaded the following docs: [] [error_logger:info,2014-08-19T16:49:02.703,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.17318.0>}, {name,{stats_reader,"@system"}}, {mfa,{stats_reader,start_link,["@system"]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [ns_server:debug,2014-08-19T16:49:02.703,ns_1@127.0.0.1:xdc_rdoc_replication_srv<0.17321.0>:xdc_rdoc_replication_srv:handle_info:154]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:49:02.703,ns_1@127.0.0.1:compaction_daemon<0.17319.0>:compaction_daemon:schedule_next_compaction:1519]Finished compaction too soon. Next run will be in 30s [ns_server:info,2014-08-19T16:49:02.704,ns_1@127.0.0.1:set_view_update_daemon<0.17323.0>:set_view_update_daemon:init:50]Set view update daemon, starting with the following settings: update interval: 5000ms minimum number of changes: 5000 [error_logger:info,2014-08-19T16:49:02.704,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.17319.0>}, {name,compaction_daemon}, {mfa,{compaction_daemon,start_link,[]}}, {restart_type,{permanent,4}}, {shutdown,86400000}, {child_type,worker}] [error_logger:info,2014-08-19T16:49:02.704,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.17321.0>}, {name,xdc_rdoc_replication_srv}, {mfa,{xdc_rdoc_replication_srv,start_link,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2014-08-19T16:49:02.704,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.17323.0>}, {name,set_view_update_daemon}, {mfa,{set_view_update_daemon,start_link,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2014-08-19T16:49:02.704,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_cluster_sup} started: [{pid,<0.17163.0>}, {name,ns_server_sup}, {mfargs,{ns_server_sup,start_link,[]}}, {restart_type,permanent}, {shutdown,infinity}, {child_type,supervisor}] [user:info,2014-08-19T16:49:02.705,ns_1@127.0.0.1:<0.17168.0>:ns_log:crash_consumption_loop:64]Port server moxi on node 'babysitter_of_ns_1@127.0.0.1' exited with status 0. Restarting. Messages: WARNING: curl error: transfer closed with outstanding read data remaining from: http://127.0.0.1:8091/pools/default/saslBucketsStreaming WARNING: curl error: couldn't connect to host from: http://127.0.0.1:8091/pools/default/saslBucketsStreaming ERROR: could not contact REST server(s): http://127.0.0.1:8091/pools/default/saslBucketsStreaming WARNING: curl error: couldn't connect to host from: http://127.0.0.1:8091/pools/default/saslBucketsStreaming ERROR: could not contact REST server(s): http://127.0.0.1:8091/pools/default/saslBucketsStreaming WARNING: curl error: couldn't connect to host from: http://127.0.0.1:8091/pools/default/saslBucketsStreaming EOL on stdin. Exiting [ns_server:debug,2014-08-19T16:49:02.709,ns_1@127.0.0.1:ns_cookie_manager<0.17150.0>:ns_cookie_manager:do_cookie_save:149]attempted to save cookie to "/opt/couchbase/var/lib/couchbase/couchbase-server.cookie-ns-server": ok [ns_server:debug,2014-08-19T16:49:02.709,ns_1@127.0.0.1:<0.17189.0>:ns_node_disco:do_nodes_wanted_updated_fun:199]ns_node_disco: nodes_wanted updated: ['ns_1@127.0.0.1'], with cookie: alkbqedpsntmtnxa [ns_server:debug,2014-08-19T16:49:02.709,ns_1@127.0.0.1:<0.17189.0>:ns_node_disco:do_nodes_wanted_updated_fun:205]ns_node_disco: nodes_wanted pong: ['ns_1@127.0.0.1'], with cookie: alkbqedpsntmtnxa [cluster:info,2014-08-19T16:49:02.713,ns_1@127.0.0.1:ns_cluster<0.17151.0>:ns_cluster:handle_call:171]Changing address to "127.0.0.1" due to client request [cluster:debug,2014-08-19T16:49:02.768,ns_1@127.0.0.1:ns_cluster<0.17151.0>:ns_cluster:handle_call:159]handling engage_cluster([{<<"requestedTargetNodeHostname">>, <<"10.242.238.90">>}, {<<"availableStorage">>, {struct, [{<<"hdd">>, [{struct, [{<<"path">>,<<"/">>}, {<<"sizeKBytes">>,103212320}, {<<"usagePercent">>,3}]}, {struct, [{<<"path">>,<<"/dev/shm">>}, {<<"sizeKBytes">>,49515824}, {<<"usagePercent">>,0}]}, {struct, [{<<"path">>,<<"/boot">>}, {<<"sizeKBytes">>,198337}, {<<"usagePercent">>,17}]}, {struct, [{<<"path">>,<<"/data">>}, {<<"sizeKBytes">>,329573012}, {<<"usagePercent">>,1}]}, {struct, [{<<"path">>,<<"/test">>}, {<<"sizeKBytes">>,528447160}, {<<"usagePercent">>,1}]}, {struct, [{<<"path">>,<<"/var/lib/pgsql">>}, {<<"sizeKBytes">>,1922866992}, {<<"usagePercent">>,1}]}]}]}}, {<<"memoryQuota">>,90112}, {<<"storageTotals">>, {struct, [{<<"ram">>, {struct, [{<<"total">>,101408407552}, {<<"quotaTotal">>,94489280512}, {<<"quotaUsed">>,13369344000}, {<<"used">>,13174808576}, {<<"usedByData">>,31847576}]}}, {<<"hdd">>, {struct, [{<<"total">>,1969015799808}, {<<"quotaTotal">>,1969015799808}, {<<"used">>,19690157998}, {<<"usedByData">>,2736915}, {<<"free">>,1949325641810}]}}]}}, {<<"storage">>, {struct, [{<<"ssd">>,[]}, {<<"hdd">>, [{struct, [{<<"path">>,<<"/var/lib/pgsql">>}, {<<"index_path">>,<<"/var/lib/pgsql">>}, {<<"quotaMb">>,<<"none">>}, {<<"state">>,<<"ok">>}]}]}]}}, {<<"systemStats">>, {struct, [{<<"cpu_utilization_rate">>,0.6265664160401002}, {<<"swap_total">>,0}, {<<"swap_used">>,0}, {<<"mem_total">>,101408407552}, {<<"mem_free">>,89866596352}]}}, {<<"interestingStats">>, {struct, [{<<"cmd_get">>,0.0}, {<<"couch_docs_actual_disk_size">>,2736915}, {<<"couch_docs_data_size">>,2729956}, {<<"couch_views_actual_disk_size">>,0}, {<<"couch_views_data_size">>,0}, {<<"curr_items">>,0}, {<<"curr_items_tot">>,0}, {<<"ep_bg_fetched">>,0.0}, {<<"get_hits">>,0.0}, {<<"mem_used">>,31847576}, {<<"ops">>,0.0}, {<<"vb_replica_curr_items">>,0}]}}, {<<"uptime">>,<<"4088">>}, {<<"memoryTotal">>,101408407552}, {<<"memoryFree">>,89866596352}, {<<"mcdMemoryReserved">>,77368}, {<<"mcdMemoryAllocated">>,77368}, {<<"couchApiBase">>,<<"http://10.242.238.88:8092/">>}, {<<"otpCookie">>,<<"xyzevwdfypcplvpp">>}, {<<"clusterMembership">>,<<"active">>}, {<<"status">>,<<"healthy">>}, {<<"otpNode">>,<<"ns_1@10.242.238.88">>}, {<<"thisNode">>,true}, {<<"hostname">>,<<"10.242.238.88:8091">>}, {<<"clusterCompatibility">>,131077}, {<<"version">>,<<"2.5.1-1083-rel-enterprise">>}, {<<"os">>,<<"x86_64-unknown-linux-gnu">>}, {<<"ports">>, {struct, [{<<"httpsMgmt">>,18091}, {<<"httpsCAPI">>,18092}, {<<"sslProxy">>,11214}, {<<"proxy">>,11211}, {<<"direct">>,11210}]}}]) [cluster:info,2014-08-19T16:49:02.770,ns_1@127.0.0.1:ns_cluster<0.17151.0>:ns_cluster:do_change_address:398]Decided to change address to "10.242.238.90" [ns_server:debug,2014-08-19T16:49:02.770,ns_1@127.0.0.1:<0.17297.0>:ns_pubsub:do_subscribe_link:136]Parent process of subscription {ns_config_events,<0.17295.0>} exited with reason noconnection [user:warn,2014-08-19T16:49:02.770,nonode@nohost:ns_node_disco<0.17177.0>:ns_node_disco:handle_info:165]Node nonode@nohost saw that node 'ns_1@127.0.0.1' went down. Details: [{nodedown_reason, net_kernel_terminated}] [error_logger:info,2014-08-19T16:49:02.770,nonode@nohost:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.17331.0>}, {name,ns_ports_setup}, {mfa,{ns_ports_setup,start,[]}}, {restart_type,{permanent,4}}, {shutdown,brutal_kill}, {child_type,worker}] [ns_server:info,2014-08-19T16:49:02.771,nonode@nohost:dist_manager<0.267.0>:dist_manager:do_adjust_address:249]Adjusted IP to "10.242.238.90" [ns_server:info,2014-08-19T16:49:02.771,nonode@nohost:dist_manager<0.267.0>:dist_manager:bringup:230]Attempting to bring up net_kernel with name 'ns_1@10.242.238.90' [error_logger:info,2014-08-19T16:49:02.771,nonode@nohost:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,net_sup} started: [{pid,<0.17334.0>}, {name,erl_epmd}, {mfargs,{erl_epmd,start_link,[]}}, {restart_type,permanent}, {shutdown,2000}, {child_type,worker}] [error_logger:info,2014-08-19T16:49:02.771,nonode@nohost:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,net_sup} started: [{pid,<0.17335.0>}, {name,auth}, {mfargs,{auth,start_link,[]}}, {restart_type,permanent}, {shutdown,2000}, {child_type,worker}] [user:info,2014-08-19T16:49:02.772,ns_1@10.242.238.90:ns_node_disco<0.17177.0>:ns_node_disco:handle_info:159]Node 'ns_1@10.242.238.90' saw that node 'ns_1@10.242.238.90' came up. Tags: [] [ns_server:debug,2014-08-19T16:49:02.772,ns_1@10.242.238.90:<0.17322.0>:xdc_rdoc_replication_srv:nodeup_monitoring_loop:46]got nodeup event. Considering rdocs replication [error_logger:info,2014-08-19T16:49:02.772,ns_1@10.242.238.90:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,net_sup} started: [{pid,<0.17336.0>}, {name,net_kernel}, {mfargs, {net_kernel,start_link, [['ns_1@10.242.238.90',longnames]]}}, {restart_type,permanent}, {shutdown,2000}, {child_type,worker}] [ns_server:info,2014-08-19T16:49:02.772,ns_1@10.242.238.90:dist_manager<0.267.0>:dist_manager:save_node:143]saving node to "/opt/couchbase/var/lib/couchbase/couchbase-server.node" [ns_server:debug,2014-08-19T16:49:02.772,ns_1@10.242.238.90:xdc_rdoc_replication_srv<0.17321.0>:xdc_rdoc_replication_srv:handle_info:154]doing replicate_newnodes_docs [error_logger:info,2014-08-19T16:49:02.772,ns_1@10.242.238.90:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,kernel_sup} started: [{pid,<0.17333.0>}, {name,net_sup_dynamic}, {mfargs, {erl_distribution,start_link, [['ns_1@10.242.238.90',longnames]]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,supervisor}] [ns_server:warn,2014-08-19T16:49:02.773,ns_1@10.242.238.90:xdc_rdoc_replication_srv<0.17321.0>:xdc_rdoc_replication_srv:handle_info:150]Remote server node {xdc_rdoc_replication_srv,'ns_1@127.0.0.1'} process down: noconnection [ns_server:debug,2014-08-19T16:49:02.801,ns_1@10.242.238.90:dist_manager<0.267.0>:dist_manager:bringup:238]Attempted to save node name to disk: ok [ns_server:info,2014-08-19T16:49:02.801,ns_1@10.242.238.90:dist_manager<0.267.0>:dist_manager:do_adjust_address:253]Re-setting cookie {alkbqedpsntmtnxa,'ns_1@10.242.238.90'} [ns_server:info,2014-08-19T16:49:02.801,ns_1@10.242.238.90:dist_manager<0.267.0>:dist_manager:save_address_config:138]Deleting irrelevant ip file "/opt/couchbase/var/lib/couchbase/ip": ok [ns_server:info,2014-08-19T16:49:02.802,ns_1@10.242.238.90:dist_manager<0.267.0>:dist_manager:save_address_config:139]saving ip config to "/opt/couchbase/var/lib/couchbase/ip_start" [ns_server:info,2014-08-19T16:49:02.835,ns_1@10.242.238.90:dist_manager<0.267.0>:dist_manager:do_adjust_address:260]Persisted the address successfully [cluster:debug,2014-08-19T16:49:02.835,ns_1@10.242.238.90:<0.17329.0>:ns_cluster:maybe_rename:431]Renaming node from 'ns_1@127.0.0.1' to 'ns_1@10.242.238.90'. [cluster:debug,2014-08-19T16:49:02.835,ns_1@10.242.238.90:ns_config<0.17155.0>:ns_cluster:rename_node_in_config:443]renaming node conf nodes_wanted -> nodes_wanted: ['ns_1@127.0.0.1'] -> ['ns_1@10.242.238.90'] [cluster:debug,2014-08-19T16:49:02.835,ns_1@10.242.238.90:ns_config<0.17155.0>:ns_cluster:rename_node_in_config:443]renaming node conf server_groups -> server_groups: [[{uuid,<<"0">>},{name,<<"Group 1">>},{nodes,['ns_1@127.0.0.1']}]] -> [[{uuid,<<"0">>},{name,<<"Group 1">>},{nodes,['ns_1@10.242.238.90']}]] [cluster:debug,2014-08-19T16:49:02.835,ns_1@10.242.238.90:ns_config<0.17155.0>:ns_cluster:rename_node_in_config:443]renaming node conf {node,'ns_1@127.0.0.1',capi_port} -> {node, 'ns_1@10.242.238.90', capi_port}: 8092 -> 8092 [cluster:debug,2014-08-19T16:49:02.835,ns_1@10.242.238.90:ns_config<0.17155.0>:ns_cluster:rename_node_in_config:443]renaming node conf {node,'ns_1@127.0.0.1',compaction_daemon} -> {node, 'ns_1@10.242.238.90', compaction_daemon}: [{check_interval,30},{min_file_size,131072}] -> [{check_interval,30},{min_file_size,131072}] [cluster:debug,2014-08-19T16:49:02.838,ns_1@10.242.238.90:ns_config<0.17155.0>:ns_cluster:rename_node_in_config:443]renaming node conf {node,'ns_1@127.0.0.1',config_version} -> {node, 'ns_1@10.242.238.90', config_version}: {2,3,0} -> {2,3,0} [cluster:debug,2014-08-19T16:49:02.838,ns_1@10.242.238.90:ns_config<0.17155.0>:ns_cluster:rename_node_in_config:443]renaming node conf {node,'ns_1@127.0.0.1',isasl} -> {node, 'ns_1@10.242.238.90', isasl}: [{path,"/opt/couchbase/var/lib/couchbase/isasl.pw"}] -> [{path,"/opt/couchbase/var/lib/couchbase/isasl.pw"}] [cluster:debug,2014-08-19T16:49:02.838,ns_1@10.242.238.90:ns_config<0.17155.0>:ns_cluster:rename_node_in_config:443]renaming node conf {node,'ns_1@127.0.0.1',membership} -> {node, 'ns_1@10.242.238.90', membership}: active -> active [cluster:debug,2014-08-19T16:49:02.838,ns_1@10.242.238.90:ns_config<0.17155.0>:ns_cluster:rename_node_in_config:443]renaming node conf {node,'ns_1@127.0.0.1',memcached} -> {node, 'ns_1@10.242.238.90', memcached}: [{mccouch_port,11213}, {engines, [{membase, [{engine,"/opt/couchbase/lib/memcached/ep.so"}, {static_config_string, "vb0=false;waitforwarmup=false;failpartialwarmup=false"}]}, {memcached, [{engine,"/opt/couchbase/lib/memcached/default_engine.so"}, {static_config_string,"vb0=true"}]}]}, {log_path,"/opt/couchbase/var/lib/couchbase/logs"}, {log_prefix,"memcached.log"}, {log_generations,20}, {log_cyclesize,10485760}, {log_sleeptime,19}, {log_rotation_period,39003}, {dedicated_port,11209}, {bucket_engine,"/opt/couchbase/lib/memcached/bucket_engine.so"}, {port,11210}, {dedicated_port,11209}, {admin_user,"_admin"}, {admin_pass,"*****"}, {verbosity,[]}] -> [{mccouch_port,11213}, {engines, [{membase, [{engine,"/opt/couchbase/lib/memcached/ep.so"}, {static_config_string, "vb0=false;waitforwarmup=false;failpartialwarmup=false"}]}, {memcached, [{engine,"/opt/couchbase/lib/memcached/default_engine.so"}, {static_config_string,"vb0=true"}]}]}, {log_path,"/opt/couchbase/var/lib/couchbase/logs"}, {log_prefix,"memcached.log"}, {log_generations,20}, {log_cyclesize,10485760}, {log_sleeptime,19}, {log_rotation_period,39003}, {dedicated_port,11209}, {bucket_engine,"/opt/couchbase/lib/memcached/bucket_engine.so"}, {port,11210}, {dedicated_port,11209}, {admin_user,"_admin"}, {admin_pass,"*****"}, {verbosity,[]}] [cluster:debug,2014-08-19T16:49:02.839,ns_1@10.242.238.90:ns_config<0.17155.0>:ns_cluster:rename_node_in_config:443]renaming node conf {node,'ns_1@127.0.0.1',moxi} -> {node, 'ns_1@10.242.238.90',moxi}: [{port,11211},{verbosity,[]}] -> [{port,11211},{verbosity,[]}] [cluster:debug,2014-08-19T16:49:02.839,ns_1@10.242.238.90:ns_config<0.17155.0>:ns_cluster:rename_node_in_config:443]renaming node conf {node,'ns_1@127.0.0.1',ns_log} -> {node, 'ns_1@10.242.238.90', ns_log}: [{filename,"/opt/couchbase/var/lib/couchbase/ns_log"}] -> [{filename,"/opt/couchbase/var/lib/couchbase/ns_log"}] [cluster:debug,2014-08-19T16:49:02.839,ns_1@10.242.238.90:ns_config<0.17155.0>:ns_cluster:rename_node_in_config:443]renaming node conf {node,'ns_1@127.0.0.1',port_servers} -> {node, 'ns_1@10.242.238.90', port_servers}: [{moxi,"/opt/couchbase/bin/moxi", ["-Z", {"port_listen=~B,default_bucket_name=default,downstream_max=1024,downstream_conn_max=4,connect_max_errors=5,connect_retry_interval=30000,connect_timeout=400,auth_timeout=100,cycle=200,downstream_conn_queue_timeout=200,downstream_timeout=5000,wait_queue_timeout=200", [port]}, "-z", {"url=http://127.0.0.1:~B/pools/default/saslBucketsStreaming", [{misc,this_node_rest_port,[]}]}, "-p","0","-Y","y","-O","stderr", {"~s",[verbosity]}], [{env,[{"EVENT_NOSELECT","1"}, {"MOXI_SASL_PLAIN_USR",{"~s",[{ns_moxi_sup,rest_user,[]}]}}, {"MOXI_SASL_PLAIN_PWD",{"~s",[{ns_moxi_sup,rest_pass,[]}]}}]}, use_stdio,exit_status,port_server_send_eol,stderr_to_stdout,stream]}, {memcached,"/opt/couchbase/bin/memcached", ["-X","/opt/couchbase/lib/memcached/stdin_term_handler.so","-X", {"/opt/couchbase/lib/memcached/file_logger.so,cyclesize=~B;sleeptime=~B;filename=~s/~s", [log_cyclesize,log_sleeptime,log_path,log_prefix]}, "-l", {"0.0.0.0:~B,0.0.0.0:~B:1000",[port,dedicated_port]}, "-p", {"~B",[port]}, "-E","/opt/couchbase/lib/memcached/bucket_engine.so","-B", "binary","-r","-c","10000","-e", {"admin=~s;default_bucket_name=default;auto_create=false", [admin_user]}, {"~s",[verbosity]}], [{env,[{"EVENT_NOSELECT","1"}, {"MEMCACHED_TOP_KEYS","100"}, {"ISASL_PWFILE",{"~s",[{isasl,path}]}}]}, use_stdio,stderr_to_stdout,exit_status,port_server_send_eol, stream]}] -> [{moxi,"/opt/couchbase/bin/moxi", ["-Z", {"port_listen=~B,default_bucket_name=default,downstream_max=1024,downstream_conn_max=4,connect_max_errors=5,connect_retry_interval=30000,connect_timeout=400,auth_timeout=100,cycle=200,downstream_conn_queue_timeout=200,downstream_timeout=5000,wait_queue_timeout=200", [port]}, "-z", {"url=http://127.0.0.1:~B/pools/default/saslBucketsStreaming", [{misc,this_node_rest_port,[]}]}, "-p","0","-Y","y","-O","stderr", {"~s",[verbosity]}], [{env,[{"EVENT_NOSELECT","1"}, {"MOXI_SASL_PLAIN_USR",{"~s",[{ns_moxi_sup,rest_user,[]}]}}, {"MOXI_SASL_PLAIN_PWD",{"~s",[{ns_moxi_sup,rest_pass,[]}]}}]}, use_stdio,exit_status,port_server_send_eol,stderr_to_stdout,stream]}, {memcached,"/opt/couchbase/bin/memcached", ["-X","/opt/couchbase/lib/memcached/stdin_term_handler.so","-X", {"/opt/couchbase/lib/memcached/file_logger.so,cyclesize=~B;sleeptime=~B;filename=~s/~s", [log_cyclesize,log_sleeptime,log_path,log_prefix]}, "-l", {"0.0.0.0:~B,0.0.0.0:~B:1000",[port,dedicated_port]}, "-p", {"~B",[port]}, "-E","/opt/couchbase/lib/memcached/bucket_engine.so","-B", "binary","-r","-c","10000","-e", {"admin=~s;default_bucket_name=default;auto_create=false", [admin_user]}, {"~s",[verbosity]}], [{env,[{"EVENT_NOSELECT","1"}, {"MEMCACHED_TOP_KEYS","100"}, {"ISASL_PWFILE",{"~s",[{isasl,path}]}}]}, use_stdio,stderr_to_stdout,exit_status,port_server_send_eol, stream]}] [cluster:debug,2014-08-19T16:49:02.840,ns_1@10.242.238.90:ns_config<0.17155.0>:ns_cluster:rename_node_in_config:443]renaming node conf {node,'ns_1@127.0.0.1',rest} -> {node, 'ns_1@10.242.238.90',rest}: [{port,8091},{port_meta,global}] -> [{port,8091},{port_meta,global}] [cluster:debug,2014-08-19T16:49:02.840,ns_1@10.242.238.90:ns_config<0.17155.0>:ns_cluster:rename_node_in_config:443]renaming node conf {node,'ns_1@127.0.0.1',ssl_capi_port} -> {node, 'ns_1@10.242.238.90', ssl_capi_port}: 18092 -> 18092 [cluster:debug,2014-08-19T16:49:02.840,ns_1@10.242.238.90:ns_config<0.17155.0>:ns_cluster:rename_node_in_config:443]renaming node conf {node,'ns_1@127.0.0.1',ssl_proxy_downstream_port} -> {node, 'ns_1@10.242.238.90', ssl_proxy_downstream_port}: 11214 -> 11214 [cluster:debug,2014-08-19T16:49:02.841,ns_1@10.242.238.90:ns_config<0.17155.0>:ns_cluster:rename_node_in_config:443]renaming node conf {node,'ns_1@127.0.0.1',ssl_proxy_upstream_port} -> {node, 'ns_1@10.242.238.90', ssl_proxy_upstream_port}: 11215 -> 11215 [cluster:debug,2014-08-19T16:49:02.841,ns_1@10.242.238.90:ns_config<0.17155.0>:ns_cluster:rename_node_in_config:443]renaming node conf {node,'ns_1@127.0.0.1',ssl_rest_port} -> {node, 'ns_1@10.242.238.90', ssl_rest_port}: 18091 -> 18091 [ns_server:debug,2014-08-19T16:49:02.841,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: {node,'ns_1@10.242.238.90',ssl_rest_port} -> 18091 [ns_server:debug,2014-08-19T16:49:02.841,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: {node,'ns_1@10.242.238.90',ssl_proxy_upstream_port} -> 11215 [ns_server:debug,2014-08-19T16:49:02.841,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: {node,'ns_1@10.242.238.90',ssl_proxy_downstream_port} -> 11214 [ns_server:debug,2014-08-19T16:49:02.841,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: {node,'ns_1@10.242.238.90',ssl_capi_port} -> 18092 [ns_server:debug,2014-08-19T16:49:02.841,ns_1@10.242.238.90:ns_config_rep<0.17183.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([nodes_wanted,server_groups, {node,'ns_1@10.242.238.90',capi_port}, {node,'ns_1@10.242.238.90',compaction_daemon}, {node,'ns_1@10.242.238.90',config_version}, {node,'ns_1@10.242.238.90',isasl}, {node,'ns_1@10.242.238.90',membership}, {node,'ns_1@10.242.238.90',memcached}, {node,'ns_1@10.242.238.90',moxi}, {node,'ns_1@10.242.238.90',ns_log}, {node,'ns_1@10.242.238.90',port_servers}, {node,'ns_1@10.242.238.90',rest}, {node,'ns_1@10.242.238.90',ssl_capi_port}, {node,'ns_1@10.242.238.90', ssl_proxy_downstream_port}, {node,'ns_1@10.242.238.90', ssl_proxy_upstream_port}, {node,'ns_1@10.242.238.90',ssl_rest_port}]..) [ns_server:debug,2014-08-19T16:49:02.841,ns_1@10.242.238.90:ns_config_events<0.17153.0>:ns_node_disco_conf_events:handle_event:44]ns_node_disco_conf_events config on nodes_wanted [ns_server:debug,2014-08-19T16:49:02.841,ns_1@10.242.238.90:mb_master<0.17213.0>:mb_master:update_peers:506]List of peers has changed from ['ns_1@127.0.0.1'] to ['ns_1@10.242.238.90'] [ns_server:debug,2014-08-19T16:49:02.841,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: {node,'ns_1@10.242.238.90',rest} -> [{port,8091},{port_meta,global}] [ns_server:debug,2014-08-19T16:49:02.841,ns_1@10.242.238.90:ns_cookie_manager<0.17150.0>:ns_cookie_manager:do_cookie_sync:110]ns_cookie_manager do_cookie_sync [ns_server:debug,2014-08-19T16:49:02.842,ns_1@10.242.238.90:ns_cookie_manager<0.17150.0>:ns_cookie_manager:do_cookie_save:147]saving cookie to "/opt/couchbase/var/lib/couchbase/couchbase-server.cookie-ns-server" [ns_server:debug,2014-08-19T16:49:02.842,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: {node,'ns_1@10.242.238.90',port_servers} -> [{moxi,"/opt/couchbase/bin/moxi", ["-Z", {"port_listen=~B,default_bucket_name=default,downstream_max=1024,downstream_conn_max=4,connect_max_errors=5,connect_retry_interval=30000,connect_timeout=400,auth_timeout=100,cycle=200,downstream_conn_queue_timeout=200,downstream_timeout=5000,wait_queue_timeout=200", [port]}, "-z", {"url=http://127.0.0.1:~B/pools/default/saslBucketsStreaming", [{misc,this_node_rest_port,[]}]}, "-p","0","-Y","y","-O","stderr", {"~s",[verbosity]}], [{env,[{"EVENT_NOSELECT","1"}, {"MOXI_SASL_PLAIN_USR",{"~s",[{ns_moxi_sup,rest_user,[]}]}}, {"MOXI_SASL_PLAIN_PWD",{"~s",[{ns_moxi_sup,rest_pass,[]}]}}]}, use_stdio,exit_status,port_server_send_eol,stderr_to_stdout,stream]}, {memcached,"/opt/couchbase/bin/memcached", ["-X","/opt/couchbase/lib/memcached/stdin_term_handler.so","-X", {"/opt/couchbase/lib/memcached/file_logger.so,cyclesize=~B;sleeptime=~B;filename=~s/~s", [log_cyclesize,log_sleeptime,log_path,log_prefix]}, "-l", {"0.0.0.0:~B,0.0.0.0:~B:1000",[port,dedicated_port]}, "-p", {"~B",[port]}, "-E","/opt/couchbase/lib/memcached/bucket_engine.so","-B", "binary","-r","-c","10000","-e", {"admin=~s;default_bucket_name=default;auto_create=false", [admin_user]}, {"~s",[verbosity]}], [{env,[{"EVENT_NOSELECT","1"}, {"MEMCACHED_TOP_KEYS","100"}, {"ISASL_PWFILE",{"~s",[{isasl,path}]}}]}, use_stdio,stderr_to_stdout,exit_status,port_server_send_eol, stream]}] [ns_server:debug,2014-08-19T16:49:02.844,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: {node,'ns_1@10.242.238.90',ns_log} -> [{filename,"/opt/couchbase/var/lib/couchbase/ns_log"}] [ns_server:debug,2014-08-19T16:49:02.844,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: {node,'ns_1@10.242.238.90',moxi} -> [{port,11211},{verbosity,[]}] [ns_server:debug,2014-08-19T16:49:02.844,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: {node,'ns_1@10.242.238.90',memcached} -> [{mccouch_port,11213}, {engines, [{membase, [{engine,"/opt/couchbase/lib/memcached/ep.so"}, {static_config_string, "vb0=false;waitforwarmup=false;failpartialwarmup=false"}]}, {memcached, [{engine,"/opt/couchbase/lib/memcached/default_engine.so"}, {static_config_string,"vb0=true"}]}]}, {log_path,"/opt/couchbase/var/lib/couchbase/logs"}, {log_prefix,"memcached.log"}, {log_generations,20}, {log_cyclesize,10485760}, {log_sleeptime,19}, {log_rotation_period,39003}, {dedicated_port,11209}, {bucket_engine,"/opt/couchbase/lib/memcached/bucket_engine.so"}, {port,11210}, {dedicated_port,11209}, {admin_user,"_admin"}, {admin_pass,"*****"}, {verbosity,[]}] [ns_server:debug,2014-08-19T16:49:02.844,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: {node,'ns_1@10.242.238.90',membership} -> active [ns_server:debug,2014-08-19T16:49:02.845,ns_1@10.242.238.90:ns_node_disco_events<0.17176.0>:ns_node_disco_rep_events:handle_event:42]Detected a new nodes (['ns_1@10.242.238.90']). Moving config around. [ns_server:debug,2014-08-19T16:49:02.845,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: {node,'ns_1@10.242.238.90',isasl} -> [{path,"/opt/couchbase/var/lib/couchbase/isasl.pw"}] [ns_server:debug,2014-08-19T16:49:02.845,ns_1@10.242.238.90:<0.17204.0>:ns_pubsub:do_subscribe_link:136]Parent process of subscription {ns_config_events,<0.17202.0>} exited with reason shutdown [ns_server:info,2014-08-19T16:49:02.845,ns_1@10.242.238.90:ns_node_disco_events<0.17176.0>:ns_node_disco_log:handle_event:46]ns_node_disco_log: nodes changed: ['ns_1@10.242.238.90'] [ns_server:debug,2014-08-19T16:49:02.845,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: {node,'ns_1@10.242.238.90',config_version} -> {2,3,0} [ns_server:info,2014-08-19T16:49:02.845,ns_1@10.242.238.90:mb_master<0.17213.0>:mb_master:terminate:299]Synchronously shutting down child mb_master_sup [error_logger:info,2014-08-19T16:49:02.845,ns_1@10.242.238.90:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.17355.0>}, {name,ns_doctor}, {mfa,{ns_doctor,start_link,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [ns_server:debug,2014-08-19T16:49:02.845,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: {node,'ns_1@10.242.238.90',compaction_daemon} -> [{check_interval,30},{min_file_size,131072}] [ns_server:debug,2014-08-19T16:49:02.845,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: {node,'ns_1@10.242.238.90',capi_port} -> 8092 [ns_server:debug,2014-08-19T16:49:02.845,ns_1@10.242.238.90:<0.17214.0>:ns_pubsub:do_subscribe_link:136]Parent process of subscription {ns_config_events,<0.17213.0>} exited with reason shutdown [ns_server:debug,2014-08-19T16:49:02.845,ns_1@10.242.238.90:ns_server_sup<0.17163.0>:mb_master:check_master_takeover_needed:141]Sending master node question to the following nodes: [] [ns_server:debug,2014-08-19T16:49:02.845,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: server_groups -> [[{uuid,<<"0">>},{name,<<"Group 1">>},{nodes,['ns_1@10.242.238.90']}]] [ns_server:debug,2014-08-19T16:49:02.845,ns_1@10.242.238.90:ns_server_sup<0.17163.0>:mb_master:check_master_takeover_needed:143]Got replies: [] [ns_server:debug,2014-08-19T16:49:02.845,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: nodes_wanted -> ['ns_1@10.242.238.90'] [ns_server:debug,2014-08-19T16:49:02.845,ns_1@10.242.238.90:ns_server_sup<0.17163.0>:mb_master:check_master_takeover_needed:149]Was unable to discover master, not going to force mastership takeover [user:info,2014-08-19T16:49:02.845,ns_1@10.242.238.90:mb_master<0.17362.0>:mb_master:init:86]I'm the only node, so I'm the master. [ns_server:info,2014-08-19T16:49:02.846,ns_1@10.242.238.90:ns_log<0.17167.0>:ns_log:handle_cast:183]suppressing duplicate log mb_master:undefined([<<"I'm the only node, so I'm the master.">>]) because it's been seen 1 times in the past 0.224171 secs (last seen 0.224171 secs ago [ns_server:debug,2014-08-19T16:49:02.846,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: dynamic_config_version -> [2,5] [ns_server:debug,2014-08-19T16:49:02.846,ns_1@10.242.238.90:ns_config_rep<0.17183.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([dynamic_config_version]..) [ns_server:debug,2014-08-19T16:49:02.846,ns_1@10.242.238.90:mb_master_sup<0.17364.0>:misc:start_singleton:986]start_singleton(gen_fsm, ns_orchestrator, [], []): started as <0.17365.0> on 'ns_1@10.242.238.90' [error_logger:info,2014-08-19T16:49:02.846,ns_1@10.242.238.90:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,mb_master_sup} started: [{pid,<0.17365.0>}, {name,ns_orchestrator}, {mfargs,{ns_orchestrator,start_link,[]}}, {restart_type,permanent}, {shutdown,20}, {child_type,worker}] [ns_server:debug,2014-08-19T16:49:02.847,ns_1@10.242.238.90:mb_master_sup<0.17364.0>:misc:start_singleton:986]start_singleton(gen_server, ns_tick, [], []): started as <0.17367.0> on 'ns_1@10.242.238.90' [error_logger:info,2014-08-19T16:49:02.847,ns_1@10.242.238.90:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,mb_master_sup} started: [{pid,<0.17367.0>}, {name,ns_tick}, {mfargs,{ns_tick,start_link,[]}}, {restart_type,permanent}, {shutdown,10}, {child_type,worker}] [ns_server:debug,2014-08-19T16:49:02.847,ns_1@10.242.238.90:<0.17368.0>:auto_failover:init:134]init auto_failover. [ns_server:debug,2014-08-19T16:49:02.847,ns_1@10.242.238.90:mb_master_sup<0.17364.0>:misc:start_singleton:986]start_singleton(gen_server, auto_failover, [], []): started as <0.17368.0> on 'ns_1@10.242.238.90' [cluster:info,2014-08-19T16:49:02.847,ns_1@10.242.238.90:ns_cluster<0.17151.0>:ns_cluster:do_change_address:404]Renamed node. New name is 'ns_1@10.242.238.90'. [error_logger:info,2014-08-19T16:49:02.847,ns_1@10.242.238.90:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,mb_master_sup} started: [{pid,<0.17368.0>}, {name,auto_failover}, {mfargs,{auto_failover,start_link,[]}}, {restart_type,permanent}, {shutdown,10}, {child_type,worker}] [error_logger:info,2014-08-19T16:49:02.847,ns_1@10.242.238.90:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.17362.0>}, {name,mb_master}, {mfa,{mb_master,start_link,[]}}, {restart_type,permanent}, {shutdown,infinity}, {child_type,supervisor}] [ns_server:debug,2014-08-19T16:49:02.895,ns_1@10.242.238.90:ns_cookie_manager<0.17150.0>:ns_cookie_manager:do_cookie_save:149]attempted to save cookie to "/opt/couchbase/var/lib/couchbase/couchbase-server.cookie-ns-server": ok [user:info,2014-08-19T16:49:02.895,ns_1@10.242.238.90:ns_cookie_manager<0.17150.0>:ns_cookie_manager:do_cookie_init:86]Initial otp cookie generated: nntvfgasfojamdnn [ns_server:debug,2014-08-19T16:49:02.895,ns_1@10.242.238.90:<0.17351.0>:ns_node_disco:do_nodes_wanted_updated_fun:199]ns_node_disco: nodes_wanted updated: ['ns_1@10.242.238.90'], with cookie: alkbqedpsntmtnxa [ns_server:debug,2014-08-19T16:49:02.895,ns_1@10.242.238.90:ns_config_events<0.17153.0>:ns_node_disco_conf_events:handle_event:50]ns_node_disco_conf_events config on otp [cluster:debug,2014-08-19T16:49:02.895,ns_1@10.242.238.90:ns_cluster<0.17151.0>:ns_cluster:handle_call:161]engage_cluster(..) -> {ok,ok} [ns_server:debug,2014-08-19T16:49:02.895,ns_1@10.242.238.90:ns_config_rep<0.17183.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([otp]..) [ns_server:debug,2014-08-19T16:49:02.895,ns_1@10.242.238.90:<0.17351.0>:ns_node_disco:do_nodes_wanted_updated_fun:205]ns_node_disco: nodes_wanted pong: ['ns_1@10.242.238.90'], with cookie: nntvfgasfojamdnn [ns_server:debug,2014-08-19T16:49:02.895,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: otp -> [{cookie,nntvfgasfojamdnn}] [ns_server:debug,2014-08-19T16:49:02.895,ns_1@10.242.238.90:ns_cookie_manager<0.17150.0>:ns_cookie_manager:do_cookie_sync:110]ns_cookie_manager do_cookie_sync [ns_server:debug,2014-08-19T16:49:02.896,ns_1@10.242.238.90:ns_cookie_manager<0.17150.0>:ns_cookie_manager:do_cookie_save:147]saving cookie to "/opt/couchbase/var/lib/couchbase/couchbase-server.cookie-ns-server" [cluster:debug,2014-08-19T16:49:02.906,ns_1@10.242.238.90:ns_cluster<0.17151.0>:ns_cluster:handle_call:165]handling complete_join([{<<"targetNode">>,<<"ns_1@10.242.238.90">>}, {<<"availableStorage">>, {struct, [{<<"hdd">>, [{struct, [{<<"path">>,<<"/">>}, {<<"sizeKBytes">>,103212320}, {<<"usagePercent">>,3}]}, {struct, [{<<"path">>,<<"/dev/shm">>}, {<<"sizeKBytes">>,49515824}, {<<"usagePercent">>,0}]}, {struct, [{<<"path">>,<<"/boot">>}, {<<"sizeKBytes">>,198337}, {<<"usagePercent">>,17}]}, {struct, [{<<"path">>,<<"/data">>}, {<<"sizeKBytes">>,329573012}, {<<"usagePercent">>,1}]}, {struct, [{<<"path">>,<<"/test">>}, {<<"sizeKBytes">>,528447160}, {<<"usagePercent">>,1}]}, {struct, [{<<"path">>,<<"/var/lib/pgsql">>}, {<<"sizeKBytes">>,1922866992}, {<<"usagePercent">>,1}]}]}]}}, {<<"memoryQuota">>,90112}, {<<"storageTotals">>, {struct, [{<<"ram">>, {struct, [{<<"total">>,101408407552}, {<<"quotaTotal">>,94489280512}, {<<"quotaUsed">>,13369344000}, {<<"used">>,13174808576}, {<<"usedByData">>,31847576}]}}, {<<"hdd">>, {struct, [{<<"total">>,1969015799808}, {<<"quotaTotal">>,1969015799808}, {<<"used">>,19690157998}, {<<"usedByData">>,2736915}, {<<"free">>,1949325641810}]}}]}}, {<<"storage">>, {struct, [{<<"ssd">>,[]}, {<<"hdd">>, [{struct, [{<<"path">>,<<"/var/lib/pgsql">>}, {<<"index_path">>,<<"/var/lib/pgsql">>}, {<<"quotaMb">>,<<"none">>}, {<<"state">>,<<"ok">>}]}]}]}}, {<<"systemStats">>, {struct, [{<<"cpu_utilization_rate">>,0.6265664160401002}, {<<"swap_total">>,0}, {<<"swap_used">>,0}, {<<"mem_total">>,101408407552}, {<<"mem_free">>,89866596352}]}}, {<<"interestingStats">>, {struct, [{<<"cmd_get">>,0.0}, {<<"couch_docs_actual_disk_size">>,2736915}, {<<"couch_docs_data_size">>,2729956}, {<<"couch_views_actual_disk_size">>,0}, {<<"couch_views_data_size">>,0}, {<<"curr_items">>,0}, {<<"curr_items_tot">>,0}, {<<"ep_bg_fetched">>,0.0}, {<<"get_hits">>,0.0}, {<<"mem_used">>,31847576}, {<<"ops">>,0.0}, {<<"vb_replica_curr_items">>,0}]}}, {<<"uptime">>,<<"4088">>}, {<<"memoryTotal">>,101408407552}, {<<"memoryFree">>,89866596352}, {<<"mcdMemoryReserved">>,77368}, {<<"mcdMemoryAllocated">>,77368}, {<<"couchApiBase">>,<<"http://10.242.238.88:8092/">>}, {<<"otpCookie">>,<<"xyzevwdfypcplvpp">>}, {<<"clusterMembership">>,<<"active">>}, {<<"status">>,<<"healthy">>}, {<<"otpNode">>,<<"ns_1@10.242.238.88">>}, {<<"thisNode">>,true}, {<<"hostname">>,<<"10.242.238.88:8091">>}, {<<"clusterCompatibility">>,131077}, {<<"version">>,<<"2.5.1-1083-rel-enterprise">>}, {<<"os">>,<<"x86_64-unknown-linux-gnu">>}, {<<"ports">>, {struct, [{<<"httpsMgmt">>,18091}, {<<"httpsCAPI">>,18092}, {<<"sslProxy">>,11214}, {<<"proxy">>,11211}, {<<"direct">>,11210}]}}]) [user:info,2014-08-19T16:49:02.908,ns_1@10.242.238.90:ns_cluster<0.17151.0>:ns_cluster:perform_actual_join:897]Node 'ns_1@10.242.238.90' is joining cluster via node 'ns_1@10.242.238.88'. [ns_server:debug,2014-08-19T16:49:02.908,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: i_am_a_dead_man -> true [ns_server:debug,2014-08-19T16:49:02.908,ns_1@10.242.238.90:<0.17324.0>:ns_pubsub:do_subscribe_link:136]Parent process of subscription {ns_config_events,<0.17323.0>} exited with reason shutdown [ns_server:debug,2014-08-19T16:49:02.908,ns_1@10.242.238.90:ns_config_rep<0.17183.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([i_am_a_dead_man]..) [ns_server:debug,2014-08-19T16:49:02.908,ns_1@10.242.238.90:<0.17320.0>:ns_pubsub:do_subscribe_link:136]Parent process of subscription {ns_config_events,<0.17319.0>} exited with reason shutdown [ns_server:debug,2014-08-19T16:49:02.945,ns_1@10.242.238.90:ns_cookie_manager<0.17150.0>:ns_cookie_manager:do_cookie_save:149]attempted to save cookie to "/opt/couchbase/var/lib/couchbase/couchbase-server.cookie-ns-server": ok [ns_server:debug,2014-08-19T16:49:02.945,ns_1@10.242.238.90:<0.17370.0>:ns_node_disco:do_nodes_wanted_updated_fun:199]ns_node_disco: nodes_wanted updated: ['ns_1@10.242.238.90'], with cookie: nntvfgasfojamdnn [ns_server:debug,2014-08-19T16:49:02.945,ns_1@10.242.238.90:<0.17370.0>:ns_node_disco:do_nodes_wanted_updated_fun:205]ns_node_disco: nodes_wanted pong: ['ns_1@10.242.238.90'], with cookie: nntvfgasfojamdnn [error_logger:error,2014-08-19T16:49:02.953,ns_1@10.242.238.90:error_logger<0.6.0>:ale_error_logger_handler:log_msg:119]** Connection attempt from disallowed node 'ns_1@10.242.238.88' ** [error_logger:error,2014-08-19T16:49:02.971,ns_1@10.242.238.90:error_logger<0.6.0>:ale_error_logger_handler:log_msg:119]** Connection attempt from disallowed node 'ns_1@10.242.238.89' ** [ns_server:debug,2014-08-19T16:49:03.190,ns_1@10.242.238.90:<0.17317.0>:ns_pubsub:do_subscribe_link:136]Parent process of subscription {ns_stats_event,<0.17316.0>} exited with reason shutdown [ns_server:debug,2014-08-19T16:49:03.191,ns_1@10.242.238.90:<0.17315.0>:ns_pubsub:do_subscribe_link:136]Parent process of subscription {ns_tick_event,<0.17313.0>} exited with reason shutdown [ns_server:debug,2014-08-19T16:49:03.191,ns_1@10.242.238.90:<0.17312.0>:ns_pubsub:do_subscribe_link:136]Parent process of subscription {ns_config_events,<0.17311.0>} exited with reason shutdown [error_logger:error,2014-08-19T16:49:03.191,ns_1@10.242.238.90:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================SUPERVISOR REPORT========================= Supervisor: {local,ns_bucket_sup} Context: shutdown_error Reason: normal Offender: [{pid,<0.17312.0>}, {name,buckets_observing_subscription}, {mfargs,{ns_bucket_sup,subscribe_on_config_events,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [ns_server:debug,2014-08-19T16:49:03.191,ns_1@10.242.238.90:<0.17332.0>:ns_pubsub:do_subscribe_link:136]Parent process of subscription {ns_config_events,<0.17331.0>} exited with reason killed [ns_server:debug,2014-08-19T16:49:03.191,ns_1@10.242.238.90:<0.17299.0>:ns_pubsub:do_subscribe_link:136]Parent process of subscription {ns_config_events,<0.17296.0>} exited with reason killed [ns_server:debug,2014-08-19T16:49:03.192,ns_1@10.242.238.90:<0.17233.0>:ns_pubsub:do_subscribe_link:136]Parent process of subscription {ns_config_events,<0.17232.0>} exited with reason shutdown [ns_server:debug,2014-08-19T16:49:03.192,ns_1@10.242.238.90:<0.17229.0>:ns_pubsub:do_subscribe_link:136]Parent process of subscription {master_activity_events,<0.17228.0>} exited with reason killed [ns_server:info,2014-08-19T16:49:03.192,ns_1@10.242.238.90:mb_master<0.17362.0>:mb_master:terminate:299]Synchronously shutting down child mb_master_sup [ns_server:debug,2014-08-19T16:49:03.192,ns_1@10.242.238.90:<0.17363.0>:ns_pubsub:do_subscribe_link:136]Parent process of subscription {ns_config_events,<0.17362.0>} exited with reason shutdown [ns_server:debug,2014-08-19T16:49:03.192,ns_1@10.242.238.90:<0.17356.0>:ns_pubsub:do_subscribe_link:136]Parent process of subscription {ns_config_events,<0.17355.0>} exited with reason shutdown [ns_server:debug,2014-08-19T16:49:03.192,ns_1@10.242.238.90:<0.17200.0>:ns_pubsub:do_subscribe_link:136]Parent process of subscription {buckets_events,<0.17199.0>} exited with reason shutdown [ns_server:debug,2014-08-19T16:49:03.193,ns_1@10.242.238.90:<0.17192.0>:ns_pubsub:do_subscribe_link:136]Parent process of subscription {ns_config_events,<0.17190.0>} exited with reason killed [ns_server:debug,2014-08-19T16:49:03.193,ns_1@10.242.238.90:<0.17187.0>:ns_pubsub:do_subscribe_link:136]Parent process of subscription {ns_config_events,<0.17186.0>} exited with reason killed [ns_server:debug,2014-08-19T16:49:03.193,ns_1@10.242.238.90:<0.17184.0>:ns_pubsub:do_subscribe_link:136]Parent process of subscription {ns_config_events_local,<0.17183.0>} exited with reason shutdown [ns_server:debug,2014-08-19T16:49:03.193,ns_1@10.242.238.90:<0.17172.0>:ns_pubsub:do_subscribe_link:136]Parent process of subscription {ns_config_events,<0.17171.0>} exited with reason shutdown [error_logger:error,2014-08-19T16:49:03.193,ns_1@10.242.238.90:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================CRASH REPORT========================= crasher: initial call: gen_event:init_it/6 pid: <0.17191.0> registered_name: bucket_info_cache_invalidations exception exit: killed in function gen_event:terminate_server/4 ancestors: [bucket_info_cache,ns_server_sup,ns_server_cluster_sup, <0.58.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 233 stack_size: 24 reductions: 133 neighbours: [ns_server:debug,2014-08-19T16:49:03.193,ns_1@10.242.238.90:<0.17170.0>:ns_pubsub:do_subscribe_link:136]Parent process of subscription {ns_config_events,<0.17169.0>} exited with reason killed [cluster:debug,2014-08-19T16:49:03.195,ns_1@10.242.238.90:ns_cluster<0.17151.0>:ns_cluster:perform_actual_join:905]ns_cluster: joining cluster. Child has exited. [cluster:debug,2014-08-19T16:49:03.199,ns_1@10.242.238.90:ns_cluster<0.17151.0>:ns_cluster:perform_actual_join:908]Deleted _replicator db: ok. [ns_server:debug,2014-08-19T16:49:03.199,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: otp -> [{cookie,xyzevwdfypcplvpp}] [ns_server:debug,2014-08-19T16:49:03.200,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: nodes_wanted -> ['ns_1@10.242.238.90','ns_1@10.242.238.88'] [ns_server:debug,2014-08-19T16:49:03.200,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: cluster_compat_version -> undefined [cluster:debug,2014-08-19T16:49:03.200,ns_1@10.242.238.90:ns_cluster<0.17151.0>:ns_cluster:perform_actual_join:927]pre-join cleaned config is: {config,{}, [[], [{directory,"/opt/couchbase/var/lib/couchbase/config"}, {index_aware_rebalance_disabled,false}, {max_bucket_count,10}, {autocompaction, [{database_fragmentation_threshold,{30,undefined}}, {view_fragmentation_threshold,{30,undefined}}]}, {set_view_update_daemon, [{update_interval,5000}, {update_min_changes,5000}, {replica_update_min_changes,5000}]}, {fast_warmup, [{fast_warmup_enabled,true}, {min_memory_threshold,10}, {min_items_threshold,10}]}, {{node,'ns_1@127.0.0.1',compaction_daemon}, [{check_interval,30},{min_file_size,131072}]}, {nodes_wanted,['ns_1@127.0.0.1']}, {{node,'ns_1@127.0.0.1',membership},active}, {rest,[{port,8091}]}, {{couchdb,max_parallel_indexers},4}, {{couchdb,max_parallel_replica_indexers},2}, {{node,'ns_1@127.0.0.1',rest},[{port,8091},{port_meta,global}]}, {{node,'ns_1@127.0.0.1',ssl_rest_port},18091}, {{node,'ns_1@127.0.0.1',capi_port},8092}, {{node,'ns_1@127.0.0.1',ssl_capi_port},18092}, {{node,'ns_1@127.0.0.1',ssl_proxy_downstream_port},11214}, {{node,'ns_1@127.0.0.1',ssl_proxy_upstream_port},11215}, {rest_creds,[{creds,[]}]}, {remote_clusters,[]}, {{node,'ns_1@127.0.0.1',isasl}, [{'_vclock',[{<<"2476fca958629f521e96d4f683277efd">>,{1,63575671742}}]}, {path,"/opt/couchbase/var/lib/couchbase/isasl.pw"}]}, {{node,'ns_1@127.0.0.1',memcached}, [{'_vclock',[{<<"2476fca958629f521e96d4f683277efd">>,{1,63575671742}}]}, {port,11210}, {mccouch_port,11213}, {dedicated_port,11209}, {admin_user,"_admin"}, {admin_pass,[]}, {bucket_engine,"/opt/couchbase/lib/memcached/bucket_engine.so"}, {engines, [{membase, [{engine,"/opt/couchbase/lib/memcached/ep.so"}, {static_config_string, "vb0=false;waitforwarmup=false;failpartialwarmup=false"}]}, {memcached, [{engine,"/opt/couchbase/lib/memcached/default_engine.so"}, {static_config_string,"vb0=true"}]}]}, {log_path,"/opt/couchbase/var/lib/couchbase/logs"}, {log_prefix,"memcached.log"}, {log_generations,20}, {log_cyclesize,10485760}, {log_sleeptime,19}, {log_rotation_period,39003}, {verbosity,[]}]}, {memory_quota,58026}, {buckets,[{configs,[]}]}, {{node,'ns_1@127.0.0.1',moxi},[{port,11211},{verbosity,[]}]}, {{node,'ns_1@127.0.0.1',port_servers}, [{moxi,"/opt/couchbase/bin/moxi", ["-Z", {"port_listen=~B,default_bucket_name=default,downstream_max=1024,downstream_conn_max=4,connect_max_errors=5,connect_retry_interval=30000,connect_timeout=400,auth_timeout=100,cycle=200,downstream_conn_queue_timeout=200,downstream_timeout=5000,wait_queue_timeout=200", [port]}, "-z", {"url=http://127.0.0.1:~B/pools/default/saslBucketsStreaming", [{misc,this_node_rest_port,[]}]}, "-p","0","-Y","y","-O","stderr", {"~s",[verbosity]}], [{env, [{"EVENT_NOSELECT","1"}, {"MOXI_SASL_PLAIN_USR",{"~s",[{ns_moxi_sup,rest_user,[]}]}}, {"MOXI_SASL_PLAIN_PWD", {"~s",[{ns_moxi_sup,rest_pass,[]}]}}]}, use_stdio,exit_status,port_server_send_eol,stderr_to_stdout, stream]}, {memcached,"/opt/couchbase/bin/memcached", ["-X","/opt/couchbase/lib/memcached/stdin_term_handler.so","-X", {"/opt/couchbase/lib/memcached/file_logger.so,cyclesize=~B;sleeptime=~B;filename=~s/~s", [log_cyclesize,log_sleeptime,log_path,log_prefix]}, "-l", {"0.0.0.0:~B,0.0.0.0:~B:1000",[port,dedicated_port]}, "-p", {"~B",[port]}, "-E","/opt/couchbase/lib/memcached/bucket_engine.so","-B", "binary","-r","-c","10000","-e", {"admin=~s;default_bucket_name=default;auto_create=false", [admin_user]}, {"~s",[verbosity]}], [{env, [{"EVENT_NOSELECT","1"}, {"MEMCACHED_TOP_KEYS","100"}, {"ISASL_PWFILE",{"~s",[{isasl,path}]}}]}, use_stdio,stderr_to_stdout,exit_status,port_server_send_eol, stream]}]}, {{node,'ns_1@127.0.0.1',ns_log}, [{'_vclock',[{<<"2476fca958629f521e96d4f683277efd">>,{1,63575671742}}]}, {filename,"/opt/couchbase/var/lib/couchbase/ns_log"}]}, {email_alerts, [{recipients,["root@localhost"]}, {sender,"couchbase@localhost"}, {enabled,false}, {email_server, [{user,[]}, {pass,[]}, {host,"localhost"}, {port,25}, {encrypt,false}]}, {alerts, [auto_failover_node,auto_failover_maximum_reached, auto_failover_other_nodes_down, auto_failover_cluster_too_small,ip,disk,overhead, ep_oom_errors,ep_item_commit_failed]}]}, {alert_limits,[{max_overhead_perc,50},{max_disk_used,90}]}, {replication,[{enabled,true}]}, {auto_failover_cfg, [{enabled,false},{timeout,120},{max_nodes,1},{count,0}]}, {{request_limit,rest},undefined}, {{request_limit,capi},undefined}, {drop_request_memory_threshold_mib,undefined}, {replication_topology,star}]], [[{cluster_compat_version,undefined}, {nodes_wanted,['ns_1@10.242.238.90','ns_1@10.242.238.88']}, {otp, [{'_vclock', [{'ns_1@10.242.238.90',{2,63575671743}}, {'ns_1@127.0.0.1',{1,63575667473}}]}, {cookie,xyzevwdfypcplvpp}]}, {{node,'ns_1@10.242.238.90',capi_port}, [{'_vclock',[{'ns_1@10.242.238.90',{1,63575671742}}]}|8092]}, {{node,'ns_1@10.242.238.90',compaction_daemon}, [{'_vclock',[{'ns_1@10.242.238.90',{1,63575671742}}]}, {check_interval,30}, {min_file_size,131072}]}, {{node,'ns_1@10.242.238.90',config_version}, [{'_vclock', [{'ns_1@10.242.238.90',{1,63575671742}}, {'ns_1@127.0.0.1',{7,63575667472}}]}| {2,3,0}]}, {{node,'ns_1@10.242.238.90',isasl}, [{'_vclock', [{'ns_1@10.242.238.90',{1,63575671742}}, {'ns_1@127.0.0.1',{1,63575667472}}, {<<"c3a87fe2e8c58375a03730a71fdf48a8">>,{1,63575667472}}]}, {path,"/opt/couchbase/var/lib/couchbase/isasl.pw"}]}, {{node,'ns_1@10.242.238.90',memcached}, [{'_vclock', [{'ns_1@10.242.238.90',{1,63575671742}}, {'ns_1@127.0.0.1',{3,63575667472}}, {<<"c3a87fe2e8c58375a03730a71fdf48a8">>,{1,63575667472}}]}, {mccouch_port,11213}, {engines, [{membase, [{engine,"/opt/couchbase/lib/memcached/ep.so"}, {static_config_string, "vb0=false;waitforwarmup=false;failpartialwarmup=false"}]}, {memcached, [{engine,"/opt/couchbase/lib/memcached/default_engine.so"}, {static_config_string,"vb0=true"}]}]}, {log_path,"/opt/couchbase/var/lib/couchbase/logs"}, {log_prefix,"memcached.log"}, {log_generations,20}, {log_cyclesize,10485760}, {log_sleeptime,19}, {log_rotation_period,39003}, {dedicated_port,11209}, {bucket_engine,"/opt/couchbase/lib/memcached/bucket_engine.so"}, {port,11210}, {dedicated_port,11209}, {admin_user,"_admin"}, {admin_pass,"f6126ae5fac44bf3d8316165791747f2"}, {verbosity,[]}]}, {{node,'ns_1@10.242.238.90',moxi}, [{'_vclock',[{'ns_1@10.242.238.90',{1,63575671742}}]}, {port,11211}, {verbosity,[]}]}, {{node,'ns_1@10.242.238.90',ns_log}, [{'_vclock', [{'ns_1@10.242.238.90',{1,63575671742}}, {'ns_1@127.0.0.1',{1,63575667472}}, {<<"c3a87fe2e8c58375a03730a71fdf48a8">>,{1,63575667472}}]}, {filename,"/opt/couchbase/var/lib/couchbase/ns_log"}]}, {{node,'ns_1@10.242.238.90',port_servers}, [{'_vclock', [{'ns_1@10.242.238.90',{1,63575671742}}, {'ns_1@127.0.0.1',{3,63575667472}}]}, {moxi,"/opt/couchbase/bin/moxi", ["-Z", {"port_listen=~B,default_bucket_name=default,downstream_max=1024,downstream_conn_max=4,connect_max_errors=5,connect_retry_interval=30000,connect_timeout=400,auth_timeout=100,cycle=200,downstream_conn_queue_timeout=200,downstream_timeout=5000,wait_queue_timeout=200", [port]}, "-z", {"url=http://127.0.0.1:~B/pools/default/saslBucketsStreaming", [{misc,this_node_rest_port,[]}]}, "-p","0","-Y","y","-O","stderr", {"~s",[verbosity]}], [{env, [{"EVENT_NOSELECT","1"}, {"MOXI_SASL_PLAIN_USR",{"~s",[{ns_moxi_sup,rest_user,[]}]}}, {"MOXI_SASL_PLAIN_PWD", {"~s",[{ns_moxi_sup,rest_pass,[]}]}}]}, use_stdio,exit_status,port_server_send_eol,stderr_to_stdout, stream]}, {memcached,"/opt/couchbase/bin/memcached", ["-X","/opt/couchbase/lib/memcached/stdin_term_handler.so","-X", {"/opt/couchbase/lib/memcached/file_logger.so,cyclesize=~B;sleeptime=~B;filename=~s/~s", [log_cyclesize,log_sleeptime,log_path,log_prefix]}, "-l", {"0.0.0.0:~B,0.0.0.0:~B:1000",[port,dedicated_port]}, "-p", {"~B",[port]}, "-E","/opt/couchbase/lib/memcached/bucket_engine.so","-B", "binary","-r","-c","10000","-e", {"admin=~s;default_bucket_name=default;auto_create=false", [admin_user]}, {"~s",[verbosity]}], [{env, [{"EVENT_NOSELECT","1"}, {"MEMCACHED_TOP_KEYS","100"}, {"ISASL_PWFILE",{"~s",[{isasl,path}]}}]}, use_stdio,stderr_to_stdout,exit_status,port_server_send_eol, stream]}]}, {{node,'ns_1@10.242.238.90',rest}, [{'_vclock',[{'ns_1@10.242.238.90',{1,63575671742}}]}, {port,8091}, {port_meta,global}]}, {{node,'ns_1@10.242.238.90',ssl_capi_port}, [{'_vclock',[{'ns_1@10.242.238.90',{1,63575671742}}]}|18092]}, {{node,'ns_1@10.242.238.90',ssl_proxy_downstream_port}, [{'_vclock',[{'ns_1@10.242.238.90',{1,63575671742}}]}|11214]}, {{node,'ns_1@10.242.238.90',ssl_proxy_upstream_port}, [{'_vclock',[{'ns_1@10.242.238.90',{1,63575671742}}]}|11215]}, {{node,'ns_1@10.242.238.90',ssl_rest_port}, [{'_vclock',[{'ns_1@10.242.238.90',{1,63575671742}}]}|18091]}]], ns_config_default} [ns_server:debug,2014-08-19T16:49:03.202,ns_1@10.242.238.90:ns_cookie_manager<0.17150.0>:ns_cookie_manager:do_cookie_sync:110]ns_cookie_manager do_cookie_sync [user:info,2014-08-19T16:49:03.202,ns_1@10.242.238.90:ns_cookie_manager<0.17150.0>:ns_cookie_manager:do_cookie_sync:130]Node 'ns_1@10.242.238.90' synchronized otp cookie xyzevwdfypcplvpp from cluster [ns_server:debug,2014-08-19T16:49:03.202,ns_1@10.242.238.90:ns_cookie_manager<0.17150.0>:ns_cookie_manager:do_cookie_save:147]saving cookie to "/opt/couchbase/var/lib/couchbase/couchbase-server.cookie-ns-server" [ns_server:debug,2014-08-19T16:49:03.263,ns_1@10.242.238.90:ns_cookie_manager<0.17150.0>:ns_cookie_manager:do_cookie_save:149]attempted to save cookie to "/opt/couchbase/var/lib/couchbase/couchbase-server.cookie-ns-server": ok [cluster:debug,2014-08-19T16:49:03.265,ns_1@10.242.238.90:ns_cluster<0.17151.0>:ns_cluster:perform_actual_join:931]Connection from 'ns_1@10.242.238.90' to 'ns_1@10.242.238.88': true [cluster:debug,2014-08-19T16:49:03.265,ns_1@10.242.238.90:ns_cluster<0.17151.0>:ns_cluster:perform_actual_join:943]Join status: {ok,ok}, starting ns_server_cluster back [ns_server:info,2014-08-19T16:49:03.266,ns_1@10.242.238.90:ns_server_sup<0.17392.0>:dir_size:start_link:47]Starting quick version of dir_size with program name: i386-linux-godu [error_logger:info,2014-08-19T16:49:03.266,ns_1@10.242.238.90:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.17393.0>}, {name,diag_handler_worker}, {mfa,{work_queue,start_link,[diag_handler_worker]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2014-08-19T16:49:03.266,ns_1@10.242.238.90:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.17394.0>}, {name,dir_size}, {mfa,{dir_size,start_link,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2014-08-19T16:49:03.266,ns_1@10.242.238.90:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.17395.0>}, {name,request_throttler}, {mfa,{request_throttler,start_link,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [ns_server:warn,2014-08-19T16:49:03.267,ns_1@10.242.238.90:ns_log<0.17396.0>:ns_log:read_logs:123]Couldn't load logs from "/opt/couchbase/var/lib/couchbase/ns_log" (perhaps it's first startup): {error, enoent} [ns_doctor:error,2014-08-19T16:49:03.267,ns_1@10.242.238.90:ns_log<0.17396.0>:ns_doctor:get_node:195]Error attempting to get node 'ns_1@10.242.238.88': {exit, {noproc, {gen_server,call, [ns_doctor, {get_node, 'ns_1@10.242.238.88'}]}}} [error_logger:info,2014-08-19T16:49:03.267,ns_1@10.242.238.90:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.17396.0>}, {name,ns_log}, {mfa,{ns_log,start_link,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [ns_server:debug,2014-08-19T16:49:03.267,ns_1@10.242.238.90:ns_config_isasl_sync<0.17399.0>:ns_config_isasl_sync:init:63]isasl_sync init: ["/opt/couchbase/var/lib/couchbase/isasl.pw","_admin", "f6126ae5fac44bf3d8316165791747f2"] [ns_server:debug,2014-08-19T16:49:03.267,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: cluster_compat_version -> undefined [ns_server:debug,2014-08-19T16:49:03.267,ns_1@10.242.238.90:ns_config_isasl_sync<0.17399.0>:ns_config_isasl_sync:init:71]isasl_sync init buckets: [] [error_logger:info,2014-08-19T16:49:03.267,ns_1@10.242.238.90:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.17397.0>}, {name,ns_crash_log_consumer}, {mfa,{ns_log,start_link_crash_consumer,[]}}, {restart_type,{permanent,4}}, {shutdown,1000}, {child_type,worker}] [ns_server:debug,2014-08-19T16:49:03.267,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: nodes_wanted -> ['ns_1@10.242.238.90','ns_1@10.242.238.88'] [ns_server:debug,2014-08-19T16:49:03.268,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: otp -> [{cookie,xyzevwdfypcplvpp}] [error_logger:info,2014-08-19T16:49:03.268,ns_1@10.242.238.90:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.17398.0>}, {name,ns_config_ets_dup}, {mfa,{ns_config_ets_dup,start_link,[]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [ns_server:debug,2014-08-19T16:49:03.268,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: {node,'ns_1@10.242.238.90',capi_port} -> 8092 [ns_server:debug,2014-08-19T16:49:03.268,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: {node,'ns_1@10.242.238.90',compaction_daemon} -> [{check_interval,30},{min_file_size,131072}] [ns_server:debug,2014-08-19T16:49:03.268,ns_1@10.242.238.90:ns_config_isasl_sync<0.17399.0>:ns_config_isasl_sync:writeSASLConf:143]Writing isasl passwd file: "/opt/couchbase/var/lib/couchbase/isasl.pw" [ns_server:debug,2014-08-19T16:49:03.268,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: {node,'ns_1@10.242.238.90',config_version} -> {2,3,0} [ns_server:debug,2014-08-19T16:49:03.268,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: {node,'ns_1@10.242.238.90',isasl} -> [{path,"/opt/couchbase/var/lib/couchbase/isasl.pw"}] [ns_server:debug,2014-08-19T16:49:03.268,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: {node,'ns_1@10.242.238.90',memcached} -> [{mccouch_port,11213}, {engines, [{membase, [{engine,"/opt/couchbase/lib/memcached/ep.so"}, {static_config_string, "vb0=false;waitforwarmup=false;failpartialwarmup=false"}]}, {memcached, [{engine,"/opt/couchbase/lib/memcached/default_engine.so"}, {static_config_string,"vb0=true"}]}]}, {log_path,"/opt/couchbase/var/lib/couchbase/logs"}, {log_prefix,"memcached.log"}, {log_generations,20}, {log_cyclesize,10485760}, {log_sleeptime,19}, {log_rotation_period,39003}, {dedicated_port,11209}, {bucket_engine,"/opt/couchbase/lib/memcached/bucket_engine.so"}, {port,11210}, {dedicated_port,11209}, {admin_user,"_admin"}, {admin_pass,"*****"}, {verbosity,[]}] [ns_server:debug,2014-08-19T16:49:03.268,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: {node,'ns_1@10.242.238.90',moxi} -> [{port,11211},{verbosity,[]}] [ns_server:debug,2014-08-19T16:49:03.269,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: {node,'ns_1@10.242.238.90',ns_log} -> [{filename,"/opt/couchbase/var/lib/couchbase/ns_log"}] [ns_server:debug,2014-08-19T16:49:03.269,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: {node,'ns_1@10.242.238.90',port_servers} -> [{moxi,"/opt/couchbase/bin/moxi", ["-Z", {"port_listen=~B,default_bucket_name=default,downstream_max=1024,downstream_conn_max=4,connect_max_errors=5,connect_retry_interval=30000,connect_timeout=400,auth_timeout=100,cycle=200,downstream_conn_queue_timeout=200,downstream_timeout=5000,wait_queue_timeout=200", [port]}, "-z", {"url=http://127.0.0.1:~B/pools/default/saslBucketsStreaming", [{misc,this_node_rest_port,[]}]}, "-p","0","-Y","y","-O","stderr", {"~s",[verbosity]}], [{env,[{"EVENT_NOSELECT","1"}, {"MOXI_SASL_PLAIN_USR",{"~s",[{ns_moxi_sup,rest_user,[]}]}}, {"MOXI_SASL_PLAIN_PWD",{"~s",[{ns_moxi_sup,rest_pass,[]}]}}]}, use_stdio,exit_status,port_server_send_eol,stderr_to_stdout,stream]}, {memcached,"/opt/couchbase/bin/memcached", ["-X","/opt/couchbase/lib/memcached/stdin_term_handler.so","-X", {"/opt/couchbase/lib/memcached/file_logger.so,cyclesize=~B;sleeptime=~B;filename=~s/~s", [log_cyclesize,log_sleeptime,log_path,log_prefix]}, "-l", {"0.0.0.0:~B,0.0.0.0:~B:1000",[port,dedicated_port]}, "-p", {"~B",[port]}, "-E","/opt/couchbase/lib/memcached/bucket_engine.so","-B", "binary","-r","-c","10000","-e", {"admin=~s;default_bucket_name=default;auto_create=false", [admin_user]}, {"~s",[verbosity]}], [{env,[{"EVENT_NOSELECT","1"}, {"MEMCACHED_TOP_KEYS","100"}, {"ISASL_PWFILE",{"~s",[{isasl,path}]}}]}, use_stdio,stderr_to_stdout,exit_status,port_server_send_eol, stream]}] [ns_server:debug,2014-08-19T16:49:03.269,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: {node,'ns_1@10.242.238.90',rest} -> [{port,8091},{port_meta,global}] [ns_server:debug,2014-08-19T16:49:03.269,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: {node,'ns_1@10.242.238.90',ssl_capi_port} -> 18092 [ns_server:debug,2014-08-19T16:49:03.269,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: {node,'ns_1@10.242.238.90',ssl_proxy_downstream_port} -> 11214 [ns_server:debug,2014-08-19T16:49:03.270,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: {node,'ns_1@10.242.238.90',ssl_proxy_upstream_port} -> 11215 [ns_server:debug,2014-08-19T16:49:03.270,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: {node,'ns_1@10.242.238.90',ssl_rest_port} -> 18091 [error_logger:info,2014-08-19T16:49:03.270,ns_1@10.242.238.90:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.17399.0>}, {name,ns_config_isasl_sync}, {mfa,{ns_config_isasl_sync,start_link,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [ns_server:debug,2014-08-19T16:49:03.270,ns_1@10.242.238.90:ns_node_disco<0.17408.0>:ns_node_disco:init:103]Initting ns_node_disco with ['ns_1@10.242.238.88'] [error_logger:info,2014-08-19T16:49:03.270,ns_1@10.242.238.90:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.17405.0>}, {name,ns_log_events}, {mfa,{gen_event,start_link,[{local,ns_log_events}]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [ns_server:debug,2014-08-19T16:49:03.270,ns_1@10.242.238.90:ns_cookie_manager<0.17150.0>:ns_cookie_manager:do_cookie_sync:110]ns_cookie_manager do_cookie_sync [error_logger:info,2014-08-19T16:49:03.270,ns_1@10.242.238.90:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_node_disco_sup} started: [{pid,<0.17407.0>}, {name,ns_node_disco_events}, {mfargs, {gen_event,start_link, [{local,ns_node_disco_events}]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [ns_server:debug,2014-08-19T16:49:03.271,ns_1@10.242.238.90:ns_cookie_manager<0.17150.0>:ns_cookie_manager:do_cookie_save:147]saving cookie to "/opt/couchbase/var/lib/couchbase/couchbase-server.cookie-ns-server" [ns_server:debug,2014-08-19T16:49:03.315,ns_1@10.242.238.90:ns_cookie_manager<0.17150.0>:ns_cookie_manager:do_cookie_save:149]attempted to save cookie to "/opt/couchbase/var/lib/couchbase/couchbase-server.cookie-ns-server": ok [ns_server:debug,2014-08-19T16:49:03.315,ns_1@10.242.238.90:<0.17409.0>:ns_node_disco:do_nodes_wanted_updated_fun:199]ns_node_disco: nodes_wanted updated: ['ns_1@10.242.238.88', 'ns_1@10.242.238.90'], with cookie: xyzevwdfypcplvpp [ns_server:debug,2014-08-19T16:49:03.315,ns_1@10.242.238.90:<0.17409.0>:ns_node_disco:do_nodes_wanted_updated_fun:205]ns_node_disco: nodes_wanted pong: ['ns_1@10.242.238.88','ns_1@10.242.238.90'], with cookie: xyzevwdfypcplvpp [error_logger:info,2014-08-19T16:49:03.316,ns_1@10.242.238.90:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_node_disco_sup} started: [{pid,<0.17408.0>}, {name,ns_node_disco}, {mfargs,{ns_node_disco,start_link,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [ns_server:debug,2014-08-19T16:49:03.316,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:init:66]init pulling [error_logger:info,2014-08-19T16:49:03.316,ns_1@10.242.238.90:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_node_disco_sup} started: [{pid,<0.17411.0>}, {name,ns_node_disco_log}, {mfargs,{ns_node_disco_log,start_link,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [ns_server:info,2014-08-19T16:49:03.316,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:do_pull:341]Pulling config from: 'ns_1@10.242.238.88' [error_logger:info,2014-08-19T16:49:03.316,ns_1@10.242.238.90:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_node_disco_sup} started: [{pid,<0.17412.0>}, {name,ns_node_disco_conf_events}, {mfargs,{ns_node_disco_conf_events,start_link,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2014-08-19T16:49:03.316,ns_1@10.242.238.90:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_node_disco_sup} started: [{pid,<0.17413.0>}, {name,ns_config_rep_merger}, {mfargs,{ns_config_rep,start_link_merger,[]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [ns_server:debug,2014-08-19T16:49:03.324,ns_1@10.242.238.90:ns_config_events<0.17153.0>:ns_node_disco_conf_events:handle_event:44]ns_node_disco_conf_events config on nodes_wanted [ns_server:debug,2014-08-19T16:49:03.324,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:init:68]init pushing [ns_server:debug,2014-08-19T16:49:03.324,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: alert_limits -> [{max_overhead_perc,50},{max_disk_used,90}] [ns_server:debug,2014-08-19T16:49:03.324,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: auto_failover_cfg -> [{enabled,false},{timeout,120},{max_nodes,1},{count,0}] [ns_server:debug,2014-08-19T16:49:03.324,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: autocompaction -> [{database_fragmentation_threshold,{30,undefined}}, {view_fragmentation_threshold,{30,undefined}}] [ns_server:debug,2014-08-19T16:49:03.325,ns_1@10.242.238.90:ns_config_isasl_sync<0.17399.0>:ns_config_isasl_sync:writeSASLConf:143]Writing isasl passwd file: "/opt/couchbase/var/lib/couchbase/isasl.pw" [ns_server:debug,2014-08-19T16:49:03.325,ns_1@10.242.238.90:ns_cookie_manager<0.17150.0>:ns_cookie_manager:do_cookie_sync:110]ns_cookie_manager do_cookie_sync [ns_server:debug,2014-08-19T16:49:03.325,ns_1@10.242.238.90:ns_cookie_manager<0.17150.0>:ns_cookie_manager:do_cookie_save:147]saving cookie to "/opt/couchbase/var/lib/couchbase/couchbase-server.cookie-ns-server" [ns_server:debug,2014-08-19T16:49:03.328,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[[{map,[{0,[],['ns_1@10.242.238.88',undefined]}, {1,[],['ns_1@10.242.238.88',undefined]}, {2,[],['ns_1@10.242.238.88',undefined]}, {3,[],['ns_1@10.242.238.88',undefined]}, {4,[],['ns_1@10.242.238.88',undefined]}, {5,[],['ns_1@10.242.238.88',undefined]}, {6,[],['ns_1@10.242.238.88',undefined]}, {7,[],['ns_1@10.242.238.88',undefined]}, {8,[],['ns_1@10.242.238.88',undefined]}, {9,[],['ns_1@10.242.238.88',undefined]}, {10,[],['ns_1@10.242.238.88',undefined]}, {11,[],['ns_1@10.242.238.88',undefined]}, {12,[],['ns_1@10.242.238.88',undefined]}, {13,[],['ns_1@10.242.238.88',undefined]}, {14,[],['ns_1@10.242.238.88',undefined]}, {15,[],['ns_1@10.242.238.88',undefined]}, {16,[],['ns_1@10.242.238.88',undefined]}, {17,[],['ns_1@10.242.238.88',undefined]}, {18,[],['ns_1@10.242.238.88',undefined]}, {19,[],['ns_1@10.242.238.88',undefined]}, {20,[],['ns_1@10.242.238.88',undefined]}, {21,[],['ns_1@10.242.238.88',undefined]}, {22,[],['ns_1@10.242.238.88',undefined]}, {23,[],['ns_1@10.242.238.88',undefined]}, {24,[],['ns_1@10.242.238.88',undefined]}, {25,[],['ns_1@10.242.238.88',undefined]}, {26,[],['ns_1@10.242.238.88',undefined]}, {27,[],['ns_1@10.242.238.88',undefined]}, {28,[],['ns_1@10.242.238.88',undefined]}, {29,[],['ns_1@10.242.238.88',undefined]}, {30,[],['ns_1@10.242.238.88',undefined]}, {31,[],['ns_1@10.242.238.88',undefined]}, {32,[],['ns_1@10.242.238.88',undefined]}, {33,[],['ns_1@10.242.238.88',undefined]}, {34,[],['ns_1@10.242.238.88',undefined]}, {35,[],['ns_1@10.242.238.88',undefined]}, {36,[],['ns_1@10.242.238.88',undefined]}, {37,[],['ns_1@10.242.238.88',undefined]}, {38,[],['ns_1@10.242.238.88',undefined]}, {39,[],['ns_1@10.242.238.88',undefined]}, {40,[],['ns_1@10.242.238.88',undefined]}, {41,[],['ns_1@10.242.238.88',undefined]}, {42,[],['ns_1@10.242.238.88',undefined]}, {43,[],['ns_1@10.242.238.88',undefined]}, {44,[],['ns_1@10.242.238.88',undefined]}, {45,[],['ns_1@10.242.238.88',undefined]}, {46,[],['ns_1@10.242.238.88',undefined]}, {47,[],['ns_1@10.242.238.88',undefined]}, {48,[],['ns_1@10.242.238.88',undefined]}, {49,[],['ns_1@10.242.238.88',undefined]}, {50,[],['ns_1@10.242.238.88',undefined]}, {51,[],['ns_1@10.242.238.88',undefined]}, {52,[],['ns_1@10.242.238.88',undefined]}, {53,[],['ns_1@10.242.238.88',undefined]}, {54,[],['ns_1@10.242.238.88',undefined]}, {55,[],['ns_1@10.242.238.88',undefined]}, {56,[],['ns_1@10.242.238.88',undefined]}, {57,[],['ns_1@10.242.238.88',undefined]}, {58,[],['ns_1@10.242.238.88',undefined]}, {59,[],['ns_1@10.242.238.88',undefined]}, {60,[],['ns_1@10.242.238.88',undefined]}, {61,[],['ns_1@10.242.238.88',undefined]}, {62,[],['ns_1@10.242.238.88',undefined]}, {63,[],['ns_1@10.242.238.88',undefined]}, {64,[],['ns_1@10.242.238.88',undefined]}, {65,[],['ns_1@10.242.238.88',undefined]}, {66,[],['ns_1@10.242.238.88',undefined]}, {67,[],['ns_1@10.242.238.88',undefined]}, {68,[],['ns_1@10.242.238.88',undefined]}, {69,[],['ns_1@10.242.238.88',undefined]}, {70,[],['ns_1@10.242.238.88',undefined]}, {71,[],['ns_1@10.242.238.88',undefined]}, {72,[],['ns_1@10.242.238.88',undefined]}, {73,[],['ns_1@10.242.238.88',undefined]}, {74,[],['ns_1@10.242.238.88',undefined]}, {75,[],['ns_1@10.242.238.88',undefined]}, {76,[],['ns_1@10.242.238.88',undefined]}, {77,[],['ns_1@10.242.238.88',undefined]}, {78,[],['ns_1@10.242.238.88',undefined]}, {79,[],['ns_1@10.242.238.88',undefined]}, {80,[],['ns_1@10.242.238.88',undefined]}, {81,[],['ns_1@10.242.238.88',undefined]}, {82,[],['ns_1@10.242.238.88',undefined]}, {83,[],['ns_1@10.242.238.88',undefined]}, {84,[],['ns_1@10.242.238.88',undefined]}, {85,[],['ns_1@10.242.238.88',undefined]}, {86,[],['ns_1@10.242.238.88',undefined]}, {87,[],['ns_1@10.242.238.88'|...]}, {88,[],[...]}, {89,[],...}, {90,...}, {...}|...]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88']}, {map_opts_hash,133465355}]]}] [ns_server:debug,2014-08-19T16:49:03.328,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: cert_and_pkey -> {<<"-----BEGIN CERTIFICATE-----\nMIICmDCCAYKgAwIBAgIIE4vQOGMt4U8wCwYJKoZIhvcNAQEFMAwxCjAIBgNVBAMT\nASowHhcNMTMwMTAxMDAwMDAwWhcNNDkxMjMxMjM1OTU5WjAMMQowCAYDVQQDEwEq\nMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA5WgTuSJMU8qPdc8uDdst\nav13oFxDpbqz8mIk7TVReVHwO9MvKgi8cqlGev50BaQNfzFW41E/baDmpa8sAlSe\nzPoGcRD5wDJdHRH87FdW8eeE4rA8N9TcsSyJDo0gmWO+Vj+ow5dzF87001UstU6A\n5UQ5anT0dGnKLChpmk0KiKx28+XSnycDQ8osiLR"...>>, <<"*****">>} [ns_server:debug,2014-08-19T16:49:03.328,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: cluster_compat_version -> [2,5] [ns_server:debug,2014-08-19T16:49:03.328,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: drop_request_memory_threshold_mib -> undefined [ns_server:debug,2014-08-19T16:49:03.328,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: dynamic_config_version -> [2,5] [ns_server:debug,2014-08-19T16:49:03.328,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: email_alerts -> [{recipients,["root@localhost"]}, {sender,"couchbase@localhost"}, {enabled,false}, {email_server,[{user,[]}, {pass,"*****"}, {host,"localhost"}, {port,25}, {encrypt,false}]}, {alerts,[auto_failover_node,auto_failover_maximum_reached, auto_failover_other_nodes_down,auto_failover_cluster_too_small,ip, disk,overhead,ep_oom_errors,ep_item_commit_failed]}] [ns_server:debug,2014-08-19T16:49:03.328,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: fast_warmup -> [{fast_warmup_enabled,true}, {min_memory_threshold,10}, {min_items_threshold,10}] [ns_server:debug,2014-08-19T16:49:03.328,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: index_aware_rebalance_disabled -> false [ns_server:debug,2014-08-19T16:49:03.329,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: max_bucket_count -> 10 [ns_server:debug,2014-08-19T16:49:03.329,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: memory_quota -> 90112 [ns_server:debug,2014-08-19T16:49:03.329,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: nodes_wanted -> ['ns_1@10.242.238.88','ns_1@10.242.238.89','ns_1@10.242.238.90'] [ns_server:debug,2014-08-19T16:49:03.329,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: read_only_user_creds -> null [ns_server:debug,2014-08-19T16:49:03.329,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: remote_clusters -> [] [ns_server:debug,2014-08-19T16:49:03.329,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: replication -> [{enabled,true}] [ns_server:debug,2014-08-19T16:49:03.329,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: replication_topology -> star [ns_server:debug,2014-08-19T16:49:03.329,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: rest -> [{port,8091}] [ns_server:info,2014-08-19T16:49:03.329,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:handle_info:63]config change: rest_creds -> ******** [ns_server:debug,2014-08-19T16:49:03.329,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: server_groups -> [[{uuid,<<"0">>}, {name,<<"Group 1">>}, {nodes,['ns_1@10.242.238.88','ns_1@10.242.238.89','ns_1@10.242.238.90']}]] [ns_server:debug,2014-08-19T16:49:03.330,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:init:72]init reannouncing [ns_server:debug,2014-08-19T16:49:03.330,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: set_view_update_daemon -> [{update_interval,5000}, {update_min_changes,5000}, {replica_update_min_changes,5000}] [ns_server:debug,2014-08-19T16:49:03.331,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: settings -> [{stats,[{send_stats,false}]}] [ns_server:debug,2014-08-19T16:49:03.331,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: uuid -> <<"9032e293d656a8b04683554c561fe06f">> [ns_server:debug,2014-08-19T16:49:03.331,ns_1@10.242.238.90:ns_config_events<0.17153.0>:ns_node_disco_conf_events:handle_event:44]ns_node_disco_conf_events config on nodes_wanted [error_logger:info,2014-08-19T16:49:03.331,ns_1@10.242.238.90:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_node_disco_sup} started: [{pid,<0.17414.0>}, {name,ns_config_rep}, {mfargs,{ns_config_rep,start_link,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2014-08-19T16:49:03.331,ns_1@10.242.238.90:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.17406.0>}, {name,ns_node_disco_sup}, {mfa,{ns_node_disco_sup,start_link,[]}}, {restart_type,permanent}, {shutdown,infinity}, {child_type,supervisor}] [ns_server:debug,2014-08-19T16:49:03.331,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([alert_limits,auto_failover_cfg,autocompaction, buckets,cert_and_pkey,cluster_compat_version, drop_request_memory_threshold_mib, dynamic_config_version,email_alerts, fast_warmup,index_aware_rebalance_disabled, max_bucket_count,memory_quota,nodes_wanted,otp, read_only_user_creds,remote_clusters, replication,replication_topology,rest, rest_creds,server_groups, set_view_update_daemon,settings,uuid, vbucket_map_history, {couchdb,max_parallel_indexers}, {couchdb,max_parallel_replica_indexers}, {request_limit,capi}, {request_limit,rest}, {node,'ns_1@10.242.238.88',capi_port}, {node,'ns_1@10.242.238.88',compaction_daemon}, {node,'ns_1@10.242.238.88',config_version}, {node,'ns_1@10.242.238.88',isasl}, {node,'ns_1@10.242.238.88',membership}, {node,'ns_1@10.242.238.88',memcached}, {node,'ns_1@10.242.238.88',moxi}, {node,'ns_1@10.242.238.88',ns_log}, {node,'ns_1@10.242.238.88',port_servers}, {node,'ns_1@10.242.238.88',rest}, {node,'ns_1@10.242.238.88',ssl_capi_port}, {node,'ns_1@10.242.238.88', ssl_proxy_downstream_port}, {node,'ns_1@10.242.238.88', ssl_proxy_upstream_port}, {node,'ns_1@10.242.238.88',ssl_rest_port}, {node,'ns_1@10.242.238.89',capi_port}, {node,'ns_1@10.242.238.89',compaction_daemon}, {node,'ns_1@10.242.238.89',config_version}, {node,'ns_1@10.242.238.89',isasl}, {node,'ns_1@10.242.238.89',membership}, {node,'ns_1@10.242.238.89',memcached}, {node,'ns_1@10.242.238.89',moxi}, {node,'ns_1@10.242.238.89',ns_log}, {node,'ns_1@10.242.238.89',port_servers}, {node,'ns_1@10.242.238.89',rest}, {node,'ns_1@10.242.238.89',ssl_capi_port}, {node,'ns_1@10.242.238.89', ssl_proxy_downstream_port}, {node,'ns_1@10.242.238.89', ssl_proxy_upstream_port}, {node,'ns_1@10.242.238.89',ssl_rest_port}, {node,'ns_1@10.242.238.90',capi_port}, {node,'ns_1@10.242.238.90',compaction_daemon}, {node,'ns_1@10.242.238.90',config_version}, {node,'ns_1@10.242.238.90',isasl}, {node,'ns_1@10.242.238.90',membership}, {node,'ns_1@10.242.238.90',memcached}]..) [ns_server:debug,2014-08-19T16:49:03.333,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: vbucket_map_history -> [{[['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88'|...], [...]|...], [{replication_topology,star},{tags,undefined},{max_slaves,10}]}] [ns_server:debug,2014-08-19T16:49:03.333,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: {couchdb,max_parallel_indexers} -> 4 [ns_server:debug,2014-08-19T16:49:03.333,ns_1@10.242.238.90:ns_config_events<0.17153.0>:ns_node_disco_conf_events:handle_event:50]ns_node_disco_conf_events config on otp [ns_server:debug,2014-08-19T16:49:03.333,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: {couchdb,max_parallel_replica_indexers} -> 2 [ns_server:debug,2014-08-19T16:49:03.333,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: {request_limit,capi} -> undefined [ns_server:debug,2014-08-19T16:49:03.334,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: {request_limit,rest} -> undefined [ns_server:debug,2014-08-19T16:49:03.334,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: {node,'ns_1@10.242.238.88',capi_port} -> 8092 [ns_server:debug,2014-08-19T16:49:03.334,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: {node,'ns_1@10.242.238.88',compaction_daemon} -> [{check_interval,30},{min_file_size,131072}] [error_logger:info,2014-08-19T16:49:03.334,ns_1@10.242.238.90:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.17423.0>}, {name,vbucket_map_mirror}, {mfa,{vbucket_map_mirror,start_link,[]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [ns_server:debug,2014-08-19T16:49:03.334,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: {node,'ns_1@10.242.238.88',config_version} -> {2,3,0} [ns_server:debug,2014-08-19T16:49:03.334,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: {node,'ns_1@10.242.238.88',isasl} -> [{path,"/opt/couchbase/var/lib/couchbase/isasl.pw"}] [ns_server:debug,2014-08-19T16:49:03.334,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: {node,'ns_1@10.242.238.88',membership} -> active [ns_server:debug,2014-08-19T16:49:03.335,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: {node,'ns_1@10.242.238.88',memcached} -> [{mccouch_port,11213}, {engines, [{membase, [{engine,"/opt/couchbase/lib/memcached/ep.so"}, {static_config_string, "vb0=false;waitforwarmup=false;failpartialwarmup=false"}]}, {memcached, [{engine,"/opt/couchbase/lib/memcached/default_engine.so"}, {static_config_string,"vb0=true"}]}]}, {log_path,"/opt/couchbase/var/lib/couchbase/logs"}, {log_prefix,"memcached.log"}, {log_generations,20}, {log_cyclesize,10485760}, {log_sleeptime,19}, {log_rotation_period,39003}, {dedicated_port,11209}, {bucket_engine,"/opt/couchbase/lib/memcached/bucket_engine.so"}, {port,11210}, {dedicated_port,11209}, {admin_user,"_admin"}, {admin_pass,"*****"}, {verbosity,[]}] [ns_server:debug,2014-08-19T16:49:03.335,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: {node,'ns_1@10.242.238.88',moxi} -> [{port,11211},{verbosity,[]}] [ns_server:debug,2014-08-19T16:49:03.335,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: {node,'ns_1@10.242.238.88',ns_log} -> [{filename,"/opt/couchbase/var/lib/couchbase/ns_log"}] [ns_server:debug,2014-08-19T16:49:03.335,ns_1@10.242.238.90:ns_log_events<0.17405.0>:ns_mail_log:init:44]ns_mail_log started up [error_logger:info,2014-08-19T16:49:03.335,ns_1@10.242.238.90:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.17427.0>}, {name,bucket_info_cache}, {mfa,{bucket_info_cache,start_link,[]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [error_logger:info,2014-08-19T16:49:03.335,ns_1@10.242.238.90:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.17430.0>}, {name,ns_tick_event}, {mfa,{gen_event,start_link,[{local,ns_tick_event}]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2014-08-19T16:49:03.336,ns_1@10.242.238.90:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.17431.0>}, {name,buckets_events}, {mfa,{gen_event,start_link,[{local,buckets_events}]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [ns_server:debug,2014-08-19T16:49:03.336,ns_1@10.242.238.90:ns_heart_slow_status_updater<0.17440.0>:ns_heart:current_status_slow:248]Ignoring failure to grab system stats: {'EXIT',{noproc,{gen_server,call, [{'stats_reader-@system','ns_1@10.242.238.90'}, {latest,"minute"}]}}} [ns_server:debug,2014-08-19T16:49:03.336,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: {node,'ns_1@10.242.238.88',port_servers} -> [{moxi,"/opt/couchbase/bin/moxi", ["-Z", {"port_listen=~B,default_bucket_name=default,downstream_max=1024,downstream_conn_max=4,connect_max_errors=5,connect_retry_interval=30000,connect_timeout=400,auth_timeout=100,cycle=200,downstream_conn_queue_timeout=200,downstream_timeout=5000,wait_queue_timeout=200", [port]}, "-z", {"url=http://127.0.0.1:~B/pools/default/saslBucketsStreaming", [{misc,this_node_rest_port,[]}]}, "-p","0","-Y","y","-O","stderr", {"~s",[verbosity]}], [{env,[{"EVENT_NOSELECT","1"}, {"MOXI_SASL_PLAIN_USR",{"~s",[{ns_moxi_sup,rest_user,[]}]}}, {"MOXI_SASL_PLAIN_PWD",{"~s",[{ns_moxi_sup,rest_pass,[]}]}}]}, use_stdio,exit_status,port_server_send_eol,stderr_to_stdout,stream]}, {memcached,"/opt/couchbase/bin/memcached", ["-X","/opt/couchbase/lib/memcached/stdin_term_handler.so","-X", {"/opt/couchbase/lib/memcached/file_logger.so,cyclesize=~B;sleeptime=~B;filename=~s/~s", [log_cyclesize,log_sleeptime,log_path,log_prefix]}, "-l", {"0.0.0.0:~B,0.0.0.0:~B:1000",[port,dedicated_port]}, "-p", {"~B",[port]}, "-E","/opt/couchbase/lib/memcached/bucket_engine.so","-B", "binary","-r","-c","10000","-e", {"admin=~s;default_bucket_name=default;auto_create=false", [admin_user]}, {"~s",[verbosity]}], [{env,[{"EVENT_NOSELECT","1"}, {"MEMCACHED_TOP_KEYS","100"}, {"ISASL_PWFILE",{"~s",[{isasl,path}]}}]}, use_stdio,stderr_to_stdout,exit_status,port_server_send_eol, stream]}] [error_logger:info,2014-08-19T16:49:03.336,ns_1@10.242.238.90:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_mail_sup} started: [{pid,<0.17433.0>}, {name,ns_mail_log}, {mfargs,{ns_mail_log,start_link,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [ns_server:debug,2014-08-19T16:49:03.336,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: {node,'ns_1@10.242.238.88',rest} -> [{port,8091},{port_meta,global}] [error_logger:info,2014-08-19T16:49:03.336,ns_1@10.242.238.90:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.17432.0>}, {name,ns_mail_sup}, {mfa,{ns_mail_sup,start_link,[]}}, {restart_type,permanent}, {shutdown,infinity}, {child_type,supervisor}] [ns_server:debug,2014-08-19T16:49:03.336,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: {node,'ns_1@10.242.238.88',ssl_capi_port} -> 18092 [ns_server:debug,2014-08-19T16:49:03.336,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: {node,'ns_1@10.242.238.88',ssl_proxy_downstream_port} -> 11214 [error_logger:info,2014-08-19T16:49:03.336,ns_1@10.242.238.90:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.17434.0>}, {name,ns_stats_event}, {mfa,{gen_event,start_link,[{local,ns_stats_event}]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [ns_server:debug,2014-08-19T16:49:03.336,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: {node,'ns_1@10.242.238.88',ssl_proxy_upstream_port} -> 11215 [ns_server:debug,2014-08-19T16:49:03.336,ns_1@10.242.238.90:ns_heart_slow_status_updater<0.17440.0>:ns_heart:grab_local_xdcr_replications:438]Ignoring exception getting xdcr replication infos {exit,{noproc,{gen_server,call,[xdc_replication_sup,which_children,infinity]}}, [{gen_server,call,3}, {xdc_replication_sup,all_local_replication_infos,0}, {ns_heart,grab_local_xdcr_replications,0}, {ns_heart,current_status_slow,0}, {ns_heart,slow_updater_loop,1}, {proc_lib,init_p_do_apply,3}]} [error_logger:info,2014-08-19T16:49:03.336,ns_1@10.242.238.90:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.17437.0>}, {name,samples_loader_tasks}, {mfa,{samples_loader_tasks,start_link,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [ns_server:debug,2014-08-19T16:49:03.336,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: {node,'ns_1@10.242.238.88',ssl_rest_port} -> 18091 [ns_server:debug,2014-08-19T16:49:03.336,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: {node,'ns_1@10.242.238.89',capi_port} -> 8092 [error_logger:info,2014-08-19T16:49:03.336,ns_1@10.242.238.90:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.17438.0>}, {name,ns_heart}, {mfa,{ns_heart,start_link,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [ns_server:debug,2014-08-19T16:49:03.337,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: {node,'ns_1@10.242.238.89',compaction_daemon} -> [{check_interval,30},{min_file_size,131072}] [ns_server:debug,2014-08-19T16:49:03.337,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: {node,'ns_1@10.242.238.89',config_version} -> {2,3,0} [error_logger:info,2014-08-19T16:49:03.337,ns_1@10.242.238.90:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.17441.0>}, {name,ns_doctor}, {mfa,{ns_doctor,start_link,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [ns_server:debug,2014-08-19T16:49:03.337,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: {node,'ns_1@10.242.238.89',isasl} -> [{path,"/opt/couchbase/var/lib/couchbase/isasl.pw"}] [ns_server:debug,2014-08-19T16:49:03.337,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: {node,'ns_1@10.242.238.89',membership} -> inactiveAdded [ns_server:debug,2014-08-19T16:49:03.337,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: {node,'ns_1@10.242.238.89',memcached} -> [{mccouch_port,11213}, {engines, [{membase, [{engine,"/opt/couchbase/lib/memcached/ep.so"}, {static_config_string, "vb0=false;waitforwarmup=false;failpartialwarmup=false"}]}, {memcached, [{engine,"/opt/couchbase/lib/memcached/default_engine.so"}, {static_config_string,"vb0=true"}]}]}, {log_path,"/opt/couchbase/var/lib/couchbase/logs"}, {log_prefix,"memcached.log"}, {log_generations,20}, {log_cyclesize,10485760}, {log_sleeptime,19}, {log_rotation_period,39003}, {dedicated_port,11209}, {bucket_engine,"/opt/couchbase/lib/memcached/bucket_engine.so"}, {port,11210}, {dedicated_port,11209}, {admin_user,"_admin"}, {admin_pass,"*****"}, {verbosity,[]}] [ns_server:debug,2014-08-19T16:49:03.337,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: {node,'ns_1@10.242.238.89',moxi} -> [{port,11211},{verbosity,[]}] [ns_server:debug,2014-08-19T16:49:03.338,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: {node,'ns_1@10.242.238.89',ns_log} -> [{filename,"/opt/couchbase/var/lib/couchbase/ns_log"}] [ns_server:debug,2014-08-19T16:49:03.338,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: {node,'ns_1@10.242.238.89',port_servers} -> [{moxi,"/opt/couchbase/bin/moxi", ["-Z", {"port_listen=~B,default_bucket_name=default,downstream_max=1024,downstream_conn_max=4,connect_max_errors=5,connect_retry_interval=30000,connect_timeout=400,auth_timeout=100,cycle=200,downstream_conn_queue_timeout=200,downstream_timeout=5000,wait_queue_timeout=200", [port]}, "-z", {"url=http://127.0.0.1:~B/pools/default/saslBucketsStreaming", [{misc,this_node_rest_port,[]}]}, "-p","0","-Y","y","-O","stderr", {"~s",[verbosity]}], [{env,[{"EVENT_NOSELECT","1"}, {"MOXI_SASL_PLAIN_USR",{"~s",[{ns_moxi_sup,rest_user,[]}]}}, {"MOXI_SASL_PLAIN_PWD",{"~s",[{ns_moxi_sup,rest_pass,[]}]}}]}, use_stdio,exit_status,port_server_send_eol,stderr_to_stdout,stream]}, {memcached,"/opt/couchbase/bin/memcached", ["-X","/opt/couchbase/lib/memcached/stdin_term_handler.so","-X", {"/opt/couchbase/lib/memcached/file_logger.so,cyclesize=~B;sleeptime=~B;filename=~s/~s", [log_cyclesize,log_sleeptime,log_path,log_prefix]}, "-l", {"0.0.0.0:~B,0.0.0.0:~B:1000",[port,dedicated_port]}, "-p", {"~B",[port]}, "-E","/opt/couchbase/lib/memcached/bucket_engine.so","-B", "binary","-r","-c","10000","-e", {"admin=~s;default_bucket_name=default;auto_create=false", [admin_user]}, {"~s",[verbosity]}], [{env,[{"EVENT_NOSELECT","1"}, {"MEMCACHED_TOP_KEYS","100"}, {"ISASL_PWFILE",{"~s",[{isasl,path}]}}]}, use_stdio,stderr_to_stdout,exit_status,port_server_send_eol, stream]}] [ns_server:debug,2014-08-19T16:49:03.338,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: {node,'ns_1@10.242.238.89',rest} -> [{port,8091},{port_meta,global}] [ns_server:debug,2014-08-19T16:49:03.338,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: {node,'ns_1@10.242.238.89',ssl_capi_port} -> 18092 [ns_server:debug,2014-08-19T16:49:03.338,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: {node,'ns_1@10.242.238.89',ssl_proxy_downstream_port} -> 11214 [ns_server:debug,2014-08-19T16:49:03.339,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: {node,'ns_1@10.242.238.89',ssl_proxy_upstream_port} -> 11215 [ns_server:debug,2014-08-19T16:49:03.339,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: {node,'ns_1@10.242.238.89',ssl_rest_port} -> 18091 [ns_server:debug,2014-08-19T16:49:03.339,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: {node,'ns_1@10.242.238.90',membership} -> inactiveAdded [ns_server:debug,2014-08-19T16:49:03.339,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: alert_limits -> [{max_overhead_perc,50},{max_disk_used,90}] [ns_server:debug,2014-08-19T16:49:03.339,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: auto_failover_cfg -> [{enabled,false},{timeout,120},{max_nodes,1},{count,0}] [ns_server:debug,2014-08-19T16:49:03.339,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: autocompaction -> [{database_fragmentation_threshold,{30,undefined}}, {view_fragmentation_threshold,{30,undefined}}] [ns_server:debug,2014-08-19T16:49:03.339,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:49:03.340,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: cert_and_pkey -> {<<"-----BEGIN CERTIFICATE-----\nMIICmDCCAYKgAwIBAgIIE4vQOGMt4U8wCwYJKoZIhvcNAQEFMAwxCjAIBgNVBAMT\nASowHhcNMTMwMTAxMDAwMDAwWhcNNDkxMjMxMjM1OTU5WjAMMQowCAYDVQQDEwEq\nMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA5WgTuSJMU8qPdc8uDdst\nav13oFxDpbqz8mIk7TVReVHwO9MvKgi8cqlGev50BaQNfzFW41E/baDmpa8sAlSe\nzPoGcRD5wDJdHRH87FdW8eeE4rA8N9TcsSyJDo0gmWO+Vj+ow5dzF87001UstU6A\n5UQ5anT0dGnKLChpmk0KiKx28+XSnycDQ8osiLR"...>>, <<"*****">>} [ns_server:debug,2014-08-19T16:49:03.340,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: cluster_compat_version -> [2,5] [ns_server:debug,2014-08-19T16:49:03.340,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: drop_request_memory_threshold_mib -> undefined [ns_server:debug,2014-08-19T16:49:03.340,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: dynamic_config_version -> [2,5] [ns_server:debug,2014-08-19T16:49:03.340,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: email_alerts -> [{recipients,["root@localhost"]}, {sender,"couchbase@localhost"}, {enabled,false}, {email_server,[{user,[]}, {pass,"*****"}, {host,"localhost"}, {port,25}, {encrypt,false}]}, {alerts,[auto_failover_node,auto_failover_maximum_reached, auto_failover_other_nodes_down,auto_failover_cluster_too_small,ip, disk,overhead,ep_oom_errors,ep_item_commit_failed]}] [ns_server:debug,2014-08-19T16:49:03.340,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: fast_warmup -> [{fast_warmup_enabled,true}, {min_memory_threshold,10}, {min_items_threshold,10}] [ns_server:debug,2014-08-19T16:49:03.340,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: index_aware_rebalance_disabled -> false [ns_server:debug,2014-08-19T16:49:03.340,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: max_bucket_count -> 10 [ns_server:debug,2014-08-19T16:49:03.340,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: memory_quota -> 90112 [ns_server:debug,2014-08-19T16:49:03.340,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: nodes_wanted -> ['ns_1@10.242.238.88','ns_1@10.242.238.89','ns_1@10.242.238.90'] [ns_server:debug,2014-08-19T16:49:03.340,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: otp -> [{cookie,xyzevwdfypcplvpp}] [ns_server:debug,2014-08-19T16:49:03.340,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: read_only_user_creds -> null [ns_server:debug,2014-08-19T16:49:03.341,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: remote_clusters -> [] [ns_server:debug,2014-08-19T16:49:03.342,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: replication -> [{enabled,true}] [ns_server:debug,2014-08-19T16:49:03.342,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: replication_topology -> star [ns_server:debug,2014-08-19T16:49:03.342,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: rest -> [{port,8091}] [ns_server:info,2014-08-19T16:49:03.342,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:handle_info:63]config change: rest_creds -> ******** [ns_server:debug,2014-08-19T16:49:03.342,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: server_groups -> [[{uuid,<<"0">>}, {name,<<"Group 1">>}, {nodes,['ns_1@10.242.238.88','ns_1@10.242.238.89','ns_1@10.242.238.90']}]] [ns_server:debug,2014-08-19T16:49:03.342,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: set_view_update_daemon -> [{update_interval,5000}, {update_min_changes,5000}, {replica_update_min_changes,5000}] [ns_server:debug,2014-08-19T16:49:03.343,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: settings -> [{stats,[{send_stats,false}]}] [ns_server:debug,2014-08-19T16:49:03.343,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: uuid -> <<"9032e293d656a8b04683554c561fe06f">> [ns_server:debug,2014-08-19T16:49:03.344,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: vbucket_map_history -> [{[['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88'|...], [...]|...], [{replication_topology,star},{tags,undefined},{max_slaves,10}]}] [ns_server:debug,2014-08-19T16:49:03.344,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: {couchdb,max_parallel_indexers} -> 4 [ns_server:debug,2014-08-19T16:49:03.344,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: {couchdb,max_parallel_replica_indexers} -> 2 [ns_server:debug,2014-08-19T16:49:03.344,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: {request_limit,capi} -> undefined [ns_server:debug,2014-08-19T16:49:03.344,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: {request_limit,rest} -> undefined [ns_server:debug,2014-08-19T16:49:03.344,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: {node,'ns_1@10.242.238.88',capi_port} -> 8092 [ns_server:debug,2014-08-19T16:49:03.344,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: {node,'ns_1@10.242.238.88',compaction_daemon} -> [{check_interval,30},{min_file_size,131072}] [ns_server:debug,2014-08-19T16:49:03.344,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: {node,'ns_1@10.242.238.88',config_version} -> {2,3,0} [ns_server:debug,2014-08-19T16:49:03.344,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: {node,'ns_1@10.242.238.88',isasl} -> [{path,"/opt/couchbase/var/lib/couchbase/isasl.pw"}] [ns_server:debug,2014-08-19T16:49:03.345,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: {node,'ns_1@10.242.238.88',membership} -> active [ns_server:debug,2014-08-19T16:49:03.345,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: {node,'ns_1@10.242.238.88',memcached} -> [{mccouch_port,11213}, {engines, [{membase, [{engine,"/opt/couchbase/lib/memcached/ep.so"}, {static_config_string, "vb0=false;waitforwarmup=false;failpartialwarmup=false"}]}, {memcached, [{engine,"/opt/couchbase/lib/memcached/default_engine.so"}, {static_config_string,"vb0=true"}]}]}, {log_path,"/opt/couchbase/var/lib/couchbase/logs"}, {log_prefix,"memcached.log"}, {log_generations,20}, {log_cyclesize,10485760}, {log_sleeptime,19}, {log_rotation_period,39003}, {dedicated_port,11209}, {bucket_engine,"/opt/couchbase/lib/memcached/bucket_engine.so"}, {port,11210}, {dedicated_port,11209}, {admin_user,"_admin"}, {admin_pass,"*****"}, {verbosity,[]}] [ns_server:debug,2014-08-19T16:49:03.345,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: {node,'ns_1@10.242.238.88',moxi} -> [{port,11211},{verbosity,[]}] [ns_server:debug,2014-08-19T16:49:03.345,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: {node,'ns_1@10.242.238.88',ns_log} -> [{filename,"/opt/couchbase/var/lib/couchbase/ns_log"}] [ns_server:debug,2014-08-19T16:49:03.345,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: {node,'ns_1@10.242.238.88',port_servers} -> [{moxi,"/opt/couchbase/bin/moxi", ["-Z", {"port_listen=~B,default_bucket_name=default,downstream_max=1024,downstream_conn_max=4,connect_max_errors=5,connect_retry_interval=30000,connect_timeout=400,auth_timeout=100,cycle=200,downstream_conn_queue_timeout=200,downstream_timeout=5000,wait_queue_timeout=200", [port]}, "-z", {"url=http://127.0.0.1:~B/pools/default/saslBucketsStreaming", [{misc,this_node_rest_port,[]}]}, "-p","0","-Y","y","-O","stderr", {"~s",[verbosity]}], [{env,[{"EVENT_NOSELECT","1"}, {"MOXI_SASL_PLAIN_USR",{"~s",[{ns_moxi_sup,rest_user,[]}]}}, {"MOXI_SASL_PLAIN_PWD",{"~s",[{ns_moxi_sup,rest_pass,[]}]}}]}, use_stdio,exit_status,port_server_send_eol,stderr_to_stdout,stream]}, {memcached,"/opt/couchbase/bin/memcached", ["-X","/opt/couchbase/lib/memcached/stdin_term_handler.so","-X", {"/opt/couchbase/lib/memcached/file_logger.so,cyclesize=~B;sleeptime=~B;filename=~s/~s", [log_cyclesize,log_sleeptime,log_path,log_prefix]}, "-l", {"0.0.0.0:~B,0.0.0.0:~B:1000",[port,dedicated_port]}, "-p", {"~B",[port]}, "-E","/opt/couchbase/lib/memcached/bucket_engine.so","-B", "binary","-r","-c","10000","-e", {"admin=~s;default_bucket_name=default;auto_create=false", [admin_user]}, {"~s",[verbosity]}], [{env,[{"EVENT_NOSELECT","1"}, {"MEMCACHED_TOP_KEYS","100"}, {"ISASL_PWFILE",{"~s",[{isasl,path}]}}]}, use_stdio,stderr_to_stdout,exit_status,port_server_send_eol, stream]}] [ns_server:debug,2014-08-19T16:49:03.346,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: {node,'ns_1@10.242.238.88',rest} -> [{port,8091},{port_meta,global}] [ns_server:debug,2014-08-19T16:49:03.346,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: {node,'ns_1@10.242.238.88',ssl_capi_port} -> 18092 [ns_server:debug,2014-08-19T16:49:03.346,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: {node,'ns_1@10.242.238.88',ssl_proxy_downstream_port} -> 11214 [ns_server:debug,2014-08-19T16:49:03.346,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: {node,'ns_1@10.242.238.88',ssl_proxy_upstream_port} -> 11215 [ns_server:debug,2014-08-19T16:49:03.346,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: {node,'ns_1@10.242.238.88',ssl_rest_port} -> 18091 [ns_server:debug,2014-08-19T16:49:03.346,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: {node,'ns_1@10.242.238.89',capi_port} -> 8092 [ns_server:debug,2014-08-19T16:49:03.346,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: {node,'ns_1@10.242.238.89',compaction_daemon} -> [{check_interval,30},{min_file_size,131072}] [ns_server:debug,2014-08-19T16:49:03.346,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: {node,'ns_1@10.242.238.89',config_version} -> {2,3,0} [ns_server:debug,2014-08-19T16:49:03.346,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: {node,'ns_1@10.242.238.89',isasl} -> [{path,"/opt/couchbase/var/lib/couchbase/isasl.pw"}] [ns_server:debug,2014-08-19T16:49:03.346,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: {node,'ns_1@10.242.238.89',membership} -> inactiveAdded [ns_server:debug,2014-08-19T16:49:03.347,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: {node,'ns_1@10.242.238.89',memcached} -> [{mccouch_port,11213}, {engines, [{membase, [{engine,"/opt/couchbase/lib/memcached/ep.so"}, {static_config_string, "vb0=false;waitforwarmup=false;failpartialwarmup=false"}]}, {memcached, [{engine,"/opt/couchbase/lib/memcached/default_engine.so"}, {static_config_string,"vb0=true"}]}]}, {log_path,"/opt/couchbase/var/lib/couchbase/logs"}, {log_prefix,"memcached.log"}, {log_generations,20}, {log_cyclesize,10485760}, {log_sleeptime,19}, {log_rotation_period,39003}, {dedicated_port,11209}, {bucket_engine,"/opt/couchbase/lib/memcached/bucket_engine.so"}, {port,11210}, {dedicated_port,11209}, {admin_user,"_admin"}, {admin_pass,"*****"}, {verbosity,[]}] [ns_server:debug,2014-08-19T16:49:03.347,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: {node,'ns_1@10.242.238.89',moxi} -> [{port,11211},{verbosity,[]}] [ns_server:debug,2014-08-19T16:49:03.347,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: {node,'ns_1@10.242.238.89',ns_log} -> [{filename,"/opt/couchbase/var/lib/couchbase/ns_log"}] [ns_server:debug,2014-08-19T16:49:03.347,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: {node,'ns_1@10.242.238.89',port_servers} -> [{moxi,"/opt/couchbase/bin/moxi", ["-Z", {"port_listen=~B,default_bucket_name=default,downstream_max=1024,downstream_conn_max=4,connect_max_errors=5,connect_retry_interval=30000,connect_timeout=400,auth_timeout=100,cycle=200,downstream_conn_queue_timeout=200,downstream_timeout=5000,wait_queue_timeout=200", [port]}, "-z", {"url=http://127.0.0.1:~B/pools/default/saslBucketsStreaming", [{misc,this_node_rest_port,[]}]}, "-p","0","-Y","y","-O","stderr", {"~s",[verbosity]}], [{env,[{"EVENT_NOSELECT","1"}, {"MOXI_SASL_PLAIN_USR",{"~s",[{ns_moxi_sup,rest_user,[]}]}}, {"MOXI_SASL_PLAIN_PWD",{"~s",[{ns_moxi_sup,rest_pass,[]}]}}]}, use_stdio,exit_status,port_server_send_eol,stderr_to_stdout,stream]}, {memcached,"/opt/couchbase/bin/memcached", ["-X","/opt/couchbase/lib/memcached/stdin_term_handler.so","-X", {"/opt/couchbase/lib/memcached/file_logger.so,cyclesize=~B;sleeptime=~B;filename=~s/~s", [log_cyclesize,log_sleeptime,log_path,log_prefix]}, "-l", {"0.0.0.0:~B,0.0.0.0:~B:1000",[port,dedicated_port]}, "-p", {"~B",[port]}, "-E","/opt/couchbase/lib/memcached/bucket_engine.so","-B", "binary","-r","-c","10000","-e", {"admin=~s;default_bucket_name=default;auto_create=false", [admin_user]}, {"~s",[verbosity]}], [{env,[{"EVENT_NOSELECT","1"}, {"MEMCACHED_TOP_KEYS","100"}, {"ISASL_PWFILE",{"~s",[{isasl,path}]}}]}, use_stdio,stderr_to_stdout,exit_status,port_server_send_eol, stream]}] [ns_server:debug,2014-08-19T16:49:03.347,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: {node,'ns_1@10.242.238.89',rest} -> [{port,8091},{port_meta,global}] [ns_server:debug,2014-08-19T16:49:03.348,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: {node,'ns_1@10.242.238.89',ssl_capi_port} -> 18092 [ns_server:debug,2014-08-19T16:49:03.348,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: {node,'ns_1@10.242.238.89',ssl_proxy_downstream_port} -> 11214 [ns_server:debug,2014-08-19T16:49:03.348,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: {node,'ns_1@10.242.238.89',ssl_proxy_upstream_port} -> 11215 [ns_server:debug,2014-08-19T16:49:03.348,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: {node,'ns_1@10.242.238.89',ssl_rest_port} -> 18091 [ns_server:debug,2014-08-19T16:49:03.348,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: {node,'ns_1@10.242.238.90',capi_port} -> 8092 [ns_server:debug,2014-08-19T16:49:03.348,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: {node,'ns_1@10.242.238.90',compaction_daemon} -> [{check_interval,30},{min_file_size,131072}] [ns_server:debug,2014-08-19T16:49:03.348,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: {node,'ns_1@10.242.238.90',config_version} -> {2,3,0} [ns_server:debug,2014-08-19T16:49:03.348,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: {node,'ns_1@10.242.238.90',isasl} -> [{path,"/opt/couchbase/var/lib/couchbase/isasl.pw"}] [ns_server:debug,2014-08-19T16:49:03.348,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: {node,'ns_1@10.242.238.90',membership} -> inactiveAdded [ns_server:debug,2014-08-19T16:49:03.348,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: {node,'ns_1@10.242.238.90',memcached} -> [{mccouch_port,11213}, {engines, [{membase, [{engine,"/opt/couchbase/lib/memcached/ep.so"}, {static_config_string, "vb0=false;waitforwarmup=false;failpartialwarmup=false"}]}, {memcached, [{engine,"/opt/couchbase/lib/memcached/default_engine.so"}, {static_config_string,"vb0=true"}]}]}, {log_path,"/opt/couchbase/var/lib/couchbase/logs"}, {log_prefix,"memcached.log"}, {log_generations,20}, {log_cyclesize,10485760}, {log_sleeptime,19}, {log_rotation_period,39003}, {dedicated_port,11209}, {bucket_engine,"/opt/couchbase/lib/memcached/bucket_engine.so"}, {port,11210}, {dedicated_port,11209}, {admin_user,"_admin"}, {admin_pass,"*****"}, {verbosity,[]}] [ns_server:debug,2014-08-19T16:49:03.348,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: {node,'ns_1@10.242.238.90',moxi} -> [{port,11211},{verbosity,[]}] [ns_server:debug,2014-08-19T16:49:03.349,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: {node,'ns_1@10.242.238.90',ns_log} -> [{filename,"/opt/couchbase/var/lib/couchbase/ns_log"}] [ns_server:debug,2014-08-19T16:49:03.349,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: {node,'ns_1@10.242.238.90',port_servers} -> [{moxi,"/opt/couchbase/bin/moxi", ["-Z", {"port_listen=~B,default_bucket_name=default,downstream_max=1024,downstream_conn_max=4,connect_max_errors=5,connect_retry_interval=30000,connect_timeout=400,auth_timeout=100,cycle=200,downstream_conn_queue_timeout=200,downstream_timeout=5000,wait_queue_timeout=200", [port]}, "-z", {"url=http://127.0.0.1:~B/pools/default/saslBucketsStreaming", [{misc,this_node_rest_port,[]}]}, "-p","0","-Y","y","-O","stderr", {"~s",[verbosity]}], [{env,[{"EVENT_NOSELECT","1"}, {"MOXI_SASL_PLAIN_USR",{"~s",[{ns_moxi_sup,rest_user,[]}]}}, {"MOXI_SASL_PLAIN_PWD",{"~s",[{ns_moxi_sup,rest_pass,[]}]}}]}, use_stdio,exit_status,port_server_send_eol,stderr_to_stdout,stream]}, {memcached,"/opt/couchbase/bin/memcached", ["-X","/opt/couchbase/lib/memcached/stdin_term_handler.so","-X", {"/opt/couchbase/lib/memcached/file_logger.so,cyclesize=~B;sleeptime=~B;filename=~s/~s", [log_cyclesize,log_sleeptime,log_path,log_prefix]}, "-l", {"0.0.0.0:~B,0.0.0.0:~B:1000",[port,dedicated_port]}, "-p", {"~B",[port]}, "-E","/opt/couchbase/lib/memcached/bucket_engine.so","-B", "binary","-r","-c","10000","-e", {"admin=~s;default_bucket_name=default;auto_create=false", [admin_user]}, {"~s",[verbosity]}], [{env,[{"EVENT_NOSELECT","1"}, {"MEMCACHED_TOP_KEYS","100"}, {"ISASL_PWFILE",{"~s",[{isasl,path}]}}]}, use_stdio,stderr_to_stdout,exit_status,port_server_send_eol, stream]}] [ns_server:debug,2014-08-19T16:49:03.349,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: {node,'ns_1@10.242.238.90',rest} -> [{port,8091},{port_meta,global}] [ns_server:debug,2014-08-19T16:49:03.349,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: {node,'ns_1@10.242.238.90',ssl_capi_port} -> 18092 [ns_server:debug,2014-08-19T16:49:03.349,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: {node,'ns_1@10.242.238.90',ssl_proxy_downstream_port} -> 11214 [ns_server:debug,2014-08-19T16:49:03.349,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: {node,'ns_1@10.242.238.90',ssl_proxy_upstream_port} -> 11215 [ns_server:debug,2014-08-19T16:49:03.349,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: {node,'ns_1@10.242.238.90',ssl_rest_port} -> 18091 [ns_server:info,2014-08-19T16:49:03.365,ns_1@10.242.238.90:remote_clusters_info<0.17445.0>:remote_clusters_info:read_or_create_table:540]Reading remote_clusters_info content from /opt/couchbase/var/lib/couchbase/remote_clusters_cache_v3 [ns_server:debug,2014-08-19T16:49:03.365,ns_1@10.242.238.90:ns_cookie_manager<0.17150.0>:ns_cookie_manager:do_cookie_save:149]attempted to save cookie to "/opt/couchbase/var/lib/couchbase/couchbase-server.cookie-ns-server": ok [ns_server:debug,2014-08-19T16:49:03.365,ns_1@10.242.238.90:ns_cookie_manager<0.17150.0>:ns_cookie_manager:do_cookie_sync:110]ns_cookie_manager do_cookie_sync [ns_server:debug,2014-08-19T16:49:03.365,ns_1@10.242.238.90:<0.17420.0>:ns_node_disco:do_nodes_wanted_updated_fun:199]ns_node_disco: nodes_wanted updated: ['ns_1@10.242.238.88', 'ns_1@10.242.238.89', 'ns_1@10.242.238.90'], with cookie: xyzevwdfypcplvpp [ns_server:debug,2014-08-19T16:49:03.366,ns_1@10.242.238.90:ns_cookie_manager<0.17150.0>:ns_cookie_manager:do_cookie_save:147]saving cookie to "/opt/couchbase/var/lib/couchbase/couchbase-server.cookie-ns-server" [ns_server:debug,2014-08-19T16:49:03.366,ns_1@10.242.238.90:ns_server_sup<0.17392.0>:mb_master:check_master_takeover_needed:141]Sending master node question to the following nodes: ['ns_1@10.242.238.88'] [error_logger:info,2014-08-19T16:49:03.366,ns_1@10.242.238.90:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.17445.0>}, {name,remote_clusters_info}, {mfa,{remote_clusters_info,start_link,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2014-08-19T16:49:03.366,ns_1@10.242.238.90:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.17449.0>}, {name,master_activity_events}, {mfa, {gen_event,start_link, [{local,master_activity_events}]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [ns_server:debug,2014-08-19T16:49:03.366,ns_1@10.242.238.90:ns_server_sup<0.17392.0>:mb_master:check_master_takeover_needed:143]Got replies: ['ns_1@10.242.238.88'] [ns_server:debug,2014-08-19T16:49:03.367,ns_1@10.242.238.90:ns_server_sup<0.17392.0>:mb_master:check_master_takeover_needed:156]Checking version of current master: 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:49:03.367,ns_1@10.242.238.90:ns_server_sup<0.17392.0>:mb_master:check_master_takeover_needed:174]Current master's supported compat version: [2,5,1] [ns_server:debug,2014-08-19T16:49:03.367,ns_1@10.242.238.90:ns_server_sup<0.17392.0>:mb_master:check_master_takeover_needed:181]Current master is not older [ns_server:debug,2014-08-19T16:49:03.367,ns_1@10.242.238.90:mb_master<0.17457.0>:mb_master:init:96]Starting as candidate. Peers: ['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90'] [ns_server:debug,2014-08-19T16:49:03.367,ns_1@10.242.238.90:ns_heart_slow_status_updater<0.17440.0>:ns_heart:current_status_slow:248]Ignoring failure to grab system stats: {'EXIT',{noproc,{gen_server,call, [{'stats_reader-@system','ns_1@10.242.238.90'}, {latest,"minute"}]}}} [error_logger:info,2014-08-19T16:49:03.368,ns_1@10.242.238.90:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.17457.0>}, {name,mb_master}, {mfa,{mb_master,start_link,[]}}, {restart_type,permanent}, {shutdown,infinity}, {child_type,supervisor}] [ns_server:debug,2014-08-19T16:49:03.368,ns_1@10.242.238.90:ns_heart_slow_status_updater<0.17440.0>:ns_heart:grab_local_xdcr_replications:438]Ignoring exception getting xdcr replication infos {exit,{noproc,{gen_server,call,[xdc_replication_sup,which_children,infinity]}}, [{gen_server,call,3}, {xdc_replication_sup,all_local_replication_infos,0}, {ns_heart,grab_local_xdcr_replications,0}, {ns_heart,current_status_slow,0}, {ns_heart,slow_updater_loop,1}]} [error_logger:info,2014-08-19T16:49:03.368,ns_1@10.242.238.90:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.17460.0>}, {name,master_activity_events_ingress}, {mfa, {gen_event,start_link, [{local,master_activity_events_ingress}]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [user:info,2014-08-19T16:49:03.368,ns_1@10.242.238.90:ns_node_disco<0.17408.0>:ns_node_disco:handle_info:159]Node 'ns_1@10.242.238.90' saw that node 'ns_1@10.242.238.89' came up. Tags: [] [error_logger:info,2014-08-19T16:49:03.368,ns_1@10.242.238.90:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.17461.0>}, {name,master_activity_events_timestamper}, {mfa, {master_activity_events,start_link_timestamper,[]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [ns_server:debug,2014-08-19T16:49:03.368,ns_1@10.242.238.90:ns_node_disco_events<0.17407.0>:ns_node_disco_rep_events:handle_event:42]Detected a new nodes (['ns_1@10.242.238.89']). Moving config around. [error_logger:info,2014-08-19T16:49:03.368,ns_1@10.242.238.90:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.17462.0>}, {name,master_activity_events_pids_watcher}, {mfa, {master_activity_events_pids_watcher,start_link, []}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [ns_server:info,2014-08-19T16:49:03.369,ns_1@10.242.238.90:ns_node_disco_events<0.17407.0>:ns_node_disco_log:handle_event:46]ns_node_disco_log: nodes changed: ['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90'] [ns_server:info,2014-08-19T16:49:03.369,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_info:220]Replicating config to/from: ['ns_1@10.242.238.89'] [ns_server:debug,2014-08-19T16:49:03.369,ns_1@10.242.238.90:<0.17420.0>:ns_node_disco:do_nodes_wanted_updated_fun:205]ns_node_disco: nodes_wanted pong: ['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90'], with cookie: xyzevwdfypcplvpp [ns_server:info,2014-08-19T16:49:03.369,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:do_pull:341]Pulling config from: 'ns_1@10.242.238.89' [ns_server:info,2014-08-19T16:49:03.374,ns_1@10.242.238.90:ns_doctor<0.17441.0>:ns_doctor:update_status:241]The following buckets became ready on node 'ns_1@10.242.238.88': ["default"] [error_logger:info,2014-08-19T16:49:03.384,ns_1@10.242.238.90:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.17463.0>}, {name,master_activity_events_keeper}, {mfa,{master_activity_events_keeper,start_link,[]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [ns_server:debug,2014-08-19T16:49:03.385,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_info:225]config pull_and_push done. [ns_server:debug,2014-08-19T16:49:03.392,ns_1@10.242.238.90:ns_cookie_manager<0.17150.0>:ns_cookie_manager:do_cookie_save:149]attempted to save cookie to "/opt/couchbase/var/lib/couchbase/couchbase-server.cookie-ns-server": ok [ns_server:debug,2014-08-19T16:49:03.392,ns_1@10.242.238.90:ns_cookie_manager<0.17150.0>:ns_cookie_manager:do_cookie_sync:110]ns_cookie_manager do_cookie_sync [ns_server:debug,2014-08-19T16:49:03.392,ns_1@10.242.238.90:ns_cookie_manager<0.17150.0>:ns_cookie_manager:do_cookie_save:147]saving cookie to "/opt/couchbase/var/lib/couchbase/couchbase-server.cookie-ns-server" [ns_server:debug,2014-08-19T16:49:03.392,ns_1@10.242.238.90:<0.17425.0>:ns_node_disco:do_nodes_wanted_updated_fun:199]ns_node_disco: nodes_wanted updated: ['ns_1@10.242.238.88', 'ns_1@10.242.238.89', 'ns_1@10.242.238.90'], with cookie: xyzevwdfypcplvpp [ns_server:debug,2014-08-19T16:49:03.393,ns_1@10.242.238.90:<0.17425.0>:ns_node_disco:do_nodes_wanted_updated_fun:205]ns_node_disco: nodes_wanted pong: ['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90'], with cookie: xyzevwdfypcplvpp [error_logger:info,2014-08-19T16:49:03.424,ns_1@10.242.238.90:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_ssl_services_sup} started: [{pid,<0.17471.0>}, {name,ns_ssl_services_setup}, {mfargs,{ns_ssl_services_setup,start_link,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2014-08-19T16:49:03.426,ns_1@10.242.238.90:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_ssl_services_sup} started: [{pid,<0.17475.0>}, {name,ns_rest_ssl_service}, {mfargs, {ns_ssl_services_setup,start_link_rest_service,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2014-08-19T16:49:03.428,ns_1@10.242.238.90:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_ssl_services_sup} started: [{pid,<0.17492.0>}, {name,ns_capi_ssl_service}, {mfargs, {ns_ssl_services_setup,start_link_capi_service,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2014-08-19T16:49:03.428,ns_1@10.242.238.90:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,menelaus_sup} started: [{pid,<0.17470.0>}, {name,ns_ssl_services_sup}, {mfargs,{ns_ssl_services_sup,start_link,[]}}, {restart_type,permanent}, {shutdown,infinity}, {child_type,supervisor}] [error_logger:info,2014-08-19T16:49:03.428,ns_1@10.242.238.90:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,menelaus_sup} started: [{pid,<0.17509.0>}, {name,menelaus_ui_auth}, {mfargs,{menelaus_ui_auth,start_link,[]}}, {restart_type,permanent}, {shutdown,5000}, {child_type,worker}] [error_logger:info,2014-08-19T16:49:03.428,ns_1@10.242.238.90:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,menelaus_sup} started: [{pid,<0.17510.0>}, {name,menelaus_web_cache}, {mfargs,{menelaus_web_cache,start_link,[]}}, {restart_type,permanent}, {shutdown,5000}, {child_type,worker}] [error_logger:info,2014-08-19T16:49:03.428,ns_1@10.242.238.90:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,menelaus_sup} started: [{pid,<0.17511.0>}, {name,menelaus_stats_gatherer}, {mfargs,{menelaus_stats_gatherer,start_link,[]}}, {restart_type,permanent}, {shutdown,5000}, {child_type,worker}] [error_logger:info,2014-08-19T16:49:03.429,ns_1@10.242.238.90:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,menelaus_sup} started: [{pid,<0.17512.0>}, {name,menelaus_web}, {mfargs,{menelaus_web,start_link,[]}}, {restart_type,permanent}, {shutdown,5000}, {child_type,worker}] [error_logger:info,2014-08-19T16:49:03.429,ns_1@10.242.238.90:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,menelaus_sup} started: [{pid,<0.17529.0>}, {name,menelaus_event}, {mfargs,{menelaus_event,start_link,[]}}, {restart_type,permanent}, {shutdown,5000}, {child_type,worker}] [error_logger:info,2014-08-19T16:49:03.429,ns_1@10.242.238.90:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,menelaus_sup} started: [{pid,<0.17530.0>}, {name,hot_keys_keeper}, {mfargs,{hot_keys_keeper,start_link,[]}}, {restart_type,permanent}, {shutdown,5000}, {child_type,worker}] [error_logger:info,2014-08-19T16:49:03.429,ns_1@10.242.238.90:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,menelaus_sup} started: [{pid,<0.17531.0>}, {name,menelaus_web_alerts_srv}, {mfargs,{menelaus_web_alerts_srv,start_link,[]}}, {restart_type,permanent}, {shutdown,5000}, {child_type,worker}] [user:info,2014-08-19T16:49:03.429,ns_1@10.242.238.90:ns_server_sup<0.17392.0>:menelaus_sup:start_link:44]Couchbase Server has started on web port 8091 on node 'ns_1@10.242.238.90'. [error_logger:info,2014-08-19T16:49:03.429,ns_1@10.242.238.90:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.17469.0>}, {name,menelaus}, {mfa,{menelaus_sup,start_link,[]}}, {restart_type,permanent}, {shutdown,infinity}, {child_type,supervisor}] [error_logger:info,2014-08-19T16:49:03.430,ns_1@10.242.238.90:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,mc_sup} started: [{pid,<0.17533.0>}, {name,mc_couch_events}, {mfargs, {gen_event,start_link,[{local,mc_couch_events}]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [ns_server:info,2014-08-19T16:49:03.430,ns_1@10.242.238.90:<0.17535.0>:mc_tcp_listener:init:24]mccouch is listening on port 11213 [error_logger:info,2014-08-19T16:49:03.430,ns_1@10.242.238.90:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,mc_sup} started: [{pid,<0.17534.0>}, {name,mc_conn_sup}, {mfargs,{mc_conn_sup,start_link,[]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,supervisor}] [error_logger:info,2014-08-19T16:49:03.430,ns_1@10.242.238.90:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,mc_sup} started: [{pid,<0.17535.0>}, {name,mc_tcp_listener}, {mfargs,{mc_tcp_listener,start_link,[11213]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [error_logger:info,2014-08-19T16:49:03.430,ns_1@10.242.238.90:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.17532.0>}, {name,mc_sup}, {mfa,{mc_sup,start_link,[]}}, {restart_type,permanent}, {shutdown,infinity}, {child_type,supervisor}] [error_logger:info,2014-08-19T16:49:03.431,ns_1@10.242.238.90:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.17536.0>}, {name,ns_ports_setup}, {mfa,{ns_ports_setup,start,[]}}, {restart_type,{permanent,4}}, {shutdown,brutal_kill}, {child_type,worker}] [error_logger:info,2014-08-19T16:49:03.431,ns_1@10.242.238.90:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.17537.0>}, {name,ns_port_memcached_killer}, {mfa,{ns_ports_setup,start_memcached_force_killer,[]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [ns_server:info,2014-08-19T16:49:03.432,ns_1@10.242.238.90:<0.17539.0>:ns_memcached_log_rotator:init:28]Starting log rotator on "/opt/couchbase/var/lib/couchbase/logs"/"memcached.log"* with an initial period of 39003ms [error_logger:info,2014-08-19T16:49:03.432,ns_1@10.242.238.90:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.17539.0>}, {name,ns_memcached_log_rotator}, {mfa,{ns_memcached_log_rotator,start_link,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2014-08-19T16:49:03.433,ns_1@10.242.238.90:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.17541.0>}, {name,memcached_clients_pool}, {mfa,{memcached_clients_pool,start_link,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2014-08-19T16:49:03.434,ns_1@10.242.238.90:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.17542.0>}, {name,proxied_memcached_clients_pool}, {mfa,{proxied_memcached_clients_pool,start_link,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2014-08-19T16:49:03.435,ns_1@10.242.238.90:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.17543.0>}, {name,xdc_lhttpc_pool}, {mfa, {lhttpc_manager,start_link, [[{name,xdc_lhttpc_pool}, {connection_timeout,120000}, {pool_size,200}]]}}, {restart_type,permanent}, {shutdown,10000}, {child_type,worker}] [error_logger:info,2014-08-19T16:49:03.435,ns_1@10.242.238.90:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.17544.0>}, {name,ns_null_connection_pool}, {mfa, {ns_null_connection_pool,start_link, [ns_null_connection_pool]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2014-08-19T16:49:03.435,ns_1@10.242.238.90:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.17545.0>}, {name,xdc_replication_sup}, {mfa,{xdc_replication_sup,start_link,[]}}, {restart_type,permanent}, {shutdown,infinity}, {child_type,supervisor}] [user:info,2014-08-19T16:49:03.440,ns_1@10.242.238.90:<0.17397.0>:ns_log:crash_consumption_loop:64]Port server moxi on node 'babysitter_of_ns_1@127.0.0.1' exited with status 0. Restarting. Messages: WARNING: curl error: transfer closed with outstanding read data remaining from: http://127.0.0.1:8091/pools/default/saslBucketsStreaming EOL on stdin. Exiting [ns_server:debug,2014-08-19T16:49:03.441,ns_1@10.242.238.90:ns_cookie_manager<0.17150.0>:ns_cookie_manager:do_cookie_save:149]attempted to save cookie to "/opt/couchbase/var/lib/couchbase/couchbase-server.cookie-ns-server": ok [ns_server:debug,2014-08-19T16:49:03.441,ns_1@10.242.238.90:<0.17426.0>:ns_node_disco:do_nodes_wanted_updated_fun:199]ns_node_disco: nodes_wanted updated: ['ns_1@10.242.238.88', 'ns_1@10.242.238.89', 'ns_1@10.242.238.90'], with cookie: xyzevwdfypcplvpp [ns_server:debug,2014-08-19T16:49:03.442,ns_1@10.242.238.90:<0.17426.0>:ns_node_disco:do_nodes_wanted_updated_fun:205]ns_node_disco: nodes_wanted pong: ['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90'], with cookie: xyzevwdfypcplvpp [error_logger:info,2014-08-19T16:49:03.471,ns_1@10.242.238.90:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.17546.0>}, {name,xdc_rep_manager}, {mfa,{xdc_rep_manager,start_link,[]}}, {restart_type,permanent}, {shutdown,30000}, {child_type,worker}] [error_logger:info,2014-08-19T16:49:03.471,ns_1@10.242.238.90:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.17555.0>}, {name,ns_memcached_sockets_pool}, {mfa,{ns_memcached_sockets_pool,start_link,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2014-08-19T16:49:03.471,ns_1@10.242.238.90:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_bucket_worker_sup} started: [{pid,<0.17558.0>}, {name,ns_bucket_worker}, {mfargs,{work_queue,start_link,[ns_bucket_worker]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2014-08-19T16:49:03.471,ns_1@10.242.238.90:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_bucket_sup} started: [{pid,<0.17560.0>}, {name,buckets_observing_subscription}, {mfargs,{ns_bucket_sup,subscribe_on_config_events,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2014-08-19T16:49:03.472,ns_1@10.242.238.90:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_bucket_worker_sup} started: [{pid,<0.17559.0>}, {name,ns_bucket_sup}, {mfargs,{ns_bucket_sup,start_link,[]}}, {restart_type,permanent}, {shutdown,infinity}, {child_type,supervisor}] [error_logger:info,2014-08-19T16:49:03.472,ns_1@10.242.238.90:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.17557.0>}, {name,ns_bucket_worker_sup}, {mfa,{ns_bucket_worker_sup,start_link,[]}}, {restart_type,permanent}, {shutdown,infinity}, {child_type,supervisor}] [error_logger:info,2014-08-19T16:49:03.472,ns_1@10.242.238.90:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.17561.0>}, {name,system_stats_collector}, {mfa,{system_stats_collector,start_link,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2014-08-19T16:49:03.472,ns_1@10.242.238.90:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.17564.0>}, {name,{stats_archiver,"@system"}}, {mfa,{stats_archiver,start_link,["@system"]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2014-08-19T16:49:03.472,ns_1@10.242.238.90:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.17566.0>}, {name,{stats_reader,"@system"}}, {mfa,{stats_reader,start_link,["@system"]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2014-08-19T16:49:03.472,ns_1@10.242.238.90:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.17567.0>}, {name,compaction_daemon}, {mfa,{compaction_daemon,start_link,[]}}, {restart_type,{permanent,4}}, {shutdown,86400000}, {child_type,worker}] [ns_server:debug,2014-08-19T16:49:03.473,ns_1@10.242.238.90:compaction_daemon<0.17567.0>:compaction_daemon:handle_info:444]No buckets to compact. Rescheduling compaction. [ns_server:debug,2014-08-19T16:49:03.473,ns_1@10.242.238.90:xdc_rdoc_replication_srv<0.17569.0>:xdc_rdoc_replication_srv:init:76]Loaded the following docs: [] [ns_server:debug,2014-08-19T16:49:03.473,ns_1@10.242.238.90:xdc_rdoc_replication_srv<0.17569.0>:xdc_rdoc_replication_srv:handle_info:154]doing replicate_newnodes_docs [error_logger:info,2014-08-19T16:49:03.473,ns_1@10.242.238.90:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.17569.0>}, {name,xdc_rdoc_replication_srv}, {mfa,{xdc_rdoc_replication_srv,start_link,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [ns_server:debug,2014-08-19T16:49:03.474,ns_1@10.242.238.90:compaction_daemon<0.17567.0>:compaction_daemon:schedule_next_compaction:1519]Finished compaction too soon. Next run will be in 30s [ns_server:info,2014-08-19T16:49:03.474,ns_1@10.242.238.90:set_view_update_daemon<0.17571.0>:set_view_update_daemon:init:50]Set view update daemon, starting with the following settings: update interval: 5000ms minimum number of changes: 5000 [error_logger:info,2014-08-19T16:49:03.474,ns_1@10.242.238.90:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.17571.0>}, {name,set_view_update_daemon}, {mfa,{set_view_update_daemon,start_link,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [user:info,2014-08-19T16:49:03.474,ns_1@10.242.238.90:ns_cluster<0.17151.0>:ns_cluster:perform_actual_join:954]Node ns_1@10.242.238.90 joined cluster [error_logger:info,2014-08-19T16:49:03.474,ns_1@10.242.238.90:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_cluster_sup} started: [{pid,<0.17392.0>}, {name,ns_server_sup}, {mfargs,{ns_server_sup,start_link,[]}}, {restart_type,permanent}, {shutdown,infinity}, {child_type,supervisor}] [cluster:debug,2014-08-19T16:49:03.474,ns_1@10.242.238.90:ns_cluster<0.17151.0>:ns_cluster:handle_call:167]complete_join([{<<"targetNode">>,<<"ns_1@10.242.238.90">>}, {<<"availableStorage">>, {struct,[{<<"hdd">>, [{struct,[{<<"path">>,<<"/">>}, {<<"sizeKBytes">>,103212320}, {<<"usagePercent">>,3}]}, {struct,[{<<"path">>,<<"/dev/shm">>}, {<<"sizeKBytes">>,49515824}, {<<"usagePercent">>,0}]}, {struct,[{<<"path">>,<<"/boot">>}, {<<"sizeKBytes">>,198337}, {<<"usagePercent">>,17}]}, {struct,[{<<"path">>,<<"/data">>}, {<<"sizeKBytes">>,329573012}, {<<"usagePercent">>,1}]}, {struct,[{<<"path">>,<<"/test">>}, {<<"sizeKBytes">>,528447160}, {<<"usagePercent">>,1}]}, {struct,[{<<"path">>,<<"/var/lib/pgsql">>}, {<<"sizeKBytes">>,1922866992}, {<<"usagePercent">>,1}]}]}]}}, {<<"memoryQuota">>,90112}, {<<"storageTotals">>, {struct,[{<<"ram">>, {struct,[{<<"total">>,101408407552}, {<<"quotaTotal">>,94489280512}, {<<"quotaUsed">>,13369344000}, {<<"used">>,13174808576}, {<<"usedByData">>,31847576}]}}, {<<"hdd">>, {struct,[{<<"total">>,1969015799808}, {<<"quotaTotal">>,1969015799808}, {<<"used">>,19690157998}, {<<"usedByData">>,2736915}, {<<"free">>,1949325641810}]}}]}}, {<<"storage">>, {struct,[{<<"ssd">>,[]}, {<<"hdd">>, [{struct,[{<<"path">>,<<"/var/lib/pgsql">>}, {<<"index_path">>,<<"/var/lib/pgsql">>}, {<<"quotaMb">>,<<"none">>}, {<<"state">>,<<"ok">>}]}]}]}}, {<<"systemStats">>, {struct,[{<<"cpu_utilization_rate">>,0.6265664160401002}, {<<"swap_total">>,0}, {<<"swap_used">>,0}, {<<"mem_total">>,101408407552}, {<<"mem_free">>,89866596352}]}}, {<<"interestingStats">>, {struct,[{<<"cmd_get">>,0.0}, {<<"couch_docs_actual_disk_size">>,2736915}, {<<"couch_docs_data_size">>,2729956}, {<<"couch_views_actual_disk_size">>,0}, {<<"couch_views_data_size">>,0}, {<<"curr_items">>,0}, {<<"curr_items_tot">>,0}, {<<"ep_bg_fetched">>,0.0}, {<<"get_hits">>,0.0}, {<<"mem_used">>,31847576}, {<<"ops">>,0.0}, {<<"vb_replica_curr_items">>,0}]}}, {<<"uptime">>,<<"4088">>}, {<<"memoryTotal">>,101408407552}, {<<"memoryFree">>,89866596352}, {<<"mcdMemoryReserved">>,77368}, {<<"mcdMemoryAllocated">>,77368}, {<<"couchApiBase">>,<<"http://10.242.238.88:8092/">>}, {<<"otpCookie">>,<<"xyzevwdfypcplvpp">>}, {<<"clusterMembership">>,<<"active">>}, {<<"status">>,<<"healthy">>}, {<<"otpNode">>,<<"ns_1@10.242.238.88">>}, {<<"thisNode">>,true}, {<<"hostname">>,<<"10.242.238.88:8091">>}, {<<"clusterCompatibility">>,131077}, {<<"version">>,<<"2.5.1-1083-rel-enterprise">>}, {<<"os">>,<<"x86_64-unknown-linux-gnu">>}, {<<"ports">>, {struct,[{<<"httpsMgmt">>,18091}, {<<"httpsCAPI">>,18092}, {<<"sslProxy">>,11214}, {<<"proxy">>,11211}, {<<"direct">>,11210}]}}]) -> {ok,ok} [ns_server:info,2014-08-19T16:49:05.076,ns_1@10.242.238.90:mb_master<0.17457.0>:mb_master:candidate:362]Changing master from undefined to 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:49:25.982,ns_1@10.242.238.90:ns_config_events<0.17153.0>:ns_node_disco_conf_events:handle_event:44]ns_node_disco_conf_events config on nodes_wanted [ns_server:debug,2014-08-19T16:49:25.982,ns_1@10.242.238.90:ns_cookie_manager<0.17150.0>:ns_cookie_manager:do_cookie_sync:110]ns_cookie_manager do_cookie_sync [ns_server:debug,2014-08-19T16:49:25.982,ns_1@10.242.238.90:mb_master<0.17457.0>:mb_master:update_peers:506]List of peers has changed from ['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90'] to ['ns_1@10.242.238.88', 'ns_1@10.242.238.89', 'ns_1@10.242.238.90', 'ns_1@10.242.238.91'] [ns_server:debug,2014-08-19T16:49:25.982,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: nodes_wanted -> ['ns_1@10.242.238.88','ns_1@10.242.238.89','ns_1@10.242.238.90', 'ns_1@10.242.238.91'] [ns_server:debug,2014-08-19T16:49:25.983,ns_1@10.242.238.90:ns_cookie_manager<0.17150.0>:ns_cookie_manager:do_cookie_save:147]saving cookie to "/opt/couchbase/var/lib/couchbase/couchbase-server.cookie-ns-server" [ns_server:debug,2014-08-19T16:49:25.983,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: server_groups -> [[{uuid,<<"0">>}, {name,<<"Group 1">>}, {nodes,['ns_1@10.242.238.88','ns_1@10.242.238.89','ns_1@10.242.238.90', 'ns_1@10.242.238.91']}]] [ns_server:debug,2014-08-19T16:49:25.983,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: {node,'ns_1@10.242.238.91',membership} -> inactiveAdded [ns_server:debug,2014-08-19T16:49:26.026,ns_1@10.242.238.90:ns_cookie_manager<0.17150.0>:ns_cookie_manager:do_cookie_save:149]attempted to save cookie to "/opt/couchbase/var/lib/couchbase/couchbase-server.cookie-ns-server": ok [ns_server:debug,2014-08-19T16:49:26.027,ns_1@10.242.238.90:<0.17682.0>:ns_node_disco:do_nodes_wanted_updated_fun:199]ns_node_disco: nodes_wanted updated: ['ns_1@10.242.238.88', 'ns_1@10.242.238.89', 'ns_1@10.242.238.90', 'ns_1@10.242.238.91'], with cookie: xyzevwdfypcplvpp [ns_server:debug,2014-08-19T16:49:26.030,ns_1@10.242.238.90:<0.17682.0>:ns_node_disco:do_nodes_wanted_updated_fun:205]ns_node_disco: nodes_wanted pong: ['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90'], with cookie: xyzevwdfypcplvpp [ns_server:debug,2014-08-19T16:49:26.491,ns_1@10.242.238.90:<0.17570.0>:xdc_rdoc_replication_srv:nodeup_monitoring_loop:46]got nodeup event. Considering rdocs replication [ns_server:debug,2014-08-19T16:49:26.491,ns_1@10.242.238.90:xdc_rdoc_replication_srv<0.17569.0>:xdc_rdoc_replication_srv:handle_info:154]doing replicate_newnodes_docs [user:info,2014-08-19T16:49:26.491,ns_1@10.242.238.90:ns_node_disco<0.17408.0>:ns_node_disco:handle_info:159]Node 'ns_1@10.242.238.90' saw that node 'ns_1@10.242.238.91' came up. Tags: [] [ns_server:debug,2014-08-19T16:49:26.491,ns_1@10.242.238.90:ns_node_disco_events<0.17407.0>:ns_node_disco_rep_events:handle_event:42]Detected a new nodes (['ns_1@10.242.238.91']). Moving config around. [ns_server:info,2014-08-19T16:49:26.491,ns_1@10.242.238.90:ns_node_disco_events<0.17407.0>:ns_node_disco_log:handle_event:46]ns_node_disco_log: nodes changed: ['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91'] [ns_server:warn,2014-08-19T16:49:26.492,ns_1@10.242.238.90:xdc_rdoc_replication_srv<0.17569.0>:xdc_rdoc_replication_srv:handle_info:150]Remote server node {xdc_rdoc_replication_srv,'ns_1@10.242.238.91'} process down: noproc [ns_server:debug,2014-08-19T16:49:26.517,ns_1@10.242.238.90:ns_config_events<0.17153.0>:ns_node_disco_conf_events:handle_event:50]ns_node_disco_conf_events config on otp [ns_server:debug,2014-08-19T16:49:26.517,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: otp -> [{cookie,xyzevwdfypcplvpp}] [ns_server:debug,2014-08-19T16:49:26.517,ns_1@10.242.238.90:ns_cookie_manager<0.17150.0>:ns_cookie_manager:do_cookie_sync:110]ns_cookie_manager do_cookie_sync [ns_server:debug,2014-08-19T16:49:26.517,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: {node,'ns_1@10.242.238.91',capi_port} -> 8092 [ns_server:debug,2014-08-19T16:49:26.517,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: {node,'ns_1@10.242.238.91',compaction_daemon} -> [{check_interval,30},{min_file_size,131072}] [ns_server:debug,2014-08-19T16:49:26.518,ns_1@10.242.238.90:ns_cookie_manager<0.17150.0>:ns_cookie_manager:do_cookie_save:147]saving cookie to "/opt/couchbase/var/lib/couchbase/couchbase-server.cookie-ns-server" [ns_server:debug,2014-08-19T16:49:26.518,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: {node,'ns_1@10.242.238.91',config_version} -> {2,3,0} [ns_server:debug,2014-08-19T16:49:26.518,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: {node,'ns_1@10.242.238.91',isasl} -> [{path,"/opt/couchbase/var/lib/couchbase/isasl.pw"}] [ns_server:debug,2014-08-19T16:49:26.518,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: {node,'ns_1@10.242.238.91',memcached} -> [{mccouch_port,11213}, {engines, [{membase, [{engine,"/opt/couchbase/lib/memcached/ep.so"}, {static_config_string, "vb0=false;waitforwarmup=false;failpartialwarmup=false"}]}, {memcached, [{engine,"/opt/couchbase/lib/memcached/default_engine.so"}, {static_config_string,"vb0=true"}]}]}, {log_path,"/opt/couchbase/var/lib/couchbase/logs"}, {log_prefix,"memcached.log"}, {log_generations,20}, {log_cyclesize,10485760}, {log_sleeptime,19}, {log_rotation_period,39003}, {dedicated_port,11209}, {bucket_engine,"/opt/couchbase/lib/memcached/bucket_engine.so"}, {port,11210}, {dedicated_port,11209}, {admin_user,"_admin"}, {admin_pass,"*****"}, {verbosity,[]}] [ns_server:debug,2014-08-19T16:49:26.518,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: {node,'ns_1@10.242.238.91',moxi} -> [{port,11211},{verbosity,[]}] [ns_server:debug,2014-08-19T16:49:26.518,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: {node,'ns_1@10.242.238.91',ns_log} -> [{filename,"/opt/couchbase/var/lib/couchbase/ns_log"}] [ns_server:debug,2014-08-19T16:49:26.519,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: {node,'ns_1@10.242.238.91',port_servers} -> [{moxi,"/opt/couchbase/bin/moxi", ["-Z", {"port_listen=~B,default_bucket_name=default,downstream_max=1024,downstream_conn_max=4,connect_max_errors=5,connect_retry_interval=30000,connect_timeout=400,auth_timeout=100,cycle=200,downstream_conn_queue_timeout=200,downstream_timeout=5000,wait_queue_timeout=200", [port]}, "-z", {"url=http://127.0.0.1:~B/pools/default/saslBucketsStreaming", [{misc,this_node_rest_port,[]}]}, "-p","0","-Y","y","-O","stderr", {"~s",[verbosity]}], [{env,[{"EVENT_NOSELECT","1"}, {"MOXI_SASL_PLAIN_USR",{"~s",[{ns_moxi_sup,rest_user,[]}]}}, {"MOXI_SASL_PLAIN_PWD",{"~s",[{ns_moxi_sup,rest_pass,[]}]}}]}, use_stdio,exit_status,port_server_send_eol,stderr_to_stdout,stream]}, {memcached,"/opt/couchbase/bin/memcached", ["-X","/opt/couchbase/lib/memcached/stdin_term_handler.so","-X", {"/opt/couchbase/lib/memcached/file_logger.so,cyclesize=~B;sleeptime=~B;filename=~s/~s", [log_cyclesize,log_sleeptime,log_path,log_prefix]}, "-l", {"0.0.0.0:~B,0.0.0.0:~B:1000",[port,dedicated_port]}, "-p", {"~B",[port]}, "-E","/opt/couchbase/lib/memcached/bucket_engine.so","-B", "binary","-r","-c","10000","-e", {"admin=~s;default_bucket_name=default;auto_create=false", [admin_user]}, {"~s",[verbosity]}], [{env,[{"EVENT_NOSELECT","1"}, {"MEMCACHED_TOP_KEYS","100"}, {"ISASL_PWFILE",{"~s",[{isasl,path}]}}]}, use_stdio,stderr_to_stdout,exit_status,port_server_send_eol, stream]}] [ns_server:debug,2014-08-19T16:49:26.519,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: {node,'ns_1@10.242.238.91',rest} -> [{port,8091},{port_meta,global}] [ns_server:debug,2014-08-19T16:49:26.519,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: {node,'ns_1@10.242.238.91',ssl_capi_port} -> 18092 [ns_server:debug,2014-08-19T16:49:26.519,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: {node,'ns_1@10.242.238.91',ssl_proxy_downstream_port} -> 11214 [ns_server:debug,2014-08-19T16:49:26.519,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: {node,'ns_1@10.242.238.91',ssl_proxy_upstream_port} -> 11215 [ns_server:debug,2014-08-19T16:49:26.519,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: {node,'ns_1@10.242.238.91',ssl_rest_port} -> 18091 [ns_server:debug,2014-08-19T16:49:26.555,ns_1@10.242.238.90:ns_cookie_manager<0.17150.0>:ns_cookie_manager:do_cookie_save:149]attempted to save cookie to "/opt/couchbase/var/lib/couchbase/couchbase-server.cookie-ns-server": ok [ns_server:debug,2014-08-19T16:49:26.555,ns_1@10.242.238.90:<0.17689.0>:ns_node_disco:do_nodes_wanted_updated_fun:199]ns_node_disco: nodes_wanted updated: ['ns_1@10.242.238.88', 'ns_1@10.242.238.89', 'ns_1@10.242.238.90', 'ns_1@10.242.238.91'], with cookie: xyzevwdfypcplvpp [ns_server:debug,2014-08-19T16:49:26.556,ns_1@10.242.238.90:<0.17689.0>:ns_node_disco:do_nodes_wanted_updated_fun:205]ns_node_disco: nodes_wanted pong: ['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91'], with cookie: xyzevwdfypcplvpp [ns_server:debug,2014-08-19T16:49:26.557,ns_1@10.242.238.90:xdc_rdoc_replication_srv<0.17569.0>:xdc_rdoc_replication_srv:handle_info:154]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:49:30.839,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:49:30.840,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: {node,'ns_1@10.242.238.89',membership} -> active [ns_server:debug,2014-08-19T16:49:30.840,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: {node,'ns_1@10.242.238.90',membership} -> active [ns_server:debug,2014-08-19T16:49:30.841,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: {node,'ns_1@10.242.238.91',membership} -> active [ns_server:debug,2014-08-19T16:49:30.841,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 2308 us [ns_server:debug,2014-08-19T16:49:30.842,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: counters -> [{rebalance_start,1}] [ns_server:debug,2014-08-19T16:49:30.842,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: rebalance_status -> running [ns_server:debug,2014-08-19T16:49:30.842,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: rebalancer_pid -> <16550.25442.0> [user:info,2014-08-19T16:49:30.863,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_unused_buckets_db_files:492]Deleting old data files of bucket "tiles" [user:info,2014-08-19T16:49:30.863,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_unused_buckets_db_files:492]Deleting old data files of bucket "default" [ns_server:info,2014-08-19T16:49:30.867,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/master">>: ok [ns_server:info,2014-08-19T16:49:30.870,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/999">>: ok [ns_server:info,2014-08-19T16:49:30.873,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/998">>: ok [ns_server:info,2014-08-19T16:49:30.876,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/997">>: ok [ns_server:info,2014-08-19T16:49:30.881,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/996">>: ok [ns_server:info,2014-08-19T16:49:30.884,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/995">>: ok [ns_server:info,2014-08-19T16:49:30.888,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/994">>: ok [ns_server:info,2014-08-19T16:49:30.891,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/993">>: ok [ns_server:info,2014-08-19T16:49:30.893,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/992">>: ok [ns_server:info,2014-08-19T16:49:30.896,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/991">>: ok [ns_server:info,2014-08-19T16:49:30.899,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/990">>: ok [ns_server:info,2014-08-19T16:49:30.901,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/99">>: ok [ns_server:info,2014-08-19T16:49:30.904,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/989">>: ok [ns_server:info,2014-08-19T16:49:30.907,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/988">>: ok [ns_server:info,2014-08-19T16:49:30.911,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/987">>: ok [ns_server:info,2014-08-19T16:49:30.914,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/986">>: ok [ns_server:info,2014-08-19T16:49:30.917,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/985">>: ok [ns_server:info,2014-08-19T16:49:30.920,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/984">>: ok [ns_server:info,2014-08-19T16:49:30.924,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/983">>: ok [ns_server:info,2014-08-19T16:49:30.926,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/982">>: ok [ns_server:info,2014-08-19T16:49:30.928,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/981">>: ok [ns_server:info,2014-08-19T16:49:30.931,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/980">>: ok [ns_server:info,2014-08-19T16:49:30.934,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/98">>: ok [ns_server:info,2014-08-19T16:49:30.936,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/979">>: ok [ns_server:info,2014-08-19T16:49:30.938,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/978">>: ok [ns_server:info,2014-08-19T16:49:30.941,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/977">>: ok [ns_server:info,2014-08-19T16:49:30.943,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/976">>: ok [ns_server:info,2014-08-19T16:49:30.945,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/975">>: ok [ns_server:info,2014-08-19T16:49:30.947,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/974">>: ok [ns_server:info,2014-08-19T16:49:30.949,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/973">>: ok [ns_server:info,2014-08-19T16:49:30.951,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/972">>: ok [ns_server:info,2014-08-19T16:49:30.953,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/971">>: ok [ns_server:info,2014-08-19T16:49:30.955,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/970">>: ok [ns_server:info,2014-08-19T16:49:30.957,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/97">>: ok [ns_server:info,2014-08-19T16:49:30.959,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/969">>: ok [ns_server:info,2014-08-19T16:49:30.960,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/968">>: ok [ns_server:info,2014-08-19T16:49:30.963,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/967">>: ok [ns_server:info,2014-08-19T16:49:30.965,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/966">>: ok [ns_server:info,2014-08-19T16:49:30.967,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/965">>: ok [ns_server:info,2014-08-19T16:49:30.969,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/964">>: ok [ns_server:info,2014-08-19T16:49:30.971,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/963">>: ok [ns_server:info,2014-08-19T16:49:30.973,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/962">>: ok [ns_server:info,2014-08-19T16:49:30.975,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/961">>: ok [ns_server:info,2014-08-19T16:49:30.976,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/960">>: ok [ns_server:info,2014-08-19T16:49:30.980,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/96">>: ok [ns_server:info,2014-08-19T16:49:30.983,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/959">>: ok [ns_server:info,2014-08-19T16:49:30.986,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/958">>: ok [ns_server:info,2014-08-19T16:49:30.989,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/957">>: ok [ns_server:info,2014-08-19T16:49:30.992,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/956">>: ok [ns_server:info,2014-08-19T16:49:30.995,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/955">>: ok [ns_server:info,2014-08-19T16:49:30.997,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/954">>: ok [ns_server:info,2014-08-19T16:49:30.999,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/953">>: ok [ns_server:info,2014-08-19T16:49:31.001,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/952">>: ok [ns_server:info,2014-08-19T16:49:31.004,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/951">>: ok [ns_server:info,2014-08-19T16:49:31.007,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/950">>: ok [ns_server:info,2014-08-19T16:49:31.009,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/95">>: ok [ns_server:info,2014-08-19T16:49:31.011,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/949">>: ok [ns_server:info,2014-08-19T16:49:31.014,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/948">>: ok [ns_server:info,2014-08-19T16:49:31.016,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/947">>: ok [ns_server:info,2014-08-19T16:49:31.018,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/946">>: ok [ns_server:info,2014-08-19T16:49:31.022,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/945">>: ok [ns_server:info,2014-08-19T16:49:31.024,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/944">>: ok [ns_server:info,2014-08-19T16:49:31.026,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/943">>: ok [ns_server:info,2014-08-19T16:49:31.029,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/942">>: ok [ns_server:info,2014-08-19T16:49:31.032,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/941">>: ok [ns_server:info,2014-08-19T16:49:31.034,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/940">>: ok [ns_server:info,2014-08-19T16:49:31.038,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/94">>: ok [ns_server:info,2014-08-19T16:49:31.040,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/939">>: ok [ns_server:info,2014-08-19T16:49:31.042,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/938">>: ok [ns_server:info,2014-08-19T16:49:31.044,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/93">>: ok [ns_server:info,2014-08-19T16:49:31.047,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/92">>: ok [ns_server:info,2014-08-19T16:49:31.049,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/91">>: ok [ns_server:info,2014-08-19T16:49:31.051,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/90">>: ok [ns_server:info,2014-08-19T16:49:31.053,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/89">>: ok [ns_server:info,2014-08-19T16:49:31.055,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/88">>: ok [ns_server:info,2014-08-19T16:49:31.058,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/87">>: ok [ns_server:info,2014-08-19T16:49:31.060,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/86">>: ok [ns_server:info,2014-08-19T16:49:31.063,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/767">>: ok [ns_server:info,2014-08-19T16:49:31.065,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/766">>: ok [ns_server:info,2014-08-19T16:49:31.068,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/765">>: ok [ns_server:info,2014-08-19T16:49:31.069,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/764">>: ok [ns_server:info,2014-08-19T16:49:31.072,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/763">>: ok [ns_server:info,2014-08-19T16:49:31.074,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/762">>: ok [ns_server:info,2014-08-19T16:49:31.077,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/761">>: ok [ns_server:info,2014-08-19T16:49:31.079,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/760">>: ok [ns_server:info,2014-08-19T16:49:31.081,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/759">>: ok [ns_server:info,2014-08-19T16:49:31.083,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/758">>: ok [ns_server:info,2014-08-19T16:49:31.085,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/757">>: ok [ns_server:info,2014-08-19T16:49:31.087,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/756">>: ok [ns_server:info,2014-08-19T16:49:31.089,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/755">>: ok [ns_server:info,2014-08-19T16:49:31.091,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/754">>: ok [ns_server:info,2014-08-19T16:49:31.093,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/753">>: ok [ns_server:info,2014-08-19T16:49:31.095,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/752">>: ok [ns_server:info,2014-08-19T16:49:31.098,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/751">>: ok [ns_server:info,2014-08-19T16:49:31.101,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/750">>: ok [ns_server:info,2014-08-19T16:49:31.104,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/749">>: ok [ns_server:info,2014-08-19T16:49:31.106,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/748">>: ok [ns_server:info,2014-08-19T16:49:31.107,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/747">>: ok [ns_server:info,2014-08-19T16:49:31.109,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/746">>: ok [ns_server:info,2014-08-19T16:49:31.112,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/745">>: ok [ns_server:info,2014-08-19T16:49:31.114,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/744">>: ok [ns_server:info,2014-08-19T16:49:31.116,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/743">>: ok [ns_server:info,2014-08-19T16:49:31.118,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/742">>: ok [ns_server:info,2014-08-19T16:49:31.120,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/741">>: ok [ns_server:info,2014-08-19T16:49:31.122,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/740">>: ok [ns_server:info,2014-08-19T16:49:31.124,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/739">>: ok [ns_server:info,2014-08-19T16:49:31.126,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/738">>: ok [ns_server:info,2014-08-19T16:49:31.128,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/737">>: ok [ns_server:info,2014-08-19T16:49:31.130,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/736">>: ok [ns_server:info,2014-08-19T16:49:31.132,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/735">>: ok [ns_server:info,2014-08-19T16:49:31.133,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/734">>: ok [ns_server:info,2014-08-19T16:49:31.135,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/733">>: ok [ns_server:info,2014-08-19T16:49:31.137,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/732">>: ok [ns_server:info,2014-08-19T16:49:31.139,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/731">>: ok [ns_server:info,2014-08-19T16:49:31.142,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/730">>: ok [ns_server:info,2014-08-19T16:49:31.146,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/729">>: ok [ns_server:info,2014-08-19T16:49:31.148,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/728">>: ok [ns_server:info,2014-08-19T16:49:31.150,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/727">>: ok [ns_server:info,2014-08-19T16:49:31.152,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/726">>: ok [ns_server:info,2014-08-19T16:49:31.155,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/725">>: ok [ns_server:info,2014-08-19T16:49:31.157,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/724">>: ok [ns_server:info,2014-08-19T16:49:31.159,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/723">>: ok [ns_server:info,2014-08-19T16:49:31.161,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/722">>: ok [ns_server:info,2014-08-19T16:49:31.164,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/721">>: ok [ns_server:info,2014-08-19T16:49:31.166,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/720">>: ok [ns_server:info,2014-08-19T16:49:31.168,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/719">>: ok [ns_server:info,2014-08-19T16:49:31.170,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/718">>: ok [ns_server:info,2014-08-19T16:49:31.172,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/717">>: ok [ns_server:info,2014-08-19T16:49:31.174,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/716">>: ok [ns_server:info,2014-08-19T16:49:31.177,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/715">>: ok [ns_server:info,2014-08-19T16:49:31.180,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/714">>: ok [ns_server:info,2014-08-19T16:49:31.182,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/713">>: ok [ns_server:info,2014-08-19T16:49:31.185,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/712">>: ok [ns_server:info,2014-08-19T16:49:31.187,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/711">>: ok [ns_server:info,2014-08-19T16:49:31.189,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/710">>: ok [ns_server:info,2014-08-19T16:49:31.192,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/709">>: ok [ns_server:info,2014-08-19T16:49:31.194,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/708">>: ok [ns_server:info,2014-08-19T16:49:31.196,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/707">>: ok [ns_server:info,2014-08-19T16:49:31.199,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/706">>: ok [ns_server:info,2014-08-19T16:49:31.201,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/705">>: ok [ns_server:info,2014-08-19T16:49:31.203,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/704">>: ok [ns_server:info,2014-08-19T16:49:31.206,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/703">>: ok [ns_server:info,2014-08-19T16:49:31.208,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/702">>: ok [ns_server:info,2014-08-19T16:49:31.210,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/701">>: ok [ns_server:info,2014-08-19T16:49:31.213,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/700">>: ok [ns_server:info,2014-08-19T16:49:31.215,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/699">>: ok [ns_server:info,2014-08-19T16:49:31.218,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/698">>: ok [ns_server:info,2014-08-19T16:49:31.220,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/697">>: ok [ns_server:info,2014-08-19T16:49:31.222,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/696">>: ok [ns_server:info,2014-08-19T16:49:31.224,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/695">>: ok [ns_server:info,2014-08-19T16:49:31.226,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/694">>: ok [ns_server:info,2014-08-19T16:49:31.229,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/693">>: ok [ns_server:info,2014-08-19T16:49:31.231,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/692">>: ok [ns_server:info,2014-08-19T16:49:31.233,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/691">>: ok [ns_server:info,2014-08-19T16:49:31.235,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/690">>: ok [ns_server:info,2014-08-19T16:49:31.238,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/689">>: ok [ns_server:info,2014-08-19T16:49:31.240,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/688">>: ok [ns_server:info,2014-08-19T16:49:31.242,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/687">>: ok [ns_server:info,2014-08-19T16:49:31.244,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/686">>: ok [ns_server:info,2014-08-19T16:49:31.246,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/685">>: ok [ns_server:info,2014-08-19T16:49:31.248,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/684">>: ok [ns_server:info,2014-08-19T16:49:31.250,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/683">>: ok [ns_server:info,2014-08-19T16:49:31.252,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/682">>: ok [ns_server:info,2014-08-19T16:49:31.254,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/681">>: ok [ns_server:info,2014-08-19T16:49:31.256,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/680">>: ok [ns_server:info,2014-08-19T16:49:31.258,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/679">>: ok [ns_server:info,2014-08-19T16:49:31.260,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/678">>: ok [ns_server:info,2014-08-19T16:49:31.262,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/677">>: ok [ns_server:info,2014-08-19T16:49:31.264,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/676">>: ok [ns_server:info,2014-08-19T16:49:31.266,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/675">>: ok [ns_server:info,2014-08-19T16:49:31.269,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/674">>: ok [ns_server:info,2014-08-19T16:49:31.270,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/673">>: ok [ns_server:info,2014-08-19T16:49:31.272,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/672">>: ok [ns_server:info,2014-08-19T16:49:31.274,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/671">>: ok [ns_server:info,2014-08-19T16:49:31.275,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/670">>: ok [ns_server:info,2014-08-19T16:49:31.277,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/669">>: ok [ns_server:info,2014-08-19T16:49:31.279,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/668">>: ok [ns_server:info,2014-08-19T16:49:31.281,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/667">>: ok [ns_server:info,2014-08-19T16:49:31.283,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/666">>: ok [ns_server:info,2014-08-19T16:49:31.285,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/665">>: ok [ns_server:info,2014-08-19T16:49:31.287,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/664">>: ok [ns_server:info,2014-08-19T16:49:31.289,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/663">>: ok [ns_server:info,2014-08-19T16:49:31.291,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/662">>: ok [ns_server:info,2014-08-19T16:49:31.293,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/661">>: ok [ns_server:info,2014-08-19T16:49:31.295,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/660">>: ok [ns_server:info,2014-08-19T16:49:31.297,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/659">>: ok [ns_server:info,2014-08-19T16:49:31.299,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/658">>: ok [ns_server:info,2014-08-19T16:49:31.302,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/657">>: ok [ns_server:info,2014-08-19T16:49:31.303,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/656">>: ok [ns_server:info,2014-08-19T16:49:31.305,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/655">>: ok [ns_server:info,2014-08-19T16:49:31.306,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/654">>: ok [ns_server:info,2014-08-19T16:49:31.308,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/653">>: ok [ns_server:info,2014-08-19T16:49:31.310,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/652">>: ok [ns_server:info,2014-08-19T16:49:31.312,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/651">>: ok [ns_server:info,2014-08-19T16:49:31.314,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/650">>: ok [ns_server:info,2014-08-19T16:49:31.316,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/649">>: ok [ns_server:info,2014-08-19T16:49:31.317,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/648">>: ok [ns_server:info,2014-08-19T16:49:31.319,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/647">>: ok [ns_server:info,2014-08-19T16:49:31.320,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/646">>: ok [ns_server:info,2014-08-19T16:49:31.323,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/645">>: ok [ns_server:info,2014-08-19T16:49:31.324,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/644">>: ok [ns_server:info,2014-08-19T16:49:31.326,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/643">>: ok [ns_server:info,2014-08-19T16:49:31.329,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/642">>: ok [ns_server:info,2014-08-19T16:49:31.331,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/641">>: ok [ns_server:info,2014-08-19T16:49:31.333,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/640">>: ok [ns_server:info,2014-08-19T16:49:31.335,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/639">>: ok [ns_server:info,2014-08-19T16:49:31.337,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/638">>: ok [ns_server:info,2014-08-19T16:49:31.339,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/637">>: ok [ns_server:info,2014-08-19T16:49:31.341,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/636">>: ok [ns_server:info,2014-08-19T16:49:31.343,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/635">>: ok [ns_server:info,2014-08-19T16:49:31.345,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/634">>: ok [ns_server:info,2014-08-19T16:49:31.347,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/633">>: ok [ns_server:info,2014-08-19T16:49:31.348,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/632">>: ok [ns_server:info,2014-08-19T16:49:31.350,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/631">>: ok [ns_server:info,2014-08-19T16:49:31.352,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/630">>: ok [ns_server:info,2014-08-19T16:49:31.353,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/629">>: ok [ns_server:info,2014-08-19T16:49:31.355,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/628">>: ok [ns_server:info,2014-08-19T16:49:31.357,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/627">>: ok [ns_server:info,2014-08-19T16:49:31.358,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/626">>: ok [ns_server:info,2014-08-19T16:49:31.360,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/625">>: ok [ns_server:info,2014-08-19T16:49:31.361,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/624">>: ok [ns_server:info,2014-08-19T16:49:31.363,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/623">>: ok [ns_server:info,2014-08-19T16:49:31.364,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/622">>: ok [ns_server:info,2014-08-19T16:49:31.366,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/621">>: ok [ns_server:info,2014-08-19T16:49:31.368,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/620">>: ok [ns_server:info,2014-08-19T16:49:31.369,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/619">>: ok [ns_server:info,2014-08-19T16:49:31.371,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/618">>: ok [ns_server:info,2014-08-19T16:49:31.372,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/617">>: ok [ns_server:info,2014-08-19T16:49:31.374,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/616">>: ok [ns_server:info,2014-08-19T16:49:31.375,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/615">>: ok [ns_server:info,2014-08-19T16:49:31.377,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/614">>: ok [ns_server:info,2014-08-19T16:49:31.378,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/613">>: ok [ns_server:info,2014-08-19T16:49:31.379,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/612">>: ok [ns_server:info,2014-08-19T16:49:31.381,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/611">>: ok [ns_server:info,2014-08-19T16:49:31.383,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/610">>: ok [ns_server:info,2014-08-19T16:49:31.385,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/609">>: ok [ns_server:info,2014-08-19T16:49:31.387,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/608">>: ok [ns_server:info,2014-08-19T16:49:31.389,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/607">>: ok [ns_server:info,2014-08-19T16:49:31.390,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/606">>: ok [ns_server:info,2014-08-19T16:49:31.392,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/605">>: ok [ns_server:info,2014-08-19T16:49:31.394,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/604">>: ok [ns_server:info,2014-08-19T16:49:31.397,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/603">>: ok [ns_server:info,2014-08-19T16:49:31.399,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/602">>: ok [ns_server:info,2014-08-19T16:49:31.400,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/601">>: ok [ns_server:info,2014-08-19T16:49:31.402,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/600">>: ok [ns_server:info,2014-08-19T16:49:31.404,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/599">>: ok [ns_server:info,2014-08-19T16:49:31.406,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/598">>: ok [ns_server:info,2014-08-19T16:49:31.408,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/597">>: ok [ns_server:info,2014-08-19T16:49:31.410,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/596">>: ok [ns_server:info,2014-08-19T16:49:31.412,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/595">>: ok [ns_server:info,2014-08-19T16:49:31.414,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/594">>: ok [ns_server:info,2014-08-19T16:49:31.416,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/593">>: ok [ns_server:info,2014-08-19T16:49:31.419,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/592">>: ok [ns_server:info,2014-08-19T16:49:31.420,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/591">>: ok [ns_server:info,2014-08-19T16:49:31.422,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/590">>: ok [ns_server:info,2014-08-19T16:49:31.424,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/589">>: ok [ns_server:info,2014-08-19T16:49:31.426,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/588">>: ok [ns_server:info,2014-08-19T16:49:31.428,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/587">>: ok [ns_server:info,2014-08-19T16:49:31.430,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/586">>: ok [ns_server:info,2014-08-19T16:49:31.432,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/585">>: ok [ns_server:info,2014-08-19T16:49:31.434,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/584">>: ok [ns_server:info,2014-08-19T16:49:31.436,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/583">>: ok [ns_server:info,2014-08-19T16:49:31.437,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/582">>: ok [ns_server:info,2014-08-19T16:49:31.439,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/581">>: ok [ns_server:info,2014-08-19T16:49:31.441,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/580">>: ok [ns_server:info,2014-08-19T16:49:31.443,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/579">>: ok [ns_server:info,2014-08-19T16:49:31.444,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/578">>: ok [ns_server:info,2014-08-19T16:49:31.446,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/577">>: ok [ns_server:info,2014-08-19T16:49:31.448,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/576">>: ok [ns_server:info,2014-08-19T16:49:31.451,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/575">>: ok [ns_server:info,2014-08-19T16:49:31.453,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/574">>: ok [ns_server:info,2014-08-19T16:49:31.454,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/573">>: ok [ns_server:info,2014-08-19T16:49:31.456,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/572">>: ok [ns_server:info,2014-08-19T16:49:31.458,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/571">>: ok [ns_server:info,2014-08-19T16:49:31.460,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/570">>: ok [ns_server:info,2014-08-19T16:49:31.462,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/569">>: ok [ns_server:info,2014-08-19T16:49:31.463,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/568">>: ok [ns_server:info,2014-08-19T16:49:31.465,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/567">>: ok [ns_server:info,2014-08-19T16:49:31.467,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/566">>: ok [ns_server:info,2014-08-19T16:49:31.469,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/565">>: ok [ns_server:info,2014-08-19T16:49:31.470,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/564">>: ok [ns_server:info,2014-08-19T16:49:31.472,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/563">>: ok [ns_server:info,2014-08-19T16:49:31.474,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/562">>: ok [ns_server:info,2014-08-19T16:49:31.476,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/561">>: ok [ns_server:info,2014-08-19T16:49:31.477,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/560">>: ok [ns_server:info,2014-08-19T16:49:31.479,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/559">>: ok [ns_server:info,2014-08-19T16:49:31.480,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/558">>: ok [ns_server:info,2014-08-19T16:49:31.482,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/557">>: ok [ns_server:info,2014-08-19T16:49:31.484,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/556">>: ok [ns_server:info,2014-08-19T16:49:31.486,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/555">>: ok [ns_server:info,2014-08-19T16:49:31.488,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/554">>: ok [ns_server:info,2014-08-19T16:49:31.489,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/553">>: ok [ns_server:info,2014-08-19T16:49:31.491,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/552">>: ok [ns_server:info,2014-08-19T16:49:31.492,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/551">>: ok [ns_server:info,2014-08-19T16:49:31.494,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/550">>: ok [ns_server:info,2014-08-19T16:49:31.495,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/549">>: ok [ns_server:info,2014-08-19T16:49:31.497,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/548">>: ok [ns_server:info,2014-08-19T16:49:31.498,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/547">>: ok [ns_server:info,2014-08-19T16:49:31.499,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/546">>: ok [ns_server:info,2014-08-19T16:49:31.501,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/545">>: ok [ns_server:info,2014-08-19T16:49:31.502,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/544">>: ok [ns_server:info,2014-08-19T16:49:31.504,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/543">>: ok [ns_server:info,2014-08-19T16:49:31.506,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/542">>: ok [ns_server:info,2014-08-19T16:49:31.508,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/541">>: ok [ns_server:info,2014-08-19T16:49:31.510,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/540">>: ok [ns_server:info,2014-08-19T16:49:31.511,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/539">>: ok [ns_server:info,2014-08-19T16:49:31.513,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/538">>: ok [ns_server:info,2014-08-19T16:49:31.514,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/537">>: ok [ns_server:info,2014-08-19T16:49:31.516,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/536">>: ok [ns_server:info,2014-08-19T16:49:31.517,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/535">>: ok [ns_server:info,2014-08-19T16:49:31.519,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/534">>: ok [ns_server:info,2014-08-19T16:49:31.520,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/533">>: ok [ns_server:info,2014-08-19T16:49:31.522,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/532">>: ok [ns_server:info,2014-08-19T16:49:31.523,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/531">>: ok [ns_server:info,2014-08-19T16:49:31.525,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/530">>: ok [ns_server:info,2014-08-19T16:49:31.527,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/529">>: ok [ns_server:info,2014-08-19T16:49:31.528,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/528">>: ok [ns_server:info,2014-08-19T16:49:31.530,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/527">>: ok [ns_server:info,2014-08-19T16:49:31.531,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/526">>: ok [ns_server:info,2014-08-19T16:49:31.533,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/525">>: ok [ns_server:info,2014-08-19T16:49:31.534,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/524">>: ok [ns_server:info,2014-08-19T16:49:31.535,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/523">>: ok [ns_server:info,2014-08-19T16:49:31.536,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/522">>: ok [ns_server:info,2014-08-19T16:49:31.538,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/521">>: ok [ns_server:info,2014-08-19T16:49:31.539,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/520">>: ok [ns_server:info,2014-08-19T16:49:31.541,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/519">>: ok [ns_server:info,2014-08-19T16:49:31.543,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/518">>: ok [ns_server:info,2014-08-19T16:49:31.544,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/517">>: ok [ns_server:info,2014-08-19T16:49:31.546,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/516">>: ok [ns_server:info,2014-08-19T16:49:31.547,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/515">>: ok [ns_server:info,2014-08-19T16:49:31.549,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/514">>: ok [ns_server:info,2014-08-19T16:49:31.550,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/513">>: ok [ns_server:info,2014-08-19T16:49:31.552,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/512">>: ok [ns_server:info,2014-08-19T16:49:31.553,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/426">>: ok [ns_server:info,2014-08-19T16:49:31.554,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/425">>: ok [ns_server:info,2014-08-19T16:49:31.555,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/424">>: ok [ns_server:info,2014-08-19T16:49:31.556,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/423">>: ok [ns_server:info,2014-08-19T16:49:31.557,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/422">>: ok [ns_server:info,2014-08-19T16:49:31.558,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/421">>: ok [ns_server:info,2014-08-19T16:49:31.560,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/420">>: ok [ns_server:info,2014-08-19T16:49:31.561,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/419">>: ok [ns_server:info,2014-08-19T16:49:31.562,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/418">>: ok [ns_server:info,2014-08-19T16:49:31.564,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/417">>: ok [ns_server:info,2014-08-19T16:49:31.565,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/416">>: ok [ns_server:info,2014-08-19T16:49:31.566,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/415">>: ok [ns_server:info,2014-08-19T16:49:31.567,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/414">>: ok [ns_server:info,2014-08-19T16:49:31.569,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/413">>: ok [ns_server:info,2014-08-19T16:49:31.570,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/412">>: ok [ns_server:info,2014-08-19T16:49:31.571,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/411">>: ok [ns_server:info,2014-08-19T16:49:31.573,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/410">>: ok [ns_server:info,2014-08-19T16:49:31.574,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/409">>: ok [ns_server:info,2014-08-19T16:49:31.575,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/408">>: ok [ns_server:info,2014-08-19T16:49:31.576,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/407">>: ok [ns_server:info,2014-08-19T16:49:31.577,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/406">>: ok [ns_server:info,2014-08-19T16:49:31.578,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/405">>: ok [ns_server:info,2014-08-19T16:49:31.579,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/404">>: ok [ns_server:info,2014-08-19T16:49:31.580,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/403">>: ok [ns_server:info,2014-08-19T16:49:31.582,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/402">>: ok [ns_server:info,2014-08-19T16:49:31.583,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/401">>: ok [ns_server:info,2014-08-19T16:49:31.584,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/400">>: ok [ns_server:info,2014-08-19T16:49:31.585,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/399">>: ok [ns_server:info,2014-08-19T16:49:31.586,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/398">>: ok [ns_server:info,2014-08-19T16:49:31.587,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/397">>: ok [ns_server:info,2014-08-19T16:49:31.588,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/396">>: ok [ns_server:info,2014-08-19T16:49:31.590,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/395">>: ok [ns_server:info,2014-08-19T16:49:31.591,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/394">>: ok [ns_server:info,2014-08-19T16:49:31.593,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/393">>: ok [ns_server:info,2014-08-19T16:49:31.594,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/392">>: ok [ns_server:info,2014-08-19T16:49:31.595,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/391">>: ok [ns_server:info,2014-08-19T16:49:31.596,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/390">>: ok [ns_server:info,2014-08-19T16:49:31.597,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/389">>: ok [ns_server:info,2014-08-19T16:49:31.599,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/388">>: ok [ns_server:info,2014-08-19T16:49:31.600,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/387">>: ok [ns_server:info,2014-08-19T16:49:31.601,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/386">>: ok [ns_server:info,2014-08-19T16:49:31.603,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/385">>: ok [ns_server:info,2014-08-19T16:49:31.604,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/384">>: ok [ns_server:info,2014-08-19T16:49:31.605,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/383">>: ok [ns_server:info,2014-08-19T16:49:31.606,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/382">>: ok [ns_server:info,2014-08-19T16:49:31.607,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/381">>: ok [ns_server:info,2014-08-19T16:49:31.609,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/380">>: ok [ns_server:info,2014-08-19T16:49:31.610,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/379">>: ok [ns_server:info,2014-08-19T16:49:31.611,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/378">>: ok [ns_server:info,2014-08-19T16:49:31.611,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/377">>: ok [ns_server:info,2014-08-19T16:49:31.613,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/376">>: ok [ns_server:info,2014-08-19T16:49:31.614,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/375">>: ok [ns_server:info,2014-08-19T16:49:31.615,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/374">>: ok [ns_server:info,2014-08-19T16:49:31.616,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/373">>: ok [ns_server:info,2014-08-19T16:49:31.618,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/372">>: ok [ns_server:info,2014-08-19T16:49:31.619,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/371">>: ok [ns_server:info,2014-08-19T16:49:31.620,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/370">>: ok [ns_server:info,2014-08-19T16:49:31.621,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/369">>: ok [ns_server:info,2014-08-19T16:49:31.622,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/368">>: ok [ns_server:info,2014-08-19T16:49:31.623,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/367">>: ok [ns_server:info,2014-08-19T16:49:31.624,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/366">>: ok [ns_server:info,2014-08-19T16:49:31.625,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/365">>: ok [ns_server:info,2014-08-19T16:49:31.626,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/364">>: ok [ns_server:info,2014-08-19T16:49:31.628,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/363">>: ok [ns_server:info,2014-08-19T16:49:31.629,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/362">>: ok [ns_server:info,2014-08-19T16:49:31.630,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/361">>: ok [ns_server:info,2014-08-19T16:49:31.631,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/360">>: ok [ns_server:info,2014-08-19T16:49:31.632,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/359">>: ok [ns_server:info,2014-08-19T16:49:31.634,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/358">>: ok [ns_server:info,2014-08-19T16:49:31.635,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/357">>: ok [ns_server:info,2014-08-19T16:49:31.636,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/356">>: ok [ns_server:info,2014-08-19T16:49:31.637,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/355">>: ok [ns_server:info,2014-08-19T16:49:31.638,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/354">>: ok [ns_server:info,2014-08-19T16:49:31.639,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/353">>: ok [ns_server:info,2014-08-19T16:49:31.640,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/352">>: ok [ns_server:info,2014-08-19T16:49:31.641,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/351">>: ok [ns_server:info,2014-08-19T16:49:31.643,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/350">>: ok [ns_server:info,2014-08-19T16:49:31.644,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/349">>: ok [ns_server:info,2014-08-19T16:49:31.645,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/348">>: ok [ns_server:info,2014-08-19T16:49:31.646,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/347">>: ok [ns_server:info,2014-08-19T16:49:31.647,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/346">>: ok [ns_server:info,2014-08-19T16:49:31.648,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/345">>: ok [ns_server:info,2014-08-19T16:49:31.649,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/344">>: ok [ns_server:info,2014-08-19T16:49:31.651,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/343">>: ok [ns_server:info,2014-08-19T16:49:31.652,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/342">>: ok [ns_server:info,2014-08-19T16:49:31.653,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/170">>: ok [ns_server:info,2014-08-19T16:49:31.654,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/169">>: ok [ns_server:info,2014-08-19T16:49:31.655,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/168">>: ok [ns_server:info,2014-08-19T16:49:31.656,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/167">>: ok [ns_server:info,2014-08-19T16:49:31.657,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/166">>: ok [ns_server:info,2014-08-19T16:49:31.658,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/165">>: ok [ns_server:info,2014-08-19T16:49:31.659,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/164">>: ok [ns_server:info,2014-08-19T16:49:31.661,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/163">>: ok [ns_server:info,2014-08-19T16:49:31.662,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/162">>: ok [ns_server:info,2014-08-19T16:49:31.663,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/161">>: ok [ns_server:info,2014-08-19T16:49:31.664,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/160">>: ok [ns_server:info,2014-08-19T16:49:31.665,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/159">>: ok [ns_server:info,2014-08-19T16:49:31.666,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/158">>: ok [ns_server:info,2014-08-19T16:49:31.667,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/157">>: ok [ns_server:info,2014-08-19T16:49:31.668,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/156">>: ok [ns_server:info,2014-08-19T16:49:31.669,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/155">>: ok [ns_server:info,2014-08-19T16:49:31.670,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/154">>: ok [ns_server:info,2014-08-19T16:49:31.671,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/153">>: ok [ns_server:info,2014-08-19T16:49:31.672,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/152">>: ok [ns_server:info,2014-08-19T16:49:31.673,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/151">>: ok [ns_server:info,2014-08-19T16:49:31.674,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/150">>: ok [ns_server:info,2014-08-19T16:49:31.675,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/149">>: ok [ns_server:info,2014-08-19T16:49:31.676,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/148">>: ok [ns_server:info,2014-08-19T16:49:31.677,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/147">>: ok [ns_server:info,2014-08-19T16:49:31.677,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/146">>: ok [ns_server:info,2014-08-19T16:49:31.678,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/145">>: ok [ns_server:info,2014-08-19T16:49:31.679,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/144">>: ok [ns_server:info,2014-08-19T16:49:31.680,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/143">>: ok [ns_server:info,2014-08-19T16:49:31.681,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/142">>: ok [ns_server:info,2014-08-19T16:49:31.682,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/141">>: ok [ns_server:info,2014-08-19T16:49:31.683,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/140">>: ok [ns_server:info,2014-08-19T16:49:31.683,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/139">>: ok [ns_server:info,2014-08-19T16:49:31.684,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/138">>: ok [ns_server:info,2014-08-19T16:49:31.685,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/137">>: ok [ns_server:info,2014-08-19T16:49:31.686,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/136">>: ok [ns_server:info,2014-08-19T16:49:31.687,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/135">>: ok [ns_server:info,2014-08-19T16:49:31.688,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/134">>: ok [ns_server:info,2014-08-19T16:49:31.688,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/133">>: ok [ns_server:info,2014-08-19T16:49:31.689,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/132">>: ok [ns_server:info,2014-08-19T16:49:31.690,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/131">>: ok [ns_server:info,2014-08-19T16:49:31.691,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/130">>: ok [ns_server:info,2014-08-19T16:49:31.691,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/129">>: ok [ns_server:info,2014-08-19T16:49:31.692,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/128">>: ok [ns_server:info,2014-08-19T16:49:31.693,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/127">>: ok [ns_server:info,2014-08-19T16:49:31.694,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/126">>: ok [ns_server:info,2014-08-19T16:49:31.694,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/125">>: ok [ns_server:info,2014-08-19T16:49:31.695,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/124">>: ok [ns_server:info,2014-08-19T16:49:31.696,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/123">>: ok [ns_server:info,2014-08-19T16:49:31.697,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/122">>: ok [ns_server:info,2014-08-19T16:49:31.698,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/121">>: ok [ns_server:info,2014-08-19T16:49:31.699,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/120">>: ok [ns_server:info,2014-08-19T16:49:31.700,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/119">>: ok [ns_server:info,2014-08-19T16:49:31.701,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/118">>: ok [ns_server:info,2014-08-19T16:49:31.702,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/117">>: ok [ns_server:info,2014-08-19T16:49:31.702,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/116">>: ok [ns_server:info,2014-08-19T16:49:31.703,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/115">>: ok [ns_server:info,2014-08-19T16:49:31.704,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/114">>: ok [ns_server:info,2014-08-19T16:49:31.705,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/113">>: ok [ns_server:info,2014-08-19T16:49:31.706,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/112">>: ok [ns_server:info,2014-08-19T16:49:31.707,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/111">>: ok [ns_server:info,2014-08-19T16:49:31.707,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/110">>: ok [ns_server:info,2014-08-19T16:49:31.708,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/109">>: ok [ns_server:info,2014-08-19T16:49:31.709,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/108">>: ok [ns_server:info,2014-08-19T16:49:31.709,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/107">>: ok [ns_server:info,2014-08-19T16:49:31.710,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/106">>: ok [ns_server:info,2014-08-19T16:49:31.711,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/105">>: ok [ns_server:info,2014-08-19T16:49:31.711,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/104">>: ok [ns_server:info,2014-08-19T16:49:31.712,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/103">>: ok [ns_server:info,2014-08-19T16:49:31.713,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/1023">>: ok [ns_server:info,2014-08-19T16:49:31.713,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/1022">>: ok [ns_server:info,2014-08-19T16:49:31.714,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/1021">>: ok [ns_server:info,2014-08-19T16:49:31.715,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/1020">>: ok [ns_server:info,2014-08-19T16:49:31.716,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/102">>: ok [ns_server:info,2014-08-19T16:49:31.716,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/1019">>: ok [ns_server:info,2014-08-19T16:49:31.717,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/1018">>: ok [ns_server:info,2014-08-19T16:49:31.718,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/1017">>: ok [ns_server:info,2014-08-19T16:49:31.718,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/1016">>: ok [ns_server:info,2014-08-19T16:49:31.719,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/1015">>: ok [ns_server:info,2014-08-19T16:49:31.720,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/1014">>: ok [ns_server:info,2014-08-19T16:49:31.720,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/1013">>: ok [ns_server:info,2014-08-19T16:49:31.721,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/1012">>: ok [ns_server:info,2014-08-19T16:49:31.722,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/1011">>: ok [ns_server:info,2014-08-19T16:49:31.722,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/1010">>: ok [ns_server:info,2014-08-19T16:49:31.723,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/101">>: ok [ns_server:info,2014-08-19T16:49:31.724,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/1009">>: ok [ns_server:info,2014-08-19T16:49:31.724,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/1008">>: ok [ns_server:info,2014-08-19T16:49:31.725,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/1007">>: ok [ns_server:info,2014-08-19T16:49:31.725,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/1006">>: ok [ns_server:info,2014-08-19T16:49:31.726,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/1005">>: ok [ns_server:info,2014-08-19T16:49:31.727,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/1004">>: ok [ns_server:info,2014-08-19T16:49:31.727,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/1003">>: ok [ns_server:info,2014-08-19T16:49:31.728,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/1002">>: ok [ns_server:info,2014-08-19T16:49:31.728,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/1001">>: ok [ns_server:info,2014-08-19T16:49:31.729,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/1000">>: ok [ns_server:info,2014-08-19T16:49:31.730,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/100">>: ok [ns_server:info,2014-08-19T16:49:31.730,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_databases_and_files:436]Couch dbs are deleted. Proceeding with bucket directory [ns_server:debug,2014-08-19T16:49:31.730,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:do_delete_bucket_indexes:457]indexes directory doesn't exist already. fine. [ns_server:info,2014-08-19T16:49:31.733,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/master">>: ok [ns_server:info,2014-08-19T16:49:31.736,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/999">>: ok [ns_server:info,2014-08-19T16:49:31.739,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/998">>: ok [ns_server:info,2014-08-19T16:49:31.741,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/997">>: ok [ns_server:info,2014-08-19T16:49:31.744,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/996">>: ok [ns_server:info,2014-08-19T16:49:31.746,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/995">>: ok [ns_server:info,2014-08-19T16:49:31.751,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/994">>: ok [ns_server:info,2014-08-19T16:49:31.753,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/993">>: ok [ns_server:info,2014-08-19T16:49:31.756,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/992">>: ok [ns_server:info,2014-08-19T16:49:31.758,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/991">>: ok [ns_server:info,2014-08-19T16:49:31.761,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/990">>: ok [ns_server:info,2014-08-19T16:49:31.763,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/99">>: ok [ns_server:info,2014-08-19T16:49:31.766,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/989">>: ok [ns_server:info,2014-08-19T16:49:31.769,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/988">>: ok [ns_server:info,2014-08-19T16:49:31.771,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/987">>: ok [ns_server:info,2014-08-19T16:49:31.773,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/986">>: ok [ns_server:info,2014-08-19T16:49:31.776,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/985">>: ok [ns_server:info,2014-08-19T16:49:31.778,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/984">>: ok [ns_server:info,2014-08-19T16:49:31.780,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/983">>: ok [ns_server:info,2014-08-19T16:49:31.782,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/982">>: ok [ns_server:info,2014-08-19T16:49:31.784,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/981">>: ok [ns_server:info,2014-08-19T16:49:31.786,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/980">>: ok [ns_server:info,2014-08-19T16:49:31.789,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/98">>: ok [ns_server:info,2014-08-19T16:49:31.791,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/979">>: ok [ns_server:info,2014-08-19T16:49:31.793,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/978">>: ok [ns_server:info,2014-08-19T16:49:31.795,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/977">>: ok [ns_server:info,2014-08-19T16:49:31.797,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/976">>: ok [ns_server:info,2014-08-19T16:49:31.799,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/975">>: ok [ns_server:info,2014-08-19T16:49:31.802,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/974">>: ok [ns_server:info,2014-08-19T16:49:31.804,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/973">>: ok [ns_server:info,2014-08-19T16:49:31.806,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/972">>: ok [ns_server:info,2014-08-19T16:49:31.808,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/971">>: ok [ns_server:info,2014-08-19T16:49:31.810,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/970">>: ok [ns_server:info,2014-08-19T16:49:31.812,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/97">>: ok [ns_server:info,2014-08-19T16:49:31.815,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/969">>: ok [ns_server:info,2014-08-19T16:49:31.817,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/968">>: ok [ns_server:info,2014-08-19T16:49:31.819,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/967">>: ok [ns_server:info,2014-08-19T16:49:31.821,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/966">>: ok [ns_server:info,2014-08-19T16:49:31.823,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/965">>: ok [ns_server:info,2014-08-19T16:49:31.825,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/964">>: ok [ns_server:info,2014-08-19T16:49:31.827,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/963">>: ok [ns_server:info,2014-08-19T16:49:31.829,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/962">>: ok [ns_server:info,2014-08-19T16:49:31.832,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/961">>: ok [ns_server:info,2014-08-19T16:49:31.834,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/960">>: ok [ns_server:info,2014-08-19T16:49:31.836,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/96">>: ok [ns_server:info,2014-08-19T16:49:31.838,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/959">>: ok [ns_server:info,2014-08-19T16:49:31.840,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/958">>: ok [ns_server:info,2014-08-19T16:49:31.842,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/957">>: ok [ns_server:info,2014-08-19T16:49:31.844,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/956">>: ok [ns_server:info,2014-08-19T16:49:31.846,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/955">>: ok [ns_server:info,2014-08-19T16:49:31.848,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/954">>: ok [ns_server:info,2014-08-19T16:49:31.850,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/953">>: ok [ns_server:info,2014-08-19T16:49:31.852,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/952">>: ok [ns_server:info,2014-08-19T16:49:31.855,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/951">>: ok [ns_server:info,2014-08-19T16:49:31.857,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/950">>: ok [ns_server:info,2014-08-19T16:49:31.858,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/95">>: ok [ns_server:info,2014-08-19T16:49:31.861,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/949">>: ok [ns_server:info,2014-08-19T16:49:31.862,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/948">>: ok [ns_server:info,2014-08-19T16:49:31.865,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/947">>: ok [ns_server:info,2014-08-19T16:49:31.868,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/946">>: ok [ns_server:info,2014-08-19T16:49:31.870,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/945">>: ok [ns_server:info,2014-08-19T16:49:31.872,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/944">>: ok [ns_server:info,2014-08-19T16:49:31.875,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/943">>: ok [ns_server:info,2014-08-19T16:49:31.877,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/942">>: ok [ns_server:info,2014-08-19T16:49:31.879,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/941">>: ok [ns_server:info,2014-08-19T16:49:31.881,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/940">>: ok [ns_server:info,2014-08-19T16:49:31.883,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/94">>: ok [ns_server:info,2014-08-19T16:49:31.885,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/939">>: ok [ns_server:info,2014-08-19T16:49:31.887,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/938">>: ok [ns_server:info,2014-08-19T16:49:31.889,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/93">>: ok [ns_server:info,2014-08-19T16:49:31.891,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/92">>: ok [ns_server:info,2014-08-19T16:49:31.894,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/91">>: ok [ns_server:info,2014-08-19T16:49:31.895,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/90">>: ok [ns_server:info,2014-08-19T16:49:31.897,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/89">>: ok [ns_server:info,2014-08-19T16:49:31.900,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/88">>: ok [ns_server:info,2014-08-19T16:49:31.903,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/87">>: ok [ns_server:info,2014-08-19T16:49:31.905,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/86">>: ok [ns_server:info,2014-08-19T16:49:31.907,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/767">>: ok [ns_server:info,2014-08-19T16:49:31.909,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/766">>: ok [ns_server:info,2014-08-19T16:49:31.911,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/765">>: ok [ns_server:info,2014-08-19T16:49:31.914,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/764">>: ok [ns_server:info,2014-08-19T16:49:31.916,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/763">>: ok [ns_server:info,2014-08-19T16:49:31.917,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/762">>: ok [ns_server:info,2014-08-19T16:49:31.920,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/761">>: ok [ns_server:info,2014-08-19T16:49:31.922,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/760">>: ok [ns_server:info,2014-08-19T16:49:31.925,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/759">>: ok [ns_server:info,2014-08-19T16:49:31.927,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/758">>: ok [ns_server:info,2014-08-19T16:49:31.929,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/757">>: ok [ns_server:info,2014-08-19T16:49:31.932,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/756">>: ok [ns_server:info,2014-08-19T16:49:31.936,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/755">>: ok [ns_server:info,2014-08-19T16:49:31.938,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/754">>: ok [ns_server:info,2014-08-19T16:49:31.940,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/753">>: ok [ns_server:info,2014-08-19T16:49:31.943,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/752">>: ok [ns_server:info,2014-08-19T16:49:31.945,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/751">>: ok [ns_server:info,2014-08-19T16:49:31.948,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/750">>: ok [ns_server:info,2014-08-19T16:49:31.950,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/749">>: ok [ns_server:info,2014-08-19T16:49:31.952,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/748">>: ok [ns_server:info,2014-08-19T16:49:31.954,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/747">>: ok [ns_server:info,2014-08-19T16:49:31.956,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/746">>: ok [ns_server:info,2014-08-19T16:49:31.958,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/745">>: ok [ns_server:info,2014-08-19T16:49:31.960,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/744">>: ok [ns_server:info,2014-08-19T16:49:31.963,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/743">>: ok [ns_server:info,2014-08-19T16:49:31.965,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/742">>: ok [ns_server:info,2014-08-19T16:49:31.968,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/741">>: ok [ns_server:info,2014-08-19T16:49:31.969,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/740">>: ok [ns_server:info,2014-08-19T16:49:31.971,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/739">>: ok [ns_server:info,2014-08-19T16:49:31.973,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/738">>: ok [ns_server:info,2014-08-19T16:49:31.975,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/737">>: ok [ns_server:info,2014-08-19T16:49:31.977,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/736">>: ok [ns_server:info,2014-08-19T16:49:31.978,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/735">>: ok [ns_server:info,2014-08-19T16:49:31.980,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/734">>: ok [ns_server:info,2014-08-19T16:49:31.982,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/733">>: ok [ns_server:info,2014-08-19T16:49:31.984,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/732">>: ok [ns_server:info,2014-08-19T16:49:31.985,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/731">>: ok [ns_server:info,2014-08-19T16:49:31.987,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/730">>: ok [ns_server:info,2014-08-19T16:49:31.989,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/729">>: ok [ns_server:info,2014-08-19T16:49:31.991,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/728">>: ok [ns_server:info,2014-08-19T16:49:31.993,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/727">>: ok [ns_server:info,2014-08-19T16:49:31.995,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/726">>: ok [ns_server:info,2014-08-19T16:49:31.997,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/725">>: ok [ns_server:info,2014-08-19T16:49:31.999,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/724">>: ok [ns_server:info,2014-08-19T16:49:32.001,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/723">>: ok [ns_server:info,2014-08-19T16:49:32.003,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/722">>: ok [ns_server:info,2014-08-19T16:49:32.005,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/721">>: ok [ns_server:info,2014-08-19T16:49:32.008,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/720">>: ok [ns_server:info,2014-08-19T16:49:32.010,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/719">>: ok [ns_server:info,2014-08-19T16:49:32.012,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/718">>: ok [ns_server:info,2014-08-19T16:49:32.014,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/717">>: ok [ns_server:info,2014-08-19T16:49:32.017,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/716">>: ok [ns_server:info,2014-08-19T16:49:32.019,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/715">>: ok [ns_server:info,2014-08-19T16:49:32.021,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/714">>: ok [ns_server:info,2014-08-19T16:49:32.023,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/713">>: ok [ns_server:info,2014-08-19T16:49:32.026,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/712">>: ok [ns_server:info,2014-08-19T16:49:32.028,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/711">>: ok [ns_server:info,2014-08-19T16:49:32.030,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/710">>: ok [ns_server:info,2014-08-19T16:49:32.032,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/709">>: ok [ns_server:info,2014-08-19T16:49:32.034,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/708">>: ok [ns_server:info,2014-08-19T16:49:32.036,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/707">>: ok [ns_server:info,2014-08-19T16:49:32.038,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/706">>: ok [ns_server:info,2014-08-19T16:49:32.040,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/705">>: ok [ns_server:info,2014-08-19T16:49:32.042,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/704">>: ok [ns_server:info,2014-08-19T16:49:32.044,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/703">>: ok [ns_server:info,2014-08-19T16:49:32.046,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/702">>: ok [ns_server:info,2014-08-19T16:49:32.048,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/701">>: ok [ns_server:info,2014-08-19T16:49:32.050,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/700">>: ok [ns_server:info,2014-08-19T16:49:32.051,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/699">>: ok [ns_server:info,2014-08-19T16:49:32.053,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/698">>: ok [ns_server:info,2014-08-19T16:49:32.055,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/697">>: ok [ns_server:info,2014-08-19T16:49:32.057,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/696">>: ok [ns_server:info,2014-08-19T16:49:32.059,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/695">>: ok [ns_server:info,2014-08-19T16:49:32.061,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/694">>: ok [ns_server:info,2014-08-19T16:49:32.063,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/693">>: ok [ns_server:info,2014-08-19T16:49:32.065,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/692">>: ok [ns_server:info,2014-08-19T16:49:32.067,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/691">>: ok [ns_server:info,2014-08-19T16:49:32.069,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/690">>: ok [ns_server:info,2014-08-19T16:49:32.070,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/689">>: ok [ns_server:info,2014-08-19T16:49:32.072,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/688">>: ok [ns_server:info,2014-08-19T16:49:32.074,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/687">>: ok [ns_server:info,2014-08-19T16:49:32.076,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/686">>: ok [ns_server:info,2014-08-19T16:49:32.078,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/685">>: ok [ns_server:info,2014-08-19T16:49:32.079,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/684">>: ok [ns_server:info,2014-08-19T16:49:32.081,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/683">>: ok [ns_server:info,2014-08-19T16:49:32.084,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/682">>: ok [ns_server:info,2014-08-19T16:49:32.086,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/681">>: ok [ns_server:info,2014-08-19T16:49:32.088,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/680">>: ok [ns_server:info,2014-08-19T16:49:32.091,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/679">>: ok [ns_server:info,2014-08-19T16:49:32.093,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/678">>: ok [ns_server:info,2014-08-19T16:49:32.096,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/677">>: ok [ns_server:info,2014-08-19T16:49:32.098,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/676">>: ok [ns_server:info,2014-08-19T16:49:32.100,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/675">>: ok [ns_server:info,2014-08-19T16:49:32.102,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/674">>: ok [ns_server:info,2014-08-19T16:49:32.104,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/673">>: ok [ns_server:info,2014-08-19T16:49:32.106,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/672">>: ok [ns_server:info,2014-08-19T16:49:32.109,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/671">>: ok [ns_server:info,2014-08-19T16:49:32.111,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/670">>: ok [ns_server:info,2014-08-19T16:49:32.113,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/669">>: ok [ns_server:info,2014-08-19T16:49:32.115,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/668">>: ok [ns_server:info,2014-08-19T16:49:32.118,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/667">>: ok [ns_server:info,2014-08-19T16:49:32.120,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/666">>: ok [ns_server:info,2014-08-19T16:49:32.122,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/665">>: ok [ns_server:info,2014-08-19T16:49:32.124,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/664">>: ok [ns_server:info,2014-08-19T16:49:32.126,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/663">>: ok [ns_server:info,2014-08-19T16:49:32.128,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/662">>: ok [ns_server:info,2014-08-19T16:49:32.131,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/661">>: ok [ns_server:info,2014-08-19T16:49:32.133,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/660">>: ok [ns_server:info,2014-08-19T16:49:32.135,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/659">>: ok [ns_server:info,2014-08-19T16:49:32.137,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/658">>: ok [ns_server:info,2014-08-19T16:49:32.139,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/657">>: ok [ns_server:info,2014-08-19T16:49:32.141,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/656">>: ok [ns_server:info,2014-08-19T16:49:32.143,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/655">>: ok [ns_server:info,2014-08-19T16:49:32.146,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/654">>: ok [ns_server:info,2014-08-19T16:49:32.147,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/653">>: ok [ns_server:info,2014-08-19T16:49:32.149,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/652">>: ok [ns_server:info,2014-08-19T16:49:32.151,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/651">>: ok [ns_server:info,2014-08-19T16:49:32.154,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/650">>: ok [ns_server:info,2014-08-19T16:49:32.156,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/649">>: ok [ns_server:info,2014-08-19T16:49:32.158,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/648">>: ok [ns_server:info,2014-08-19T16:49:32.160,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/647">>: ok [ns_server:info,2014-08-19T16:49:32.162,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/646">>: ok [ns_server:info,2014-08-19T16:49:32.164,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/645">>: ok [ns_server:info,2014-08-19T16:49:32.166,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/644">>: ok [ns_server:info,2014-08-19T16:49:32.168,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/643">>: ok [ns_server:info,2014-08-19T16:49:32.170,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/642">>: ok [ns_server:info,2014-08-19T16:49:32.173,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/641">>: ok [ns_server:info,2014-08-19T16:49:32.174,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/640">>: ok [ns_server:info,2014-08-19T16:49:32.176,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/639">>: ok [ns_server:info,2014-08-19T16:49:32.179,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/638">>: ok [ns_server:info,2014-08-19T16:49:32.180,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/637">>: ok [ns_server:info,2014-08-19T16:49:32.182,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/636">>: ok [ns_server:info,2014-08-19T16:49:32.185,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/635">>: ok [ns_server:info,2014-08-19T16:49:32.186,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/634">>: ok [ns_server:info,2014-08-19T16:49:32.188,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/633">>: ok [ns_server:info,2014-08-19T16:49:32.190,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/632">>: ok [ns_server:info,2014-08-19T16:49:32.192,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/631">>: ok [ns_server:info,2014-08-19T16:49:32.194,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/630">>: ok [ns_server:info,2014-08-19T16:49:32.196,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/629">>: ok [ns_server:info,2014-08-19T16:49:32.198,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/628">>: ok [ns_server:info,2014-08-19T16:49:32.200,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/627">>: ok [ns_server:info,2014-08-19T16:49:32.202,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/626">>: ok [ns_server:info,2014-08-19T16:49:32.204,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/625">>: ok [ns_server:info,2014-08-19T16:49:32.206,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/624">>: ok [ns_server:info,2014-08-19T16:49:32.208,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/623">>: ok [ns_server:info,2014-08-19T16:49:32.210,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/622">>: ok [ns_server:info,2014-08-19T16:49:32.211,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/621">>: ok [ns_server:info,2014-08-19T16:49:32.213,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/620">>: ok [ns_server:info,2014-08-19T16:49:32.215,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/619">>: ok [ns_server:info,2014-08-19T16:49:32.217,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/618">>: ok [ns_server:info,2014-08-19T16:49:32.219,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/617">>: ok [ns_server:info,2014-08-19T16:49:32.220,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/616">>: ok [ns_server:info,2014-08-19T16:49:32.223,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/615">>: ok [ns_server:info,2014-08-19T16:49:32.225,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/614">>: ok [ns_server:info,2014-08-19T16:49:32.227,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/613">>: ok [ns_server:info,2014-08-19T16:49:32.229,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/612">>: ok [ns_server:info,2014-08-19T16:49:32.231,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/611">>: ok [ns_server:info,2014-08-19T16:49:32.233,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/610">>: ok [ns_server:info,2014-08-19T16:49:32.235,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/609">>: ok [ns_server:info,2014-08-19T16:49:32.237,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/608">>: ok [ns_server:info,2014-08-19T16:49:32.239,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/607">>: ok [ns_server:info,2014-08-19T16:49:32.241,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/606">>: ok [ns_server:info,2014-08-19T16:49:32.243,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/605">>: ok [ns_server:info,2014-08-19T16:49:32.244,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/604">>: ok [ns_server:info,2014-08-19T16:49:32.246,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/603">>: ok [ns_server:info,2014-08-19T16:49:32.248,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/602">>: ok [ns_server:info,2014-08-19T16:49:32.250,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/601">>: ok [ns_server:info,2014-08-19T16:49:32.252,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/600">>: ok [ns_server:info,2014-08-19T16:49:32.254,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/599">>: ok [ns_server:info,2014-08-19T16:49:32.255,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/598">>: ok [ns_server:info,2014-08-19T16:49:32.257,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/597">>: ok [ns_server:info,2014-08-19T16:49:32.258,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/596">>: ok [ns_server:info,2014-08-19T16:49:32.260,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/595">>: ok [ns_server:info,2014-08-19T16:49:32.261,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/594">>: ok [ns_server:info,2014-08-19T16:49:32.262,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/593">>: ok [ns_server:info,2014-08-19T16:49:32.264,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/592">>: ok [ns_server:info,2014-08-19T16:49:32.266,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/591">>: ok [ns_server:info,2014-08-19T16:49:32.267,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/590">>: ok [ns_server:info,2014-08-19T16:49:32.269,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/589">>: ok [ns_server:info,2014-08-19T16:49:32.271,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/588">>: ok [ns_server:info,2014-08-19T16:49:32.272,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/587">>: ok [ns_server:info,2014-08-19T16:49:32.273,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/586">>: ok [ns_server:info,2014-08-19T16:49:32.275,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/585">>: ok [ns_server:info,2014-08-19T16:49:32.277,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/584">>: ok [ns_server:info,2014-08-19T16:49:32.278,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/583">>: ok [ns_server:info,2014-08-19T16:49:32.280,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/582">>: ok [ns_server:info,2014-08-19T16:49:32.281,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/581">>: ok [ns_server:info,2014-08-19T16:49:32.282,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/580">>: ok [ns_server:info,2014-08-19T16:49:32.284,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/579">>: ok [ns_server:info,2014-08-19T16:49:32.285,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/578">>: ok [ns_server:info,2014-08-19T16:49:32.287,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/577">>: ok [ns_server:info,2014-08-19T16:49:32.289,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/576">>: ok [ns_server:info,2014-08-19T16:49:32.291,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/575">>: ok [ns_server:info,2014-08-19T16:49:32.292,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/574">>: ok [ns_server:info,2014-08-19T16:49:32.294,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/573">>: ok [ns_server:info,2014-08-19T16:49:32.295,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/572">>: ok [ns_server:info,2014-08-19T16:49:32.297,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/571">>: ok [ns_server:info,2014-08-19T16:49:32.299,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/570">>: ok [ns_server:info,2014-08-19T16:49:32.300,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/569">>: ok [ns_server:info,2014-08-19T16:49:32.302,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/568">>: ok [ns_server:info,2014-08-19T16:49:32.303,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/567">>: ok [ns_server:info,2014-08-19T16:49:32.305,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/566">>: ok [ns_server:info,2014-08-19T16:49:32.306,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/565">>: ok [ns_server:info,2014-08-19T16:49:32.308,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/564">>: ok [ns_server:info,2014-08-19T16:49:32.309,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/563">>: ok [ns_server:info,2014-08-19T16:49:32.310,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/562">>: ok [ns_server:info,2014-08-19T16:49:32.311,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/561">>: ok [ns_server:info,2014-08-19T16:49:32.313,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/560">>: ok [ns_server:info,2014-08-19T16:49:32.314,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/559">>: ok [ns_server:info,2014-08-19T16:49:32.316,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/558">>: ok [ns_server:info,2014-08-19T16:49:32.318,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/557">>: ok [ns_server:info,2014-08-19T16:49:32.319,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/556">>: ok [ns_server:info,2014-08-19T16:49:32.321,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/555">>: ok [ns_server:info,2014-08-19T16:49:32.322,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/554">>: ok [ns_server:info,2014-08-19T16:49:32.324,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/553">>: ok [ns_server:info,2014-08-19T16:49:32.326,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/552">>: ok [ns_server:info,2014-08-19T16:49:32.327,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/551">>: ok [ns_server:info,2014-08-19T16:49:32.329,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/550">>: ok [ns_server:info,2014-08-19T16:49:32.331,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/549">>: ok [ns_server:info,2014-08-19T16:49:32.332,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/548">>: ok [ns_server:info,2014-08-19T16:49:32.334,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/547">>: ok [ns_server:info,2014-08-19T16:49:32.336,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/546">>: ok [ns_server:info,2014-08-19T16:49:32.337,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/545">>: ok [ns_server:info,2014-08-19T16:49:32.339,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/544">>: ok [ns_server:info,2014-08-19T16:49:32.341,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/543">>: ok [ns_server:info,2014-08-19T16:49:32.342,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/542">>: ok [ns_server:info,2014-08-19T16:49:32.344,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/541">>: ok [ns_server:info,2014-08-19T16:49:32.345,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/540">>: ok [ns_server:info,2014-08-19T16:49:32.347,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/539">>: ok [ns_server:info,2014-08-19T16:49:32.349,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/538">>: ok [ns_server:info,2014-08-19T16:49:32.350,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/537">>: ok [ns_server:info,2014-08-19T16:49:32.352,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/536">>: ok [ns_server:info,2014-08-19T16:49:32.354,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/535">>: ok [ns_server:info,2014-08-19T16:49:32.355,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/534">>: ok [ns_server:info,2014-08-19T16:49:32.357,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/533">>: ok [ns_server:info,2014-08-19T16:49:32.358,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/532">>: ok [ns_server:info,2014-08-19T16:49:32.360,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/531">>: ok [ns_server:info,2014-08-19T16:49:32.361,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/530">>: ok [ns_server:info,2014-08-19T16:49:32.363,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/529">>: ok [ns_server:info,2014-08-19T16:49:32.364,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/528">>: ok [ns_server:info,2014-08-19T16:49:32.366,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/527">>: ok [ns_server:info,2014-08-19T16:49:32.367,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/526">>: ok [ns_server:info,2014-08-19T16:49:32.369,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/525">>: ok [ns_server:info,2014-08-19T16:49:32.371,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/524">>: ok [ns_server:info,2014-08-19T16:49:32.372,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/523">>: ok [ns_server:info,2014-08-19T16:49:32.374,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/522">>: ok [ns_server:info,2014-08-19T16:49:32.375,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/521">>: ok [ns_server:info,2014-08-19T16:49:32.377,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/520">>: ok [ns_server:info,2014-08-19T16:49:32.378,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/519">>: ok [ns_server:info,2014-08-19T16:49:32.380,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/518">>: ok [ns_server:info,2014-08-19T16:49:32.381,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/517">>: ok [ns_server:info,2014-08-19T16:49:32.383,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/516">>: ok [ns_server:info,2014-08-19T16:49:32.384,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/515">>: ok [ns_server:info,2014-08-19T16:49:32.386,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/514">>: ok [ns_server:info,2014-08-19T16:49:32.387,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/513">>: ok [ns_server:info,2014-08-19T16:49:32.389,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/512">>: ok [ns_server:info,2014-08-19T16:49:32.390,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/426">>: ok [ns_server:info,2014-08-19T16:49:32.391,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/425">>: ok [ns_server:info,2014-08-19T16:49:32.393,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/424">>: ok [ns_server:info,2014-08-19T16:49:32.394,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/423">>: ok [ns_server:info,2014-08-19T16:49:32.396,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/422">>: ok [ns_server:info,2014-08-19T16:49:32.397,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/421">>: ok [ns_server:info,2014-08-19T16:49:32.399,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/420">>: ok [ns_server:info,2014-08-19T16:49:32.400,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/419">>: ok [ns_server:info,2014-08-19T16:49:32.402,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/418">>: ok [ns_server:info,2014-08-19T16:49:32.403,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/417">>: ok [ns_server:info,2014-08-19T16:49:32.404,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/416">>: ok [ns_server:info,2014-08-19T16:49:32.406,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/415">>: ok [ns_server:info,2014-08-19T16:49:32.407,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/414">>: ok [ns_server:info,2014-08-19T16:49:32.408,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/413">>: ok [ns_server:info,2014-08-19T16:49:32.409,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/412">>: ok [ns_server:info,2014-08-19T16:49:32.411,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/411">>: ok [ns_server:info,2014-08-19T16:49:32.412,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/410">>: ok [ns_server:info,2014-08-19T16:49:32.413,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/409">>: ok [ns_server:info,2014-08-19T16:49:32.414,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/408">>: ok [ns_server:info,2014-08-19T16:49:32.416,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/407">>: ok [ns_server:info,2014-08-19T16:49:32.417,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/406">>: ok [ns_server:info,2014-08-19T16:49:32.419,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/405">>: ok [ns_server:info,2014-08-19T16:49:32.420,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/404">>: ok [ns_server:info,2014-08-19T16:49:32.421,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/403">>: ok [ns_server:info,2014-08-19T16:49:32.423,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/402">>: ok [ns_server:info,2014-08-19T16:49:32.424,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/401">>: ok [ns_server:info,2014-08-19T16:49:32.425,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/400">>: ok [ns_server:info,2014-08-19T16:49:32.427,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/399">>: ok [ns_server:info,2014-08-19T16:49:32.428,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/398">>: ok [ns_server:info,2014-08-19T16:49:32.430,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/397">>: ok [ns_server:info,2014-08-19T16:49:32.431,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/396">>: ok [ns_server:info,2014-08-19T16:49:32.432,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/395">>: ok [ns_server:info,2014-08-19T16:49:32.434,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/394">>: ok [ns_server:info,2014-08-19T16:49:32.435,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/393">>: ok [ns_server:info,2014-08-19T16:49:32.436,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/392">>: ok [ns_server:info,2014-08-19T16:49:32.438,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/391">>: ok [ns_server:info,2014-08-19T16:49:32.439,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/390">>: ok [ns_server:info,2014-08-19T16:49:32.440,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/389">>: ok [ns_server:info,2014-08-19T16:49:32.441,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/388">>: ok [ns_server:info,2014-08-19T16:49:32.443,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/387">>: ok [ns_server:info,2014-08-19T16:49:32.444,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/386">>: ok [ns_server:info,2014-08-19T16:49:32.445,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/385">>: ok [ns_server:info,2014-08-19T16:49:32.446,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/384">>: ok [ns_server:info,2014-08-19T16:49:32.447,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/383">>: ok [ns_server:info,2014-08-19T16:49:32.449,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/382">>: ok [ns_server:info,2014-08-19T16:49:32.450,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/381">>: ok [ns_server:info,2014-08-19T16:49:32.451,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/380">>: ok [ns_server:info,2014-08-19T16:49:32.453,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/379">>: ok [ns_server:info,2014-08-19T16:49:32.454,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/378">>: ok [ns_server:info,2014-08-19T16:49:32.455,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/377">>: ok [ns_server:info,2014-08-19T16:49:32.457,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/376">>: ok [ns_server:info,2014-08-19T16:49:32.458,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/375">>: ok [ns_server:info,2014-08-19T16:49:32.459,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/374">>: ok [ns_server:info,2014-08-19T16:49:32.460,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/373">>: ok [ns_server:info,2014-08-19T16:49:32.462,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/372">>: ok [ns_server:info,2014-08-19T16:49:32.463,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/371">>: ok [ns_server:info,2014-08-19T16:49:32.464,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/370">>: ok [ns_server:info,2014-08-19T16:49:32.465,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/369">>: ok [ns_server:info,2014-08-19T16:49:32.466,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/368">>: ok [ns_server:info,2014-08-19T16:49:32.468,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/367">>: ok [ns_server:info,2014-08-19T16:49:32.469,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/366">>: ok [ns_server:info,2014-08-19T16:49:32.470,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/365">>: ok [ns_server:info,2014-08-19T16:49:32.472,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/364">>: ok [ns_server:info,2014-08-19T16:49:32.473,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/363">>: ok [ns_server:info,2014-08-19T16:49:32.474,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/362">>: ok [ns_server:info,2014-08-19T16:49:32.475,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/361">>: ok [ns_server:info,2014-08-19T16:49:32.477,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/360">>: ok [ns_server:info,2014-08-19T16:49:32.478,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/359">>: ok [ns_server:info,2014-08-19T16:49:32.479,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/358">>: ok [ns_server:info,2014-08-19T16:49:32.480,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/357">>: ok [ns_server:info,2014-08-19T16:49:32.481,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/356">>: ok [ns_server:info,2014-08-19T16:49:32.483,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/355">>: ok [ns_server:info,2014-08-19T16:49:32.484,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/354">>: ok [ns_server:info,2014-08-19T16:49:32.485,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/353">>: ok [ns_server:info,2014-08-19T16:49:32.486,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/352">>: ok [ns_server:info,2014-08-19T16:49:32.487,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/351">>: ok [ns_server:info,2014-08-19T16:49:32.488,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/350">>: ok [ns_server:info,2014-08-19T16:49:32.489,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/349">>: ok [ns_server:info,2014-08-19T16:49:32.490,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/348">>: ok [ns_server:info,2014-08-19T16:49:32.492,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/347">>: ok [ns_server:info,2014-08-19T16:49:32.493,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/346">>: ok [ns_server:info,2014-08-19T16:49:32.494,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/345">>: ok [ns_server:info,2014-08-19T16:49:32.495,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/344">>: ok [ns_server:info,2014-08-19T16:49:32.496,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/343">>: ok [ns_server:info,2014-08-19T16:49:32.497,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/342">>: ok [ns_server:info,2014-08-19T16:49:32.498,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/170">>: ok [ns_server:info,2014-08-19T16:49:32.500,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/169">>: ok [ns_server:info,2014-08-19T16:49:32.501,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/168">>: ok [ns_server:info,2014-08-19T16:49:32.502,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/167">>: ok [ns_server:info,2014-08-19T16:49:32.503,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/166">>: ok [ns_server:info,2014-08-19T16:49:32.504,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/165">>: ok [ns_server:info,2014-08-19T16:49:32.505,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/164">>: ok [ns_server:info,2014-08-19T16:49:32.506,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/163">>: ok [ns_server:info,2014-08-19T16:49:32.507,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/162">>: ok [ns_server:info,2014-08-19T16:49:32.508,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/161">>: ok [ns_server:info,2014-08-19T16:49:32.509,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/160">>: ok [ns_server:info,2014-08-19T16:49:32.510,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/159">>: ok [ns_server:info,2014-08-19T16:49:32.511,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/158">>: ok [ns_server:info,2014-08-19T16:49:32.512,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/157">>: ok [ns_server:info,2014-08-19T16:49:32.513,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/156">>: ok [ns_server:info,2014-08-19T16:49:32.514,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/155">>: ok [ns_server:info,2014-08-19T16:49:32.515,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/154">>: ok [ns_server:info,2014-08-19T16:49:32.516,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/153">>: ok [ns_server:info,2014-08-19T16:49:32.517,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/152">>: ok [ns_server:info,2014-08-19T16:49:32.518,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/151">>: ok [ns_server:info,2014-08-19T16:49:32.519,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/150">>: ok [ns_server:info,2014-08-19T16:49:32.520,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/149">>: ok [ns_server:info,2014-08-19T16:49:32.521,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/148">>: ok [ns_server:info,2014-08-19T16:49:32.522,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/147">>: ok [ns_server:info,2014-08-19T16:49:32.524,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/146">>: ok [ns_server:info,2014-08-19T16:49:32.525,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/145">>: ok [ns_server:info,2014-08-19T16:49:32.525,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/144">>: ok [ns_server:info,2014-08-19T16:49:32.526,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/143">>: ok [ns_server:info,2014-08-19T16:49:32.527,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/142">>: ok [ns_server:info,2014-08-19T16:49:32.528,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/141">>: ok [ns_server:info,2014-08-19T16:49:32.529,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/140">>: ok [ns_server:info,2014-08-19T16:49:32.530,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/139">>: ok [ns_server:info,2014-08-19T16:49:32.531,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/138">>: ok [ns_server:info,2014-08-19T16:49:32.532,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/137">>: ok [ns_server:info,2014-08-19T16:49:32.533,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/136">>: ok [ns_server:info,2014-08-19T16:49:32.534,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/135">>: ok [ns_server:info,2014-08-19T16:49:32.535,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/134">>: ok [ns_server:info,2014-08-19T16:49:32.536,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/133">>: ok [ns_server:info,2014-08-19T16:49:32.537,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/132">>: ok [ns_server:info,2014-08-19T16:49:32.538,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/131">>: ok [ns_server:info,2014-08-19T16:49:32.539,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/130">>: ok [ns_server:info,2014-08-19T16:49:32.540,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/129">>: ok [ns_server:info,2014-08-19T16:49:32.541,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/128">>: ok [ns_server:info,2014-08-19T16:49:32.542,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/127">>: ok [ns_server:info,2014-08-19T16:49:32.543,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/126">>: ok [ns_server:info,2014-08-19T16:49:32.544,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/125">>: ok [ns_server:info,2014-08-19T16:49:32.544,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/124">>: ok [ns_server:info,2014-08-19T16:49:32.546,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/123">>: ok [ns_server:info,2014-08-19T16:49:32.546,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/122">>: ok [ns_server:info,2014-08-19T16:49:32.547,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/121">>: ok [ns_server:info,2014-08-19T16:49:32.548,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/120">>: ok [ns_server:info,2014-08-19T16:49:32.549,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/119">>: ok [ns_server:info,2014-08-19T16:49:32.550,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/118">>: ok [ns_server:info,2014-08-19T16:49:32.551,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/117">>: ok [ns_server:info,2014-08-19T16:49:32.552,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/116">>: ok [ns_server:info,2014-08-19T16:49:32.553,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/115">>: ok [ns_server:info,2014-08-19T16:49:32.553,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/114">>: ok [ns_server:info,2014-08-19T16:49:32.554,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/113">>: ok [ns_server:info,2014-08-19T16:49:32.555,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/112">>: ok [ns_server:info,2014-08-19T16:49:32.556,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/111">>: ok [ns_server:info,2014-08-19T16:49:32.557,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/110">>: ok [ns_server:info,2014-08-19T16:49:32.557,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/109">>: ok [ns_server:info,2014-08-19T16:49:32.558,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/108">>: ok [ns_server:info,2014-08-19T16:49:32.559,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/107">>: ok [ns_server:info,2014-08-19T16:49:32.560,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/106">>: ok [ns_server:info,2014-08-19T16:49:32.561,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/105">>: ok [ns_server:info,2014-08-19T16:49:32.562,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/104">>: ok [ns_server:info,2014-08-19T16:49:32.562,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/103">>: ok [ns_server:info,2014-08-19T16:49:32.563,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/1023">>: ok [ns_server:info,2014-08-19T16:49:32.564,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/1022">>: ok [ns_server:info,2014-08-19T16:49:32.565,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/1021">>: ok [ns_server:info,2014-08-19T16:49:32.566,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/1020">>: ok [ns_server:info,2014-08-19T16:49:32.566,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/102">>: ok [ns_server:info,2014-08-19T16:49:32.567,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/1019">>: ok [ns_server:info,2014-08-19T16:49:32.568,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/1018">>: ok [ns_server:info,2014-08-19T16:49:32.568,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/1017">>: ok [ns_server:info,2014-08-19T16:49:32.569,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/1016">>: ok [ns_server:info,2014-08-19T16:49:32.570,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/1015">>: ok [ns_server:info,2014-08-19T16:49:32.570,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/1014">>: ok [ns_server:info,2014-08-19T16:49:32.571,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/1013">>: ok [ns_server:info,2014-08-19T16:49:32.572,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/1012">>: ok [ns_server:info,2014-08-19T16:49:32.573,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/1011">>: ok [ns_server:info,2014-08-19T16:49:32.573,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/1010">>: ok [ns_server:info,2014-08-19T16:49:32.574,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/101">>: ok [ns_server:info,2014-08-19T16:49:32.575,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/1009">>: ok [ns_server:info,2014-08-19T16:49:32.576,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/1008">>: ok [ns_server:info,2014-08-19T16:49:32.576,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/1007">>: ok [ns_server:info,2014-08-19T16:49:32.577,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/1006">>: ok [ns_server:info,2014-08-19T16:49:32.577,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/1005">>: ok [ns_server:info,2014-08-19T16:49:32.578,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/1004">>: ok [ns_server:info,2014-08-19T16:49:32.579,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/1003">>: ok [ns_server:info,2014-08-19T16:49:32.579,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/1002">>: ok [ns_server:info,2014-08-19T16:49:32.580,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/1001">>: ok [ns_server:info,2014-08-19T16:49:32.581,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/1000">>: ok [ns_server:info,2014-08-19T16:49:32.582,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/100">>: ok [ns_server:info,2014-08-19T16:49:32.582,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:delete_databases_and_files:436]Couch dbs are deleted. Proceeding with bucket directory [ns_server:debug,2014-08-19T16:49:32.582,ns_1@10.242.238.90:<0.17720.0>:ns_storage_conf:do_delete_bucket_indexes:457]indexes directory doesn't exist already. fine. [ns_server:info,2014-08-19T16:49:32.729,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:do_pull:341]Pulling config from: 'ns_1@10.242.238.91' [ns_server:debug,2014-08-19T16:49:32.885,ns_1@10.242.238.90:ns_bucket_worker<0.17558.0>:ns_bucket_sup:update_childs:84]Starting new child: {{per_bucket_sup,"default"}, {single_bucket_sup,start_link,["default"]}, permanent,infinity,supervisor, [single_bucket_sup]} [ns_server:debug,2014-08-19T16:49:32.885,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [error_logger:info,2014-08-19T16:49:32.887,ns_1@10.242.238.90:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_bucket_sup} started: [{pid,<0.18759.0>}, {name,{per_bucket_sup,"default"}}, {mfargs,{single_bucket_sup,start_link,["default"]}}, {restart_type,permanent}, {shutdown,infinity}, {child_type,supervisor}] [ns_server:debug,2014-08-19T16:49:32.955,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:init:228]Usable vbuckets: [] [ns_server:debug,2014-08-19T16:49:32.955,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:49:32.955,ns_1@10.242.238.90:ns_memcached-default<0.18773.0>:ns_memcached:init:144]Starting ns_memcached [error_logger:info,2014-08-19T16:49:32.955,ns_1@10.242.238.90:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,'single_bucket_sup-default'} started: [{pid,<0.18761.0>}, {name,{capi_set_view_manager,"default"}}, {mfargs,{capi_set_view_manager,start_link,["default"]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [ns_server:debug,2014-08-19T16:49:32.955,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:49:32.956,ns_1@10.242.238.90:<0.18774.0>:ns_memcached:run_connect_phase:167]Started 'connecting' phase of ns_memcached-default. Parent is <0.18773.0> [error_logger:info,2014-08-19T16:49:32.956,ns_1@10.242.238.90:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,'single_bucket_sup-default'} started: [{pid,<0.18773.0>}, {name,{ns_memcached,"default"}}, {mfargs,{ns_memcached,start_link,["default"]}}, {restart_type,permanent}, {shutdown,86400000}, {child_type,worker}] [error_logger:info,2014-08-19T16:49:32.958,ns_1@10.242.238.90:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,'single_bucket_sup-default'} started: [{pid,<0.18775.0>}, {name,{tap_replication_manager,"default"}}, {mfargs, {tap_replication_manager,start_link,["default"]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2014-08-19T16:49:32.959,ns_1@10.242.238.90:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,'single_bucket_sup-default'} started: [{pid,<0.18776.0>}, {name,{ns_vbm_new_sup,"default"}}, {mfargs,{ns_vbm_new_sup,start_link,["default"]}}, {restart_type,permanent}, {shutdown,infinity}, {child_type,supervisor}] [error_logger:info,2014-08-19T16:49:32.961,ns_1@10.242.238.90:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,'single_bucket_sup-default'} started: [{pid,<0.18777.0>}, {name,{ns_vbm_sup,"default"}}, {mfargs,{ns_vbm_sup,start_link,["default"]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,supervisor}] [ns_server:debug,2014-08-19T16:49:32.964,ns_1@10.242.238.90:<0.17535.0>:mc_tcp_listener:accept_loop:31]Got new connection [ns_server:info,2014-08-19T16:49:32.964,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:read_flush_counter:936]Loading flushseq failed: {error,enoent}. Assuming it's equal to global config. [ns_server:info,2014-08-19T16:49:32.965,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:read_flush_counter_from_config:943]Initialized flushseq 0 from bucket config [error_logger:info,2014-08-19T16:49:32.965,ns_1@10.242.238.90:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,'single_bucket_sup-default'} started: [{pid,<0.18778.0>}, {name,{janitor_agent,"default"}}, {mfargs,{janitor_agent,start_link,["default"]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [ns_server:debug,2014-08-19T16:49:32.966,ns_1@10.242.238.90:<0.17535.0>:mc_tcp_listener:accept_loop:33]Passed connection to mc_conn_sup: <0.18779.0> [error_logger:info,2014-08-19T16:49:32.968,ns_1@10.242.238.90:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,'single_bucket_sup-default'} started: [{pid,<0.18780.0>}, {name,{couch_stats_reader,"default"}}, {mfargs,{couch_stats_reader,start_link,["default"]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2014-08-19T16:49:32.968,ns_1@10.242.238.90:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,'single_bucket_sup-default'} started: [{pid,<0.18781.0>}, {name,{stats_collector,"default"}}, {mfargs,{stats_collector,start_link,["default"]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [ns_server:info,2014-08-19T16:49:32.968,ns_1@10.242.238.90:ns_memcached-default<0.18773.0>:ns_memcached:ensure_bucket:1178]Created bucket "default" with config string "ht_size=3079;ht_locks=5;tap_noop_interval=20;max_txn_size=10000;max_size=13369344000;tap_keepalive=300;dbname=/var/lib/pgsql/default;allow_data_loss_during_shutdown=true;backend=couchdb;couch_bucket=default;couch_port=11213;max_vbuckets=1024;alog_path=/var/lib/pgsql/default/access.log;data_traffic_enabled=false;max_num_workers=3;uuid=d95ae85dc319bab78fd23c50f6adae2e;vb0=false;waitforwarmup=false;failpartialwarmup=false;" [ns_server:info,2014-08-19T16:49:32.969,ns_1@10.242.238.90:ns_memcached-default<0.18773.0>:ns_memcached:handle_cast:609]Main ns_memcached connection established: {ok,#Port<0.13274>} [ns_server:debug,2014-08-19T16:49:32.969,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [error_logger:info,2014-08-19T16:49:32.970,ns_1@10.242.238.90:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,'single_bucket_sup-default'} started: [{pid,<0.18783.0>}, {name,{stats_archiver,"default"}}, {mfargs,{stats_archiver,start_link,["default"]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2014-08-19T16:49:32.970,ns_1@10.242.238.90:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,'single_bucket_sup-default'} started: [{pid,<0.18791.0>}, {name,{stats_reader,"default"}}, {mfargs,{stats_reader,start_link,["default"]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2014-08-19T16:49:32.970,ns_1@10.242.238.90:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,'single_bucket_sup-default'} started: [{pid,<0.18793.0>}, {name,{failover_safeness_level,"default"}}, {mfargs, {failover_safeness_level,start_link,["default"]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [user:info,2014-08-19T16:49:32.971,ns_1@10.242.238.90:ns_memcached-default<0.18773.0>:ns_memcached:handle_cast:632]Bucket "default" loaded on node 'ns_1@10.242.238.90' in 0 seconds. [error_logger:info,2014-08-19T16:49:32.971,ns_1@10.242.238.90:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,'single_bucket_sup-default'} started: [{pid,<0.18794.0>}, {name,{terse_bucket_info_uploader,"default"}}, {mfargs, {terse_bucket_info_uploader,start_link, ["default"]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [ns_server:debug,2014-08-19T16:49:32.983,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:49:33.159,ns_1@10.242.238.90:ns_heart_slow_status_updater<0.17440.0>:ns_heart:current_status_slow:261]Ignoring failure to get stats for bucket: "default": {error,no_samples} [ns_server:debug,2014-08-19T16:49:33.336,ns_1@10.242.238.90:ns_heart_slow_status_updater<0.17440.0>:ns_heart:current_status_slow:261]Ignoring failure to get stats for bucket: "default": {error,no_samples} [ns_server:debug,2014-08-19T16:49:33.475,ns_1@10.242.238.90:compaction_daemon<0.17567.0>:compaction_daemon:handle_info:447]Starting compaction for the following buckets: [<<"default">>] [ns_server:info,2014-08-19T16:49:33.476,ns_1@10.242.238.90:<0.18821.0>:compaction_daemon:try_to_cleanup_indexes:650]Cleaning up indexes for bucket `default` [ns_server:info,2014-08-19T16:49:33.480,ns_1@10.242.238.90:<0.18821.0>:compaction_daemon:spawn_bucket_compactor:609]Compacting bucket default with config: [{database_fragmentation_threshold,{30,undefined}}, {view_fragmentation_threshold,{30,undefined}}] [ns_server:debug,2014-08-19T16:49:33.481,ns_1@10.242.238.90:<0.18824.0>:compaction_daemon:bucket_needs_compaction:1042]`default` data size is 0, disk size is 34 [ns_server:debug,2014-08-19T16:49:33.481,ns_1@10.242.238.90:compaction_daemon<0.17567.0>:compaction_daemon:handle_info:505]Finished compaction iteration. [ns_server:debug,2014-08-19T16:49:33.481,ns_1@10.242.238.90:compaction_daemon<0.17567.0>:compaction_daemon:schedule_next_compaction:1519]Finished compaction too soon. Next run will be in 30s [ns_server:info,2014-08-19T16:49:33.936,ns_1@10.242.238.90:ns_memcached-default<0.18773.0>:ns_memcached:handle_call:247]Enabling traffic to bucket "default" [ns_server:info,2014-08-19T16:49:33.936,ns_1@10.242.238.90:ns_memcached-default<0.18773.0>:ns_memcached:handle_call:251]Bucket "default" marked as warmed in 0 seconds [ns_server:debug,2014-08-19T16:49:34.000,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:49:34.000,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:49:34.004,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: vbucket_map_history -> [{[['ns_1@10.242.238.88','ns_1@10.242.238.89'], ['ns_1@10.242.238.88','ns_1@10.242.238.89'], ['ns_1@10.242.238.88','ns_1@10.242.238.89'], ['ns_1@10.242.238.88','ns_1@10.242.238.89'], ['ns_1@10.242.238.88','ns_1@10.242.238.89'], ['ns_1@10.242.238.88','ns_1@10.242.238.89'], ['ns_1@10.242.238.88','ns_1@10.242.238.89'], ['ns_1@10.242.238.88','ns_1@10.242.238.89'], ['ns_1@10.242.238.88','ns_1@10.242.238.89'], ['ns_1@10.242.238.88','ns_1@10.242.238.89'], ['ns_1@10.242.238.88','ns_1@10.242.238.89'], ['ns_1@10.242.238.88','ns_1@10.242.238.89'], ['ns_1@10.242.238.88','ns_1@10.242.238.89'], ['ns_1@10.242.238.88','ns_1@10.242.238.89'], ['ns_1@10.242.238.88','ns_1@10.242.238.89'], ['ns_1@10.242.238.88','ns_1@10.242.238.89'], ['ns_1@10.242.238.88','ns_1@10.242.238.89'], ['ns_1@10.242.238.88','ns_1@10.242.238.89'], ['ns_1@10.242.238.88','ns_1@10.242.238.89'], ['ns_1@10.242.238.88','ns_1@10.242.238.89'], ['ns_1@10.242.238.88','ns_1@10.242.238.89'], ['ns_1@10.242.238.88','ns_1@10.242.238.89'], ['ns_1@10.242.238.88','ns_1@10.242.238.89'], ['ns_1@10.242.238.88','ns_1@10.242.238.89'], ['ns_1@10.242.238.88','ns_1@10.242.238.89'], ['ns_1@10.242.238.88','ns_1@10.242.238.89'], ['ns_1@10.242.238.88','ns_1@10.242.238.89'], ['ns_1@10.242.238.88','ns_1@10.242.238.89'], ['ns_1@10.242.238.88','ns_1@10.242.238.89'], ['ns_1@10.242.238.88','ns_1@10.242.238.89'], ['ns_1@10.242.238.88','ns_1@10.242.238.89'], ['ns_1@10.242.238.88','ns_1@10.242.238.89'], ['ns_1@10.242.238.88','ns_1@10.242.238.89'], ['ns_1@10.242.238.88','ns_1@10.242.238.89'], ['ns_1@10.242.238.88','ns_1@10.242.238.89'], ['ns_1@10.242.238.88','ns_1@10.242.238.89'], ['ns_1@10.242.238.88','ns_1@10.242.238.89'], ['ns_1@10.242.238.88','ns_1@10.242.238.89'], ['ns_1@10.242.238.88','ns_1@10.242.238.89'], ['ns_1@10.242.238.88','ns_1@10.242.238.89'], ['ns_1@10.242.238.88','ns_1@10.242.238.89'], ['ns_1@10.242.238.88','ns_1@10.242.238.89'], ['ns_1@10.242.238.88','ns_1@10.242.238.89'], ['ns_1@10.242.238.88','ns_1@10.242.238.89'], ['ns_1@10.242.238.88','ns_1@10.242.238.89'], ['ns_1@10.242.238.88','ns_1@10.242.238.89'], ['ns_1@10.242.238.88','ns_1@10.242.238.89'], ['ns_1@10.242.238.88','ns_1@10.242.238.89'], ['ns_1@10.242.238.88','ns_1@10.242.238.89'], ['ns_1@10.242.238.88','ns_1@10.242.238.89'], ['ns_1@10.242.238.88','ns_1@10.242.238.89'], ['ns_1@10.242.238.88','ns_1@10.242.238.89'], ['ns_1@10.242.238.88','ns_1@10.242.238.89'], ['ns_1@10.242.238.88','ns_1@10.242.238.89'], ['ns_1@10.242.238.88','ns_1@10.242.238.89'], ['ns_1@10.242.238.88','ns_1@10.242.238.89'], ['ns_1@10.242.238.88','ns_1@10.242.238.89'], ['ns_1@10.242.238.88','ns_1@10.242.238.89'], ['ns_1@10.242.238.88','ns_1@10.242.238.89'], ['ns_1@10.242.238.88','ns_1@10.242.238.89'], ['ns_1@10.242.238.88','ns_1@10.242.238.89'], ['ns_1@10.242.238.88','ns_1@10.242.238.89'], ['ns_1@10.242.238.88','ns_1@10.242.238.89'], ['ns_1@10.242.238.88','ns_1@10.242.238.89'], ['ns_1@10.242.238.88','ns_1@10.242.238.89'], ['ns_1@10.242.238.88','ns_1@10.242.238.89'], ['ns_1@10.242.238.88','ns_1@10.242.238.89'], ['ns_1@10.242.238.88','ns_1@10.242.238.89'], ['ns_1@10.242.238.88','ns_1@10.242.238.89'], ['ns_1@10.242.238.88','ns_1@10.242.238.89'], ['ns_1@10.242.238.88','ns_1@10.242.238.89'], ['ns_1@10.242.238.88','ns_1@10.242.238.89'], ['ns_1@10.242.238.88','ns_1@10.242.238.89'], ['ns_1@10.242.238.88','ns_1@10.242.238.89'], ['ns_1@10.242.238.88','ns_1@10.242.238.89'], ['ns_1@10.242.238.88','ns_1@10.242.238.89'], ['ns_1@10.242.238.88','ns_1@10.242.238.89'], ['ns_1@10.242.238.88','ns_1@10.242.238.89'], ['ns_1@10.242.238.88','ns_1@10.242.238.89'], ['ns_1@10.242.238.88','ns_1@10.242.238.89'], ['ns_1@10.242.238.88','ns_1@10.242.238.89'], ['ns_1@10.242.238.88','ns_1@10.242.238.89'], ['ns_1@10.242.238.88','ns_1@10.242.238.89'], ['ns_1@10.242.238.88','ns_1@10.242.238.89'], ['ns_1@10.242.238.88','ns_1@10.242.238.89'], ['ns_1@10.242.238.88','ns_1@10.242.238.89'], ['ns_1@10.242.238.88','ns_1@10.242.238.90'], ['ns_1@10.242.238.88','ns_1@10.242.238.90'], ['ns_1@10.242.238.88','ns_1@10.242.238.90'], ['ns_1@10.242.238.88','ns_1@10.242.238.90'], ['ns_1@10.242.238.88','ns_1@10.242.238.90'], ['ns_1@10.242.238.88','ns_1@10.242.238.90'], ['ns_1@10.242.238.88','ns_1@10.242.238.90'], ['ns_1@10.242.238.88','ns_1@10.242.238.90'], ['ns_1@10.242.238.88','ns_1@10.242.238.90'], ['ns_1@10.242.238.88'|...], [...]|...], [{replication_topology,star},{tags,undefined},{max_slaves,10}]}, {[['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88'|...], [...]|...], [{replication_topology,star},{tags,undefined},{max_slaves,10}]}] [ns_server:debug,2014-08-19T16:49:34.010,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:49:34.011,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:49:34.016,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[]}, {fastForwardMap,[{0,[], ['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {1,[], ['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {2,[], ['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {3,[], ['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {4,[], ['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {5,[], ['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {6,[], ['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {7,[], ['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {8,[], ['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {9,[], ['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {10,[], ['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {11,[], ['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {12,[], ['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {13,[], ['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {14,[], ['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {15,[], ['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {16,[], ['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {17,[], ['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {18,[], ['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {19,[], ['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {20,[], ['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {21,[], ['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {22,[], ['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {23,[], ['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {24,[], ['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {25,[], ['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {26,[], ['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {27,[], ['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {28,[], ['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {29,[], ['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {30,[], ['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {31,[], ['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {32,[], ['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {33,[], ['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {34,[], ['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {35,[], ['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {36,[], ['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {37,[], ['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {38,[], ['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {39,[], ['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {40,[], ['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {41,[], ['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {42,[], ['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {43,[], ['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {44,[], ['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {45,[], ['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {46,[], ['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {47,[], ['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {48,[], ['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {49,[], ['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {50,[], ['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {51,[], ['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {52,[], ['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {53,[], ['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {54,[], ['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {55,[], ['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {56,[], ['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {57,[], ['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {58,[], ['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {59,[], ['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {60,[], ['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {61,[], ['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {62,[], ['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {63,[], ['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {64,[], ['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {65,[], ['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {66,[], ['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {67,[], ['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {68,[], ['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {69,[], ['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {70,[], ['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {71,[], ['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {72,[], ['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {73,[], ['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {74,[], ['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {75,[], ['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {76,[], ['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {77,[], ['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {78,[], ['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {79,[], ['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {80,[], ['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {81,[], ['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {82,[], ['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {83,[], ['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {84,[],['ns_1@10.242.238.88'|...]}, {85,[],[...]}, {86,[],...}, {87,...}, {...}|...]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:info,2014-08-19T16:49:34.063,ns_1@10.242.238.90:<0.18786.0>:ns_memcached:do_handle_call:527]Changed vbucket 1023 state to replica [ns_server:info,2014-08-19T16:49:34.091,ns_1@10.242.238.90:<0.18828.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 1023 to state replica [ns_server:debug,2014-08-19T16:49:34.138,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 1023. Nacking mccouch update. [views:debug,2014-08-19T16:49:34.138,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/1023. Updated state: replica (0) [ns_server:debug,2014-08-19T16:49:34.139,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [1023] [ns_server:debug,2014-08-19T16:49:34.139,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",1023,replica,0} [views:debug,2014-08-19T16:49:34.181,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/1023. Updated state: replica (0) [ns_server:debug,2014-08-19T16:49:34.181,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",1023,replica,0} [ns_server:debug,2014-08-19T16:49:34.183,ns_1@10.242.238.90:<0.18828.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_1023_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:49:34.185,ns_1@10.242.238.90:<0.18828.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[1023]}, {checkpoints,[{1023,0}]}, {name,<<"replication_building_1023_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[1023]}, {takeover,false}, {suffix,"building_1023_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",1023,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,false}]} [rebalance:debug,2014-08-19T16:49:34.185,ns_1@10.242.238.90:<0.18828.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.18843.0> [rebalance:debug,2014-08-19T16:49:34.185,ns_1@10.242.238.90:<0.18828.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:49:34.186,ns_1@10.242.238.90:<0.18828.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.25772.0>,#Ref<16550.0.1.16173>}]} [rebalance:info,2014-08-19T16:49:34.186,ns_1@10.242.238.90:<0.18828.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 1023 [rebalance:debug,2014-08-19T16:49:34.186,ns_1@10.242.238.90:<0.18828.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.25772.0>,#Ref<16550.0.1.16173>}] [ns_server:debug,2014-08-19T16:49:34.187,ns_1@10.242.238.90:<0.18828.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:debug,2014-08-19T16:49:34.208,ns_1@10.242.238.90:<0.18844.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 1023 [ns_server:info,2014-08-19T16:49:34.214,ns_1@10.242.238.90:<0.18786.0>:ns_memcached:do_handle_call:527]Changed vbucket 767 state to replica [ns_server:info,2014-08-19T16:49:34.220,ns_1@10.242.238.90:<0.18847.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 767 to state replica [ns_server:debug,2014-08-19T16:49:34.307,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 767. Nacking mccouch update. [views:debug,2014-08-19T16:49:34.307,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/767. Updated state: pending (0) [ns_server:debug,2014-08-19T16:49:34.307,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",767,pending,0} [ns_server:debug,2014-08-19T16:49:34.308,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [767,1023] [ns_server:debug,2014-08-19T16:49:34.325,ns_1@10.242.238.90:<0.18847.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_767_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:49:34.327,ns_1@10.242.238.90:<0.18847.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[767]}, {checkpoints,[{767,0}]}, {name,<<"replication_building_767_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[767]}, {takeover,false}, {suffix,"building_767_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",767,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,true}]} [rebalance:debug,2014-08-19T16:49:34.327,ns_1@10.242.238.90:<0.18847.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.18862.0> [rebalance:debug,2014-08-19T16:49:34.328,ns_1@10.242.238.90:<0.18847.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:49:34.328,ns_1@10.242.238.90:<0.18847.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.25806.0>,#Ref<16550.0.1.18163>}]} [rebalance:info,2014-08-19T16:49:34.328,ns_1@10.242.238.90:<0.18847.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 767 [rebalance:debug,2014-08-19T16:49:34.329,ns_1@10.242.238.90:<0.18847.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.25806.0>,#Ref<16550.0.1.18163>}] [ns_server:debug,2014-08-19T16:49:34.329,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.18863.0> (ok) [ns_server:debug,2014-08-19T16:49:34.329,ns_1@10.242.238.90:<0.18847.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:debug,2014-08-19T16:49:34.331,ns_1@10.242.238.90:<0.18864.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 767 [views:debug,2014-08-19T16:49:34.341,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/767. Updated state: pending (0) [ns_server:debug,2014-08-19T16:49:34.341,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",767,pending,0} [rebalance:debug,2014-08-19T16:49:34.342,ns_1@10.242.238.90:<0.18864.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:49:34.342,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.18864.0> (ok) [rebalance:debug,2014-08-19T16:49:34.380,ns_1@10.242.238.90:<0.18844.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:49:34.380,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.18844.0> (ok) [ns_server:info,2014-08-19T16:49:34.465,ns_1@10.242.238.90:<0.18786.0>:ns_memcached:do_handle_call:527]Changed vbucket 1022 state to replica [ns_server:info,2014-08-19T16:49:34.471,ns_1@10.242.238.90:<0.18867.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 1022 to state replica [ns_server:debug,2014-08-19T16:49:34.561,ns_1@10.242.238.90:<0.18867.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_1022_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:49:34.562,ns_1@10.242.238.90:<0.18867.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[1022]}, {checkpoints,[{1022,0}]}, {name,<<"replication_building_1022_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[1022]}, {takeover,false}, {suffix,"building_1022_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",1022,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,false}]} [rebalance:debug,2014-08-19T16:49:34.563,ns_1@10.242.238.90:<0.18867.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.18882.0> [rebalance:debug,2014-08-19T16:49:34.563,ns_1@10.242.238.90:<0.18867.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:49:34.563,ns_1@10.242.238.90:<0.18867.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.25863.0>,#Ref<16550.0.1.18481>}]} [rebalance:info,2014-08-19T16:49:34.563,ns_1@10.242.238.90:<0.18867.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 1022 [rebalance:debug,2014-08-19T16:49:34.564,ns_1@10.242.238.90:<0.18867.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.25863.0>,#Ref<16550.0.1.18481>}] [ns_server:debug,2014-08-19T16:49:34.565,ns_1@10.242.238.90:<0.18867.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:debug,2014-08-19T16:49:34.585,ns_1@10.242.238.90:<0.18883.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 1022 [ns_server:info,2014-08-19T16:49:34.591,ns_1@10.242.238.90:<0.18786.0>:ns_memcached:do_handle_call:527]Changed vbucket 766 state to replica [ns_server:info,2014-08-19T16:49:34.598,ns_1@10.242.238.90:<0.18886.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 766 to state replica [ns_server:debug,2014-08-19T16:49:34.606,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 1022. Nacking mccouch update. [views:debug,2014-08-19T16:49:34.606,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/1022. Updated state: replica (0) [ns_server:debug,2014-08-19T16:49:34.606,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",1022,replica,0} [ns_server:debug,2014-08-19T16:49:34.606,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [1022,767,1023] [views:debug,2014-08-19T16:49:34.673,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/1022. Updated state: replica (0) [ns_server:debug,2014-08-19T16:49:34.673,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",1022,replica,0} [ns_server:debug,2014-08-19T16:49:34.704,ns_1@10.242.238.90:<0.18886.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_766_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:49:34.705,ns_1@10.242.238.90:<0.18886.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[766]}, {checkpoints,[{766,0}]}, {name,<<"replication_building_766_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[766]}, {takeover,false}, {suffix,"building_766_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",766,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,true}]} [rebalance:debug,2014-08-19T16:49:34.706,ns_1@10.242.238.90:<0.18886.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.18887.0> [rebalance:debug,2014-08-19T16:49:34.706,ns_1@10.242.238.90:<0.18886.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:49:34.706,ns_1@10.242.238.90:<0.18886.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.25883.0>,#Ref<16550.0.1.18621>}]} [rebalance:info,2014-08-19T16:49:34.706,ns_1@10.242.238.90:<0.18886.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 766 [rebalance:debug,2014-08-19T16:49:34.707,ns_1@10.242.238.90:<0.18886.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.25883.0>,#Ref<16550.0.1.18621>}] [ns_server:debug,2014-08-19T16:49:34.707,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.18888.0> (ok) [ns_server:debug,2014-08-19T16:49:34.707,ns_1@10.242.238.90:<0.18886.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:debug,2014-08-19T16:49:34.709,ns_1@10.242.238.90:<0.18889.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 766 [ns_server:info,2014-08-19T16:49:34.769,ns_1@10.242.238.90:ns_doctor<0.17441.0>:ns_doctor:update_status:241]The following buckets became ready on node 'ns_1@10.242.238.89': ["default"] [ns_server:debug,2014-08-19T16:49:34.806,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 766. Nacking mccouch update. [views:debug,2014-08-19T16:49:34.807,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/766. Updated state: pending (0) [ns_server:debug,2014-08-19T16:49:34.807,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",766,pending,0} [ns_server:debug,2014-08-19T16:49:34.807,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [766,1022,767,1023] [ns_server:info,2014-08-19T16:49:34.838,ns_1@10.242.238.90:<0.18786.0>:ns_memcached:do_handle_call:527]Changed vbucket 1021 state to replica [ns_server:info,2014-08-19T16:49:34.842,ns_1@10.242.238.90:<0.18906.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 1021 to state replica [views:debug,2014-08-19T16:49:34.882,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/766. Updated state: pending (0) [ns_server:debug,2014-08-19T16:49:34.882,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",766,pending,0} [rebalance:debug,2014-08-19T16:49:34.884,ns_1@10.242.238.90:<0.18889.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:49:34.884,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.18889.0> (ok) [ns_server:debug,2014-08-19T16:49:34.932,ns_1@10.242.238.90:<0.18906.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_1021_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:49:34.933,ns_1@10.242.238.90:<0.18906.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[1021]}, {checkpoints,[{1021,0}]}, {name,<<"replication_building_1021_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[1021]}, {takeover,false}, {suffix,"building_1021_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",1021,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,false}]} [rebalance:debug,2014-08-19T16:49:34.934,ns_1@10.242.238.90:<0.18906.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.18907.0> [rebalance:debug,2014-08-19T16:49:34.934,ns_1@10.242.238.90:<0.18906.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:49:34.934,ns_1@10.242.238.90:<0.18906.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.25958.0>,#Ref<16550.0.1.19028>}]} [rebalance:info,2014-08-19T16:49:34.934,ns_1@10.242.238.90:<0.18906.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 1021 [rebalance:debug,2014-08-19T16:49:34.935,ns_1@10.242.238.90:<0.18906.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.25958.0>,#Ref<16550.0.1.19028>}] [ns_server:debug,2014-08-19T16:49:34.936,ns_1@10.242.238.90:<0.18906.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:debug,2014-08-19T16:49:34.956,ns_1@10.242.238.90:<0.18908.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 1021 [ns_server:info,2014-08-19T16:49:34.962,ns_1@10.242.238.90:<0.18786.0>:ns_memcached:do_handle_call:527]Changed vbucket 765 state to replica [ns_server:info,2014-08-19T16:49:34.969,ns_1@10.242.238.90:<0.18911.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 765 to state replica [ns_server:debug,2014-08-19T16:49:35.073,ns_1@10.242.238.90:<0.18911.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_765_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:49:35.074,ns_1@10.242.238.90:<0.18911.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[765]}, {checkpoints,[{765,0}]}, {name,<<"replication_building_765_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[765]}, {takeover,false}, {suffix,"building_765_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",765,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,true}]} [rebalance:debug,2014-08-19T16:49:35.075,ns_1@10.242.238.90:<0.18911.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.18926.0> [rebalance:debug,2014-08-19T16:49:35.075,ns_1@10.242.238.90:<0.18911.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:49:35.075,ns_1@10.242.238.90:<0.18911.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.25992.0>,#Ref<16550.0.1.19197>}]} [rebalance:info,2014-08-19T16:49:35.075,ns_1@10.242.238.90:<0.18911.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 765 [rebalance:debug,2014-08-19T16:49:35.076,ns_1@10.242.238.90:<0.18911.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.25992.0>,#Ref<16550.0.1.19197>}] [ns_server:debug,2014-08-19T16:49:35.076,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.18927.0> (ok) [ns_server:debug,2014-08-19T16:49:35.077,ns_1@10.242.238.90:<0.18911.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:debug,2014-08-19T16:49:35.078,ns_1@10.242.238.90:<0.18928.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 765 [ns_server:debug,2014-08-19T16:49:35.099,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 1021. Nacking mccouch update. [views:debug,2014-08-19T16:49:35.099,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/1021. Updated state: replica (0) [ns_server:debug,2014-08-19T16:49:35.099,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",1021,replica,0} [ns_server:debug,2014-08-19T16:49:35.099,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [766,1022,1021,767,1023] [views:debug,2014-08-19T16:49:35.175,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/1021. Updated state: replica (0) [ns_server:debug,2014-08-19T16:49:35.175,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",1021,replica,0} [rebalance:debug,2014-08-19T16:49:35.176,ns_1@10.242.238.90:<0.18883.0>:janitor_agent:handle_call:795]Done [rebalance:debug,2014-08-19T16:49:35.176,ns_1@10.242.238.90:<0.18928.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:49:35.176,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.18883.0> (ok) [ns_server:debug,2014-08-19T16:49:35.176,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.18928.0> (ok) [ns_server:info,2014-08-19T16:49:35.210,ns_1@10.242.238.90:<0.18786.0>:ns_memcached:do_handle_call:527]Changed vbucket 1020 state to replica [ns_server:info,2014-08-19T16:49:35.214,ns_1@10.242.238.90:<0.18931.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 1020 to state replica [ns_server:debug,2014-08-19T16:49:35.303,ns_1@10.242.238.90:<0.18931.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_1020_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:49:35.304,ns_1@10.242.238.90:<0.18931.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[1020]}, {checkpoints,[{1020,0}]}, {name,<<"replication_building_1020_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[1020]}, {takeover,false}, {suffix,"building_1020_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",1020,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,false}]} [rebalance:debug,2014-08-19T16:49:35.305,ns_1@10.242.238.90:<0.18931.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.18946.0> [rebalance:debug,2014-08-19T16:49:35.305,ns_1@10.242.238.90:<0.18931.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:49:35.305,ns_1@10.242.238.90:<0.18931.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.26054.0>,#Ref<16550.0.1.19522>}]} [rebalance:info,2014-08-19T16:49:35.306,ns_1@10.242.238.90:<0.18931.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 1020 [rebalance:debug,2014-08-19T16:49:35.306,ns_1@10.242.238.90:<0.18931.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.26054.0>,#Ref<16550.0.1.19522>}] [ns_server:debug,2014-08-19T16:49:35.307,ns_1@10.242.238.90:<0.18931.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:debug,2014-08-19T16:49:35.330,ns_1@10.242.238.90:<0.18947.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 766 [rebalance:debug,2014-08-19T16:49:35.330,ns_1@10.242.238.90:<0.18948.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 1020 [rebalance:debug,2014-08-19T16:49:35.331,ns_1@10.242.238.90:<0.18953.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 767 [ns_server:debug,2014-08-19T16:49:35.333,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 765. Nacking mccouch update. [views:debug,2014-08-19T16:49:35.333,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/765. Updated state: pending (1) [ns_server:debug,2014-08-19T16:49:35.334,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",765,pending,1} [ns_server:debug,2014-08-19T16:49:35.334,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [766,1022,765,1021,767,1023] [ns_server:info,2014-08-19T16:49:35.337,ns_1@10.242.238.90:<0.18786.0>:ns_memcached:do_handle_call:527]Changed vbucket 764 state to replica [ns_server:info,2014-08-19T16:49:35.345,ns_1@10.242.238.90:<0.18956.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 764 to state replica [views:debug,2014-08-19T16:49:35.409,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/765. Updated state: pending (1) [ns_server:debug,2014-08-19T16:49:35.409,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",765,pending,1} [ns_server:debug,2014-08-19T16:49:35.450,ns_1@10.242.238.90:<0.18956.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_764_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:49:35.451,ns_1@10.242.238.90:<0.18956.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[764]}, {checkpoints,[{764,0}]}, {name,<<"replication_building_764_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[764]}, {takeover,false}, {suffix,"building_764_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",764,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,true}]} [rebalance:debug,2014-08-19T16:49:35.452,ns_1@10.242.238.90:<0.18956.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.18963.0> [rebalance:debug,2014-08-19T16:49:35.452,ns_1@10.242.238.90:<0.18956.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:49:35.452,ns_1@10.242.238.90:<0.18956.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.26096.0>,#Ref<16550.0.1.19763>}]} [rebalance:info,2014-08-19T16:49:35.453,ns_1@10.242.238.90:<0.18956.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 764 [rebalance:debug,2014-08-19T16:49:35.453,ns_1@10.242.238.90:<0.18956.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.26096.0>,#Ref<16550.0.1.19763>}] [ns_server:debug,2014-08-19T16:49:35.453,ns_1@10.242.238.90:<0.18956.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:49:35.453,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.18964.0> (ok) [rebalance:debug,2014-08-19T16:49:35.455,ns_1@10.242.238.90:<0.18965.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 764 [ns_server:debug,2014-08-19T16:49:35.550,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 1020. Nacking mccouch update. [views:debug,2014-08-19T16:49:35.550,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/1020. Updated state: replica (0) [ns_server:debug,2014-08-19T16:49:35.550,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",1020,replica,0} [ns_server:debug,2014-08-19T16:49:35.550,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [766,1022,765,1021,767,1020,1023] [views:debug,2014-08-19T16:49:35.584,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/1020. Updated state: replica (0) [ns_server:debug,2014-08-19T16:49:35.584,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",1020,replica,0} [ns_server:info,2014-08-19T16:49:35.584,ns_1@10.242.238.90:<0.18786.0>:ns_memcached:do_handle_call:527]Changed vbucket 1019 state to replica [ns_server:info,2014-08-19T16:49:35.588,ns_1@10.242.238.90:<0.18982.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 1019 to state replica [ns_server:debug,2014-08-19T16:49:35.650,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 764. Nacking mccouch update. [views:debug,2014-08-19T16:49:35.651,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/764. Updated state: pending (0) [ns_server:debug,2014-08-19T16:49:35.651,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",764,pending,0} [ns_server:debug,2014-08-19T16:49:35.651,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [766,1022,765,1021,764,767,1020,1023] [ns_server:debug,2014-08-19T16:49:35.678,ns_1@10.242.238.90:<0.18982.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_1019_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:49:35.679,ns_1@10.242.238.90:<0.18982.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[1019]}, {checkpoints,[{1019,0}]}, {name,<<"replication_building_1019_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[1019]}, {takeover,false}, {suffix,"building_1019_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",1019,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,false}]} [rebalance:debug,2014-08-19T16:49:35.680,ns_1@10.242.238.90:<0.18982.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.18997.0> [rebalance:debug,2014-08-19T16:49:35.680,ns_1@10.242.238.90:<0.18982.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:49:35.680,ns_1@10.242.238.90:<0.18982.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.26154.0>,#Ref<16550.0.1.20108>}]} [rebalance:info,2014-08-19T16:49:35.680,ns_1@10.242.238.90:<0.18982.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 1019 [rebalance:debug,2014-08-19T16:49:35.681,ns_1@10.242.238.90:<0.18982.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.26154.0>,#Ref<16550.0.1.20108>}] [ns_server:debug,2014-08-19T16:49:35.681,ns_1@10.242.238.90:<0.18982.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [views:debug,2014-08-19T16:49:35.684,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/764. Updated state: pending (0) [ns_server:debug,2014-08-19T16:49:35.685,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",764,pending,0} [rebalance:debug,2014-08-19T16:49:35.685,ns_1@10.242.238.90:<0.18908.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:49:35.685,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.18908.0> (ok) [rebalance:debug,2014-08-19T16:49:35.701,ns_1@10.242.238.90:<0.18998.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 1019 [ns_server:info,2014-08-19T16:49:35.707,ns_1@10.242.238.90:<0.18786.0>:ns_memcached:do_handle_call:527]Changed vbucket 763 state to replica [ns_server:info,2014-08-19T16:49:35.713,ns_1@10.242.238.90:<0.19001.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 763 to state replica [ns_server:debug,2014-08-19T16:49:35.785,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 1019. Nacking mccouch update. [views:debug,2014-08-19T16:49:35.785,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/1019. Updated state: replica (0) [ns_server:debug,2014-08-19T16:49:35.785,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",1019,replica,0} [ns_server:debug,2014-08-19T16:49:35.785,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [766,1019,1022,765,1021,764,767,1020,1023] [views:debug,2014-08-19T16:49:35.819,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/1019. Updated state: replica (0) [ns_server:debug,2014-08-19T16:49:35.819,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",1019,replica,0} [ns_server:debug,2014-08-19T16:49:35.819,ns_1@10.242.238.90:<0.19001.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_763_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:49:35.821,ns_1@10.242.238.90:<0.19001.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[763]}, {checkpoints,[{763,0}]}, {name,<<"replication_building_763_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[763]}, {takeover,false}, {suffix,"building_763_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",763,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,true}]} [rebalance:debug,2014-08-19T16:49:35.821,ns_1@10.242.238.90:<0.19001.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.19016.0> [rebalance:debug,2014-08-19T16:49:35.822,ns_1@10.242.238.90:<0.19001.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:49:35.822,ns_1@10.242.238.90:<0.19001.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.26188.0>,#Ref<16550.0.1.20278>}]} [rebalance:info,2014-08-19T16:49:35.822,ns_1@10.242.238.90:<0.19001.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 763 [rebalance:debug,2014-08-19T16:49:35.822,ns_1@10.242.238.90:<0.19001.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.26188.0>,#Ref<16550.0.1.20278>}] [ns_server:debug,2014-08-19T16:49:35.823,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.19017.0> (ok) [ns_server:debug,2014-08-19T16:49:35.823,ns_1@10.242.238.90:<0.19001.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:debug,2014-08-19T16:49:35.824,ns_1@10.242.238.90:<0.19018.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 763 [ns_server:debug,2014-08-19T16:49:35.886,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 763. Nacking mccouch update. [views:debug,2014-08-19T16:49:35.886,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/763. Updated state: pending (0) [ns_server:debug,2014-08-19T16:49:35.886,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",763,pending,0} [ns_server:debug,2014-08-19T16:49:35.886,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [766,1019,1022,765,1021,764,767,1020,1023,763] [views:debug,2014-08-19T16:49:35.920,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/763. Updated state: pending (0) [ns_server:debug,2014-08-19T16:49:35.920,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",763,pending,0} [rebalance:debug,2014-08-19T16:49:35.920,ns_1@10.242.238.90:<0.18947.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:49:35.920,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.18947.0> (ok) [rebalance:debug,2014-08-19T16:49:35.921,ns_1@10.242.238.90:<0.18965.0>:janitor_agent:handle_call:795]Done [rebalance:debug,2014-08-19T16:49:35.921,ns_1@10.242.238.90:<0.18953.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:49:35.921,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.18965.0> (ok) [ns_server:debug,2014-08-19T16:49:35.921,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.18953.0> (ok) [ns_server:info,2014-08-19T16:49:35.955,ns_1@10.242.238.90:<0.18786.0>:ns_memcached:do_handle_call:527]Changed vbucket 1018 state to replica [ns_server:info,2014-08-19T16:49:35.959,ns_1@10.242.238.90:<0.19035.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 1018 to state replica [rebalance:debug,2014-08-19T16:49:35.968,ns_1@10.242.238.90:<0.18948.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:49:35.968,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.18948.0> (ok) [rebalance:debug,2014-08-19T16:49:35.968,ns_1@10.242.238.90:<0.19018.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:49:35.968,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.19018.0> (ok) [ns_server:debug,2014-08-19T16:49:36.048,ns_1@10.242.238.90:<0.19035.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_1018_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:49:36.050,ns_1@10.242.238.90:<0.19035.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[1018]}, {checkpoints,[{1018,0}]}, {name,<<"replication_building_1018_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[1018]}, {takeover,false}, {suffix,"building_1018_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",1018,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,false}]} [rebalance:debug,2014-08-19T16:49:36.051,ns_1@10.242.238.90:<0.19035.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.19036.0> [rebalance:debug,2014-08-19T16:49:36.051,ns_1@10.242.238.90:<0.19035.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:49:36.051,ns_1@10.242.238.90:<0.19035.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.26245.0>,#Ref<16550.0.1.20565>}]} [rebalance:info,2014-08-19T16:49:36.051,ns_1@10.242.238.90:<0.19035.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 1018 [rebalance:debug,2014-08-19T16:49:36.052,ns_1@10.242.238.90:<0.19035.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.26245.0>,#Ref<16550.0.1.20565>}] [ns_server:debug,2014-08-19T16:49:36.052,ns_1@10.242.238.90:<0.19035.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:debug,2014-08-19T16:49:36.071,ns_1@10.242.238.90:<0.19037.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 1018 [ns_server:info,2014-08-19T16:49:36.077,ns_1@10.242.238.90:<0.18787.0>:ns_memcached:do_handle_call:527]Changed vbucket 762 state to replica [ns_server:info,2014-08-19T16:49:36.083,ns_1@10.242.238.90:<0.19040.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 762 to state replica [ns_server:debug,2014-08-19T16:49:36.190,ns_1@10.242.238.90:<0.19040.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_762_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:49:36.191,ns_1@10.242.238.90:<0.19040.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[762]}, {checkpoints,[{762,0}]}, {name,<<"replication_building_762_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[762]}, {takeover,false}, {suffix,"building_762_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",762,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,true}]} [rebalance:debug,2014-08-19T16:49:36.192,ns_1@10.242.238.90:<0.19040.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.19055.0> [rebalance:debug,2014-08-19T16:49:36.192,ns_1@10.242.238.90:<0.19040.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:49:36.192,ns_1@10.242.238.90:<0.19040.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.26265.0>,#Ref<16550.0.1.20719>}]} [rebalance:info,2014-08-19T16:49:36.193,ns_1@10.242.238.90:<0.19040.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 762 [rebalance:debug,2014-08-19T16:49:36.193,ns_1@10.242.238.90:<0.19040.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.26265.0>,#Ref<16550.0.1.20719>}] [ns_server:debug,2014-08-19T16:49:36.193,ns_1@10.242.238.90:<0.19040.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:49:36.194,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.19056.0> (ok) [rebalance:debug,2014-08-19T16:49:36.195,ns_1@10.242.238.90:<0.19057.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 762 [ns_server:debug,2014-08-19T16:49:36.209,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 1018. Nacking mccouch update. [views:debug,2014-08-19T16:49:36.210,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/1018. Updated state: replica (0) [ns_server:debug,2014-08-19T16:49:36.210,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [766,1019,1022,765,1018,1021,764,767,1020,1023,763] [ns_server:debug,2014-08-19T16:49:36.210,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",1018,replica,0} [views:debug,2014-08-19T16:49:36.293,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/1018. Updated state: replica (0) [ns_server:debug,2014-08-19T16:49:36.293,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",1018,replica,0} [ns_server:info,2014-08-19T16:49:36.326,ns_1@10.242.238.90:<0.18786.0>:ns_memcached:do_handle_call:527]Changed vbucket 1017 state to replica [ns_server:info,2014-08-19T16:49:36.329,ns_1@10.242.238.90:<0.19060.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 1017 to state replica [rebalance:debug,2014-08-19T16:49:36.361,ns_1@10.242.238.90:<0.18998.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:49:36.361,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.18998.0> (ok) [ns_server:debug,2014-08-19T16:49:36.418,ns_1@10.242.238.90:<0.19060.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_1017_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:49:36.419,ns_1@10.242.238.90:<0.19060.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[1017]}, {checkpoints,[{1017,0}]}, {name,<<"replication_building_1017_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[1017]}, {takeover,false}, {suffix,"building_1017_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",1017,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,false}]} [rebalance:debug,2014-08-19T16:49:36.420,ns_1@10.242.238.90:<0.19060.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.19061.0> [rebalance:debug,2014-08-19T16:49:36.420,ns_1@10.242.238.90:<0.19060.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:49:36.421,ns_1@10.242.238.90:<0.19060.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.26330.0>,#Ref<16550.0.1.21059>}]} [rebalance:info,2014-08-19T16:49:36.421,ns_1@10.242.238.90:<0.19060.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 1017 [rebalance:debug,2014-08-19T16:49:36.421,ns_1@10.242.238.90:<0.19060.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.26330.0>,#Ref<16550.0.1.21059>}] [ns_server:debug,2014-08-19T16:49:36.422,ns_1@10.242.238.90:<0.19060.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:debug,2014-08-19T16:49:36.443,ns_1@10.242.238.90:<0.19063.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 1017 [ns_server:info,2014-08-19T16:49:36.450,ns_1@10.242.238.90:<0.18786.0>:ns_memcached:do_handle_call:527]Changed vbucket 761 state to replica [ns_server:info,2014-08-19T16:49:36.455,ns_1@10.242.238.90:ns_doctor<0.17441.0>:ns_doctor:update_status:241]The following buckets became ready on node 'ns_1@10.242.238.91': ["default"] [ns_server:info,2014-08-19T16:49:36.458,ns_1@10.242.238.90:<0.19066.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 761 to state replica [ns_server:debug,2014-08-19T16:49:36.561,ns_1@10.242.238.90:<0.19066.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_761_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:49:36.562,ns_1@10.242.238.90:<0.19066.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[761]}, {checkpoints,[{761,0}]}, {name,<<"replication_building_761_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[761]}, {takeover,false}, {suffix,"building_761_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",761,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,true}]} [rebalance:debug,2014-08-19T16:49:36.563,ns_1@10.242.238.90:<0.19066.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.19081.0> [rebalance:debug,2014-08-19T16:49:36.563,ns_1@10.242.238.90:<0.19066.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:49:36.564,ns_1@10.242.238.90:<0.19066.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.26366.0>,#Ref<16550.0.1.21227>}]} [rebalance:info,2014-08-19T16:49:36.564,ns_1@10.242.238.90:<0.19066.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 761 [rebalance:debug,2014-08-19T16:49:36.564,ns_1@10.242.238.90:<0.19066.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.26366.0>,#Ref<16550.0.1.21227>}] [ns_server:debug,2014-08-19T16:49:36.565,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.19082.0> (ok) [ns_server:debug,2014-08-19T16:49:36.565,ns_1@10.242.238.90:<0.19066.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:debug,2014-08-19T16:49:36.566,ns_1@10.242.238.90:<0.19083.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 761 [ns_server:debug,2014-08-19T16:49:36.594,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 762. Nacking mccouch update. [views:debug,2014-08-19T16:49:36.594,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/762. Updated state: pending (0) [ns_server:debug,2014-08-19T16:49:36.594,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [766,1019,1022,762,765,1018,1021,764,767,1020,1023,763] [ns_server:debug,2014-08-19T16:49:36.594,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",762,pending,0} [views:debug,2014-08-19T16:49:36.661,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/762. Updated state: pending (0) [ns_server:debug,2014-08-19T16:49:36.662,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",762,pending,0} [ns_server:info,2014-08-19T16:49:36.696,ns_1@10.242.238.90:<0.18786.0>:ns_memcached:do_handle_call:527]Changed vbucket 1016 state to replica [ns_server:info,2014-08-19T16:49:36.700,ns_1@10.242.238.90:<0.19086.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 1016 to state replica [ns_server:debug,2014-08-19T16:49:36.790,ns_1@10.242.238.90:<0.19086.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_1016_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:49:36.791,ns_1@10.242.238.90:<0.19086.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[1016]}, {checkpoints,[{1016,0}]}, {name,<<"replication_building_1016_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[1016]}, {takeover,false}, {suffix,"building_1016_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",1016,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,false}]} [rebalance:debug,2014-08-19T16:49:36.792,ns_1@10.242.238.90:<0.19086.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.19087.0> [rebalance:debug,2014-08-19T16:49:36.792,ns_1@10.242.238.90:<0.19086.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:49:36.792,ns_1@10.242.238.90:<0.19086.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.26438.0>,#Ref<16550.0.1.21618>}]} [rebalance:info,2014-08-19T16:49:36.793,ns_1@10.242.238.90:<0.19086.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 1016 [rebalance:debug,2014-08-19T16:49:36.793,ns_1@10.242.238.90:<0.19086.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.26438.0>,#Ref<16550.0.1.21618>}] [ns_server:debug,2014-08-19T16:49:36.794,ns_1@10.242.238.90:<0.19086.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:debug,2014-08-19T16:49:36.814,ns_1@10.242.238.90:<0.19102.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 1016 [ns_server:info,2014-08-19T16:49:36.821,ns_1@10.242.238.90:<0.18786.0>:ns_memcached:do_handle_call:527]Changed vbucket 760 state to replica [ns_server:info,2014-08-19T16:49:36.829,ns_1@10.242.238.90:<0.19105.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 760 to state replica [ns_server:debug,2014-08-19T16:49:36.887,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 1017. Nacking mccouch update. [views:debug,2014-08-19T16:49:36.887,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/1017. Updated state: replica (0) [ns_server:debug,2014-08-19T16:49:36.887,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [766,1019,1022,762,765,1018,1021,764,767,1017,1020,1023,763] [ns_server:debug,2014-08-19T16:49:36.887,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",1017,replica,0} [ns_server:debug,2014-08-19T16:49:36.933,ns_1@10.242.238.90:<0.19105.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_760_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:49:36.935,ns_1@10.242.238.90:<0.19105.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[760]}, {checkpoints,[{760,0}]}, {name,<<"replication_building_760_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[760]}, {takeover,false}, {suffix,"building_760_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",760,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,true}]} [rebalance:debug,2014-08-19T16:49:36.936,ns_1@10.242.238.90:<0.19105.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.19106.0> [rebalance:debug,2014-08-19T16:49:36.936,ns_1@10.242.238.90:<0.19105.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:49:36.936,ns_1@10.242.238.90:<0.19105.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.26458.0>,#Ref<16550.0.1.21737>}]} [rebalance:info,2014-08-19T16:49:36.936,ns_1@10.242.238.90:<0.19105.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 760 [rebalance:debug,2014-08-19T16:49:36.937,ns_1@10.242.238.90:<0.19105.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.26458.0>,#Ref<16550.0.1.21737>}] [ns_server:debug,2014-08-19T16:49:36.937,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.19107.0> (ok) [ns_server:debug,2014-08-19T16:49:36.937,ns_1@10.242.238.90:<0.19105.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:debug,2014-08-19T16:49:36.939,ns_1@10.242.238.90:<0.19108.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 760 [views:debug,2014-08-19T16:49:36.954,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/1017. Updated state: replica (0) [ns_server:debug,2014-08-19T16:49:36.954,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",1017,replica,0} [ns_server:info,2014-08-19T16:49:37.068,ns_1@10.242.238.90:<0.18786.0>:ns_memcached:do_handle_call:527]Changed vbucket 1015 state to replica [ns_server:info,2014-08-19T16:49:37.073,ns_1@10.242.238.90:<0.19125.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 1015 to state replica [ns_server:debug,2014-08-19T16:49:37.095,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 761. Nacking mccouch update. [views:debug,2014-08-19T16:49:37.095,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/761. Updated state: pending (0) [ns_server:debug,2014-08-19T16:49:37.095,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",761,pending,0} [ns_server:debug,2014-08-19T16:49:37.095,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [766,1019,1022,762,765,1018,1021,761,764,767,1017,1020,1023,763] [views:debug,2014-08-19T16:49:37.129,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/761. Updated state: pending (0) [ns_server:debug,2014-08-19T16:49:37.129,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",761,pending,0} [ns_server:debug,2014-08-19T16:49:37.163,ns_1@10.242.238.90:<0.19125.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_1015_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:49:37.164,ns_1@10.242.238.90:<0.19125.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[1015]}, {checkpoints,[{1015,0}]}, {name,<<"replication_building_1015_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[1015]}, {takeover,false}, {suffix,"building_1015_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",1015,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,false}]} [rebalance:debug,2014-08-19T16:49:37.165,ns_1@10.242.238.90:<0.19125.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.19126.0> [rebalance:debug,2014-08-19T16:49:37.165,ns_1@10.242.238.90:<0.19125.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:49:37.165,ns_1@10.242.238.90:<0.19125.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.26515.0>,#Ref<16550.0.1.22025>}]} [rebalance:info,2014-08-19T16:49:37.165,ns_1@10.242.238.90:<0.19125.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 1015 [rebalance:debug,2014-08-19T16:49:37.166,ns_1@10.242.238.90:<0.19125.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.26515.0>,#Ref<16550.0.1.22025>}] [ns_server:debug,2014-08-19T16:49:37.166,ns_1@10.242.238.90:<0.19125.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:debug,2014-08-19T16:49:37.187,ns_1@10.242.238.90:<0.19141.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 1015 [ns_server:info,2014-08-19T16:49:37.194,ns_1@10.242.238.90:<0.18786.0>:ns_memcached:do_handle_call:527]Changed vbucket 759 state to replica [ns_server:info,2014-08-19T16:49:37.200,ns_1@10.242.238.90:<0.19144.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 759 to state replica [ns_server:debug,2014-08-19T16:49:37.204,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 1016. Nacking mccouch update. [views:debug,2014-08-19T16:49:37.204,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/1016. Updated state: replica (0) [ns_server:debug,2014-08-19T16:49:37.204,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",1016,replica,0} [ns_server:debug,2014-08-19T16:49:37.204,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [766,1016,1019,1022,762,765,1018,1021,761,764,767,1017,1020,1023,763] [views:debug,2014-08-19T16:49:37.239,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/1016. Updated state: replica (0) [ns_server:debug,2014-08-19T16:49:37.239,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",1016,replica,0} [rebalance:debug,2014-08-19T16:49:37.264,ns_1@10.242.238.90:<0.19145.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 1023 [ns_server:debug,2014-08-19T16:49:37.305,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 760. Nacking mccouch update. [views:debug,2014-08-19T16:49:37.305,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/760. Updated state: pending (0) [ns_server:debug,2014-08-19T16:49:37.305,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",760,pending,0} [ns_server:debug,2014-08-19T16:49:37.305,ns_1@10.242.238.90:<0.19144.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_759_'ns_1@10.242.238.90' [ns_server:debug,2014-08-19T16:49:37.305,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [766,1016,1019,1022,762,765,1018,1021,761,764,767,1017,1020,760,1023,763] [rebalance:info,2014-08-19T16:49:37.306,ns_1@10.242.238.90:<0.19144.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[759]}, {checkpoints,[{759,0}]}, {name,<<"replication_building_759_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[759]}, {takeover,false}, {suffix,"building_759_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",759,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,true}]} [rebalance:debug,2014-08-19T16:49:37.307,ns_1@10.242.238.90:<0.19144.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.19162.0> [rebalance:debug,2014-08-19T16:49:37.307,ns_1@10.242.238.90:<0.19144.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:49:37.307,ns_1@10.242.238.90:<0.19144.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.26554.0>,#Ref<16550.0.1.22208>}]} [rebalance:info,2014-08-19T16:49:37.308,ns_1@10.242.238.90:<0.19144.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 759 [rebalance:debug,2014-08-19T16:49:37.308,ns_1@10.242.238.90:<0.19144.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.26554.0>,#Ref<16550.0.1.22208>}] [ns_server:debug,2014-08-19T16:49:37.308,ns_1@10.242.238.90:<0.19144.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:49:37.309,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.19163.0> (ok) [rebalance:debug,2014-08-19T16:49:37.310,ns_1@10.242.238.90:<0.19164.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 759 [views:debug,2014-08-19T16:49:37.339,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/760. Updated state: pending (0) [ns_server:debug,2014-08-19T16:49:37.339,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",760,pending,0} [ns_server:debug,2014-08-19T16:49:37.422,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 1015. Nacking mccouch update. [views:debug,2014-08-19T16:49:37.422,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/1015. Updated state: replica (0) [ns_server:debug,2014-08-19T16:49:37.422,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",1015,replica,0} [ns_server:debug,2014-08-19T16:49:37.422,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [766,1016,1019,1022,762,765,1015,1018,1021,761,764,767,1017,1020,760,1023,763] [ns_server:info,2014-08-19T16:49:37.440,ns_1@10.242.238.90:<0.18786.0>:ns_memcached:do_handle_call:527]Changed vbucket 1014 state to replica [ns_server:info,2014-08-19T16:49:37.443,ns_1@10.242.238.90:<0.19187.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 1014 to state replica [views:debug,2014-08-19T16:49:37.457,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/1015. Updated state: replica (0) [ns_server:debug,2014-08-19T16:49:37.457,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",1015,replica,0} [ns_server:debug,2014-08-19T16:49:37.523,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 759. Nacking mccouch update. [views:debug,2014-08-19T16:49:37.523,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/759. Updated state: pending (0) [ns_server:debug,2014-08-19T16:49:37.523,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",759,pending,0} [ns_server:debug,2014-08-19T16:49:37.523,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [766,1016,1019,759,1022,762,765,1015,1018,1021,761,764,767,1017,1020,760,1023, 763] [ns_server:debug,2014-08-19T16:49:37.533,ns_1@10.242.238.90:<0.19187.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_1014_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:49:37.534,ns_1@10.242.238.90:<0.19187.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[1014]}, {checkpoints,[{1014,0}]}, {name,<<"replication_building_1014_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[1014]}, {takeover,false}, {suffix,"building_1014_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",1014,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,false}]} [rebalance:debug,2014-08-19T16:49:37.535,ns_1@10.242.238.90:<0.19187.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.19202.0> [rebalance:debug,2014-08-19T16:49:37.535,ns_1@10.242.238.90:<0.19187.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:49:37.535,ns_1@10.242.238.90:<0.19187.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.26619.0>,#Ref<16550.0.1.22592>}]} [rebalance:info,2014-08-19T16:49:37.535,ns_1@10.242.238.90:<0.19187.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 1014 [rebalance:debug,2014-08-19T16:49:37.536,ns_1@10.242.238.90:<0.19187.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.26619.0>,#Ref<16550.0.1.22592>}] [ns_server:debug,2014-08-19T16:49:37.536,ns_1@10.242.238.90:<0.19187.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:debug,2014-08-19T16:49:37.556,ns_1@10.242.238.90:<0.19203.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 1014 [ns_server:info,2014-08-19T16:49:37.562,ns_1@10.242.238.90:<0.18786.0>:ns_memcached:do_handle_call:527]Changed vbucket 758 state to replica [views:debug,2014-08-19T16:49:37.563,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/759. Updated state: pending (0) [ns_server:debug,2014-08-19T16:49:37.563,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",759,pending,0} [rebalance:debug,2014-08-19T16:49:37.564,ns_1@10.242.238.90:<0.19108.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:49:37.564,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.19108.0> (ok) [ns_server:info,2014-08-19T16:49:37.570,ns_1@10.242.238.90:<0.19206.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 758 to state replica [ns_server:debug,2014-08-19T16:49:37.676,ns_1@10.242.238.90:<0.19206.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_758_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:49:37.678,ns_1@10.242.238.90:<0.19206.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[758]}, {checkpoints,[{758,0}]}, {name,<<"replication_building_758_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[758]}, {takeover,false}, {suffix,"building_758_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",758,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,true}]} [rebalance:debug,2014-08-19T16:49:37.678,ns_1@10.242.238.90:<0.19206.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.19221.0> [rebalance:debug,2014-08-19T16:49:37.678,ns_1@10.242.238.90:<0.19206.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:49:37.679,ns_1@10.242.238.90:<0.19206.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.26653.0>,#Ref<16550.0.1.23028>}]} [rebalance:info,2014-08-19T16:49:37.679,ns_1@10.242.238.90:<0.19206.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 758 [rebalance:debug,2014-08-19T16:49:37.679,ns_1@10.242.238.90:<0.19206.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.26653.0>,#Ref<16550.0.1.23028>}] [ns_server:debug,2014-08-19T16:49:37.680,ns_1@10.242.238.90:<0.19206.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:49:37.680,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.19222.0> (ok) [rebalance:debug,2014-08-19T16:49:37.682,ns_1@10.242.238.90:<0.19223.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 758 [ns_server:debug,2014-08-19T16:49:37.721,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 1014. Nacking mccouch update. [views:debug,2014-08-19T16:49:37.722,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/1014. Updated state: replica (0) [ns_server:debug,2014-08-19T16:49:37.722,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",1014,replica,0} [ns_server:debug,2014-08-19T16:49:37.722,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [766,1016,1019,759,1022,762,765,1015,1018,1021,761,764,767,1014,1017,1020,760, 1023,763] [views:debug,2014-08-19T16:49:37.788,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/1014. Updated state: replica (0) [ns_server:debug,2014-08-19T16:49:37.789,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",1014,replica,0} [ns_server:info,2014-08-19T16:49:37.813,ns_1@10.242.238.90:<0.18786.0>:ns_memcached:do_handle_call:527]Changed vbucket 1013 state to replica [ns_server:info,2014-08-19T16:49:37.817,ns_1@10.242.238.90:<0.19227.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 1013 to state replica [ns_server:debug,2014-08-19T16:49:37.907,ns_1@10.242.238.90:<0.19227.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_1013_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:49:37.908,ns_1@10.242.238.90:<0.19227.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[1013]}, {checkpoints,[{1013,0}]}, {name,<<"replication_building_1013_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[1013]}, {takeover,false}, {suffix,"building_1013_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",1013,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,false}]} [rebalance:debug,2014-08-19T16:49:37.909,ns_1@10.242.238.90:<0.19227.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.19228.0> [rebalance:debug,2014-08-19T16:49:37.909,ns_1@10.242.238.90:<0.19227.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:49:37.910,ns_1@10.242.238.90:<0.19227.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.26724.0>,#Ref<16550.0.1.24070>}]} [rebalance:info,2014-08-19T16:49:37.910,ns_1@10.242.238.90:<0.19227.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 1013 [rebalance:debug,2014-08-19T16:49:37.910,ns_1@10.242.238.90:<0.19227.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.26724.0>,#Ref<16550.0.1.24070>}] [ns_server:debug,2014-08-19T16:49:37.911,ns_1@10.242.238.90:<0.19227.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:debug,2014-08-19T16:49:37.932,ns_1@10.242.238.90:<0.19232.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 1013 [ns_server:info,2014-08-19T16:49:37.938,ns_1@10.242.238.90:<0.18786.0>:ns_memcached:do_handle_call:527]Changed vbucket 757 state to replica [ns_server:info,2014-08-19T16:49:37.945,ns_1@10.242.238.90:<0.19246.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 757 to state replica [ns_server:debug,2014-08-19T16:49:38.007,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 758. Nacking mccouch update. [views:debug,2014-08-19T16:49:38.007,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/758. Updated state: replica (0) [ns_server:debug,2014-08-19T16:49:38.007,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",758,replica,0} [ns_server:debug,2014-08-19T16:49:38.008,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [766,1016,1019,759,1022,762,765,1015,1018,758,1021,761,764,767,1014,1017,1020, 760,1023,763] [ns_server:debug,2014-08-19T16:49:38.051,ns_1@10.242.238.90:<0.19246.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_757_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:49:38.052,ns_1@10.242.238.90:<0.19246.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[757]}, {checkpoints,[{757,0}]}, {name,<<"replication_building_757_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[757]}, {takeover,false}, {suffix,"building_757_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",757,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,true}]} [rebalance:debug,2014-08-19T16:49:38.052,ns_1@10.242.238.90:<0.19246.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.19247.0> [rebalance:debug,2014-08-19T16:49:38.053,ns_1@10.242.238.90:<0.19246.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:49:38.053,ns_1@10.242.238.90:<0.19246.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.26789.0>,#Ref<16550.0.1.24642>}]} [rebalance:info,2014-08-19T16:49:38.053,ns_1@10.242.238.90:<0.19246.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 757 [rebalance:debug,2014-08-19T16:49:38.053,ns_1@10.242.238.90:<0.19246.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.26789.0>,#Ref<16550.0.1.24642>}] [ns_server:debug,2014-08-19T16:49:38.054,ns_1@10.242.238.90:<0.19246.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:49:38.054,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.19248.0> (ok) [rebalance:debug,2014-08-19T16:49:38.056,ns_1@10.242.238.90:<0.19249.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 757 [views:debug,2014-08-19T16:49:38.074,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/758. Updated state: replica (0) [ns_server:debug,2014-08-19T16:49:38.075,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",758,replica,0} [ns_server:info,2014-08-19T16:49:38.188,ns_1@10.242.238.90:<0.18786.0>:ns_memcached:do_handle_call:527]Changed vbucket 1012 state to replica [ns_server:info,2014-08-19T16:49:38.193,ns_1@10.242.238.90:<0.19266.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 1012 to state replica [ns_server:debug,2014-08-19T16:49:38.233,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 1013. Nacking mccouch update. [views:debug,2014-08-19T16:49:38.233,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/1013. Updated state: replica (0) [ns_server:debug,2014-08-19T16:49:38.233,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",1013,replica,0} [ns_server:debug,2014-08-19T16:49:38.233,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [766,1013,1016,1019,759,1022,762,765,1015,1018,758,1021,761,764,767,1014,1017, 1020,760,1023,763] [ns_server:debug,2014-08-19T16:49:38.283,ns_1@10.242.238.90:<0.19266.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_1012_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:49:38.285,ns_1@10.242.238.90:<0.19266.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[1012]}, {checkpoints,[{1012,0}]}, {name,<<"replication_building_1012_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[1012]}, {takeover,false}, {suffix,"building_1012_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",1012,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,false}]} [rebalance:debug,2014-08-19T16:49:38.285,ns_1@10.242.238.90:<0.19266.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.19267.0> [rebalance:debug,2014-08-19T16:49:38.285,ns_1@10.242.238.90:<0.19266.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:49:38.286,ns_1@10.242.238.90:<0.19266.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.26846.0>,#Ref<16550.0.1.24941>}]} [rebalance:info,2014-08-19T16:49:38.286,ns_1@10.242.238.90:<0.19266.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 1012 [rebalance:debug,2014-08-19T16:49:38.286,ns_1@10.242.238.90:<0.19266.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.26846.0>,#Ref<16550.0.1.24941>}] [ns_server:debug,2014-08-19T16:49:38.287,ns_1@10.242.238.90:<0.19266.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:debug,2014-08-19T16:49:38.306,ns_1@10.242.238.90:<0.19268.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 1012 [ns_server:info,2014-08-19T16:49:38.313,ns_1@10.242.238.90:<0.18786.0>:ns_memcached:do_handle_call:527]Changed vbucket 756 state to replica [views:debug,2014-08-19T16:49:38.317,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/1013. Updated state: replica (0) [ns_server:debug,2014-08-19T16:49:38.317,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",1013,replica,0} [ns_server:info,2014-08-19T16:49:38.318,ns_1@10.242.238.90:<0.19271.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 756 to state replica [ns_server:info,2014-08-19T16:49:38.342,ns_1@10.242.238.90:ns_doctor<0.17441.0>:ns_doctor:update_status:241]The following buckets became ready on node 'ns_1@10.242.238.90': ["default"] [ns_server:debug,2014-08-19T16:49:38.426,ns_1@10.242.238.90:<0.19271.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_756_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:49:38.427,ns_1@10.242.238.90:<0.19271.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[756]}, {checkpoints,[{756,0}]}, {name,<<"replication_building_756_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[756]}, {takeover,false}, {suffix,"building_756_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",756,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,true}]} [rebalance:debug,2014-08-19T16:49:38.428,ns_1@10.242.238.90:<0.19271.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.19294.0> [rebalance:debug,2014-08-19T16:49:38.428,ns_1@10.242.238.90:<0.19271.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:49:38.428,ns_1@10.242.238.90:<0.19271.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.26866.0>,#Ref<16550.0.1.25075>}]} [rebalance:info,2014-08-19T16:49:38.428,ns_1@10.242.238.90:<0.19271.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 756 [rebalance:debug,2014-08-19T16:49:38.429,ns_1@10.242.238.90:<0.19271.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.26866.0>,#Ref<16550.0.1.25075>}] [ns_server:debug,2014-08-19T16:49:38.429,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.19295.0> (ok) [ns_server:debug,2014-08-19T16:49:38.429,ns_1@10.242.238.90:<0.19271.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:debug,2014-08-19T16:49:38.431,ns_1@10.242.238.90:<0.19298.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 756 [ns_server:debug,2014-08-19T16:49:38.475,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 757. Nacking mccouch update. [views:debug,2014-08-19T16:49:38.476,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/757. Updated state: pending (0) [ns_server:debug,2014-08-19T16:49:38.476,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [766,1013,1016,1019,759,1022,762,765,1015,1018,758,1021,761,764,767,1014,1017, 757,1020,760,1023,763] [ns_server:debug,2014-08-19T16:49:38.476,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",757,pending,0} [views:debug,2014-08-19T16:49:38.548,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/757. Updated state: pending (0) [ns_server:debug,2014-08-19T16:49:38.548,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",757,pending,0} [ns_server:info,2014-08-19T16:49:38.562,ns_1@10.242.238.90:<0.18786.0>:ns_memcached:do_handle_call:527]Changed vbucket 1011 state to replica [ns_server:info,2014-08-19T16:49:38.566,ns_1@10.242.238.90:<0.19301.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 1011 to state replica [ns_server:debug,2014-08-19T16:49:38.616,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 1012. Nacking mccouch update. [views:debug,2014-08-19T16:49:38.617,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/1012. Updated state: replica (0) [ns_server:debug,2014-08-19T16:49:38.617,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",1012,replica,0} [ns_server:debug,2014-08-19T16:49:38.617,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [766,1013,1016,1019,759,1022,762,765,1012,1015,1018,758,1021,761,764,767,1014, 1017,757,1020,760,1023,763] [views:debug,2014-08-19T16:49:38.650,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/1012. Updated state: replica (0) [ns_server:debug,2014-08-19T16:49:38.651,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",1012,replica,0} [ns_server:debug,2014-08-19T16:49:38.656,ns_1@10.242.238.90:<0.19301.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_1011_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:49:38.658,ns_1@10.242.238.90:<0.19301.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[1011]}, {checkpoints,[{1011,0}]}, {name,<<"replication_building_1011_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[1011]}, {takeover,false}, {suffix,"building_1011_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",1011,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,false}]} [rebalance:debug,2014-08-19T16:49:38.659,ns_1@10.242.238.90:<0.19301.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.19316.0> [rebalance:debug,2014-08-19T16:49:38.659,ns_1@10.242.238.90:<0.19301.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:49:38.659,ns_1@10.242.238.90:<0.19301.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.26924.0>,#Ref<16550.0.1.25407>}]} [rebalance:info,2014-08-19T16:49:38.659,ns_1@10.242.238.90:<0.19301.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 1011 [rebalance:debug,2014-08-19T16:49:38.660,ns_1@10.242.238.90:<0.19301.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.26924.0>,#Ref<16550.0.1.25407>}] [ns_server:debug,2014-08-19T16:49:38.660,ns_1@10.242.238.90:<0.19301.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:debug,2014-08-19T16:49:38.679,ns_1@10.242.238.90:<0.19317.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 1011 [views:debug,2014-08-19T16:49:38.684,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/758. Updated state: pending (0) [ns_server:debug,2014-08-19T16:49:38.684,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",758,pending,0} [ns_server:info,2014-08-19T16:49:38.685,ns_1@10.242.238.90:<0.18786.0>:ns_memcached:do_handle_call:527]Changed vbucket 755 state to replica [ns_server:info,2014-08-19T16:49:38.692,ns_1@10.242.238.90:<0.19320.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 755 to state replica [ns_server:debug,2014-08-19T16:49:38.751,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 756. Nacking mccouch update. [views:debug,2014-08-19T16:49:38.751,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/756. Updated state: pending (0) [ns_server:debug,2014-08-19T16:49:38.751,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",756,pending,0} [ns_server:debug,2014-08-19T16:49:38.751,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [766,1013,1016,756,1019,759,1022,762,765,1012,1015,1018,758,1021,761,764,767, 1014,1017,757,1020,760,1023,763] [views:debug,2014-08-19T16:49:38.784,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/756. Updated state: pending (0) [ns_server:debug,2014-08-19T16:49:38.785,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",756,pending,0} [ns_server:debug,2014-08-19T16:49:38.798,ns_1@10.242.238.90:<0.19320.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_755_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:49:38.800,ns_1@10.242.238.90:<0.19320.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[755]}, {checkpoints,[{755,0}]}, {name,<<"replication_building_755_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[755]}, {takeover,false}, {suffix,"building_755_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",755,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,true}]} [rebalance:debug,2014-08-19T16:49:38.800,ns_1@10.242.238.90:<0.19320.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.19335.0> [rebalance:debug,2014-08-19T16:49:38.800,ns_1@10.242.238.90:<0.19320.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:49:38.801,ns_1@10.242.238.90:<0.19320.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.26958.0>,#Ref<16550.0.1.25573>}]} [rebalance:info,2014-08-19T16:49:38.801,ns_1@10.242.238.90:<0.19320.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 755 [rebalance:debug,2014-08-19T16:49:38.801,ns_1@10.242.238.90:<0.19320.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.26958.0>,#Ref<16550.0.1.25573>}] [ns_server:debug,2014-08-19T16:49:38.802,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.19336.0> (ok) [ns_server:debug,2014-08-19T16:49:38.802,ns_1@10.242.238.90:<0.19320.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:debug,2014-08-19T16:49:38.804,ns_1@10.242.238.90:<0.19337.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 755 [ns_server:debug,2014-08-19T16:49:38.868,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 1011. Nacking mccouch update. [views:debug,2014-08-19T16:49:38.868,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/1011. Updated state: replica (0) [ns_server:debug,2014-08-19T16:49:38.869,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [766,1013,1016,756,1019,759,1022,762,765,1012,1015,1018,758,1021,761,764,1011, 767,1014,1017,757,1020,760,1023,763] [ns_server:debug,2014-08-19T16:49:38.869,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",1011,replica,0} [views:debug,2014-08-19T16:49:38.902,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/1011. Updated state: replica (0) [ns_server:debug,2014-08-19T16:49:38.902,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",1011,replica,0} [ns_server:info,2014-08-19T16:49:38.936,ns_1@10.242.238.90:<0.18786.0>:ns_memcached:do_handle_call:527]Changed vbucket 1010 state to replica [ns_server:info,2014-08-19T16:49:38.940,ns_1@10.242.238.90:<0.19354.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 1010 to state replica [ns_server:debug,2014-08-19T16:49:38.986,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 755. Nacking mccouch update. [views:debug,2014-08-19T16:49:38.986,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/755. Updated state: pending (0) [ns_server:debug,2014-08-19T16:49:38.986,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",755,pending,0} [ns_server:debug,2014-08-19T16:49:38.986,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [766,1013,1016,756,1019,759,1022,762,765,1012,1015,755,1018,758,1021,761,764, 1011,767,1014,1017,757,1020,760,1023,763] [views:debug,2014-08-19T16:49:39.026,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/755. Updated state: pending (0) [ns_server:debug,2014-08-19T16:49:39.026,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",755,pending,0} [rebalance:debug,2014-08-19T16:49:39.027,ns_1@10.242.238.90:<0.19057.0>:janitor_agent:handle_call:795]Done [rebalance:debug,2014-08-19T16:49:39.027,ns_1@10.242.238.90:<0.19337.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:49:39.027,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.19057.0> (ok) [ns_server:debug,2014-08-19T16:49:39.027,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.19337.0> (ok) [ns_server:debug,2014-08-19T16:49:39.030,ns_1@10.242.238.90:<0.19354.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_1010_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:49:39.031,ns_1@10.242.238.90:<0.19354.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[1010]}, {checkpoints,[{1010,0}]}, {name,<<"replication_building_1010_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[1010]}, {takeover,false}, {suffix,"building_1010_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",1010,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,false}]} [rebalance:debug,2014-08-19T16:49:39.032,ns_1@10.242.238.90:<0.19354.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.19369.0> [rebalance:debug,2014-08-19T16:49:39.032,ns_1@10.242.238.90:<0.19354.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:49:39.032,ns_1@10.242.238.90:<0.19354.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.27015.0>,#Ref<16550.0.1.25863>}]} [rebalance:info,2014-08-19T16:49:39.032,ns_1@10.242.238.90:<0.19354.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 1010 [rebalance:debug,2014-08-19T16:49:39.033,ns_1@10.242.238.90:<0.19354.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.27015.0>,#Ref<16550.0.1.25863>}] [ns_server:debug,2014-08-19T16:49:39.034,ns_1@10.242.238.90:<0.19354.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:debug,2014-08-19T16:49:39.053,ns_1@10.242.238.90:<0.19370.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 1010 [ns_server:info,2014-08-19T16:49:39.059,ns_1@10.242.238.90:<0.18786.0>:ns_memcached:do_handle_call:527]Changed vbucket 754 state to replica [ns_server:info,2014-08-19T16:49:39.068,ns_1@10.242.238.90:<0.19373.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 754 to state replica [ns_server:debug,2014-08-19T16:49:39.174,ns_1@10.242.238.90:<0.19373.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_754_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:49:39.175,ns_1@10.242.238.90:<0.19373.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[754]}, {checkpoints,[{754,0}]}, {name,<<"replication_building_754_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[754]}, {takeover,false}, {suffix,"building_754_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",754,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,true}]} [rebalance:debug,2014-08-19T16:49:39.176,ns_1@10.242.238.90:<0.19373.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.19388.0> [rebalance:debug,2014-08-19T16:49:39.176,ns_1@10.242.238.90:<0.19373.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:49:39.177,ns_1@10.242.238.90:<0.19373.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.27049.0>,#Ref<16550.0.1.26050>}]} [rebalance:info,2014-08-19T16:49:39.177,ns_1@10.242.238.90:<0.19373.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 754 [rebalance:debug,2014-08-19T16:49:39.177,ns_1@10.242.238.90:<0.19373.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.27049.0>,#Ref<16550.0.1.26050>}] [ns_server:debug,2014-08-19T16:49:39.178,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.19389.0> (ok) [ns_server:debug,2014-08-19T16:49:39.178,ns_1@10.242.238.90:<0.19373.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:debug,2014-08-19T16:49:39.179,ns_1@10.242.238.90:<0.19390.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 754 [ns_server:debug,2014-08-19T16:49:39.184,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 1010. Nacking mccouch update. [views:debug,2014-08-19T16:49:39.185,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/1010. Updated state: replica (0) [ns_server:debug,2014-08-19T16:49:39.185,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",1010,replica,0} [ns_server:debug,2014-08-19T16:49:39.185,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [766,1013,1016,756,1019,759,1022,762,765,1012,1015,755,1018,758,1021,761,764, 1011,767,1014,1017,757,1020,760,1023,763,1010] [views:debug,2014-08-19T16:49:39.260,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/1010. Updated state: replica (0) [ns_server:debug,2014-08-19T16:49:39.260,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",1010,replica,0} [ns_server:info,2014-08-19T16:49:39.310,ns_1@10.242.238.90:<0.18786.0>:ns_memcached:do_handle_call:527]Changed vbucket 1009 state to replica [ns_server:info,2014-08-19T16:49:39.314,ns_1@10.242.238.90:<0.19393.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 1009 to state replica [ns_server:debug,2014-08-19T16:49:39.404,ns_1@10.242.238.90:<0.19393.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_1009_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:49:39.405,ns_1@10.242.238.90:<0.19393.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[1009]}, {checkpoints,[{1009,0}]}, {name,<<"replication_building_1009_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[1009]}, {takeover,false}, {suffix,"building_1009_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",1009,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,false}]} [rebalance:debug,2014-08-19T16:49:39.406,ns_1@10.242.238.90:<0.19393.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.19414.0> [rebalance:debug,2014-08-19T16:49:39.406,ns_1@10.242.238.90:<0.19393.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:49:39.406,ns_1@10.242.238.90:<0.19393.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.27125.0>,#Ref<16550.0.1.26484>}]} [rebalance:info,2014-08-19T16:49:39.407,ns_1@10.242.238.90:<0.19393.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 1009 [rebalance:debug,2014-08-19T16:49:39.407,ns_1@10.242.238.90:<0.19393.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.27125.0>,#Ref<16550.0.1.26484>}] [ns_server:debug,2014-08-19T16:49:39.408,ns_1@10.242.238.90:<0.19393.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:debug,2014-08-19T16:49:39.427,ns_1@10.242.238.90:<0.19415.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 1009 [ns_server:info,2014-08-19T16:49:39.433,ns_1@10.242.238.90:<0.18786.0>:ns_memcached:do_handle_call:527]Changed vbucket 753 state to replica [ns_server:info,2014-08-19T16:49:39.441,ns_1@10.242.238.90:<0.19419.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 753 to state replica [ns_server:debug,2014-08-19T16:49:39.486,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 1009. Nacking mccouch update. [views:debug,2014-08-19T16:49:39.486,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/1009. Updated state: replica (0) [ns_server:debug,2014-08-19T16:49:39.486,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [766,1013,1016,756,1019,759,1022,762,1009,765,1012,1015,755,1018,758,1021,761, 764,1011,767,1014,1017,757,1020,760,1023,763,1010] [ns_server:debug,2014-08-19T16:49:39.487,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",1009,replica,0} [ns_server:debug,2014-08-19T16:49:39.546,ns_1@10.242.238.90:<0.19419.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_753_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:49:39.548,ns_1@10.242.238.90:<0.19419.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[753]}, {checkpoints,[{753,0}]}, {name,<<"replication_building_753_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[753]}, {takeover,false}, {suffix,"building_753_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",753,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,true}]} [rebalance:debug,2014-08-19T16:49:39.548,ns_1@10.242.238.90:<0.19419.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.19420.0> [rebalance:debug,2014-08-19T16:49:39.548,ns_1@10.242.238.90:<0.19419.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:49:39.549,ns_1@10.242.238.90:<0.19419.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.27159.0>,#Ref<16550.0.1.26654>}]} [rebalance:info,2014-08-19T16:49:39.549,ns_1@10.242.238.90:<0.19419.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 753 [rebalance:debug,2014-08-19T16:49:39.549,ns_1@10.242.238.90:<0.19419.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.27159.0>,#Ref<16550.0.1.26654>}] [ns_server:debug,2014-08-19T16:49:39.550,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.19421.0> (ok) [ns_server:debug,2014-08-19T16:49:39.550,ns_1@10.242.238.90:<0.19419.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:debug,2014-08-19T16:49:39.552,ns_1@10.242.238.90:<0.19422.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 753 [views:debug,2014-08-19T16:49:39.553,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/1009. Updated state: replica (0) [ns_server:debug,2014-08-19T16:49:39.553,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",1009,replica,0} [ns_server:info,2014-08-19T16:49:39.685,ns_1@10.242.238.90:<0.18786.0>:ns_memcached:do_handle_call:527]Changed vbucket 1008 state to replica [ns_server:info,2014-08-19T16:49:39.688,ns_1@10.242.238.90:<0.19430.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 1008 to state replica [ns_server:debug,2014-08-19T16:49:39.770,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 754. Nacking mccouch update. [views:debug,2014-08-19T16:49:39.770,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/754. Updated state: pending (0) [ns_server:debug,2014-08-19T16:49:39.770,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",754,pending,0} [ns_server:debug,2014-08-19T16:49:39.770,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [766,1013,1016,756,1019,759,1022,762,1009,765,1012,1015,755,1018,758,1021,761, 764,1011,767,1014,754,1017,757,1020,760,1023,763,1010] [ns_server:debug,2014-08-19T16:49:39.778,ns_1@10.242.238.90:<0.19430.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_1008_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:49:39.779,ns_1@10.242.238.90:<0.19430.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[1008]}, {checkpoints,[{1008,0}]}, {name,<<"replication_building_1008_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[1008]}, {takeover,false}, {suffix,"building_1008_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",1008,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,false}]} [rebalance:debug,2014-08-19T16:49:39.780,ns_1@10.242.238.90:<0.19430.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.19440.0> [rebalance:debug,2014-08-19T16:49:39.780,ns_1@10.242.238.90:<0.19430.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:49:39.781,ns_1@10.242.238.90:<0.19430.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.27216.0>,#Ref<16550.0.1.26969>}]} [rebalance:info,2014-08-19T16:49:39.781,ns_1@10.242.238.90:<0.19430.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 1008 [rebalance:debug,2014-08-19T16:49:39.781,ns_1@10.242.238.90:<0.19430.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.27216.0>,#Ref<16550.0.1.26969>}] [ns_server:debug,2014-08-19T16:49:39.782,ns_1@10.242.238.90:<0.19430.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:debug,2014-08-19T16:49:39.802,ns_1@10.242.238.90:<0.19441.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 1008 [ns_server:info,2014-08-19T16:49:39.807,ns_1@10.242.238.90:<0.18786.0>:ns_memcached:do_handle_call:527]Changed vbucket 752 state to replica [ns_server:info,2014-08-19T16:49:39.814,ns_1@10.242.238.90:<0.19444.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 752 to state replica [views:debug,2014-08-19T16:49:39.837,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/754. Updated state: pending (0) [ns_server:debug,2014-08-19T16:49:39.838,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",754,pending,0} [ns_server:debug,2014-08-19T16:49:39.919,ns_1@10.242.238.90:<0.19444.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_752_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:49:39.920,ns_1@10.242.238.90:<0.19444.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[752]}, {checkpoints,[{752,0}]}, {name,<<"replication_building_752_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[752]}, {takeover,false}, {suffix,"building_752_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",752,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,true}]} [rebalance:debug,2014-08-19T16:49:39.921,ns_1@10.242.238.90:<0.19444.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.19459.0> [rebalance:debug,2014-08-19T16:49:39.921,ns_1@10.242.238.90:<0.19444.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:49:39.921,ns_1@10.242.238.90:<0.19444.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.27250.0>,#Ref<16550.0.1.27140>}]} [rebalance:info,2014-08-19T16:49:39.921,ns_1@10.242.238.90:<0.19444.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 752 [rebalance:debug,2014-08-19T16:49:39.922,ns_1@10.242.238.90:<0.19444.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.27250.0>,#Ref<16550.0.1.27140>}] [ns_server:debug,2014-08-19T16:49:39.922,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.19460.0> (ok) [ns_server:debug,2014-08-19T16:49:39.922,ns_1@10.242.238.90:<0.19444.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:debug,2014-08-19T16:49:39.924,ns_1@10.242.238.90:<0.19461.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 752 [ns_server:debug,2014-08-19T16:49:39.996,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 1008. Nacking mccouch update. [views:debug,2014-08-19T16:49:39.996,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/1008. Updated state: replica (0) [ns_server:debug,2014-08-19T16:49:39.996,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",1008,replica,0} [ns_server:debug,2014-08-19T16:49:39.996,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [766,1013,1016,756,1019,759,1022,762,1009,765,1012,1015,755,1018,758,1021,761, 1008,764,1011,767,1014,754,1017,757,1020,760,1023,763,1010] [views:debug,2014-08-19T16:49:40.045,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/1008. Updated state: replica (0) [ns_server:debug,2014-08-19T16:49:40.045,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",1008,replica,0} [ns_server:info,2014-08-19T16:49:40.056,ns_1@10.242.238.90:<0.18786.0>:ns_memcached:do_handle_call:527]Changed vbucket 1007 state to replica [ns_server:info,2014-08-19T16:49:40.060,ns_1@10.242.238.90:<0.19464.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 1007 to state replica [ns_server:debug,2014-08-19T16:49:40.120,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 752. Nacking mccouch update. [views:debug,2014-08-19T16:49:40.120,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/752. Updated state: pending (0) [ns_server:debug,2014-08-19T16:49:40.121,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",752,pending,0} [ns_server:debug,2014-08-19T16:49:40.121,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [766,1013,1016,756,1019,759,1022,762,1009,765,1012,752,1015,755,1018,758,1021, 761,1008,764,1011,767,1014,754,1017,757,1020,760,1023,763,1010] [ns_server:debug,2014-08-19T16:49:40.150,ns_1@10.242.238.90:<0.19464.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_1007_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:49:40.152,ns_1@10.242.238.90:<0.19464.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[1007]}, {checkpoints,[{1007,0}]}, {name,<<"replication_building_1007_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[1007]}, {takeover,false}, {suffix,"building_1007_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",1007,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,false}]} [rebalance:debug,2014-08-19T16:49:40.153,ns_1@10.242.238.90:<0.19464.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.19479.0> [rebalance:debug,2014-08-19T16:49:40.153,ns_1@10.242.238.90:<0.19464.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:49:40.153,ns_1@10.242.238.90:<0.19464.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.27307.0>,#Ref<16550.0.1.27427>}]} [rebalance:info,2014-08-19T16:49:40.154,ns_1@10.242.238.90:<0.19464.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 1007 [rebalance:debug,2014-08-19T16:49:40.154,ns_1@10.242.238.90:<0.19464.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.27307.0>,#Ref<16550.0.1.27427>}] [views:debug,2014-08-19T16:49:40.154,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/752. Updated state: pending (0) [ns_server:debug,2014-08-19T16:49:40.154,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",752,pending,0} [ns_server:debug,2014-08-19T16:49:40.155,ns_1@10.242.238.90:<0.19464.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:debug,2014-08-19T16:49:40.174,ns_1@10.242.238.90:<0.19480.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 1007 [ns_server:info,2014-08-19T16:49:40.180,ns_1@10.242.238.90:<0.18786.0>:ns_memcached:do_handle_call:527]Changed vbucket 751 state to replica [ns_server:info,2014-08-19T16:49:40.187,ns_1@10.242.238.90:<0.19483.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 751 to state replica [ns_server:debug,2014-08-19T16:49:40.229,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 1007. Nacking mccouch update. [views:debug,2014-08-19T16:49:40.229,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/1007. Updated state: replica (0) [ns_server:debug,2014-08-19T16:49:40.230,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",1007,replica,0} [ns_server:debug,2014-08-19T16:49:40.230,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [766,1013,1016,756,1019,759,1022,762,1009,765,1012,752,1015,755,1018,758,1021, 761,1008,764,1011,767,1014,754,1017,757,1020,760,1023,1007,763,1010] [views:debug,2014-08-19T16:49:40.263,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/1007. Updated state: replica (0) [ns_server:debug,2014-08-19T16:49:40.263,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",1007,replica,0} [ns_server:debug,2014-08-19T16:49:40.295,ns_1@10.242.238.90:<0.19483.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_751_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:49:40.296,ns_1@10.242.238.90:<0.19483.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[751]}, {checkpoints,[{751,0}]}, {name,<<"replication_building_751_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[751]}, {takeover,false}, {suffix,"building_751_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",751,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,true}]} [rebalance:debug,2014-08-19T16:49:40.297,ns_1@10.242.238.90:<0.19483.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.19498.0> [rebalance:debug,2014-08-19T16:49:40.297,ns_1@10.242.238.90:<0.19483.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:49:40.298,ns_1@10.242.238.90:<0.19483.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.27327.0>,#Ref<16550.0.1.27579>}]} [rebalance:info,2014-08-19T16:49:40.298,ns_1@10.242.238.90:<0.19483.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 751 [rebalance:debug,2014-08-19T16:49:40.298,ns_1@10.242.238.90:<0.19483.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.27327.0>,#Ref<16550.0.1.27579>}] [ns_server:debug,2014-08-19T16:49:40.299,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.19499.0> (ok) [ns_server:debug,2014-08-19T16:49:40.299,ns_1@10.242.238.90:<0.19483.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:debug,2014-08-19T16:49:40.300,ns_1@10.242.238.90:<0.19500.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 751 [ns_server:debug,2014-08-19T16:49:40.338,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 753. Nacking mccouch update. [views:debug,2014-08-19T16:49:40.339,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/753. Updated state: pending (0) [ns_server:debug,2014-08-19T16:49:40.339,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",753,pending,0} [ns_server:debug,2014-08-19T16:49:40.339,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [766,1013,753,1016,756,1019,759,1022,762,1009,765,1012,752,1015,755,1018,758, 1021,761,1008,764,1011,767,1014,754,1017,757,1020,760,1023,1007,763,1010] [views:debug,2014-08-19T16:49:40.397,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/753. Updated state: pending (0) [ns_server:debug,2014-08-19T16:49:40.397,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",753,pending,0} [rebalance:debug,2014-08-19T16:49:40.398,ns_1@10.242.238.90:<0.19203.0>:janitor_agent:handle_call:795]Done [rebalance:debug,2014-08-19T16:49:40.398,ns_1@10.242.238.90:<0.19249.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:49:40.398,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.19203.0> (ok) [ns_server:debug,2014-08-19T16:49:40.398,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.19249.0> (ok) [ns_server:info,2014-08-19T16:49:40.432,ns_1@10.242.238.90:<0.18786.0>:ns_memcached:do_handle_call:527]Changed vbucket 1006 state to replica [ns_server:info,2014-08-19T16:49:40.438,ns_1@10.242.238.90:<0.19517.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 1006 to state replica [ns_server:debug,2014-08-19T16:49:40.529,ns_1@10.242.238.90:<0.19517.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_1006_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:49:40.530,ns_1@10.242.238.90:<0.19517.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[1006]}, {checkpoints,[{1006,0}]}, {name,<<"replication_building_1006_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[1006]}, {takeover,false}, {suffix,"building_1006_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",1006,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,false}]} [rebalance:debug,2014-08-19T16:49:40.531,ns_1@10.242.238.90:<0.19517.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.19532.0> [rebalance:debug,2014-08-19T16:49:40.531,ns_1@10.242.238.90:<0.19517.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:49:40.532,ns_1@10.242.238.90:<0.19517.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.27384.0>,#Ref<16550.0.1.27882>}]} [rebalance:info,2014-08-19T16:49:40.532,ns_1@10.242.238.90:<0.19517.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 1006 [rebalance:debug,2014-08-19T16:49:40.532,ns_1@10.242.238.90:<0.19517.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.27384.0>,#Ref<16550.0.1.27882>}] [ns_server:debug,2014-08-19T16:49:40.533,ns_1@10.242.238.90:<0.19517.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:49:40.548,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 751. Nacking mccouch update. [views:debug,2014-08-19T16:49:40.548,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/751. Updated state: pending (0) [ns_server:debug,2014-08-19T16:49:40.548,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [766,1013,753,1016,756,1019,759,1022,762,1009,765,1012,752,1015,755,1018,758, 1021,761,1008,764,1011,767,751,1014,754,1017,757,1020,760,1023,1007,763,1010] [ns_server:debug,2014-08-19T16:49:40.548,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",751,pending,0} [rebalance:debug,2014-08-19T16:49:40.554,ns_1@10.242.238.90:<0.19533.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 1006 [ns_server:info,2014-08-19T16:49:40.560,ns_1@10.242.238.90:<0.18786.0>:ns_memcached:do_handle_call:527]Changed vbucket 750 state to replica [ns_server:info,2014-08-19T16:49:40.565,ns_1@10.242.238.90:<0.19536.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 750 to state replica [views:debug,2014-08-19T16:49:40.621,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/751. Updated state: pending (0) [ns_server:debug,2014-08-19T16:49:40.621,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",751,pending,0} [ns_server:debug,2014-08-19T16:49:40.672,ns_1@10.242.238.90:<0.19536.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_750_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:49:40.673,ns_1@10.242.238.90:<0.19536.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[750]}, {checkpoints,[{750,0}]}, {name,<<"replication_building_750_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[750]}, {takeover,false}, {suffix,"building_750_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",750,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,true}]} [rebalance:debug,2014-08-19T16:49:40.674,ns_1@10.242.238.90:<0.19536.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.19537.0> [rebalance:debug,2014-08-19T16:49:40.674,ns_1@10.242.238.90:<0.19536.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:49:40.674,ns_1@10.242.238.90:<0.19536.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.27418.0>,#Ref<16550.0.1.28074>}]} [rebalance:info,2014-08-19T16:49:40.675,ns_1@10.242.238.90:<0.19536.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 750 [rebalance:debug,2014-08-19T16:49:40.675,ns_1@10.242.238.90:<0.19536.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.27418.0>,#Ref<16550.0.1.28074>}] [ns_server:debug,2014-08-19T16:49:40.676,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.19538.0> (ok) [ns_server:debug,2014-08-19T16:49:40.676,ns_1@10.242.238.90:<0.19536.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:debug,2014-08-19T16:49:40.677,ns_1@10.242.238.90:<0.19539.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 750 [ns_server:info,2014-08-19T16:49:40.809,ns_1@10.242.238.90:<0.18786.0>:ns_memcached:do_handle_call:527]Changed vbucket 1005 state to replica [ns_server:info,2014-08-19T16:49:40.814,ns_1@10.242.238.90:<0.19556.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 1005 to state replica [ns_server:debug,2014-08-19T16:49:40.863,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 1006. Nacking mccouch update. [views:debug,2014-08-19T16:49:40.864,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/1006. Updated state: replica (0) [ns_server:debug,2014-08-19T16:49:40.864,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",1006,replica,0} [ns_server:debug,2014-08-19T16:49:40.864,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [766,1013,753,1016,756,1019,759,1022,1006,762,1009,765,1012,752,1015,755,1018, 758,1021,761,1008,764,1011,767,751,1014,754,1017,757,1020,760,1023,1007,763, 1010] [views:debug,2014-08-19T16:49:40.897,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/1006. Updated state: replica (0) [ns_server:debug,2014-08-19T16:49:40.898,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",1006,replica,0} [ns_server:debug,2014-08-19T16:49:40.905,ns_1@10.242.238.90:<0.19556.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_1005_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:49:40.906,ns_1@10.242.238.90:<0.19556.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[1005]}, {checkpoints,[{1005,0}]}, {name,<<"replication_building_1005_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[1005]}, {takeover,false}, {suffix,"building_1005_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",1005,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,false}]} [rebalance:debug,2014-08-19T16:49:40.907,ns_1@10.242.238.90:<0.19556.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.19557.0> [rebalance:debug,2014-08-19T16:49:40.907,ns_1@10.242.238.90:<0.19556.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:49:40.907,ns_1@10.242.238.90:<0.19556.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.27489.0>,#Ref<16550.0.1.28458>}]} [rebalance:info,2014-08-19T16:49:40.907,ns_1@10.242.238.90:<0.19556.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 1005 [rebalance:debug,2014-08-19T16:49:40.908,ns_1@10.242.238.90:<0.19556.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.27489.0>,#Ref<16550.0.1.28458>}] [ns_server:debug,2014-08-19T16:49:40.909,ns_1@10.242.238.90:<0.19556.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:debug,2014-08-19T16:49:40.930,ns_1@10.242.238.90:<0.19558.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 1005 [ns_server:info,2014-08-19T16:49:40.936,ns_1@10.242.238.90:<0.18786.0>:ns_memcached:do_handle_call:527]Changed vbucket 749 state to replica [ns_server:info,2014-08-19T16:49:40.945,ns_1@10.242.238.90:<0.19561.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 749 to state replica [ns_server:debug,2014-08-19T16:49:40.981,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 750. Nacking mccouch update. [views:debug,2014-08-19T16:49:40.981,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/750. Updated state: pending (0) [ns_server:debug,2014-08-19T16:49:40.981,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",750,pending,0} [ns_server:debug,2014-08-19T16:49:40.981,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [766,750,1013,753,1016,756,1019,759,1022,1006,762,1009,765,1012,752,1015,755, 1018,758,1021,761,1008,764,1011,767,751,1014,754,1017,757,1020,760,1023,1007, 763,1010] [views:debug,2014-08-19T16:49:41.016,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/750. Updated state: pending (0) [ns_server:debug,2014-08-19T16:49:41.017,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",750,pending,0} [ns_server:debug,2014-08-19T16:49:41.050,ns_1@10.242.238.90:<0.19561.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_749_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:49:41.051,ns_1@10.242.238.90:<0.19561.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[749]}, {checkpoints,[{749,0}]}, {name,<<"replication_building_749_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[749]}, {takeover,false}, {suffix,"building_749_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",749,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,true}]} [rebalance:debug,2014-08-19T16:49:41.052,ns_1@10.242.238.90:<0.19561.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.19576.0> [rebalance:debug,2014-08-19T16:49:41.052,ns_1@10.242.238.90:<0.19561.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:49:41.052,ns_1@10.242.238.90:<0.19561.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.27523.0>,#Ref<16550.0.1.28647>}]} [rebalance:info,2014-08-19T16:49:41.053,ns_1@10.242.238.90:<0.19561.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 749 [rebalance:debug,2014-08-19T16:49:41.053,ns_1@10.242.238.90:<0.19561.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.27523.0>,#Ref<16550.0.1.28647>}] [ns_server:debug,2014-08-19T16:49:41.053,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.19577.0> (ok) [ns_server:debug,2014-08-19T16:49:41.054,ns_1@10.242.238.90:<0.19561.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:debug,2014-08-19T16:49:41.055,ns_1@10.242.238.90:<0.19578.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 749 [ns_server:debug,2014-08-19T16:49:41.100,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 1005. Nacking mccouch update. [views:debug,2014-08-19T16:49:41.100,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/1005. Updated state: replica (0) [ns_server:debug,2014-08-19T16:49:41.100,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [766,750,1013,753,1016,756,1019,759,1022,1006,762,1009,765,1012,752,1015,755, 1018,758,1021,1005,761,1008,764,1011,767,751,1014,754,1017,757,1020,760,1023, 1007,763,1010] [ns_server:debug,2014-08-19T16:49:41.100,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",1005,replica,0} [views:debug,2014-08-19T16:49:41.134,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/1005. Updated state: replica (0) [ns_server:debug,2014-08-19T16:49:41.134,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",1005,replica,0} [ns_server:info,2014-08-19T16:49:41.189,ns_1@10.242.238.90:<0.18786.0>:ns_memcached:do_handle_call:527]Changed vbucket 1004 state to replica [ns_server:info,2014-08-19T16:49:41.192,ns_1@10.242.238.90:<0.19609.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 1004 to state replica [ns_server:debug,2014-08-19T16:49:41.217,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 749. Nacking mccouch update. [views:debug,2014-08-19T16:49:41.217,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/749. Updated state: pending (0) [ns_server:debug,2014-08-19T16:49:41.217,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",749,pending,0} [ns_server:debug,2014-08-19T16:49:41.218,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [766,750,1013,753,1016,756,1019,759,1022,1006,762,1009,765,749,1012,752,1015, 755,1018,758,1021,1005,761,1008,764,1011,767,751,1014,754,1017,757,1020,760, 1023,1007,763,1010] [ns_server:debug,2014-08-19T16:49:41.283,ns_1@10.242.238.90:<0.19609.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_1004_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:49:41.284,ns_1@10.242.238.90:<0.19609.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[1004]}, {checkpoints,[{1004,0}]}, {name,<<"replication_building_1004_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[1004]}, {takeover,false}, {suffix,"building_1004_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",1004,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,false}]} [rebalance:debug,2014-08-19T16:49:41.285,ns_1@10.242.238.90:<0.19609.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.19610.0> [rebalance:debug,2014-08-19T16:49:41.285,ns_1@10.242.238.90:<0.19609.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:49:41.285,ns_1@10.242.238.90:<0.19609.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.27599.0>,#Ref<16550.0.1.29026>}]} [rebalance:info,2014-08-19T16:49:41.286,ns_1@10.242.238.90:<0.19609.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 1004 [rebalance:debug,2014-08-19T16:49:41.286,ns_1@10.242.238.90:<0.19609.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.27599.0>,#Ref<16550.0.1.29026>}] [ns_server:debug,2014-08-19T16:49:41.288,ns_1@10.242.238.90:<0.19609.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [views:debug,2014-08-19T16:49:41.290,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/749. Updated state: pending (0) [ns_server:debug,2014-08-19T16:49:41.290,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",749,pending,0} [rebalance:debug,2014-08-19T16:49:41.306,ns_1@10.242.238.90:<0.19611.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 1004 [ns_server:info,2014-08-19T16:49:41.313,ns_1@10.242.238.90:<0.18786.0>:ns_memcached:do_handle_call:527]Changed vbucket 748 state to replica [ns_server:info,2014-08-19T16:49:41.320,ns_1@10.242.238.90:<0.19614.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 748 to state replica [ns_server:debug,2014-08-19T16:49:41.407,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 1004. Nacking mccouch update. [views:debug,2014-08-19T16:49:41.407,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/1004. Updated state: replica (0) [ns_server:debug,2014-08-19T16:49:41.407,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",1004,replica,0} [ns_server:debug,2014-08-19T16:49:41.407,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [766,750,1013,753,1016,756,1019,759,1022,1006,762,1009,765,749,1012,752,1015, 755,1018,758,1021,1005,761,1008,764,1011,767,751,1014,754,1017,757,1020,1004, 760,1023,1007,763,1010] [ns_server:debug,2014-08-19T16:49:41.427,ns_1@10.242.238.90:<0.19614.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_748_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:49:41.428,ns_1@10.242.238.90:<0.19614.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[748]}, {checkpoints,[{748,0}]}, {name,<<"replication_building_748_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[748]}, {takeover,false}, {suffix,"building_748_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",748,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,true}]} [rebalance:debug,2014-08-19T16:49:41.429,ns_1@10.242.238.90:<0.19614.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.19635.0> [rebalance:debug,2014-08-19T16:49:41.429,ns_1@10.242.238.90:<0.19614.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:49:41.429,ns_1@10.242.238.90:<0.19614.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.27627.0>,#Ref<16550.0.1.29181>}]} [rebalance:info,2014-08-19T16:49:41.430,ns_1@10.242.238.90:<0.19614.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 748 [rebalance:debug,2014-08-19T16:49:41.430,ns_1@10.242.238.90:<0.19614.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.27627.0>,#Ref<16550.0.1.29181>}] [ns_server:debug,2014-08-19T16:49:41.431,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.19636.0> (ok) [ns_server:debug,2014-08-19T16:49:41.432,ns_1@10.242.238.90:<0.19614.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:debug,2014-08-19T16:49:41.432,ns_1@10.242.238.90:<0.19637.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 748 [views:debug,2014-08-19T16:49:41.483,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/1004. Updated state: replica (0) [ns_server:debug,2014-08-19T16:49:41.483,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",1004,replica,0} [ns_server:info,2014-08-19T16:49:41.562,ns_1@10.242.238.90:<0.18786.0>:ns_memcached:do_handle_call:527]Changed vbucket 1003 state to replica [ns_server:info,2014-08-19T16:49:41.566,ns_1@10.242.238.90:<0.19654.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 1003 to state replica [ns_server:debug,2014-08-19T16:49:41.634,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 748. Nacking mccouch update. [views:debug,2014-08-19T16:49:41.634,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/748. Updated state: pending (0) [ns_server:debug,2014-08-19T16:49:41.634,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",748,pending,0} [ns_server:debug,2014-08-19T16:49:41.635,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [766,750,1013,753,1016,756,1019,759,1022,1006,762,1009,765,749,1012,752,1015, 755,1018,758,1021,1005,761,1008,764,748,1011,767,751,1014,754,1017,757,1020, 1004,760,1023,1007,763,1010] [ns_server:debug,2014-08-19T16:49:41.658,ns_1@10.242.238.90:<0.19654.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_1003_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:49:41.659,ns_1@10.242.238.90:<0.19654.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[1003]}, {checkpoints,[{1003,0}]}, {name,<<"replication_building_1003_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[1003]}, {takeover,false}, {suffix,"building_1003_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",1003,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,false}]} [rebalance:debug,2014-08-19T16:49:41.660,ns_1@10.242.238.90:<0.19654.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.19655.0> [rebalance:debug,2014-08-19T16:49:41.660,ns_1@10.242.238.90:<0.19654.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:49:41.661,ns_1@10.242.238.90:<0.19654.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.27702.0>,#Ref<16550.0.1.29576>}]} [rebalance:info,2014-08-19T16:49:41.661,ns_1@10.242.238.90:<0.19654.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 1003 [rebalance:debug,2014-08-19T16:49:41.661,ns_1@10.242.238.90:<0.19654.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.27702.0>,#Ref<16550.0.1.29576>}] [ns_server:debug,2014-08-19T16:49:41.663,ns_1@10.242.238.90:<0.19654.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [views:debug,2014-08-19T16:49:41.668,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/748. Updated state: pending (0) [ns_server:debug,2014-08-19T16:49:41.669,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",748,pending,0} [rebalance:debug,2014-08-19T16:49:41.669,ns_1@10.242.238.90:<0.19164.0>:janitor_agent:handle_call:795]Done [rebalance:debug,2014-08-19T16:49:41.669,ns_1@10.242.238.90:<0.19102.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:49:41.669,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.19164.0> (ok) [ns_server:debug,2014-08-19T16:49:41.669,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.19102.0> (ok) [rebalance:debug,2014-08-19T16:49:41.680,ns_1@10.242.238.90:<0.19656.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 1003 [ns_server:info,2014-08-19T16:49:41.686,ns_1@10.242.238.90:<0.18786.0>:ns_memcached:do_handle_call:527]Changed vbucket 747 state to replica [ns_server:info,2014-08-19T16:49:41.692,ns_1@10.242.238.90:<0.19659.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 747 to state replica [ns_server:debug,2014-08-19T16:49:41.799,ns_1@10.242.238.90:<0.19659.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_747_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:49:41.800,ns_1@10.242.238.90:<0.19659.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[747]}, {checkpoints,[{747,0}]}, {name,<<"replication_building_747_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[747]}, {takeover,false}, {suffix,"building_747_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",747,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,true}]} [rebalance:debug,2014-08-19T16:49:41.801,ns_1@10.242.238.90:<0.19659.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.19674.0> [rebalance:debug,2014-08-19T16:49:41.801,ns_1@10.242.238.90:<0.19659.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:49:41.802,ns_1@10.242.238.90:<0.19659.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.27722.0>,#Ref<16550.0.1.29697>}]} [rebalance:info,2014-08-19T16:49:41.802,ns_1@10.242.238.90:<0.19659.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 747 [rebalance:debug,2014-08-19T16:49:41.802,ns_1@10.242.238.90:<0.19659.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.27722.0>,#Ref<16550.0.1.29697>}] [ns_server:debug,2014-08-19T16:49:41.803,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.19675.0> (ok) [ns_server:debug,2014-08-19T16:49:41.803,ns_1@10.242.238.90:<0.19659.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:debug,2014-08-19T16:49:41.804,ns_1@10.242.238.90:<0.19676.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 747 [ns_server:debug,2014-08-19T16:49:41.810,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 1003. Nacking mccouch update. [views:debug,2014-08-19T16:49:41.810,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/1003. Updated state: replica (0) [ns_server:debug,2014-08-19T16:49:41.811,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",1003,replica,0} [ns_server:debug,2014-08-19T16:49:41.811,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [766,750,1013,753,1016,756,1019,1003,759,1022,1006,762,1009,765,749,1012,752, 1015,755,1018,758,1021,1005,761,1008,764,748,1011,767,751,1014,754,1017,757, 1020,1004,760,1023,1007,763,1010] [views:debug,2014-08-19T16:49:41.878,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/1003. Updated state: replica (0) [ns_server:debug,2014-08-19T16:49:41.878,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",1003,replica,0} [ns_server:info,2014-08-19T16:49:41.935,ns_1@10.242.238.90:<0.18786.0>:ns_memcached:do_handle_call:527]Changed vbucket 1002 state to replica [ns_server:info,2014-08-19T16:49:41.939,ns_1@10.242.238.90:<0.19679.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 1002 to state replica [ns_server:debug,2014-08-19T16:49:42.030,ns_1@10.242.238.90:<0.19679.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_1002_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:49:42.031,ns_1@10.242.238.90:<0.19679.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[1002]}, {checkpoints,[{1002,0}]}, {name,<<"replication_building_1002_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[1002]}, {takeover,false}, {suffix,"building_1002_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",1002,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,false}]} [rebalance:debug,2014-08-19T16:49:42.031,ns_1@10.242.238.90:<0.19679.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.19680.0> [rebalance:debug,2014-08-19T16:49:42.032,ns_1@10.242.238.90:<0.19679.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:49:42.032,ns_1@10.242.238.90:<0.19679.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.27779.0>,#Ref<16550.0.1.29985>}]} [rebalance:info,2014-08-19T16:49:42.032,ns_1@10.242.238.90:<0.19679.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 1002 [rebalance:debug,2014-08-19T16:49:42.032,ns_1@10.242.238.90:<0.19679.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.27779.0>,#Ref<16550.0.1.29985>}] [ns_server:debug,2014-08-19T16:49:42.033,ns_1@10.242.238.90:<0.19679.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:debug,2014-08-19T16:49:42.053,ns_1@10.242.238.90:<0.19695.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 1002 [ns_server:debug,2014-08-19T16:49:42.084,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 1002. Nacking mccouch update. [views:debug,2014-08-19T16:49:42.084,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/1002. Updated state: replica (0) [ns_server:debug,2014-08-19T16:49:42.084,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",1002,replica,0} [ns_server:debug,2014-08-19T16:49:42.084,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [766,750,1013,753,1016,756,1019,1003,759,1022,1006,762,1009,765,749,1012,752, 1015,755,1018,1002,758,1021,1005,761,1008,764,748,1011,767,751,1014,754,1017, 757,1020,1004,760,1023,1007,763,1010] [views:debug,2014-08-19T16:49:42.118,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/1002. Updated state: replica (0) [ns_server:debug,2014-08-19T16:49:42.118,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",1002,replica,0} [ns_server:debug,2014-08-19T16:49:42.202,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 747. Nacking mccouch update. [views:debug,2014-08-19T16:49:42.202,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/747. Updated state: pending (0) [ns_server:debug,2014-08-19T16:49:42.202,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",747,pending,0} [ns_server:debug,2014-08-19T16:49:42.202,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [766,750,1013,753,1016,756,1019,1003,759,1022,1006,762,1009,765,749,1012,752, 1015,755,1018,1002,758,1021,1005,761,1008,764,748,1011,767,751,1014,754,1017, 757,1020,1004,760,1023,1007,763,747,1010] [views:debug,2014-08-19T16:49:42.236,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/747. Updated state: pending (0) [ns_server:debug,2014-08-19T16:49:42.236,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",747,pending,0} [rebalance:debug,2014-08-19T16:49:42.237,ns_1@10.242.238.90:<0.19083.0>:janitor_agent:handle_call:795]Done [rebalance:debug,2014-08-19T16:49:42.237,ns_1@10.242.238.90:<0.19037.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:49:42.237,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.19083.0> (ok) [ns_server:debug,2014-08-19T16:49:42.237,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.19037.0> (ok) [rebalance:debug,2014-08-19T16:49:42.345,ns_1@10.242.238.90:<0.19317.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:49:42.345,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.19317.0> (ok) [rebalance:debug,2014-08-19T16:49:42.395,ns_1@10.242.238.90:<0.19232.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:49:42.395,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.19232.0> (ok) [rebalance:debug,2014-08-19T16:49:42.446,ns_1@10.242.238.90:<0.19637.0>:janitor_agent:handle_call:795]Done [rebalance:debug,2014-08-19T16:49:42.446,ns_1@10.242.238.90:<0.19141.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:49:42.446,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.19637.0> (ok) [ns_server:debug,2014-08-19T16:49:42.446,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.19141.0> (ok) [rebalance:debug,2014-08-19T16:49:42.571,ns_1@10.242.238.90:<0.19539.0>:janitor_agent:handle_call:795]Done [rebalance:debug,2014-08-19T16:49:42.571,ns_1@10.242.238.90:<0.19063.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:49:42.571,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.19539.0> (ok) [ns_server:debug,2014-08-19T16:49:42.571,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.19063.0> (ok) [rebalance:debug,2014-08-19T16:49:42.680,ns_1@10.242.238.90:<0.19461.0>:janitor_agent:handle_call:795]Done [rebalance:debug,2014-08-19T16:49:42.680,ns_1@10.242.238.90:<0.19145.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:49:42.680,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.19461.0> (ok) [ns_server:debug,2014-08-19T16:49:42.680,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.19145.0> (ok) [rebalance:debug,2014-08-19T16:49:42.755,ns_1@10.242.238.90:<0.19390.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:49:42.755,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.19390.0> (ok) [rebalance:debug,2014-08-19T16:49:42.830,ns_1@10.242.238.90:<0.19298.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:49:42.830,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.19298.0> (ok) [rebalance:debug,2014-08-19T16:49:42.870,ns_1@10.242.238.90:<0.19223.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:49:42.870,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.19223.0> (ok) [rebalance:debug,2014-08-19T16:49:42.920,ns_1@10.242.238.90:<0.19695.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:49:42.921,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.19695.0> (ok) [rebalance:debug,2014-08-19T16:49:42.995,ns_1@10.242.238.90:<0.19611.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:49:42.996,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.19611.0> (ok) [rebalance:debug,2014-08-19T16:49:42.996,ns_1@10.242.238.90:<0.19676.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:49:42.996,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.19676.0> (ok) [rebalance:debug,2014-08-19T16:49:43.138,ns_1@10.242.238.90:<0.19578.0>:janitor_agent:handle_call:795]Done [rebalance:debug,2014-08-19T16:49:43.138,ns_1@10.242.238.90:<0.19533.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:49:43.138,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.19578.0> (ok) [ns_server:debug,2014-08-19T16:49:43.138,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.19533.0> (ok) [rebalance:debug,2014-08-19T16:49:43.288,ns_1@10.242.238.90:<0.19441.0>:janitor_agent:handle_call:795]Done [rebalance:debug,2014-08-19T16:49:43.288,ns_1@10.242.238.90:<0.19500.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:49:43.289,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.19441.0> (ok) [ns_server:debug,2014-08-19T16:49:43.289,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.19500.0> (ok) [rebalance:debug,2014-08-19T16:49:43.414,ns_1@10.242.238.90:<0.19422.0>:janitor_agent:handle_call:795]Done [rebalance:debug,2014-08-19T16:49:43.414,ns_1@10.242.238.90:<0.19370.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:49:43.414,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.19422.0> (ok) [ns_server:debug,2014-08-19T16:49:43.414,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.19370.0> (ok) [rebalance:debug,2014-08-19T16:49:43.539,ns_1@10.242.238.90:<0.19268.0>:janitor_agent:handle_call:795]Done [rebalance:debug,2014-08-19T16:49:43.539,ns_1@10.242.238.90:<0.19656.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:49:43.539,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.19268.0> (ok) [ns_server:debug,2014-08-19T16:49:43.539,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.19656.0> (ok) [rebalance:debug,2014-08-19T16:49:43.665,ns_1@10.242.238.90:<0.19558.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:49:43.665,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.19558.0> (ok) [rebalance:debug,2014-08-19T16:49:43.748,ns_1@10.242.238.90:<0.19480.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:49:43.748,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.19480.0> (ok) [rebalance:debug,2014-08-19T16:49:43.798,ns_1@10.242.238.90:<0.19415.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:49:43.798,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.19415.0> (ok) [ns_server:debug,2014-08-19T16:49:44.036,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:49:44.040,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 3563 us [ns_server:debug,2014-08-19T16:49:44.041,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:49:44.042,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:49:44.043,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{511, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:49:45.121,ns_1@10.242.238.90:<0.19732.0>:capi_set_view_manager:do_wait_index_updated:618]References to wait: [] ("default", 767) [ns_server:debug,2014-08-19T16:49:45.121,ns_1@10.242.238.90:<0.19732.0>:capi_set_view_manager:do_wait_index_updated:638]All refs fired [ns_server:debug,2014-08-19T16:49:45.121,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.19731.0> (ok) [rebalance:debug,2014-08-19T16:49:45.122,ns_1@10.242.238.90:<0.18847.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:49:45.122,ns_1@10.242.238.90:<0.18847.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:49:45.122,ns_1@10.242.238.90:<0.19733.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:49:45.123,ns_1@10.242.238.90:<0.19733.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:49:45.123,ns_1@10.242.238.90:<0.18847.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:info,2014-08-19T16:49:45.180,ns_1@10.242.238.90:<0.18786.0>:ns_memcached:do_handle_call:527]Changed vbucket 767 state to active [ns_server:debug,2014-08-19T16:49:45.199,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:49:45.203,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:49:45.203,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 2231 us [ns_server:debug,2014-08-19T16:49:45.204,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{767, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:49:45.205,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [views:debug,2014-08-19T16:49:45.230,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/767. Updated state: active (1) [ns_server:debug,2014-08-19T16:49:45.230,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",767,active,1} [rebalance:debug,2014-08-19T16:49:45.250,ns_1@10.242.238.90:<0.19735.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 760 [rebalance:debug,2014-08-19T16:49:45.251,ns_1@10.242.238.90:<0.19735.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:49:45.251,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.19735.0> (ok) [rebalance:debug,2014-08-19T16:49:45.255,ns_1@10.242.238.90:<0.18828.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:49:45.256,ns_1@10.242.238.90:<0.18828.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:49:45.256,ns_1@10.242.238.90:<0.19738.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:49:45.256,ns_1@10.242.238.90:<0.19738.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:49:45.256,ns_1@10.242.238.90:<0.18828.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:info,2014-08-19T16:49:45.259,ns_1@10.242.238.90:<0.18786.0>:ns_memcached:do_handle_call:527]Changed vbucket 1023 state to replica [ns_server:info,2014-08-19T16:49:45.260,ns_1@10.242.238.90:tap_replication_manager-default<0.18775.0>:tap_replication_manager:start_child:172]Starting replication from 'ns_1@10.242.238.91' for [1023] [error_logger:info,2014-08-19T16:49:45.262,ns_1@10.242.238.90:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,'ns_vbm_new_sup-default'} started: [{pid,<0.19739.0>}, {name,{new_child_id,[1023],'ns_1@10.242.238.91'}}, {mfargs, {ebucketmigrator_srv,start_link, [{"10.242.238.91",11209}, {"10.242.238.90",11209}, [{on_not_ready_vbuckets, #Fun}, {username,"default"}, {password,get_from_config}, {vbuckets,[1023]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]]}}, {restart_type,temporary}, {shutdown,60000}, {child_type,worker}] [ns_server:info,2014-08-19T16:49:45.263,ns_1@10.242.238.90:<0.19739.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 1023 to state replica [ns_server:debug,2014-08-19T16:49:45.271,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:49:45.272,ns_1@10.242.238.90:<0.19739.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_ns_1@10.242.238.90 [rebalance:info,2014-08-19T16:49:45.275,ns_1@10.242.238.90:<0.19739.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[1023]}, {checkpoints,[{1023,1}]}, {name,<<"replication_ns_1@10.242.238.90">>}, {takeover,false}] {{"10.242.238.91",11209}, {"10.242.238.90",11209}, [{on_not_ready_vbuckets,#Fun}, {username,"default"}, {password,get_from_config}, {vbuckets,[1023]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]} [rebalance:debug,2014-08-19T16:49:45.276,ns_1@10.242.238.90:<0.19739.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.19740.0> [rebalance:info,2014-08-19T16:49:45.279,ns_1@10.242.238.90:<0.19739.0>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [ns_server:debug,2014-08-19T16:49:45.281,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 9658 us [ns_server:debug,2014-08-19T16:49:45.281,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:49:45.282,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{1023, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:49:45.282,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [rebalance:debug,2014-08-19T16:49:45.466,ns_1@10.242.238.90:<0.19749.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 762 [rebalance:debug,2014-08-19T16:49:45.467,ns_1@10.242.238.90:<0.19749.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:49:45.467,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.19749.0> (ok) [rebalance:debug,2014-08-19T16:49:45.616,ns_1@10.242.238.90:<0.19752.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 764 [rebalance:debug,2014-08-19T16:49:45.617,ns_1@10.242.238.90:<0.19752.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:49:45.617,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.19752.0> (ok) [ns_server:debug,2014-08-19T16:49:45.739,ns_1@10.242.238.90:<0.19756.0>:capi_set_view_manager:do_wait_index_updated:618]References to wait: [] ("default", 766) [ns_server:debug,2014-08-19T16:49:45.739,ns_1@10.242.238.90:<0.19756.0>:capi_set_view_manager:do_wait_index_updated:638]All refs fired [ns_server:debug,2014-08-19T16:49:45.739,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.19755.0> (ok) [rebalance:debug,2014-08-19T16:49:45.740,ns_1@10.242.238.90:<0.18886.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:49:45.740,ns_1@10.242.238.90:<0.18886.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:49:45.740,ns_1@10.242.238.90:<0.19757.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:49:45.740,ns_1@10.242.238.90:<0.19757.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:49:45.740,ns_1@10.242.238.90:<0.18886.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:info,2014-08-19T16:49:45.795,ns_1@10.242.238.90:<0.18786.0>:ns_memcached:do_handle_call:527]Changed vbucket 766 state to active [rebalance:debug,2014-08-19T16:49:45.817,ns_1@10.242.238.90:<0.19758.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 1016 [ns_server:debug,2014-08-19T16:49:45.818,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:49:45.822,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:49:45.822,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 3522 us [ns_server:debug,2014-08-19T16:49:45.823,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:49:45.824,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{766, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [views:debug,2014-08-19T16:49:45.847,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/766. Updated state: active (1) [ns_server:debug,2014-08-19T16:49:45.847,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",766,active,1} [rebalance:debug,2014-08-19T16:49:45.848,ns_1@10.242.238.90:<0.19758.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:49:45.848,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.19758.0> (ok) [rebalance:debug,2014-08-19T16:49:45.918,ns_1@10.242.238.90:<0.19762.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 1018 [rebalance:debug,2014-08-19T16:49:45.919,ns_1@10.242.238.90:<0.19762.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:49:45.919,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.19762.0> (ok) [rebalance:debug,2014-08-19T16:49:46.055,ns_1@10.242.238.90:<0.19765.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 1020 [rebalance:debug,2014-08-19T16:49:46.056,ns_1@10.242.238.90:<0.19765.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:49:46.056,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.19765.0> (ok) [rebalance:debug,2014-08-19T16:49:46.171,ns_1@10.242.238.90:<0.19768.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 1022 [rebalance:debug,2014-08-19T16:49:46.172,ns_1@10.242.238.90:<0.19768.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:49:46.173,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.19768.0> (ok) [rebalance:debug,2014-08-19T16:49:46.345,ns_1@10.242.238.90:<0.18867.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:49:46.346,ns_1@10.242.238.90:<0.18867.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:49:46.346,ns_1@10.242.238.90:<0.19771.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:49:46.346,ns_1@10.242.238.90:<0.19771.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:49:46.346,ns_1@10.242.238.90:<0.18867.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:info,2014-08-19T16:49:46.351,ns_1@10.242.238.90:<0.18786.0>:ns_memcached:do_handle_call:527]Changed vbucket 1022 state to replica [ns_server:info,2014-08-19T16:49:46.351,ns_1@10.242.238.90:tap_replication_manager-default<0.18775.0>:tap_replication_manager:change_vbucket_filter:200]Going to change replication from 'ns_1@10.242.238.91' to have [1022,1023] ([1022], []) [ns_server:debug,2014-08-19T16:49:46.352,ns_1@10.242.238.90:<0.19772.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:135]Registered myself under id:{"default", {new_child_id,[1022,1023],'ns_1@10.242.238.91'}, #Ref<0.0.0.216691>} Args:[{"10.242.238.91",11209}, {"10.242.238.90",11209}, [{old_state_retriever,#Fun}, {on_not_ready_vbuckets,#Fun}, {username,"default"}, {password,get_from_config}, {vbuckets,[1022,1023]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]] [ns_server:debug,2014-08-19T16:49:46.353,ns_1@10.242.238.90:<0.19772.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:139]Linked myself to old ebucketmigrator <0.19739.0> [ns_server:info,2014-08-19T16:49:46.353,ns_1@10.242.238.90:<0.19739.0>:ebucketmigrator_srv:handle_call:270]Starting new-style vbucket filter change on stream `replication_ns_1@10.242.238.90` [ns_server:info,2014-08-19T16:49:46.361,ns_1@10.242.238.90:<0.19739.0>:ebucketmigrator_srv:handle_call:297]Changing vbucket filter on tap stream `replication_ns_1@10.242.238.90`: [{1022,1},{1023,1}] [ns_server:info,2014-08-19T16:49:46.361,ns_1@10.242.238.90:<0.19739.0>:ebucketmigrator_srv:handle_call:307]Successfully changed vbucket filter on tap stream `replication_ns_1@10.242.238.90`. [ns_server:info,2014-08-19T16:49:46.361,ns_1@10.242.238.90:<0.19739.0>:ebucketmigrator_srv:process_upstream:1027]Got vbucket filter change completion message. Silencing upstream sender [ns_server:info,2014-08-19T16:49:46.362,ns_1@10.242.238.90:<0.19739.0>:ebucketmigrator_srv:handle_info:366]Got reply from upstream silencing request. Completing state transition to a new ebucketmigrator. [ns_server:debug,2014-08-19T16:49:46.362,ns_1@10.242.238.90:<0.19739.0>:ebucketmigrator_srv:complete_native_vb_filter_change:170]Proceeding with reading unread binaries [ns_server:debug,2014-08-19T16:49:46.362,ns_1@10.242.238.90:<0.19739.0>:ebucketmigrator_srv:confirm_downstream:197]Going to confirm reception downstream messages [ns_server:debug,2014-08-19T16:49:46.362,ns_1@10.242.238.90:<0.19739.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:49:46.362,ns_1@10.242.238.90:<0.19774.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:49:46.362,ns_1@10.242.238.90:<0.19774.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:49:46.362,ns_1@10.242.238.90:<0.19739.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:49:46.362,ns_1@10.242.238.90:<0.19739.0>:ebucketmigrator_srv:confirm_downstream:201]Confirmed upstream messages are feeded to kernel [ns_server:debug,2014-08-19T16:49:46.362,ns_1@10.242.238.90:<0.19739.0>:ebucketmigrator_srv:reply_and_die:210]Passed old state to caller [ns_server:debug,2014-08-19T16:49:46.363,ns_1@10.242.238.90:<0.19739.0>:ebucketmigrator_srv:reply_and_die:213]Sent out state. Preparing to die [ns_server:debug,2014-08-19T16:49:46.363,ns_1@10.242.238.90:<0.19772.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:143]Got old state from previous ebucketmigrator: <0.19739.0> [ns_server:debug,2014-08-19T16:49:46.363,ns_1@10.242.238.90:<0.19772.0>:ns_vbm_new_sup:perform_vbucket_filter_change_loop:184]Sent old state to new instance [ns_server:info,2014-08-19T16:49:46.363,ns_1@10.242.238.90:<0.19776.0>:ns_vbm_new_sup:mk_old_state_retriever:83]Got vbucket filter change old state. Proceeding vbucket filter change operation [ns_server:debug,2014-08-19T16:49:46.363,ns_1@10.242.238.90:<0.19776.0>:ebucketmigrator_srv:init:494]Got old ebucketmigrator state from <0.19739.0>: {state,#Port<0.13886>,#Port<0.13883>,#Port<0.13887>,#Port<0.13884>, <0.19740.0>,<<"cut off">>,<<"cut off">>,[],7,false,false,0, {1408,452586,361941}, completed, {<0.19772.0>,#Ref<0.0.0.216704>}, <<"replication_ns_1@10.242.238.90">>,<0.19739.0>, {had_backfill,false,undefined,[]}, completed,false}. [ns_server:debug,2014-08-19T16:49:46.363,ns_1@10.242.238.90:<0.17162.0>:ns_process_registry:handle_info:98]Got exit msg: {'EXIT',<0.19772.0>,{#Ref<0.0.0.216693>,<0.19776.0>}} [error_logger:info,2014-08-19T16:49:46.363,ns_1@10.242.238.90:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,'ns_vbm_new_sup-default'} started: [{pid,<0.19776.0>}, {name,{new_child_id,[1022,1023],'ns_1@10.242.238.91'}}, {mfargs, {ebucketmigrator_srv,start_link, [{"10.242.238.91",11209}, {"10.242.238.90",11209}, [{old_state_retriever, #Fun}, {on_not_ready_vbuckets, #Fun}, {username,"default"}, {password,get_from_config}, {vbuckets,[1022,1023]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]]}}, {restart_type,temporary}, {shutdown,60000}, {child_type,worker}] [ns_server:debug,2014-08-19T16:49:46.367,ns_1@10.242.238.90:<0.19776.0>:ebucketmigrator_srv:init:621]Reusing old upstream: [{vbuckets,[1022,1023]}, {name,<<"replication_ns_1@10.242.238.90">>}, {takeover,false}] [rebalance:debug,2014-08-19T16:49:46.367,ns_1@10.242.238.90:<0.19776.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.19777.0> [ns_server:debug,2014-08-19T16:49:46.372,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:49:46.376,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:49:46.376,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 4198 us [ns_server:debug,2014-08-19T16:49:46.377,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{1022, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:49:46.378,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [rebalance:debug,2014-08-19T16:49:46.490,ns_1@10.242.238.90:<0.19779.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 747 [rebalance:debug,2014-08-19T16:49:46.492,ns_1@10.242.238.90:<0.19779.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:49:46.492,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.19779.0> (ok) [rebalance:debug,2014-08-19T16:49:46.591,ns_1@10.242.238.90:<0.19782.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 749 [rebalance:debug,2014-08-19T16:49:46.592,ns_1@10.242.238.90:<0.19782.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:49:46.592,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.19782.0> (ok) [rebalance:debug,2014-08-19T16:49:46.691,ns_1@10.242.238.90:<0.19785.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 751 [rebalance:debug,2014-08-19T16:49:46.692,ns_1@10.242.238.90:<0.19785.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:49:46.693,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.19785.0> (ok) [rebalance:debug,2014-08-19T16:49:46.791,ns_1@10.242.238.90:<0.19788.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 753 [rebalance:debug,2014-08-19T16:49:46.793,ns_1@10.242.238.90:<0.19788.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:49:46.793,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.19788.0> (ok) [rebalance:debug,2014-08-19T16:49:46.901,ns_1@10.242.238.90:<0.19791.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 755 [rebalance:debug,2014-08-19T16:49:46.903,ns_1@10.242.238.90:<0.19791.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:49:46.903,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.19791.0> (ok) [rebalance:debug,2014-08-19T16:49:47.035,ns_1@10.242.238.90:<0.19794.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 757 [rebalance:debug,2014-08-19T16:49:47.037,ns_1@10.242.238.90:<0.19794.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:49:47.037,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.19794.0> (ok) [rebalance:debug,2014-08-19T16:49:47.112,ns_1@10.242.238.90:<0.19797.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 759 [rebalance:debug,2014-08-19T16:49:47.114,ns_1@10.242.238.90:<0.19797.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:49:47.114,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.19797.0> (ok) [ns_server:debug,2014-08-19T16:49:47.116,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:49:47.119,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:49:47.120,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 4403 us [ns_server:debug,2014-08-19T16:49:47.121,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:49:47.121,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{504, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [rebalance:debug,2014-08-19T16:49:47.180,ns_1@10.242.238.90:<0.19801.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 761 [rebalance:debug,2014-08-19T16:49:47.182,ns_1@10.242.238.90:<0.19801.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:49:47.182,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.19801.0> (ok) [ns_server:debug,2014-08-19T16:49:47.218,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:49:47.218,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:49:47.218,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 23 us [ns_server:debug,2014-08-19T16:49:47.219,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:49:47.219,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{506, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [rebalance:debug,2014-08-19T16:49:47.227,ns_1@10.242.238.90:<0.19805.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 763 [rebalance:debug,2014-08-19T16:49:47.228,ns_1@10.242.238.90:<0.19805.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:49:47.228,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.19805.0> (ok) [ns_server:debug,2014-08-19T16:49:47.269,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [rebalance:debug,2014-08-19T16:49:47.270,ns_1@10.242.238.90:<0.19808.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 748 [rebalance:debug,2014-08-19T16:49:47.270,ns_1@10.242.238.90:<0.19811.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 765 [rebalance:debug,2014-08-19T16:49:47.271,ns_1@10.242.238.90:<0.19808.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:49:47.271,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.19808.0> (ok) [ns_server:debug,2014-08-19T16:49:47.273,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:49:47.273,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 3144 us [rebalance:debug,2014-08-19T16:49:47.273,ns_1@10.242.238.90:<0.19811.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:49:47.274,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.19811.0> (ok) [ns_server:debug,2014-08-19T16:49:47.274,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{508, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:49:47.274,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:49:47.316,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:49:47.324,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:49:47.325,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 8106 us [ns_server:debug,2014-08-19T16:49:47.325,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:49:47.326,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{510, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [rebalance:debug,2014-08-19T16:49:47.353,ns_1@10.242.238.90:<0.19816.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 750 [rebalance:debug,2014-08-19T16:49:47.354,ns_1@10.242.238.90:<0.19819.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 1003 [rebalance:debug,2014-08-19T16:49:47.354,ns_1@10.242.238.90:<0.19816.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:49:47.354,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.19816.0> (ok) [rebalance:debug,2014-08-19T16:49:47.355,ns_1@10.242.238.90:<0.19819.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:49:47.355,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.19819.0> (ok) [rebalance:debug,2014-08-19T16:49:47.445,ns_1@10.242.238.90:<0.19828.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 752 [rebalance:debug,2014-08-19T16:49:47.445,ns_1@10.242.238.90:<0.19831.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 1005 [rebalance:debug,2014-08-19T16:49:47.446,ns_1@10.242.238.90:<0.19828.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:49:47.446,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.19828.0> (ok) [rebalance:debug,2014-08-19T16:49:47.447,ns_1@10.242.238.90:<0.19831.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:49:47.447,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.19831.0> (ok) [rebalance:debug,2014-08-19T16:49:47.564,ns_1@10.242.238.90:<0.19834.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 754 [rebalance:debug,2014-08-19T16:49:47.564,ns_1@10.242.238.90:<0.19837.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 1007 [rebalance:debug,2014-08-19T16:49:47.565,ns_1@10.242.238.90:<0.19834.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:49:47.565,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.19834.0> (ok) [rebalance:debug,2014-08-19T16:49:47.565,ns_1@10.242.238.90:<0.19837.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:49:47.565,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.19837.0> (ok) [rebalance:debug,2014-08-19T16:49:47.681,ns_1@10.242.238.90:<0.19840.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 1009 [rebalance:debug,2014-08-19T16:49:47.681,ns_1@10.242.238.90:<0.19843.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 756 [rebalance:debug,2014-08-19T16:49:47.682,ns_1@10.242.238.90:<0.19843.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:49:47.682,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.19843.0> (ok) [rebalance:debug,2014-08-19T16:49:47.682,ns_1@10.242.238.90:<0.19840.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:49:47.682,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.19840.0> (ok) [rebalance:debug,2014-08-19T16:49:47.793,ns_1@10.242.238.90:<0.19847.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 758 [rebalance:debug,2014-08-19T16:49:47.793,ns_1@10.242.238.90:<0.19850.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 1011 [rebalance:debug,2014-08-19T16:49:47.794,ns_1@10.242.238.90:<0.19847.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:49:47.794,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.19847.0> (ok) [rebalance:debug,2014-08-19T16:49:47.794,ns_1@10.242.238.90:<0.19850.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:49:47.794,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.19850.0> (ok) [ns_server:debug,2014-08-19T16:49:47.889,ns_1@10.242.238.90:<0.19854.0>:capi_set_view_manager:do_wait_index_updated:618]References to wait: [] ("default", 760) [ns_server:debug,2014-08-19T16:49:47.889,ns_1@10.242.238.90:<0.19854.0>:capi_set_view_manager:do_wait_index_updated:638]All refs fired [ns_server:debug,2014-08-19T16:49:47.889,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.19853.0> (ok) [rebalance:debug,2014-08-19T16:49:47.890,ns_1@10.242.238.90:<0.19105.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:49:47.890,ns_1@10.242.238.90:<0.19105.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:49:47.890,ns_1@10.242.238.90:<0.19855.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:49:47.890,ns_1@10.242.238.90:<0.19855.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:49:47.890,ns_1@10.242.238.90:<0.19105.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [rebalance:debug,2014-08-19T16:49:47.891,ns_1@10.242.238.90:<0.19856.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 1013 [rebalance:debug,2014-08-19T16:49:47.892,ns_1@10.242.238.90:<0.19856.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:49:47.893,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.19856.0> (ok) [ns_server:debug,2014-08-19T16:49:47.940,ns_1@10.242.238.90:<0.19860.0>:capi_set_view_manager:do_wait_index_updated:618]References to wait: [] ("default", 762) [ns_server:debug,2014-08-19T16:49:47.940,ns_1@10.242.238.90:<0.19860.0>:capi_set_view_manager:do_wait_index_updated:638]All refs fired [ns_server:debug,2014-08-19T16:49:47.940,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.19859.0> (ok) [rebalance:debug,2014-08-19T16:49:47.940,ns_1@10.242.238.90:<0.19040.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:49:47.940,ns_1@10.242.238.90:<0.19040.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:49:47.941,ns_1@10.242.238.90:<0.19861.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:49:47.941,ns_1@10.242.238.90:<0.19861.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:49:47.941,ns_1@10.242.238.90:<0.19040.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [rebalance:debug,2014-08-19T16:49:47.942,ns_1@10.242.238.90:<0.19862.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 1015 [rebalance:debug,2014-08-19T16:49:47.943,ns_1@10.242.238.90:<0.19862.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:49:47.943,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.19862.0> (ok) [ns_server:info,2014-08-19T16:49:47.946,ns_1@10.242.238.90:<0.18786.0>:ns_memcached:do_handle_call:527]Changed vbucket 760 state to active [ns_server:debug,2014-08-19T16:49:47.971,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:49:47.975,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 3624 us [ns_server:debug,2014-08-19T16:49:47.975,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:49:47.975,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:49:47.976,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{760, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:49:47.991,ns_1@10.242.238.90:<0.19867.0>:capi_set_view_manager:do_wait_index_updated:618]References to wait: [] ("default", 764) [ns_server:debug,2014-08-19T16:49:47.991,ns_1@10.242.238.90:<0.19867.0>:capi_set_view_manager:do_wait_index_updated:638]All refs fired [ns_server:debug,2014-08-19T16:49:47.991,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.19866.0> (ok) [rebalance:debug,2014-08-19T16:49:47.991,ns_1@10.242.238.90:<0.18956.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:49:47.992,ns_1@10.242.238.90:<0.18956.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:49:47.992,ns_1@10.242.238.90:<0.19868.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:49:47.992,ns_1@10.242.238.90:<0.19868.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:debug,2014-08-19T16:49:47.992,ns_1@10.242.238.90:<0.19869.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 1017 [rebalance:info,2014-08-19T16:49:47.992,ns_1@10.242.238.90:<0.18956.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:info,2014-08-19T16:49:47.996,ns_1@10.242.238.90:<0.18787.0>:ns_memcached:do_handle_call:527]Changed vbucket 762 state to active [ns_server:debug,2014-08-19T16:49:48.014,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [views:debug,2014-08-19T16:49:48.014,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/760. Updated state: active (1) [ns_server:debug,2014-08-19T16:49:48.014,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",760,active,1} [rebalance:debug,2014-08-19T16:49:48.015,ns_1@10.242.238.90:<0.19869.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:49:48.015,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.19869.0> (ok) [ns_server:debug,2014-08-19T16:49:48.017,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:49:48.017,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 3226 us [ns_server:debug,2014-08-19T16:49:48.017,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:49:48.018,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{762, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [rebalance:debug,2014-08-19T16:49:48.032,ns_1@10.242.238.90:<0.19874.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 1019 [rebalance:debug,2014-08-19T16:49:48.032,ns_1@10.242.238.90:<0.19873.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 1002 [ns_server:info,2014-08-19T16:49:48.049,ns_1@10.242.238.90:<0.18787.0>:ns_memcached:do_handle_call:527]Changed vbucket 764 state to active [ns_server:debug,2014-08-19T16:49:48.071,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:49:48.075,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:49:48.075,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 3871 us [ns_server:debug,2014-08-19T16:49:48.075,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:49:48.076,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{764, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [views:debug,2014-08-19T16:49:48.097,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/762. Updated state: active (1) [ns_server:debug,2014-08-19T16:49:48.098,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",762,active,1} [rebalance:debug,2014-08-19T16:49:48.098,ns_1@10.242.238.90:<0.19873.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:49:48.098,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.19873.0> (ok) [rebalance:debug,2014-08-19T16:49:48.154,ns_1@10.242.238.90:<0.19880.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 1021 [rebalance:debug,2014-08-19T16:49:48.154,ns_1@10.242.238.90:<0.19883.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 1004 [views:debug,2014-08-19T16:49:48.172,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/764. Updated state: active (1) [ns_server:debug,2014-08-19T16:49:48.172,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",764,active,1} [rebalance:debug,2014-08-19T16:49:48.173,ns_1@10.242.238.90:<0.19874.0>:janitor_agent:handle_call:795]Done [rebalance:debug,2014-08-19T16:49:48.173,ns_1@10.242.238.90:<0.19883.0>:janitor_agent:handle_call:795]Done [rebalance:debug,2014-08-19T16:49:48.173,ns_1@10.242.238.90:<0.19880.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:49:48.173,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.19874.0> (ok) [ns_server:debug,2014-08-19T16:49:48.173,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.19883.0> (ok) [ns_server:debug,2014-08-19T16:49:48.173,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.19880.0> (ok) [rebalance:debug,2014-08-19T16:49:48.279,ns_1@10.242.238.90:<0.19886.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 1006 [rebalance:debug,2014-08-19T16:49:48.280,ns_1@10.242.238.90:<0.19886.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:49:48.280,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.19886.0> (ok) [rebalance:debug,2014-08-19T16:49:48.328,ns_1@10.242.238.90:<0.19889.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 1008 [rebalance:debug,2014-08-19T16:49:48.330,ns_1@10.242.238.90:<0.19889.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:49:48.330,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.19889.0> (ok) [ns_server:debug,2014-08-19T16:49:48.357,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:49:48.360,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:49:48.360,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 3305 us [ns_server:debug,2014-08-19T16:49:48.361,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{491, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:49:48.362,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [rebalance:debug,2014-08-19T16:49:48.381,ns_1@10.242.238.90:<0.19901.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 1010 [rebalance:debug,2014-08-19T16:49:48.382,ns_1@10.242.238.90:<0.19901.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:49:48.382,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.19901.0> (ok) [ns_server:debug,2014-08-19T16:49:48.403,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:49:48.408,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:49:48.409,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 1883 us [ns_server:debug,2014-08-19T16:49:48.409,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{493, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:49:48.409,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [rebalance:debug,2014-08-19T16:49:48.437,ns_1@10.242.238.90:<0.19908.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 1012 [rebalance:debug,2014-08-19T16:49:48.438,ns_1@10.242.238.90:<0.19908.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:49:48.438,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.19908.0> (ok) [ns_server:debug,2014-08-19T16:49:48.454,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:49:48.461,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:49:48.461,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 5097 us [ns_server:debug,2014-08-19T16:49:48.461,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:49:48.462,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{495, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [rebalance:debug,2014-08-19T16:49:48.470,ns_1@10.242.238.90:<0.19912.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 1014 [rebalance:debug,2014-08-19T16:49:48.472,ns_1@10.242.238.90:<0.19912.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:49:48.472,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.19912.0> (ok) [ns_server:debug,2014-08-19T16:49:48.514,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:49:48.517,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:49:48.518,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 4097 us [ns_server:debug,2014-08-19T16:49:48.518,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{497, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:49:48.519,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:49:48.529,ns_1@10.242.238.90:<0.19917.0>:capi_set_view_manager:do_wait_index_updated:618]References to wait: [] ("default", 747) [ns_server:debug,2014-08-19T16:49:48.529,ns_1@10.242.238.90:<0.19917.0>:capi_set_view_manager:do_wait_index_updated:638]All refs fired [ns_server:debug,2014-08-19T16:49:48.529,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.19916.0> (ok) [ns_server:debug,2014-08-19T16:49:48.530,ns_1@10.242.238.90:<0.19919.0>:capi_set_view_manager:do_wait_index_updated:618]References to wait: [] ("default", 751) [ns_server:debug,2014-08-19T16:49:48.530,ns_1@10.242.238.90:<0.19919.0>:capi_set_view_manager:do_wait_index_updated:638]All refs fired [ns_server:debug,2014-08-19T16:49:48.530,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.19918.0> (ok) [rebalance:debug,2014-08-19T16:49:48.531,ns_1@10.242.238.90:<0.19659.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:49:48.531,ns_1@10.242.238.90:<0.19922.0>:capi_set_view_manager:do_wait_index_updated:618]References to wait: [] ("default", 763) [ns_server:debug,2014-08-19T16:49:48.531,ns_1@10.242.238.90:<0.19922.0>:capi_set_view_manager:do_wait_index_updated:638]All refs fired [rebalance:debug,2014-08-19T16:49:48.531,ns_1@10.242.238.90:<0.19483.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:49:48.531,ns_1@10.242.238.90:<0.19659.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:49:48.531,ns_1@10.242.238.90:<0.19927.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:49:48.531,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.19920.0> (ok) [ns_server:debug,2014-08-19T16:49:48.531,ns_1@10.242.238.90:<0.19928.0>:capi_set_view_manager:do_wait_index_updated:618]References to wait: [] ("default", 749) [ns_server:debug,2014-08-19T16:49:48.531,ns_1@10.242.238.90:<0.19483.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:49:48.531,ns_1@10.242.238.90:<0.19929.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:49:48.531,ns_1@10.242.238.90:<0.19927.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [ns_server:debug,2014-08-19T16:49:48.531,ns_1@10.242.238.90:<0.19929.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [ns_server:debug,2014-08-19T16:49:48.532,ns_1@10.242.238.90:<0.19928.0>:capi_set_view_manager:do_wait_index_updated:638]All refs fired [ns_server:debug,2014-08-19T16:49:48.532,ns_1@10.242.238.90:<0.19932.0>:capi_set_view_manager:do_wait_index_updated:618]References to wait: [] ("default", 753) [rebalance:info,2014-08-19T16:49:48.532,ns_1@10.242.238.90:<0.19483.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:49:48.532,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.19921.0> (ok) [rebalance:info,2014-08-19T16:49:48.532,ns_1@10.242.238.90:<0.19659.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:49:48.532,ns_1@10.242.238.90:<0.19932.0>:capi_set_view_manager:do_wait_index_updated:638]All refs fired [ns_server:debug,2014-08-19T16:49:48.532,ns_1@10.242.238.90:<0.19937.0>:capi_set_view_manager:do_wait_index_updated:618]References to wait: [] ("default", 757) [ns_server:debug,2014-08-19T16:49:48.532,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.19923.0> (ok) [ns_server:debug,2014-08-19T16:49:48.532,ns_1@10.242.238.90:<0.19937.0>:capi_set_view_manager:do_wait_index_updated:638]All refs fired [ns_server:debug,2014-08-19T16:49:48.532,ns_1@10.242.238.90:<0.19940.0>:capi_set_view_manager:do_wait_index_updated:618]References to wait: [] ("default", 755) [ns_server:debug,2014-08-19T16:49:48.532,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.19924.0> (ok) [rebalance:debug,2014-08-19T16:49:48.532,ns_1@10.242.238.90:<0.19001.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:49:48.532,ns_1@10.242.238.90:<0.19941.0>:capi_set_view_manager:do_wait_index_updated:618]References to wait: [] ("default", 759) [ns_server:debug,2014-08-19T16:49:48.532,ns_1@10.242.238.90:<0.19940.0>:capi_set_view_manager:do_wait_index_updated:638]All refs fired [rebalance:debug,2014-08-19T16:49:48.532,ns_1@10.242.238.90:<0.19561.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:49:48.532,ns_1@10.242.238.90:<0.19941.0>:capi_set_view_manager:do_wait_index_updated:638]All refs fired [ns_server:debug,2014-08-19T16:49:48.532,ns_1@10.242.238.90:<0.19942.0>:capi_set_view_manager:do_wait_index_updated:618]References to wait: [] ("default", 765) [ns_server:debug,2014-08-19T16:49:48.532,ns_1@10.242.238.90:<0.19001.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:49:48.532,ns_1@10.242.238.90:<0.19942.0>:capi_set_view_manager:do_wait_index_updated:638]All refs fired [ns_server:debug,2014-08-19T16:49:48.533,ns_1@10.242.238.90:<0.19944.0>:capi_set_view_manager:do_wait_index_updated:618]References to wait: [] ("default", 748) [ns_server:debug,2014-08-19T16:49:48.533,ns_1@10.242.238.90:<0.19943.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:49:48.532,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.19926.0> (ok) [ns_server:debug,2014-08-19T16:49:48.533,ns_1@10.242.238.90:<0.19944.0>:capi_set_view_manager:do_wait_index_updated:638]All refs fired [ns_server:debug,2014-08-19T16:49:48.533,ns_1@10.242.238.90:<0.19561.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:49:48.533,ns_1@10.242.238.90:<0.19945.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:49:48.533,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.19925.0> (ok) [ns_server:debug,2014-08-19T16:49:48.533,ns_1@10.242.238.90:<0.19943.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [ns_server:debug,2014-08-19T16:49:48.533,ns_1@10.242.238.90:<0.19946.0>:capi_set_view_manager:do_wait_index_updated:618]References to wait: [] ("default", 752) [ns_server:debug,2014-08-19T16:49:48.533,ns_1@10.242.238.90:<0.19945.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [ns_server:debug,2014-08-19T16:49:48.533,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.19930.0> (ok) [rebalance:debug,2014-08-19T16:49:48.533,ns_1@10.242.238.90:<0.19419.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:49:48.533,ns_1@10.242.238.90:<0.19946.0>:capi_set_view_manager:do_wait_index_updated:638]All refs fired [ns_server:debug,2014-08-19T16:49:48.533,ns_1@10.242.238.90:<0.19947.0>:capi_set_view_manager:do_wait_index_updated:618]References to wait: [] ("default", 754) [rebalance:info,2014-08-19T16:49:48.533,ns_1@10.242.238.90:<0.19001.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:49:48.533,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.19931.0> (ok) [rebalance:debug,2014-08-19T16:49:48.533,ns_1@10.242.238.90:<0.19246.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [rebalance:info,2014-08-19T16:49:48.533,ns_1@10.242.238.90:<0.19561.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:49:48.533,ns_1@10.242.238.90:<0.19419.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:49:48.533,ns_1@10.242.238.90:<0.19947.0>:capi_set_view_manager:do_wait_index_updated:638]All refs fired [ns_server:debug,2014-08-19T16:49:48.533,ns_1@10.242.238.90:<0.19949.0>:capi_set_view_manager:do_wait_index_updated:618]References to wait: [] ("default", 761) [rebalance:debug,2014-08-19T16:49:48.533,ns_1@10.242.238.90:<0.19144.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:49:48.533,ns_1@10.242.238.90:<0.19948.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:49:48.533,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.19933.0> (ok) [ns_server:debug,2014-08-19T16:49:48.533,ns_1@10.242.238.90:<0.19949.0>:capi_set_view_manager:do_wait_index_updated:638]All refs fired [ns_server:debug,2014-08-19T16:49:48.533,ns_1@10.242.238.90:<0.19246.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:49:48.533,ns_1@10.242.238.90:<0.19948.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [ns_server:debug,2014-08-19T16:49:48.533,ns_1@10.242.238.90:<0.19950.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:49:48.533,ns_1@10.242.238.90:<0.19144.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:49:48.533,ns_1@10.242.238.90:<0.19951.0>:capi_set_view_manager:do_wait_index_updated:618]References to wait: [] ("default", 750) [rebalance:info,2014-08-19T16:49:48.534,ns_1@10.242.238.90:<0.19419.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:49:48.533,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.19935.0> (ok) [ns_server:debug,2014-08-19T16:49:48.534,ns_1@10.242.238.90:<0.19951.0>:capi_set_view_manager:do_wait_index_updated:638]All refs fired [ns_server:debug,2014-08-19T16:49:48.534,ns_1@10.242.238.90:<0.19950.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [ns_server:debug,2014-08-19T16:49:48.534,ns_1@10.242.238.90:<0.19952.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [rebalance:debug,2014-08-19T16:49:48.534,ns_1@10.242.238.90:<0.19614.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:49:48.534,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.19934.0> (ok) [rebalance:debug,2014-08-19T16:49:48.534,ns_1@10.242.238.90:<0.19320.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:49:48.534,ns_1@10.242.238.90:<0.19953.0>:capi_set_view_manager:do_wait_index_updated:618]References to wait: [] ("default", 756) [rebalance:debug,2014-08-19T16:49:48.534,ns_1@10.242.238.90:<0.18911.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:49:48.534,ns_1@10.242.238.90:<0.19952.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [ns_server:debug,2014-08-19T16:49:48.534,ns_1@10.242.238.90:<0.19614.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:49:48.534,ns_1@10.242.238.90:<0.19954.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [rebalance:info,2014-08-19T16:49:48.534,ns_1@10.242.238.90:<0.19246.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [rebalance:info,2014-08-19T16:49:48.534,ns_1@10.242.238.90:<0.19144.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:49:48.534,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.19936.0> (ok) [ns_server:debug,2014-08-19T16:49:48.534,ns_1@10.242.238.90:<0.19953.0>:capi_set_view_manager:do_wait_index_updated:638]All refs fired [ns_server:debug,2014-08-19T16:49:48.534,ns_1@10.242.238.90:<0.19954.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [ns_server:debug,2014-08-19T16:49:48.534,ns_1@10.242.238.90:<0.19320.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:49:48.534,ns_1@10.242.238.90:<0.19955.0>:capi_set_view_manager:do_wait_index_updated:618]References to wait: [] ("default", 758) [ns_server:debug,2014-08-19T16:49:48.534,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.19938.0> (ok) [ns_server:debug,2014-08-19T16:49:48.534,ns_1@10.242.238.90:<0.19956.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:49:48.534,ns_1@10.242.238.90:<0.19957.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:49:48.534,ns_1@10.242.238.90:<0.18911.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:49:48.534,ns_1@10.242.238.90:<0.19955.0>:capi_set_view_manager:do_wait_index_updated:638]All refs fired [ns_server:debug,2014-08-19T16:49:48.534,ns_1@10.242.238.90:<0.19956.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [ns_server:debug,2014-08-19T16:49:48.534,ns_1@10.242.238.90:<0.19957.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:debug,2014-08-19T16:49:48.534,ns_1@10.242.238.90:<0.19444.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [rebalance:debug,2014-08-19T16:49:48.534,ns_1@10.242.238.90:<0.19066.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [rebalance:debug,2014-08-19T16:49:48.534,ns_1@10.242.238.90:<0.19373.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [rebalance:info,2014-08-19T16:49:48.534,ns_1@10.242.238.90:<0.19614.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:49:48.534,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.19939.0> (ok) [rebalance:info,2014-08-19T16:49:48.534,ns_1@10.242.238.90:<0.19320.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [rebalance:info,2014-08-19T16:49:48.534,ns_1@10.242.238.90:<0.18911.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:49:48.535,ns_1@10.242.238.90:<0.19444.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:49:48.535,ns_1@10.242.238.90:<0.19066.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:49:48.535,ns_1@10.242.238.90:<0.19960.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:49:48.535,ns_1@10.242.238.90:<0.19373.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:49:48.535,ns_1@10.242.238.90:<0.19958.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:49:48.535,ns_1@10.242.238.90:<0.19959.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [rebalance:debug,2014-08-19T16:49:48.535,ns_1@10.242.238.90:<0.19536.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:49:48.535,ns_1@10.242.238.90:<0.19960.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [ns_server:debug,2014-08-19T16:49:48.535,ns_1@10.242.238.90:<0.19958.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [ns_server:debug,2014-08-19T16:49:48.535,ns_1@10.242.238.90:<0.19959.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:debug,2014-08-19T16:49:48.535,ns_1@10.242.238.90:<0.19271.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:49:48.535,ns_1@10.242.238.90:<0.19536.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [rebalance:info,2014-08-19T16:49:48.535,ns_1@10.242.238.90:<0.19373.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [rebalance:info,2014-08-19T16:49:48.535,ns_1@10.242.238.90:<0.19444.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [rebalance:info,2014-08-19T16:49:48.535,ns_1@10.242.238.90:<0.19066.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:49:48.535,ns_1@10.242.238.90:<0.19271.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:49:48.535,ns_1@10.242.238.90:<0.19961.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:49:48.535,ns_1@10.242.238.90:<0.19962.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [rebalance:debug,2014-08-19T16:49:48.535,ns_1@10.242.238.90:<0.19206.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:49:48.535,ns_1@10.242.238.90:<0.19961.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [ns_server:debug,2014-08-19T16:49:48.535,ns_1@10.242.238.90:<0.19962.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [ns_server:debug,2014-08-19T16:49:48.535,ns_1@10.242.238.90:<0.19206.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:49:48.535,ns_1@10.242.238.90:<0.19963.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [rebalance:info,2014-08-19T16:49:48.535,ns_1@10.242.238.90:<0.19271.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:49:48.536,ns_1@10.242.238.90:<0.19963.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:49:48.536,ns_1@10.242.238.90:<0.19536.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [rebalance:info,2014-08-19T16:49:48.536,ns_1@10.242.238.90:<0.19206.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:49:48.646,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:49:48.650,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:49:48.650,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 3688 us [ns_server:debug,2014-08-19T16:49:48.650,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:49:48.651,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{499, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:49:48.680,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:49:48.684,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:49:48.685,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 4219 us [ns_server:debug,2014-08-19T16:49:48.686,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{503, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:49:48.686,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:49:48.704,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:49:48.709,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 2908 us [ns_server:debug,2014-08-19T16:49:48.709,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:49:48.710,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:info,2014-08-19T16:49:48.710,ns_1@10.242.238.90:<0.18786.0>:ns_memcached:do_handle_call:527]Changed vbucket 751 state to active [ns_server:debug,2014-08-19T16:49:48.710,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{501, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:49:48.734,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:49:48.737,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:49:48.737,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 3246 us [ns_server:debug,2014-08-19T16:49:48.738,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:49:48.739,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{509, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [views:debug,2014-08-19T16:49:48.748,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/751. Updated state: active (1) [ns_server:debug,2014-08-19T16:49:48.748,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",751,active,1} [ns_server:debug,2014-08-19T16:49:48.756,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:49:48.764,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 8084 us [ns_server:debug,2014-08-19T16:49:48.764,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:49:48.765,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:49:48.765,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{494, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:49:48.790,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:49:48.794,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 3691 us [ns_server:debug,2014-08-19T16:49:48.794,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:49:48.794,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:49:48.795,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{751, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:49:48.808,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:49:48.810,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 2570 us [ns_server:info,2014-08-19T16:49:48.811,ns_1@10.242.238.90:<0.18786.0>:ns_memcached:do_handle_call:527]Changed vbucket 750 state to active [ns_server:debug,2014-08-19T16:49:48.811,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:49:48.812,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{505, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:49:48.812,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:info,2014-08-19T16:49:48.823,ns_1@10.242.238.90:<0.18786.0>:ns_memcached:do_handle_call:527]Changed vbucket 753 state to active [ns_server:debug,2014-08-19T16:49:48.826,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:49:48.830,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:49:48.831,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 5159 us [ns_server:debug,2014-08-19T16:49:48.832,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{498, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:49:48.834,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [rebalance:debug,2014-08-19T16:49:48.841,ns_1@10.242.238.90:<0.19354.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:49:48.841,ns_1@10.242.238.90:<0.19354.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:49:48.841,ns_1@10.242.238.90:<0.19970.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:49:48.842,ns_1@10.242.238.90:<0.19970.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:49:48.842,ns_1@10.242.238.90:<0.19354.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:49:48.844,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:49:48.848,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 3378 us [ns_server:debug,2014-08-19T16:49:48.848,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:49:48.848,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:49:48.849,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{496, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [views:debug,2014-08-19T16:49:48.865,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/750. Updated state: active (1) [ns_server:debug,2014-08-19T16:49:48.865,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",750,active,1} [rebalance:debug,2014-08-19T16:49:48.866,ns_1@10.242.238.90:<0.18982.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:49:48.866,ns_1@10.242.238.90:<0.18982.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:49:48.866,ns_1@10.242.238.90:<0.19972.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:49:48.867,ns_1@10.242.238.90:<0.19972.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:49:48.867,ns_1@10.242.238.90:<0.18982.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:49:48.871,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:49:48.872,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:49:48.872,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 1419 us [ns_server:debug,2014-08-19T16:49:48.873,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:49:48.873,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{492, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [rebalance:debug,2014-08-19T16:49:48.884,ns_1@10.242.238.90:<0.19227.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:49:48.885,ns_1@10.242.238.90:<0.19227.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:49:48.885,ns_1@10.242.238.90:<0.19973.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:49:48.885,ns_1@10.242.238.90:<0.19973.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:49:48.885,ns_1@10.242.238.90:<0.19227.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:49:48.891,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:49:48.897,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:49:48.897,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 5405 us [ns_server:debug,2014-08-19T16:49:48.898,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:49:48.898,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{507, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:info,2014-08-19T16:49:48.912,ns_1@10.242.238.90:<0.18786.0>:ns_memcached:do_handle_call:527]Changed vbucket 763 state to active [views:debug,2014-08-19T16:49:48.915,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/753. Updated state: active (1) [ns_server:debug,2014-08-19T16:49:48.915,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",753,active,1} [ns_server:debug,2014-08-19T16:49:48.917,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:49:48.919,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:49:48.919,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 1321 us [ns_server:debug,2014-08-19T16:49:48.919,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:49:48.920,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{750, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [rebalance:debug,2014-08-19T16:49:48.956,ns_1@10.242.238.90:<0.19517.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:49:48.956,ns_1@10.242.238.90:<0.19517.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:49:48.956,ns_1@10.242.238.90:<0.19976.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:49:48.956,ns_1@10.242.238.90:<0.19976.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:49:48.956,ns_1@10.242.238.90:<0.19517.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:49:48.959,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:49:48.962,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 2836 us [ns_server:debug,2014-08-19T16:49:48.962,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:49:48.963,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:49:48.963,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{753, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:info,2014-08-19T16:49:48.967,ns_1@10.242.238.90:<0.18786.0>:ns_memcached:do_handle_call:527]Changed vbucket 1010 state to replica [ns_server:info,2014-08-19T16:49:48.967,ns_1@10.242.238.90:tap_replication_manager-default<0.18775.0>:tap_replication_manager:change_vbucket_filter:200]Going to change replication from 'ns_1@10.242.238.91' to have [1010,1022,1023] ([1010], []) [ns_server:debug,2014-08-19T16:49:48.968,ns_1@10.242.238.90:<0.19978.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:135]Registered myself under id:{"default", {new_child_id, [1010,1022,1023], 'ns_1@10.242.238.91'}, #Ref<0.0.0.218935>} Args:[{"10.242.238.91",11209}, {"10.242.238.90",11209}, [{old_state_retriever,#Fun}, {on_not_ready_vbuckets,#Fun}, {username,"default"}, {password,get_from_config}, {vbuckets,[1010,1022,1023]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]] [ns_server:debug,2014-08-19T16:49:48.969,ns_1@10.242.238.90:<0.19978.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:139]Linked myself to old ebucketmigrator <0.19776.0> [ns_server:info,2014-08-19T16:49:48.969,ns_1@10.242.238.90:<0.19776.0>:ebucketmigrator_srv:handle_call:270]Starting new-style vbucket filter change on stream `replication_ns_1@10.242.238.90` [rebalance:debug,2014-08-19T16:49:48.970,ns_1@10.242.238.90:<0.19430.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:49:48.970,ns_1@10.242.238.90:<0.19430.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:49:48.970,ns_1@10.242.238.90:<0.19980.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:49:48.970,ns_1@10.242.238.90:<0.19980.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:49:48.970,ns_1@10.242.238.90:<0.19430.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [views:debug,2014-08-19T16:49:48.976,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/763. Updated state: active (1) [ns_server:debug,2014-08-19T16:49:48.976,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",763,active,1} [ns_server:info,2014-08-19T16:49:48.978,ns_1@10.242.238.90:<0.19776.0>:ebucketmigrator_srv:handle_call:297]Changing vbucket filter on tap stream `replication_ns_1@10.242.238.90`: [{1010,1},{1022,1},{1023,1}] [ns_server:info,2014-08-19T16:49:48.978,ns_1@10.242.238.90:<0.19776.0>:ebucketmigrator_srv:handle_call:307]Successfully changed vbucket filter on tap stream `replication_ns_1@10.242.238.90`. [ns_server:info,2014-08-19T16:49:48.979,ns_1@10.242.238.90:<0.19776.0>:ebucketmigrator_srv:process_upstream:1027]Got vbucket filter change completion message. Silencing upstream sender [ns_server:info,2014-08-19T16:49:48.979,ns_1@10.242.238.90:<0.19776.0>:ebucketmigrator_srv:handle_info:366]Got reply from upstream silencing request. Completing state transition to a new ebucketmigrator. [ns_server:debug,2014-08-19T16:49:48.979,ns_1@10.242.238.90:<0.19776.0>:ebucketmigrator_srv:complete_native_vb_filter_change:170]Proceeding with reading unread binaries [ns_server:debug,2014-08-19T16:49:48.979,ns_1@10.242.238.90:<0.19776.0>:ebucketmigrator_srv:confirm_downstream:197]Going to confirm reception downstream messages [ns_server:debug,2014-08-19T16:49:48.979,ns_1@10.242.238.90:<0.19776.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:49:48.979,ns_1@10.242.238.90:<0.19981.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:49:48.979,ns_1@10.242.238.90:<0.19981.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:49:48.979,ns_1@10.242.238.90:<0.19776.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:49:48.980,ns_1@10.242.238.90:<0.19776.0>:ebucketmigrator_srv:confirm_downstream:201]Confirmed upstream messages are feeded to kernel [ns_server:debug,2014-08-19T16:49:48.980,ns_1@10.242.238.90:<0.19776.0>:ebucketmigrator_srv:reply_and_die:210]Passed old state to caller [ns_server:debug,2014-08-19T16:49:48.980,ns_1@10.242.238.90:<0.19776.0>:ebucketmigrator_srv:reply_and_die:213]Sent out state. Preparing to die [ns_server:debug,2014-08-19T16:49:48.980,ns_1@10.242.238.90:<0.19978.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:143]Got old state from previous ebucketmigrator: <0.19776.0> [ns_server:debug,2014-08-19T16:49:48.980,ns_1@10.242.238.90:<0.19978.0>:ns_vbm_new_sup:perform_vbucket_filter_change_loop:184]Sent old state to new instance [ns_server:info,2014-08-19T16:49:48.980,ns_1@10.242.238.90:<0.19983.0>:ns_vbm_new_sup:mk_old_state_retriever:83]Got vbucket filter change old state. Proceeding vbucket filter change operation [ns_server:debug,2014-08-19T16:49:48.980,ns_1@10.242.238.90:<0.19983.0>:ebucketmigrator_srv:init:494]Got old ebucketmigrator state from <0.19776.0>: {state,#Port<0.13886>,#Port<0.13883>,#Port<0.13887>,#Port<0.13884>, <0.19777.0>,<<"cut off">>,<<"cut off">>,[],10,false,false,0, {1408,452588,979111}, completed, {<0.19978.0>,#Ref<0.0.0.218949>}, <<"replication_ns_1@10.242.238.90">>,<0.19776.0>, {had_backfill,false,undefined,[]}, completed,false}. [ns_server:debug,2014-08-19T16:49:48.981,ns_1@10.242.238.90:<0.17162.0>:ns_process_registry:handle_info:98]Got exit msg: {'EXIT',<0.19978.0>,{#Ref<0.0.0.218937>,<0.19983.0>}} [error_logger:info,2014-08-19T16:49:48.981,ns_1@10.242.238.90:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,'ns_vbm_new_sup-default'} started: [{pid,<0.19983.0>}, {name, {new_child_id, [1010,1022,1023], 'ns_1@10.242.238.91'}}, {mfargs, {ebucketmigrator_srv,start_link, [{"10.242.238.91",11209}, {"10.242.238.90",11209}, [{old_state_retriever, #Fun}, {on_not_ready_vbuckets, #Fun}, {username,"default"}, {password,get_from_config}, {vbuckets,[1010,1022,1023]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]]}}, {restart_type,temporary}, {shutdown,60000}, {child_type,worker}] [ns_server:debug,2014-08-19T16:49:48.984,ns_1@10.242.238.90:<0.19983.0>:ebucketmigrator_srv:init:621]Reusing old upstream: [{vbuckets,[1010,1022,1023]}, {name,<<"replication_ns_1@10.242.238.90">>}, {takeover,false}] [rebalance:debug,2014-08-19T16:49:48.984,ns_1@10.242.238.90:<0.19983.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.19984.0> [rebalance:debug,2014-08-19T16:49:48.988,ns_1@10.242.238.90:<0.19086.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:49:48.989,ns_1@10.242.238.90:<0.19086.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:49:48.989,ns_1@10.242.238.90:<0.19985.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:49:48.989,ns_1@10.242.238.90:<0.19985.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [ns_server:debug,2014-08-19T16:49:48.989,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [rebalance:info,2014-08-19T16:49:48.989,ns_1@10.242.238.90:<0.19086.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:49:48.993,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:49:48.993,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 4478 us [ns_server:debug,2014-08-19T16:49:48.994,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:49:48.994,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{1010, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:info,2014-08-19T16:49:48.997,ns_1@10.242.238.90:<0.18786.0>:ns_memcached:do_handle_call:527]Changed vbucket 1019 state to replica [ns_server:info,2014-08-19T16:49:48.997,ns_1@10.242.238.90:tap_replication_manager-default<0.18775.0>:tap_replication_manager:change_vbucket_filter:200]Going to change replication from 'ns_1@10.242.238.91' to have [1010,1019,1022,1023] ([1019], []) [ns_server:debug,2014-08-19T16:49:49.001,ns_1@10.242.238.90:<0.19987.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:135]Registered myself under id:{"default", {new_child_id, [1010,1019,1022,1023], 'ns_1@10.242.238.91'}, #Ref<0.0.0.219123>} Args:[{"10.242.238.91",11209}, {"10.242.238.90",11209}, [{old_state_retriever,#Fun}, {on_not_ready_vbuckets,#Fun}, {username,"default"}, {password,get_from_config}, {vbuckets,[1010,1019,1022,1023]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]] [ns_server:debug,2014-08-19T16:49:49.001,ns_1@10.242.238.90:<0.19987.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:139]Linked myself to old ebucketmigrator <0.19983.0> [ns_server:info,2014-08-19T16:49:49.001,ns_1@10.242.238.90:<0.19983.0>:ebucketmigrator_srv:handle_call:270]Starting new-style vbucket filter change on stream `replication_ns_1@10.242.238.90` [ns_server:info,2014-08-19T16:49:49.009,ns_1@10.242.238.90:<0.19983.0>:ebucketmigrator_srv:handle_call:297]Changing vbucket filter on tap stream `replication_ns_1@10.242.238.90`: [{1010,1},{1019,1},{1022,1},{1023,1}] [ns_server:info,2014-08-19T16:49:49.010,ns_1@10.242.238.90:<0.19983.0>:ebucketmigrator_srv:handle_call:307]Successfully changed vbucket filter on tap stream `replication_ns_1@10.242.238.90`. [ns_server:info,2014-08-19T16:49:49.010,ns_1@10.242.238.90:<0.19983.0>:ebucketmigrator_srv:process_upstream:1027]Got vbucket filter change completion message. Silencing upstream sender [ns_server:info,2014-08-19T16:49:49.010,ns_1@10.242.238.90:<0.19983.0>:ebucketmigrator_srv:handle_info:366]Got reply from upstream silencing request. Completing state transition to a new ebucketmigrator. [ns_server:debug,2014-08-19T16:49:49.010,ns_1@10.242.238.90:<0.19983.0>:ebucketmigrator_srv:complete_native_vb_filter_change:170]Proceeding with reading unread binaries [ns_server:debug,2014-08-19T16:49:49.010,ns_1@10.242.238.90:<0.19983.0>:ebucketmigrator_srv:confirm_downstream:197]Going to confirm reception downstream messages [ns_server:debug,2014-08-19T16:49:49.010,ns_1@10.242.238.90:<0.19983.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:49:49.010,ns_1@10.242.238.90:<0.19989.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:49:49.010,ns_1@10.242.238.90:<0.19989.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:49:49.010,ns_1@10.242.238.90:<0.19983.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:49:49.011,ns_1@10.242.238.90:<0.19983.0>:ebucketmigrator_srv:confirm_downstream:201]Confirmed upstream messages are feeded to kernel [ns_server:debug,2014-08-19T16:49:49.011,ns_1@10.242.238.90:<0.19983.0>:ebucketmigrator_srv:reply_and_die:210]Passed old state to caller [ns_server:debug,2014-08-19T16:49:49.011,ns_1@10.242.238.90:<0.19983.0>:ebucketmigrator_srv:reply_and_die:213]Sent out state. Preparing to die [ns_server:debug,2014-08-19T16:49:49.011,ns_1@10.242.238.90:<0.19987.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:143]Got old state from previous ebucketmigrator: <0.19983.0> [ns_server:debug,2014-08-19T16:49:49.011,ns_1@10.242.238.90:<0.19987.0>:ns_vbm_new_sup:perform_vbucket_filter_change_loop:184]Sent old state to new instance [ns_server:info,2014-08-19T16:49:49.011,ns_1@10.242.238.90:<0.19991.0>:ns_vbm_new_sup:mk_old_state_retriever:83]Got vbucket filter change old state. Proceeding vbucket filter change operation [ns_server:debug,2014-08-19T16:49:49.011,ns_1@10.242.238.90:<0.19991.0>:ebucketmigrator_srv:init:494]Got old ebucketmigrator state from <0.19983.0>: {state,#Port<0.13886>,#Port<0.13883>,#Port<0.13887>,#Port<0.13884>, <0.19984.0>,<<"cut off">>,<<"cut off">>,[],13,false,false,0, {1408,452589,10260}, completed, {<0.19987.0>,#Ref<0.0.0.219138>}, <<"replication_ns_1@10.242.238.90">>,<0.19983.0>, {had_backfill,false,undefined,[]}, completed,false}. [ns_server:debug,2014-08-19T16:49:49.012,ns_1@10.242.238.90:<0.17162.0>:ns_process_registry:handle_info:98]Got exit msg: {'EXIT',<0.19987.0>,{#Ref<0.0.0.219125>,<0.19991.0>}} [error_logger:info,2014-08-19T16:49:49.012,ns_1@10.242.238.90:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,'ns_vbm_new_sup-default'} started: [{pid,<0.19991.0>}, {name, {new_child_id, [1010,1019,1022,1023], 'ns_1@10.242.238.91'}}, {mfargs, {ebucketmigrator_srv,start_link, [{"10.242.238.91",11209}, {"10.242.238.90",11209}, [{old_state_retriever, #Fun}, {on_not_ready_vbuckets, #Fun}, {username,"default"}, {password,get_from_config}, {vbuckets,[1010,1019,1022,1023]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]]}}, {restart_type,temporary}, {shutdown,60000}, {child_type,worker}] [ns_server:info,2014-08-19T16:49:49.012,ns_1@10.242.238.90:<0.18786.0>:ns_memcached:do_handle_call:527]Changed vbucket 759 state to active [ns_server:debug,2014-08-19T16:49:49.016,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:49:49.016,ns_1@10.242.238.90:<0.19991.0>:ebucketmigrator_srv:init:621]Reusing old upstream: [{vbuckets,[1010,1019,1022,1023]}, {name,<<"replication_ns_1@10.242.238.90">>}, {takeover,false}] [rebalance:debug,2014-08-19T16:49:49.016,ns_1@10.242.238.90:<0.19991.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.19992.0> [ns_server:debug,2014-08-19T16:49:49.023,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 6861 us [ns_server:debug,2014-08-19T16:49:49.026,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [rebalance:debug,2014-08-19T16:49:49.026,ns_1@10.242.238.90:<0.19125.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:info,2014-08-19T16:49:49.026,ns_1@10.242.238.90:<0.18786.0>:ns_memcached:do_handle_call:527]Changed vbucket 1013 state to replica [ns_server:debug,2014-08-19T16:49:49.027,ns_1@10.242.238.90:<0.19125.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:49:49.027,ns_1@10.242.238.90:<0.19993.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:info,2014-08-19T16:49:49.027,ns_1@10.242.238.90:tap_replication_manager-default<0.18775.0>:tap_replication_manager:change_vbucket_filter:200]Going to change replication from 'ns_1@10.242.238.91' to have [1010,1013,1019,1022,1023] ([1013], []) [ns_server:debug,2014-08-19T16:49:49.027,ns_1@10.242.238.90:<0.19993.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:49:49.028,ns_1@10.242.238.90:<0.19125.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:49:49.028,ns_1@10.242.238.90:<0.19994.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:135]Registered myself under id:{"default", {new_child_id, [1010,1013,1019,1022,1023], 'ns_1@10.242.238.91'}, #Ref<0.0.0.219282>} Args:[{"10.242.238.91",11209}, {"10.242.238.90",11209}, [{old_state_retriever,#Fun}, {on_not_ready_vbuckets,#Fun}, {username,"default"}, {password,get_from_config}, {vbuckets,[1010,1013,1019,1022,1023]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]] [ns_server:debug,2014-08-19T16:49:49.028,ns_1@10.242.238.90:<0.19994.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:139]Linked myself to old ebucketmigrator <0.19991.0> [ns_server:info,2014-08-19T16:49:49.028,ns_1@10.242.238.90:<0.19991.0>:ebucketmigrator_srv:handle_call:270]Starting new-style vbucket filter change on stream `replication_ns_1@10.242.238.90` [ns_server:debug,2014-08-19T16:49:49.028,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:49:49.029,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{1019, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:info,2014-08-19T16:49:49.037,ns_1@10.242.238.90:<0.19991.0>:ebucketmigrator_srv:handle_call:297]Changing vbucket filter on tap stream `replication_ns_1@10.242.238.90`: [{1010,1},{1013,1},{1019,1},{1022,1},{1023,1}] [ns_server:info,2014-08-19T16:49:49.038,ns_1@10.242.238.90:<0.19991.0>:ebucketmigrator_srv:handle_call:307]Successfully changed vbucket filter on tap stream `replication_ns_1@10.242.238.90`. [ns_server:info,2014-08-19T16:49:49.038,ns_1@10.242.238.90:<0.19991.0>:ebucketmigrator_srv:process_upstream:1027]Got vbucket filter change completion message. Silencing upstream sender [ns_server:info,2014-08-19T16:49:49.038,ns_1@10.242.238.90:<0.19991.0>:ebucketmigrator_srv:handle_info:366]Got reply from upstream silencing request. Completing state transition to a new ebucketmigrator. [ns_server:debug,2014-08-19T16:49:49.038,ns_1@10.242.238.90:<0.19991.0>:ebucketmigrator_srv:complete_native_vb_filter_change:170]Proceeding with reading unread binaries [ns_server:debug,2014-08-19T16:49:49.039,ns_1@10.242.238.90:<0.19991.0>:ebucketmigrator_srv:confirm_downstream:197]Going to confirm reception downstream messages [ns_server:debug,2014-08-19T16:49:49.039,ns_1@10.242.238.90:<0.19991.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:49:49.039,ns_1@10.242.238.90:<0.19997.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:49:49.039,ns_1@10.242.238.90:<0.19997.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:49:49.039,ns_1@10.242.238.90:<0.19991.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:49:49.039,ns_1@10.242.238.90:<0.19991.0>:ebucketmigrator_srv:confirm_downstream:201]Confirmed upstream messages are feeded to kernel [ns_server:debug,2014-08-19T16:49:49.039,ns_1@10.242.238.90:<0.19991.0>:ebucketmigrator_srv:reply_and_die:210]Passed old state to caller [ns_server:debug,2014-08-19T16:49:49.039,ns_1@10.242.238.90:<0.19991.0>:ebucketmigrator_srv:reply_and_die:213]Sent out state. Preparing to die [ns_server:debug,2014-08-19T16:49:49.039,ns_1@10.242.238.90:<0.19994.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:143]Got old state from previous ebucketmigrator: <0.19991.0> [ns_server:debug,2014-08-19T16:49:49.040,ns_1@10.242.238.90:<0.19994.0>:ns_vbm_new_sup:perform_vbucket_filter_change_loop:184]Sent old state to new instance [ns_server:info,2014-08-19T16:49:49.040,ns_1@10.242.238.90:<0.19999.0>:ns_vbm_new_sup:mk_old_state_retriever:83]Got vbucket filter change old state. Proceeding vbucket filter change operation [ns_server:debug,2014-08-19T16:49:49.040,ns_1@10.242.238.90:<0.19999.0>:ebucketmigrator_srv:init:494]Got old ebucketmigrator state from <0.19991.0>: {state,#Port<0.13886>,#Port<0.13883>,#Port<0.13887>,#Port<0.13884>, <0.19992.0>,<<"cut off">>,<<"cut off">>,[],16,false,false,0, {1408,452589,38601}, completed, {<0.19994.0>,#Ref<0.0.0.219300>}, <<"replication_ns_1@10.242.238.90">>,<0.19991.0>, {had_backfill,false,undefined,[]}, completed,false}. [ns_server:debug,2014-08-19T16:49:49.040,ns_1@10.242.238.90:<0.17162.0>:ns_process_registry:handle_info:98]Got exit msg: {'EXIT',<0.19994.0>,{#Ref<0.0.0.219284>,<0.19999.0>}} [error_logger:info,2014-08-19T16:49:49.040,ns_1@10.242.238.90:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,'ns_vbm_new_sup-default'} started: [{pid,<0.19999.0>}, {name, {new_child_id, [1010,1013,1019,1022,1023], 'ns_1@10.242.238.91'}}, {mfargs, {ebucketmigrator_srv,start_link, [{"10.242.238.91",11209}, {"10.242.238.90",11209}, [{old_state_retriever, #Fun}, {on_not_ready_vbuckets, #Fun}, {username,"default"}, {password,get_from_config}, {vbuckets,[1010,1013,1019,1022,1023]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]]}}, {restart_type,temporary}, {shutdown,60000}, {child_type,worker}] [ns_server:info,2014-08-19T16:49:49.042,ns_1@10.242.238.90:<0.18786.0>:ns_memcached:do_handle_call:527]Changed vbucket 755 state to active [ns_server:debug,2014-08-19T16:49:49.044,ns_1@10.242.238.90:<0.19999.0>:ebucketmigrator_srv:init:621]Reusing old upstream: [{vbuckets,[1010,1013,1019,1022,1023]}, {name,<<"replication_ns_1@10.242.238.90">>}, {takeover,false}] [rebalance:debug,2014-08-19T16:49:49.044,ns_1@10.242.238.90:<0.19999.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.20000.0> [ns_server:debug,2014-08-19T16:49:49.045,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:49:49.049,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:49:49.049,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 3753 us [ns_server:debug,2014-08-19T16:49:49.050,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:49:49.051,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{1013, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [views:debug,2014-08-19T16:49:49.057,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/759. Updated state: active (1) [ns_server:debug,2014-08-19T16:49:49.057,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",759,active,1} [ns_server:info,2014-08-19T16:49:49.061,ns_1@10.242.238.90:<0.18786.0>:ns_memcached:do_handle_call:527]Changed vbucket 758 state to active [ns_server:debug,2014-08-19T16:49:49.072,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:49:49.081,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:49:49.081,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 8607 us [ns_server:info,2014-08-19T16:49:49.081,ns_1@10.242.238.90:<0.18786.0>:ns_memcached:do_handle_call:527]Changed vbucket 748 state to active [ns_server:debug,2014-08-19T16:49:49.082,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{502, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:49:49.085,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:49:49.097,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [rebalance:debug,2014-08-19T16:49:49.100,ns_1@10.242.238.90:<0.19556.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:49:49.100,ns_1@10.242.238.90:<0.19556.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:49:49.100,ns_1@10.242.238.90:<0.20002.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:49:49.100,ns_1@10.242.238.90:<0.20002.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:49:49.100,ns_1@10.242.238.90:<0.19556.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:49:49.101,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:49:49.101,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 1680 us [ns_server:debug,2014-08-19T16:49:49.102,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:49:49.103,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{763, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [views:debug,2014-08-19T16:49:49.107,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/755. Updated state: active (1) [ns_server:debug,2014-08-19T16:49:49.107,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",755,active,1} [rebalance:debug,2014-08-19T16:49:49.126,ns_1@10.242.238.90:<0.19609.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:49:49.126,ns_1@10.242.238.90:<0.19609.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:49:49.126,ns_1@10.242.238.90:<0.20004.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:49:49.127,ns_1@10.242.238.90:<0.20004.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:49:49.127,ns_1@10.242.238.90:<0.19609.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:49:49.129,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:49:49.133,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 3374 us [ns_server:debug,2014-08-19T16:49:49.133,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:49:49.133,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:49:49.134,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{500, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:info,2014-08-19T16:49:49.136,ns_1@10.242.238.90:<0.18786.0>:ns_memcached:do_handle_call:527]Changed vbucket 1006 state to replica [ns_server:info,2014-08-19T16:49:49.136,ns_1@10.242.238.90:tap_replication_manager-default<0.18775.0>:tap_replication_manager:change_vbucket_filter:200]Going to change replication from 'ns_1@10.242.238.91' to have [1006,1010,1013,1019,1022,1023] ([1006], []) [ns_server:debug,2014-08-19T16:49:49.139,ns_1@10.242.238.90:<0.20005.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:135]Registered myself under id:{"default", {new_child_id, [1006,1010,1013,1019,1022,1023], 'ns_1@10.242.238.91'}, #Ref<0.0.0.219627>} Args:[{"10.242.238.91",11209}, {"10.242.238.90",11209}, [{old_state_retriever,#Fun}, {on_not_ready_vbuckets,#Fun}, {username,"default"}, {password,get_from_config}, {vbuckets,[1006,1010,1013,1019,1022,1023]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]] [ns_server:debug,2014-08-19T16:49:49.140,ns_1@10.242.238.90:<0.20005.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:139]Linked myself to old ebucketmigrator <0.19999.0> [ns_server:info,2014-08-19T16:49:49.140,ns_1@10.242.238.90:<0.19999.0>:ebucketmigrator_srv:handle_call:270]Starting new-style vbucket filter change on stream `replication_ns_1@10.242.238.90` [views:debug,2014-08-19T16:49:49.141,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/758. Updated state: active (1) [ns_server:debug,2014-08-19T16:49:49.141,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",758,active,1} [ns_server:info,2014-08-19T16:49:49.148,ns_1@10.242.238.90:<0.19999.0>:ebucketmigrator_srv:handle_call:297]Changing vbucket filter on tap stream `replication_ns_1@10.242.238.90`: [{1006,1},{1010,1},{1013,1},{1019,1},{1022,1},{1023,1}] [ns_server:info,2014-08-19T16:49:49.148,ns_1@10.242.238.90:<0.19999.0>:ebucketmigrator_srv:handle_call:307]Successfully changed vbucket filter on tap stream `replication_ns_1@10.242.238.90`. [ns_server:info,2014-08-19T16:49:49.148,ns_1@10.242.238.90:<0.19999.0>:ebucketmigrator_srv:process_upstream:1027]Got vbucket filter change completion message. Silencing upstream sender [ns_server:info,2014-08-19T16:49:49.149,ns_1@10.242.238.90:<0.19999.0>:ebucketmigrator_srv:handle_info:366]Got reply from upstream silencing request. Completing state transition to a new ebucketmigrator. [ns_server:debug,2014-08-19T16:49:49.149,ns_1@10.242.238.90:<0.19999.0>:ebucketmigrator_srv:complete_native_vb_filter_change:170]Proceeding with reading unread binaries [ns_server:debug,2014-08-19T16:49:49.149,ns_1@10.242.238.90:<0.19999.0>:ebucketmigrator_srv:confirm_downstream:197]Going to confirm reception downstream messages [ns_server:debug,2014-08-19T16:49:49.149,ns_1@10.242.238.90:<0.19999.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:49:49.149,ns_1@10.242.238.90:<0.20008.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:49:49.149,ns_1@10.242.238.90:<0.20008.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:49:49.149,ns_1@10.242.238.90:<0.19999.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:49:49.149,ns_1@10.242.238.90:<0.19999.0>:ebucketmigrator_srv:confirm_downstream:201]Confirmed upstream messages are feeded to kernel [ns_server:debug,2014-08-19T16:49:49.149,ns_1@10.242.238.90:<0.19999.0>:ebucketmigrator_srv:reply_and_die:210]Passed old state to caller [ns_server:debug,2014-08-19T16:49:49.150,ns_1@10.242.238.90:<0.19999.0>:ebucketmigrator_srv:reply_and_die:213]Sent out state. Preparing to die [ns_server:debug,2014-08-19T16:49:49.150,ns_1@10.242.238.90:<0.20005.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:143]Got old state from previous ebucketmigrator: <0.19999.0> [ns_server:debug,2014-08-19T16:49:49.150,ns_1@10.242.238.90:<0.20005.0>:ns_vbm_new_sup:perform_vbucket_filter_change_loop:184]Sent old state to new instance [ns_server:info,2014-08-19T16:49:49.150,ns_1@10.242.238.90:<0.20010.0>:ns_vbm_new_sup:mk_old_state_retriever:83]Got vbucket filter change old state. Proceeding vbucket filter change operation [ns_server:debug,2014-08-19T16:49:49.150,ns_1@10.242.238.90:<0.20010.0>:ebucketmigrator_srv:init:494]Got old ebucketmigrator state from <0.19999.0>: {state,#Port<0.13886>,#Port<0.13883>,#Port<0.13887>,#Port<0.13884>, <0.20000.0>,<<"cut off">>,<<"cut off">>,[],19,false,false,0, {1408,452589,148946}, completed, {<0.20005.0>,#Ref<0.0.0.219641>}, <<"replication_ns_1@10.242.238.90">>,<0.19999.0>, {had_backfill,false,undefined,[]}, completed,false}. [ns_server:debug,2014-08-19T16:49:49.151,ns_1@10.242.238.90:<0.17162.0>:ns_process_registry:handle_info:98]Got exit msg: {'EXIT',<0.20005.0>,{#Ref<0.0.0.219629>,<0.20010.0>}} [error_logger:info,2014-08-19T16:49:49.151,ns_1@10.242.238.90:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,'ns_vbm_new_sup-default'} started: [{pid,<0.20010.0>}, {name, {new_child_id, [1006,1010,1013,1019,1022,1023], 'ns_1@10.242.238.91'}}, {mfargs, {ebucketmigrator_srv,start_link, [{"10.242.238.91",11209}, {"10.242.238.90",11209}, [{old_state_retriever, #Fun}, {on_not_ready_vbuckets, #Fun}, {username,"default"}, {password,get_from_config}, {vbuckets,[1006,1010,1013,1019,1022,1023]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]]}}, {restart_type,temporary}, {shutdown,60000}, {child_type,worker}] [ns_server:info,2014-08-19T16:49:49.151,ns_1@10.242.238.90:<0.18787.0>:ns_memcached:do_handle_call:527]Changed vbucket 749 state to active [ns_server:debug,2014-08-19T16:49:49.155,ns_1@10.242.238.90:<0.20010.0>:ebucketmigrator_srv:init:621]Reusing old upstream: [{vbuckets,[1006,1010,1013,1019,1022,1023]}, {name,<<"replication_ns_1@10.242.238.90">>}, {takeover,false}] [ns_server:debug,2014-08-19T16:49:49.158,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:49:49.160,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:49:49.161,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{1006, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [rebalance:debug,2014-08-19T16:49:49.164,ns_1@10.242.238.90:<0.19060.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [rebalance:debug,2014-08-19T16:49:49.173,ns_1@10.242.238.90:<0.20010.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.20011.0> [ns_server:debug,2014-08-19T16:49:49.173,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 14 us [ns_server:debug,2014-08-19T16:49:49.173,ns_1@10.242.238.90:<0.19060.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:49:49.173,ns_1@10.242.238.90:<0.20013.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:49:49.173,ns_1@10.242.238.90:<0.20013.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:49:49.175,ns_1@10.242.238.90:<0.19060.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:49:49.175,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:info,2014-08-19T16:49:49.176,ns_1@10.242.238.90:<0.18787.0>:ns_memcached:do_handle_call:527]Changed vbucket 757 state to active [ns_server:info,2014-08-19T16:49:49.178,ns_1@10.242.238.90:<0.18787.0>:ns_memcached:do_handle_call:527]Changed vbucket 1008 state to replica [ns_server:info,2014-08-19T16:49:49.179,ns_1@10.242.238.90:tap_replication_manager-default<0.18775.0>:tap_replication_manager:change_vbucket_filter:200]Going to change replication from 'ns_1@10.242.238.91' to have [1006,1008,1010,1013,1019,1022,1023] ([1008], []) [ns_server:debug,2014-08-19T16:49:49.179,ns_1@10.242.238.90:<0.20014.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:135]Registered myself under id:{"default", {new_child_id, [1006,1008,1010,1013,1019,1022,1023], 'ns_1@10.242.238.91'}, #Ref<0.0.0.219836>} Args:[{"10.242.238.91",11209}, {"10.242.238.90",11209}, [{old_state_retriever,#Fun}, {on_not_ready_vbuckets,#Fun}, {username,"default"}, {password,get_from_config}, {vbuckets,[1006,1008,1010,1013,1019,1022,1023]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]] [ns_server:debug,2014-08-19T16:49:49.180,ns_1@10.242.238.90:<0.20014.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:139]Linked myself to old ebucketmigrator <0.20010.0> [ns_server:info,2014-08-19T16:49:49.180,ns_1@10.242.238.90:<0.20010.0>:ebucketmigrator_srv:handle_call:270]Starting new-style vbucket filter change on stream `replication_ns_1@10.242.238.90` [ns_server:info,2014-08-19T16:49:49.183,ns_1@10.242.238.90:<0.20010.0>:ebucketmigrator_srv:handle_call:297]Changing vbucket filter on tap stream `replication_ns_1@10.242.238.90`: [{1006,1},{1008,1},{1010,1},{1013,1},{1019,1},{1022,1},{1023,1}] [ns_server:info,2014-08-19T16:49:49.184,ns_1@10.242.238.90:<0.20010.0>:ebucketmigrator_srv:handle_call:307]Successfully changed vbucket filter on tap stream `replication_ns_1@10.242.238.90`. [ns_server:info,2014-08-19T16:49:49.184,ns_1@10.242.238.90:<0.20010.0>:ebucketmigrator_srv:process_upstream:1027]Got vbucket filter change completion message. Silencing upstream sender [ns_server:info,2014-08-19T16:49:49.184,ns_1@10.242.238.90:<0.20010.0>:ebucketmigrator_srv:handle_info:366]Got reply from upstream silencing request. Completing state transition to a new ebucketmigrator. [ns_server:debug,2014-08-19T16:49:49.184,ns_1@10.242.238.90:<0.20010.0>:ebucketmigrator_srv:complete_native_vb_filter_change:170]Proceeding with reading unread binaries [ns_server:debug,2014-08-19T16:49:49.184,ns_1@10.242.238.90:<0.20010.0>:ebucketmigrator_srv:confirm_downstream:197]Going to confirm reception downstream messages [ns_server:debug,2014-08-19T16:49:49.184,ns_1@10.242.238.90:<0.20010.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:49:49.184,ns_1@10.242.238.90:<0.20016.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:49:49.184,ns_1@10.242.238.90:<0.20016.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:49:49.185,ns_1@10.242.238.90:<0.20010.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:49:49.185,ns_1@10.242.238.90:<0.20010.0>:ebucketmigrator_srv:confirm_downstream:201]Confirmed upstream messages are feeded to kernel [ns_server:debug,2014-08-19T16:49:49.185,ns_1@10.242.238.90:<0.20010.0>:ebucketmigrator_srv:reply_and_die:210]Passed old state to caller [ns_server:debug,2014-08-19T16:49:49.185,ns_1@10.242.238.90:<0.20010.0>:ebucketmigrator_srv:reply_and_die:213]Sent out state. Preparing to die [ns_server:debug,2014-08-19T16:49:49.185,ns_1@10.242.238.90:<0.20014.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:143]Got old state from previous ebucketmigrator: <0.20010.0> [ns_server:debug,2014-08-19T16:49:49.185,ns_1@10.242.238.90:<0.20014.0>:ns_vbm_new_sup:perform_vbucket_filter_change_loop:184]Sent old state to new instance [ns_server:info,2014-08-19T16:49:49.185,ns_1@10.242.238.90:<0.20018.0>:ns_vbm_new_sup:mk_old_state_retriever:83]Got vbucket filter change old state. Proceeding vbucket filter change operation [ns_server:debug,2014-08-19T16:49:49.185,ns_1@10.242.238.90:<0.20018.0>:ebucketmigrator_srv:init:494]Got old ebucketmigrator state from <0.20010.0>: {state,#Port<0.13886>,#Port<0.13883>,#Port<0.13887>,#Port<0.13884>, <0.20011.0>,<<"cut off">>,<<"cut off">>,[],22,false,false,0, {1408,452589,184229}, completed, {<0.20014.0>,#Ref<0.0.0.219849>}, <<"replication_ns_1@10.242.238.90">>,<0.20010.0>, {had_backfill,false,undefined,[]}, completed,false}. [ns_server:debug,2014-08-19T16:49:49.186,ns_1@10.242.238.90:<0.17162.0>:ns_process_registry:handle_info:98]Got exit msg: {'EXIT',<0.20014.0>,{#Ref<0.0.0.219838>,<0.20018.0>}} [error_logger:info,2014-08-19T16:49:49.186,ns_1@10.242.238.90:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,'ns_vbm_new_sup-default'} started: [{pid,<0.20018.0>}, {name, {new_child_id, [1006,1008,1010,1013,1019,1022,1023], 'ns_1@10.242.238.91'}}, {mfargs, {ebucketmigrator_srv,start_link, [{"10.242.238.91",11209}, {"10.242.238.90",11209}, [{old_state_retriever, #Fun}, {on_not_ready_vbuckets, #Fun}, {username,"default"}, {password,get_from_config}, {vbuckets, [1006,1008,1010,1013,1019,1022,1023]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]]}}, {restart_type,temporary}, {shutdown,60000}, {child_type,worker}] [ns_server:debug,2014-08-19T16:49:49.189,ns_1@10.242.238.90:<0.20018.0>:ebucketmigrator_srv:init:621]Reusing old upstream: [{vbuckets,[1006,1008,1010,1013,1019,1022,1023]}, {name,<<"replication_ns_1@10.242.238.90">>}, {takeover,false}] [rebalance:debug,2014-08-19T16:49:49.189,ns_1@10.242.238.90:<0.20018.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.20019.0> [views:debug,2014-08-19T16:49:49.192,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/748. Updated state: active (1) [ns_server:debug,2014-08-19T16:49:49.192,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",748,active,1} [ns_server:debug,2014-08-19T16:49:49.193,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:49:49.195,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 2481 us [ns_server:debug,2014-08-19T16:49:49.195,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:49:49.196,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:49:49.196,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{1008, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:info,2014-08-19T16:49:49.203,ns_1@10.242.238.90:<0.18787.0>:ns_memcached:do_handle_call:527]Changed vbucket 1016 state to replica [ns_server:info,2014-08-19T16:49:49.203,ns_1@10.242.238.90:tap_replication_manager-default<0.18775.0>:tap_replication_manager:change_vbucket_filter:200]Going to change replication from 'ns_1@10.242.238.91' to have [1006,1008,1010,1013,1016,1019,1022,1023] ([1016], []) [rebalance:debug,2014-08-19T16:49:49.204,ns_1@10.242.238.90:<0.19464.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:49:49.204,ns_1@10.242.238.90:<0.19464.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:49:49.204,ns_1@10.242.238.90:<0.20020.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:49:49.205,ns_1@10.242.238.90:<0.20020.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:49:49.205,ns_1@10.242.238.90:<0.19464.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:49:49.206,ns_1@10.242.238.90:<0.20021.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:135]Registered myself under id:{"default", {new_child_id, [1006,1008,1010,1013,1016,1019,1022,1023], 'ns_1@10.242.238.91'}, #Ref<0.0.0.220009>} Args:[{"10.242.238.91",11209}, {"10.242.238.90",11209}, [{old_state_retriever,#Fun}, {on_not_ready_vbuckets,#Fun}, {username,"default"}, {password,get_from_config}, {vbuckets,[1006,1008,1010,1013,1016,1019,1022,1023]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]] [ns_server:debug,2014-08-19T16:49:49.207,ns_1@10.242.238.90:<0.20021.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:139]Linked myself to old ebucketmigrator <0.20018.0> [ns_server:info,2014-08-19T16:49:49.207,ns_1@10.242.238.90:<0.20018.0>:ebucketmigrator_srv:handle_call:270]Starting new-style vbucket filter change on stream `replication_ns_1@10.242.238.90` [ns_server:info,2014-08-19T16:49:49.211,ns_1@10.242.238.90:<0.20018.0>:ebucketmigrator_srv:handle_call:297]Changing vbucket filter on tap stream `replication_ns_1@10.242.238.90`: [{1006,1},{1008,1},{1010,1},{1013,1},{1016,1},{1019,1},{1022,1},{1023,1}] [ns_server:info,2014-08-19T16:49:49.211,ns_1@10.242.238.90:<0.20018.0>:ebucketmigrator_srv:handle_call:307]Successfully changed vbucket filter on tap stream `replication_ns_1@10.242.238.90`. [ns_server:info,2014-08-19T16:49:49.211,ns_1@10.242.238.90:<0.20018.0>:ebucketmigrator_srv:process_upstream:1027]Got vbucket filter change completion message. Silencing upstream sender [ns_server:info,2014-08-19T16:49:49.211,ns_1@10.242.238.90:<0.20018.0>:ebucketmigrator_srv:handle_info:366]Got reply from upstream silencing request. Completing state transition to a new ebucketmigrator. [ns_server:debug,2014-08-19T16:49:49.211,ns_1@10.242.238.90:<0.20018.0>:ebucketmigrator_srv:complete_native_vb_filter_change:170]Proceeding with reading unread binaries [ns_server:debug,2014-08-19T16:49:49.212,ns_1@10.242.238.90:<0.20018.0>:ebucketmigrator_srv:confirm_downstream:197]Going to confirm reception downstream messages [ns_server:debug,2014-08-19T16:49:49.212,ns_1@10.242.238.90:<0.20018.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:49:49.212,ns_1@10.242.238.90:<0.20024.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:49:49.212,ns_1@10.242.238.90:<0.20024.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:49:49.212,ns_1@10.242.238.90:<0.20018.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:49:49.212,ns_1@10.242.238.90:<0.20018.0>:ebucketmigrator_srv:confirm_downstream:201]Confirmed upstream messages are feeded to kernel [ns_server:debug,2014-08-19T16:49:49.212,ns_1@10.242.238.90:<0.20018.0>:ebucketmigrator_srv:reply_and_die:210]Passed old state to caller [ns_server:debug,2014-08-19T16:49:49.212,ns_1@10.242.238.90:<0.20018.0>:ebucketmigrator_srv:reply_and_die:213]Sent out state. Preparing to die [ns_server:debug,2014-08-19T16:49:49.212,ns_1@10.242.238.90:<0.20021.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:143]Got old state from previous ebucketmigrator: <0.20018.0> [ns_server:debug,2014-08-19T16:49:49.213,ns_1@10.242.238.90:<0.20021.0>:ns_vbm_new_sup:perform_vbucket_filter_change_loop:184]Sent old state to new instance [ns_server:info,2014-08-19T16:49:49.213,ns_1@10.242.238.90:<0.20026.0>:ns_vbm_new_sup:mk_old_state_retriever:83]Got vbucket filter change old state. Proceeding vbucket filter change operation [ns_server:debug,2014-08-19T16:49:49.213,ns_1@10.242.238.90:<0.20026.0>:ebucketmigrator_srv:init:494]Got old ebucketmigrator state from <0.20018.0>: {state,#Port<0.13886>,#Port<0.13883>,#Port<0.13887>,#Port<0.13884>, <0.20019.0>,<<"cut off">>,<<"cut off">>,[],25,false,false,0, {1408,452589,211688}, completed, {<0.20021.0>,#Ref<0.0.0.220023>}, <<"replication_ns_1@10.242.238.90">>,<0.20018.0>, {had_backfill,false,undefined,[]}, completed,false}. [ns_server:debug,2014-08-19T16:49:49.214,ns_1@10.242.238.90:<0.17162.0>:ns_process_registry:handle_info:98]Got exit msg: {'EXIT',<0.20021.0>,{#Ref<0.0.0.220011>,<0.20026.0>}} [error_logger:info,2014-08-19T16:49:49.214,ns_1@10.242.238.90:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,'ns_vbm_new_sup-default'} started: [{pid,<0.20026.0>}, {name, {new_child_id, [1006,1008,1010,1013,1016,1019,1022,1023], 'ns_1@10.242.238.91'}}, {mfargs, {ebucketmigrator_srv,start_link, [{"10.242.238.91",11209}, {"10.242.238.90",11209}, [{old_state_retriever, #Fun}, {on_not_ready_vbuckets, #Fun}, {username,"default"}, {password,get_from_config}, {vbuckets, [1006,1008,1010,1013,1016,1019,1022, 1023]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]]}}, {restart_type,temporary}, {shutdown,60000}, {child_type,worker}] [ns_server:debug,2014-08-19T16:49:49.217,ns_1@10.242.238.90:<0.20026.0>:ebucketmigrator_srv:init:621]Reusing old upstream: [{vbuckets,[1006,1008,1010,1013,1016,1019,1022,1023]}, {name,<<"replication_ns_1@10.242.238.90">>}, {takeover,false}] [rebalance:debug,2014-08-19T16:49:49.218,ns_1@10.242.238.90:<0.20026.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.20027.0> [ns_server:debug,2014-08-19T16:49:49.220,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:49:49.221,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:49:49.222,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 1252 us [ns_server:debug,2014-08-19T16:49:49.222,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:info,2014-08-19T16:49:49.222,ns_1@10.242.238.90:<0.18787.0>:ns_memcached:do_handle_call:527]Changed vbucket 754 state to active [ns_server:debug,2014-08-19T16:49:49.223,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{1016, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [views:debug,2014-08-19T16:49:49.226,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/757. Updated state: active (1) [ns_server:debug,2014-08-19T16:49:49.226,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",757,active,1} [ns_server:info,2014-08-19T16:49:49.236,ns_1@10.242.238.90:<0.18787.0>:ns_memcached:do_handle_call:527]Changed vbucket 765 state to active [ns_server:debug,2014-08-19T16:49:49.240,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:49:49.248,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 7788 us [ns_server:debug,2014-08-19T16:49:49.248,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:49:49.249,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:49:49.250,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{759, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:info,2014-08-19T16:49:49.253,ns_1@10.242.238.90:<0.18787.0>:ns_memcached:do_handle_call:527]Changed vbucket 1015 state to replica [ns_server:info,2014-08-19T16:49:49.253,ns_1@10.242.238.90:tap_replication_manager-default<0.18775.0>:tap_replication_manager:change_vbucket_filter:200]Going to change replication from 'ns_1@10.242.238.91' to have [1006,1008,1010,1013,1015,1016,1019,1022,1023] ([1015], []) [ns_server:debug,2014-08-19T16:49:49.254,ns_1@10.242.238.90:<0.20029.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:135]Registered myself under id:{"default", {new_child_id, [1006,1008,1010,1013,1015,1016,1019,1022,1023], 'ns_1@10.242.238.91'}, #Ref<0.0.0.220220>} Args:[{"10.242.238.91",11209}, {"10.242.238.90",11209}, [{old_state_retriever,#Fun}, {on_not_ready_vbuckets,#Fun}, {username,"default"}, {password,get_from_config}, {vbuckets,[1006,1008,1010,1013,1015,1016,1019,1022,1023]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]] [ns_server:debug,2014-08-19T16:49:49.254,ns_1@10.242.238.90:<0.20029.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:139]Linked myself to old ebucketmigrator <0.20026.0> [ns_server:info,2014-08-19T16:49:49.255,ns_1@10.242.238.90:<0.20026.0>:ebucketmigrator_srv:handle_call:270]Starting new-style vbucket filter change on stream `replication_ns_1@10.242.238.90` [ns_server:info,2014-08-19T16:49:49.263,ns_1@10.242.238.90:<0.20026.0>:ebucketmigrator_srv:handle_call:297]Changing vbucket filter on tap stream `replication_ns_1@10.242.238.90`: [{1006,1}, {1008,1}, {1010,1}, {1013,1}, {1015,1}, {1016,1}, {1019,1}, {1022,1}, {1023,1}] [ns_server:info,2014-08-19T16:49:49.263,ns_1@10.242.238.90:<0.20026.0>:ebucketmigrator_srv:handle_call:307]Successfully changed vbucket filter on tap stream `replication_ns_1@10.242.238.90`. [ns_server:info,2014-08-19T16:49:49.263,ns_1@10.242.238.90:<0.20026.0>:ebucketmigrator_srv:process_upstream:1027]Got vbucket filter change completion message. Silencing upstream sender [ns_server:info,2014-08-19T16:49:49.264,ns_1@10.242.238.90:<0.20026.0>:ebucketmigrator_srv:handle_info:366]Got reply from upstream silencing request. Completing state transition to a new ebucketmigrator. [ns_server:debug,2014-08-19T16:49:49.264,ns_1@10.242.238.90:<0.20026.0>:ebucketmigrator_srv:complete_native_vb_filter_change:170]Proceeding with reading unread binaries [ns_server:debug,2014-08-19T16:49:49.264,ns_1@10.242.238.90:<0.20026.0>:ebucketmigrator_srv:confirm_downstream:197]Going to confirm reception downstream messages [ns_server:debug,2014-08-19T16:49:49.264,ns_1@10.242.238.90:<0.20026.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:49:49.264,ns_1@10.242.238.90:<0.20031.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:49:49.264,ns_1@10.242.238.90:<0.20031.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:49:49.264,ns_1@10.242.238.90:<0.20026.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:49:49.264,ns_1@10.242.238.90:<0.20026.0>:ebucketmigrator_srv:confirm_downstream:201]Confirmed upstream messages are feeded to kernel [ns_server:debug,2014-08-19T16:49:49.264,ns_1@10.242.238.90:<0.20026.0>:ebucketmigrator_srv:reply_and_die:210]Passed old state to caller [ns_server:debug,2014-08-19T16:49:49.265,ns_1@10.242.238.90:<0.20026.0>:ebucketmigrator_srv:reply_and_die:213]Sent out state. Preparing to die [ns_server:debug,2014-08-19T16:49:49.265,ns_1@10.242.238.90:<0.20029.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:143]Got old state from previous ebucketmigrator: <0.20026.0> [ns_server:debug,2014-08-19T16:49:49.265,ns_1@10.242.238.90:<0.20029.0>:ns_vbm_new_sup:perform_vbucket_filter_change_loop:184]Sent old state to new instance [ns_server:info,2014-08-19T16:49:49.265,ns_1@10.242.238.90:<0.20033.0>:ns_vbm_new_sup:mk_old_state_retriever:83]Got vbucket filter change old state. Proceeding vbucket filter change operation [ns_server:debug,2014-08-19T16:49:49.265,ns_1@10.242.238.90:<0.20033.0>:ebucketmigrator_srv:init:494]Got old ebucketmigrator state from <0.20026.0>: {state,#Port<0.13886>,#Port<0.13883>,#Port<0.13887>,#Port<0.13884>, <0.20027.0>,<<"cut off">>,<<"cut off">>,[],28,false,false,0, {1408,452589,263939}, completed, {<0.20029.0>,#Ref<0.0.0.220233>}, <<"replication_ns_1@10.242.238.90">>,<0.20026.0>, {had_backfill,false,undefined,[]}, completed,false}. [ns_server:debug,2014-08-19T16:49:49.265,ns_1@10.242.238.90:<0.17162.0>:ns_process_registry:handle_info:98]Got exit msg: {'EXIT',<0.20029.0>,{#Ref<0.0.0.220222>,<0.20033.0>}} [error_logger:info,2014-08-19T16:49:49.265,ns_1@10.242.238.90:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,'ns_vbm_new_sup-default'} started: [{pid,<0.20033.0>}, {name, {new_child_id, [1006,1008,1010,1013,1015,1016,1019,1022,1023], 'ns_1@10.242.238.91'}}, {mfargs, {ebucketmigrator_srv,start_link, [{"10.242.238.91",11209}, {"10.242.238.90",11209}, [{old_state_retriever, #Fun}, {on_not_ready_vbuckets, #Fun}, {username,"default"}, {password,get_from_config}, {vbuckets, [1006,1008,1010,1013,1015,1016,1019,1022, 1023]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]]}}, {restart_type,temporary}, {shutdown,60000}, {child_type,worker}] [ns_server:info,2014-08-19T16:49:49.266,ns_1@10.242.238.90:<0.18787.0>:ns_memcached:do_handle_call:527]Changed vbucket 747 state to active [ns_server:debug,2014-08-19T16:49:49.269,ns_1@10.242.238.90:<0.20033.0>:ebucketmigrator_srv:init:621]Reusing old upstream: [{vbuckets,[1006,1008,1010,1013,1015,1016,1019,1022,1023]}, {name,<<"replication_ns_1@10.242.238.90">>}, {takeover,false}] [rebalance:debug,2014-08-19T16:49:49.269,ns_1@10.242.238.90:<0.20033.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.20035.0> [ns_server:info,2014-08-19T16:49:49.270,ns_1@10.242.238.90:<0.18787.0>:ns_memcached:do_handle_call:527]Changed vbucket 752 state to active [ns_server:debug,2014-08-19T16:49:49.273,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [views:debug,2014-08-19T16:49:49.273,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/749. Updated state: active (1) [ns_server:debug,2014-08-19T16:49:49.274,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",749,active,1} [ns_server:debug,2014-08-19T16:49:49.276,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:49:49.276,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 3465 us [ns_server:debug,2014-08-19T16:49:49.277,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{1015, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:49:49.278,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [rebalance:debug,2014-08-19T16:49:49.292,ns_1@10.242.238.90:<0.19035.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:49:49.293,ns_1@10.242.238.90:<0.19035.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:49:49.293,ns_1@10.242.238.90:<0.20036.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:49:49.293,ns_1@10.242.238.90:<0.20036.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:49:49.293,ns_1@10.242.238.90:<0.19035.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:49:49.299,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:49:49.301,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:49:49.301,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 1599 us [ns_server:debug,2014-08-19T16:49:49.301,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:49:49.302,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{755, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [rebalance:debug,2014-08-19T16:49:49.303,ns_1@10.242.238.90:<0.19187.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:49:49.303,ns_1@10.242.238.90:<0.19187.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:49:49.303,ns_1@10.242.238.90:<0.20038.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:49:49.303,ns_1@10.242.238.90:<0.20038.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:49:49.303,ns_1@10.242.238.90:<0.19187.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:49:49.316,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:info,2014-08-19T16:49:49.319,ns_1@10.242.238.90:<0.18787.0>:ns_memcached:do_handle_call:527]Changed vbucket 761 state to active [ns_server:debug,2014-08-19T16:49:49.320,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 1926 us [ns_server:debug,2014-08-19T16:49:49.320,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:49:49.321,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:49:49.321,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{758, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [views:debug,2014-08-19T16:49:49.323,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/765. Updated state: active (1) [ns_server:debug,2014-08-19T16:49:49.323,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",765,active,1} [ns_server:debug,2014-08-19T16:49:49.339,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [rebalance:debug,2014-08-19T16:49:49.342,ns_1@10.242.238.90:<0.18906.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:49:49.343,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 4282 us [ns_server:debug,2014-08-19T16:49:49.343,ns_1@10.242.238.90:<0.18906.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:49:49.344,ns_1@10.242.238.90:<0.20040.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:49:49.344,ns_1@10.242.238.90:<0.20040.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:49:49.347,ns_1@10.242.238.90:<0.18906.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:49:49.347,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:info,2014-08-19T16:49:49.347,ns_1@10.242.238.90:<0.18787.0>:ns_memcached:do_handle_call:527]Changed vbucket 1005 state to replica [ns_server:info,2014-08-19T16:49:49.347,ns_1@10.242.238.90:tap_replication_manager-default<0.18775.0>:tap_replication_manager:change_vbucket_filter:200]Going to change replication from 'ns_1@10.242.238.91' to have [1005,1006,1008,1010,1013,1015,1016,1019,1022,1023] ([1005], []) [ns_server:debug,2014-08-19T16:49:49.347,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:49:49.349,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{748, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:49:49.349,ns_1@10.242.238.90:<0.20041.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:135]Registered myself under id:{"default", {new_child_id, [1005,1006,1008,1010,1013,1015,1016,1019,1022, 1023], 'ns_1@10.242.238.91'}, #Ref<0.0.0.220562>} Args:[{"10.242.238.91",11209}, {"10.242.238.90",11209}, [{old_state_retriever,#Fun}, {on_not_ready_vbuckets,#Fun}, {username,"default"}, {password,get_from_config}, {vbuckets,[1005,1006,1008,1010,1013,1015,1016,1019,1022,1023]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]] [ns_server:debug,2014-08-19T16:49:49.349,ns_1@10.242.238.90:<0.20041.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:139]Linked myself to old ebucketmigrator <0.20033.0> [ns_server:info,2014-08-19T16:49:49.350,ns_1@10.242.238.90:<0.20033.0>:ebucketmigrator_srv:handle_call:270]Starting new-style vbucket filter change on stream `replication_ns_1@10.242.238.90` [rebalance:debug,2014-08-19T16:49:49.356,ns_1@10.242.238.90:<0.19393.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:49:49.356,ns_1@10.242.238.90:<0.19393.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:49:49.357,ns_1@10.242.238.90:<0.20043.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:49:49.357,ns_1@10.242.238.90:<0.20043.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:49:49.357,ns_1@10.242.238.90:<0.19393.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:info,2014-08-19T16:49:49.358,ns_1@10.242.238.90:<0.20033.0>:ebucketmigrator_srv:handle_call:297]Changing vbucket filter on tap stream `replication_ns_1@10.242.238.90`: [{1005,1}, {1006,1}, {1008,1}, {1010,1}, {1013,1}, {1015,1}, {1016,1}, {1019,1}, {1022,1}, {1023,1}] [ns_server:info,2014-08-19T16:49:49.358,ns_1@10.242.238.90:<0.20033.0>:ebucketmigrator_srv:handle_call:307]Successfully changed vbucket filter on tap stream `replication_ns_1@10.242.238.90`. [ns_server:info,2014-08-19T16:49:49.359,ns_1@10.242.238.90:<0.20033.0>:ebucketmigrator_srv:process_upstream:1027]Got vbucket filter change completion message. Silencing upstream sender [ns_server:info,2014-08-19T16:49:49.359,ns_1@10.242.238.90:<0.20033.0>:ebucketmigrator_srv:handle_info:366]Got reply from upstream silencing request. Completing state transition to a new ebucketmigrator. [ns_server:debug,2014-08-19T16:49:49.359,ns_1@10.242.238.90:<0.20033.0>:ebucketmigrator_srv:complete_native_vb_filter_change:170]Proceeding with reading unread binaries [ns_server:debug,2014-08-19T16:49:49.359,ns_1@10.242.238.90:<0.20033.0>:ebucketmigrator_srv:confirm_downstream:197]Going to confirm reception downstream messages [ns_server:debug,2014-08-19T16:49:49.359,ns_1@10.242.238.90:<0.20033.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:49:49.359,ns_1@10.242.238.90:<0.20045.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:49:49.359,ns_1@10.242.238.90:<0.20045.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:49:49.359,ns_1@10.242.238.90:<0.20033.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:49:49.359,ns_1@10.242.238.90:<0.20033.0>:ebucketmigrator_srv:confirm_downstream:201]Confirmed upstream messages are feeded to kernel [ns_server:debug,2014-08-19T16:49:49.360,ns_1@10.242.238.90:<0.20033.0>:ebucketmigrator_srv:reply_and_die:210]Passed old state to caller [ns_server:debug,2014-08-19T16:49:49.360,ns_1@10.242.238.90:<0.20033.0>:ebucketmigrator_srv:reply_and_die:213]Sent out state. Preparing to die [ns_server:debug,2014-08-19T16:49:49.360,ns_1@10.242.238.90:<0.20041.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:143]Got old state from previous ebucketmigrator: <0.20033.0> [ns_server:debug,2014-08-19T16:49:49.360,ns_1@10.242.238.90:<0.20041.0>:ns_vbm_new_sup:perform_vbucket_filter_change_loop:184]Sent old state to new instance [ns_server:info,2014-08-19T16:49:49.360,ns_1@10.242.238.90:<0.20047.0>:ns_vbm_new_sup:mk_old_state_retriever:83]Got vbucket filter change old state. Proceeding vbucket filter change operation [ns_server:debug,2014-08-19T16:49:49.360,ns_1@10.242.238.90:<0.20047.0>:ebucketmigrator_srv:init:494]Got old ebucketmigrator state from <0.20033.0>: {state,#Port<0.13886>,#Port<0.13883>,#Port<0.13887>,#Port<0.13884>, <0.20035.0>,<<"cut off">>,<<"cut off">>,[],31,false,false,0, {1408,452589,359125}, completed, {<0.20041.0>,#Ref<0.0.0.220577>}, <<"replication_ns_1@10.242.238.90">>,<0.20033.0>, {had_backfill,false,undefined,[]}, completed,false}. [error_logger:info,2014-08-19T16:49:49.360,ns_1@10.242.238.90:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,'ns_vbm_new_sup-default'} started: [{pid,<0.20047.0>}, {name, {new_child_id, [1005,1006,1008,1010,1013,1015,1016,1019,1022, 1023], 'ns_1@10.242.238.91'}}, {mfargs, {ebucketmigrator_srv,start_link, [{"10.242.238.91",11209}, {"10.242.238.90",11209}, [{old_state_retriever, #Fun}, {on_not_ready_vbuckets, #Fun}, {username,"default"}, {password,get_from_config}, {vbuckets, [1005,1006,1008,1010,1013,1015,1016,1019, 1022,1023]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]]}}, {restart_type,temporary}, {shutdown,60000}, {child_type,worker}] [ns_server:debug,2014-08-19T16:49:49.361,ns_1@10.242.238.90:<0.17162.0>:ns_process_registry:handle_info:98]Got exit msg: {'EXIT',<0.20041.0>,{#Ref<0.0.0.220564>,<0.20047.0>}} [ns_server:debug,2014-08-19T16:49:49.364,ns_1@10.242.238.90:<0.20047.0>:ebucketmigrator_srv:init:621]Reusing old upstream: [{vbuckets,[1005,1006,1008,1010,1013,1015,1016,1019,1022,1023]}, {name,<<"replication_ns_1@10.242.238.90">>}, {takeover,false}] [rebalance:debug,2014-08-19T16:49:49.364,ns_1@10.242.238.90:<0.20047.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.20048.0> [ns_server:debug,2014-08-19T16:49:49.369,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:49:49.370,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 1105 us [ns_server:debug,2014-08-19T16:49:49.370,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:49:49.371,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:49:49.371,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{1005, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:info,2014-08-19T16:49:49.373,ns_1@10.242.238.90:<0.18787.0>:ns_memcached:do_handle_call:527]Changed vbucket 1004 state to replica [ns_server:info,2014-08-19T16:49:49.374,ns_1@10.242.238.90:tap_replication_manager-default<0.18775.0>:tap_replication_manager:change_vbucket_filter:200]Going to change replication from 'ns_1@10.242.238.91' to have [1004,1005,1006,1008,1010,1013,1015,1016,1019,1022,1023] ([1004], []) [rebalance:debug,2014-08-19T16:49:49.374,ns_1@10.242.238.90:<0.19654.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:49:49.374,ns_1@10.242.238.90:<0.19654.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:49:49.374,ns_1@10.242.238.90:<0.20055.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:49:49.374,ns_1@10.242.238.90:<0.20055.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:49:49.375,ns_1@10.242.238.90:<0.19654.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:49:49.374,ns_1@10.242.238.90:<0.20056.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:135]Registered myself under id:{"default", {new_child_id, [1004,1005,1006,1008,1010,1013,1015,1016,1019, 1022,1023], 'ns_1@10.242.238.91'}, #Ref<0.0.0.220722>} Args:[{"10.242.238.91",11209}, {"10.242.238.90",11209}, [{old_state_retriever,#Fun}, {on_not_ready_vbuckets,#Fun}, {username,"default"}, {password,get_from_config}, {vbuckets,[1004,1005,1006,1008,1010,1013,1015,1016,1019,1022,1023]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]] [ns_server:debug,2014-08-19T16:49:49.375,ns_1@10.242.238.90:<0.20056.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:139]Linked myself to old ebucketmigrator <0.20047.0> [ns_server:info,2014-08-19T16:49:49.375,ns_1@10.242.238.90:<0.20047.0>:ebucketmigrator_srv:handle_call:270]Starting new-style vbucket filter change on stream `replication_ns_1@10.242.238.90` [ns_server:info,2014-08-19T16:49:49.379,ns_1@10.242.238.90:<0.20047.0>:ebucketmigrator_srv:handle_call:297]Changing vbucket filter on tap stream `replication_ns_1@10.242.238.90`: [{1004,1}, {1005,1}, {1006,1}, {1008,1}, {1010,1}, {1013,1}, {1015,1}, {1016,1}, {1019,1}, {1022,1}, {1023,1}] [ns_server:info,2014-08-19T16:49:49.379,ns_1@10.242.238.90:<0.20047.0>:ebucketmigrator_srv:handle_call:307]Successfully changed vbucket filter on tap stream `replication_ns_1@10.242.238.90`. [ns_server:info,2014-08-19T16:49:49.379,ns_1@10.242.238.90:<0.20047.0>:ebucketmigrator_srv:process_upstream:1027]Got vbucket filter change completion message. Silencing upstream sender [ns_server:info,2014-08-19T16:49:49.379,ns_1@10.242.238.90:<0.20047.0>:ebucketmigrator_srv:handle_info:366]Got reply from upstream silencing request. Completing state transition to a new ebucketmigrator. [ns_server:debug,2014-08-19T16:49:49.379,ns_1@10.242.238.90:<0.20047.0>:ebucketmigrator_srv:complete_native_vb_filter_change:170]Proceeding with reading unread binaries [ns_server:debug,2014-08-19T16:49:49.380,ns_1@10.242.238.90:<0.20047.0>:ebucketmigrator_srv:confirm_downstream:197]Going to confirm reception downstream messages [ns_server:debug,2014-08-19T16:49:49.380,ns_1@10.242.238.90:<0.20047.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:49:49.380,ns_1@10.242.238.90:<0.20058.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:49:49.380,ns_1@10.242.238.90:<0.20058.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:49:49.380,ns_1@10.242.238.90:<0.20047.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:49:49.380,ns_1@10.242.238.90:<0.20047.0>:ebucketmigrator_srv:confirm_downstream:201]Confirmed upstream messages are feeded to kernel [ns_server:debug,2014-08-19T16:49:49.380,ns_1@10.242.238.90:<0.20047.0>:ebucketmigrator_srv:reply_and_die:210]Passed old state to caller [ns_server:debug,2014-08-19T16:49:49.380,ns_1@10.242.238.90:<0.20047.0>:ebucketmigrator_srv:reply_and_die:213]Sent out state. Preparing to die [ns_server:debug,2014-08-19T16:49:49.380,ns_1@10.242.238.90:<0.20056.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:143]Got old state from previous ebucketmigrator: <0.20047.0> [ns_server:debug,2014-08-19T16:49:49.381,ns_1@10.242.238.90:<0.20056.0>:ns_vbm_new_sup:perform_vbucket_filter_change_loop:184]Sent old state to new instance [ns_server:info,2014-08-19T16:49:49.381,ns_1@10.242.238.90:<0.20060.0>:ns_vbm_new_sup:mk_old_state_retriever:83]Got vbucket filter change old state. Proceeding vbucket filter change operation [ns_server:debug,2014-08-19T16:49:49.381,ns_1@10.242.238.90:<0.20060.0>:ebucketmigrator_srv:init:494]Got old ebucketmigrator state from <0.20047.0>: {state,#Port<0.13886>,#Port<0.13883>,#Port<0.13887>,#Port<0.13884>, <0.20048.0>,<<"cut off">>,<<"cut off">>,[],34,false,false,0, {1408,452589,379614}, completed, {<0.20056.0>,#Ref<0.0.0.220745>}, <<"replication_ns_1@10.242.238.90">>,<0.20047.0>, {had_backfill,false,undefined,[]}, completed,false}. [ns_server:debug,2014-08-19T16:49:49.381,ns_1@10.242.238.90:<0.17162.0>:ns_process_registry:handle_info:98]Got exit msg: {'EXIT',<0.20056.0>,{#Ref<0.0.0.220724>,<0.20060.0>}} [error_logger:info,2014-08-19T16:49:49.381,ns_1@10.242.238.90:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,'ns_vbm_new_sup-default'} started: [{pid,<0.20060.0>}, {name, {new_child_id, [1004,1005,1006,1008,1010,1013,1015,1016,1019, 1022,1023], 'ns_1@10.242.238.91'}}, {mfargs, {ebucketmigrator_srv,start_link, [{"10.242.238.91",11209}, {"10.242.238.90",11209}, [{old_state_retriever, #Fun}, {on_not_ready_vbuckets, #Fun}, {username,"default"}, {password,get_from_config}, {vbuckets, [1004,1005,1006,1008,1010,1013,1015,1016, 1019,1022,1023]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]]}}, {restart_type,temporary}, {shutdown,60000}, {child_type,worker}] [ns_server:debug,2014-08-19T16:49:49.385,ns_1@10.242.238.90:<0.20060.0>:ebucketmigrator_srv:init:621]Reusing old upstream: [{vbuckets,[1004,1005,1006,1008,1010,1013,1015,1016,1019,1022,1023]}, {name,<<"replication_ns_1@10.242.238.90">>}, {takeover,false}] [rebalance:debug,2014-08-19T16:49:49.385,ns_1@10.242.238.90:<0.20060.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.20061.0> [ns_server:debug,2014-08-19T16:49:49.391,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [rebalance:debug,2014-08-19T16:49:49.395,ns_1@10.242.238.90:<0.19266.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:49:49.395,ns_1@10.242.238.90:<0.19266.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:49:49.396,ns_1@10.242.238.90:<0.20063.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:49:49.396,ns_1@10.242.238.90:<0.20063.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:49:49.396,ns_1@10.242.238.90:<0.19266.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:49:49.397,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 5947 us [ns_server:debug,2014-08-19T16:49:49.397,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:49:49.397,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:49:49.398,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{1004, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:info,2014-08-19T16:49:49.401,ns_1@10.242.238.90:<0.18787.0>:ns_memcached:do_handle_call:527]Changed vbucket 1017 state to replica [ns_server:info,2014-08-19T16:49:49.401,ns_1@10.242.238.90:tap_replication_manager-default<0.18775.0>:tap_replication_manager:change_vbucket_filter:200]Going to change replication from 'ns_1@10.242.238.91' to have [1004,1005,1006,1008,1010,1013,1015,1016,1017,1019,1022,1023] ([1017], []) [rebalance:debug,2014-08-19T16:49:49.408,ns_1@10.242.238.90:<0.18931.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:49:49.408,ns_1@10.242.238.90:<0.20064.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:135]Registered myself under id:{"default", {new_child_id, [1004,1005,1006,1008,1010,1013,1015,1016,1017, 1019,1022,1023], 'ns_1@10.242.238.91'}, #Ref<0.0.0.220888>} Args:[{"10.242.238.91",11209}, {"10.242.238.90",11209}, [{old_state_retriever,#Fun}, {on_not_ready_vbuckets,#Fun}, {username,"default"}, {password,get_from_config}, {vbuckets,[1004,1005,1006,1008,1010,1013,1015,1016,1017,1019,1022, 1023]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]] [ns_server:debug,2014-08-19T16:49:49.408,ns_1@10.242.238.90:<0.20064.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:139]Linked myself to old ebucketmigrator <0.20060.0> [ns_server:debug,2014-08-19T16:49:49.408,ns_1@10.242.238.90:<0.18931.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:info,2014-08-19T16:49:49.408,ns_1@10.242.238.90:<0.20060.0>:ebucketmigrator_srv:handle_call:270]Starting new-style vbucket filter change on stream `replication_ns_1@10.242.238.90` [ns_server:debug,2014-08-19T16:49:49.408,ns_1@10.242.238.90:<0.20066.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:49:49.408,ns_1@10.242.238.90:<0.20066.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [views:debug,2014-08-19T16:49:49.408,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/747. Updated state: active (1) [ns_server:debug,2014-08-19T16:49:49.408,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",747,active,1} [rebalance:info,2014-08-19T16:49:49.408,ns_1@10.242.238.90:<0.18931.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:info,2014-08-19T16:49:49.419,ns_1@10.242.238.90:<0.20060.0>:ebucketmigrator_srv:handle_call:297]Changing vbucket filter on tap stream `replication_ns_1@10.242.238.90`: [{1004,1}, {1005,1}, {1006,1}, {1008,1}, {1010,1}, {1013,1}, {1015,1}, {1016,1}, {1017,1}, {1019,1}, {1022,1}, {1023,1}] [ns_server:info,2014-08-19T16:49:49.420,ns_1@10.242.238.90:<0.20060.0>:ebucketmigrator_srv:handle_call:307]Successfully changed vbucket filter on tap stream `replication_ns_1@10.242.238.90`. [ns_server:info,2014-08-19T16:49:49.420,ns_1@10.242.238.90:<0.20060.0>:ebucketmigrator_srv:process_upstream:1027]Got vbucket filter change completion message. Silencing upstream sender [ns_server:info,2014-08-19T16:49:49.420,ns_1@10.242.238.90:<0.20060.0>:ebucketmigrator_srv:handle_info:366]Got reply from upstream silencing request. Completing state transition to a new ebucketmigrator. [ns_server:debug,2014-08-19T16:49:49.421,ns_1@10.242.238.90:<0.20060.0>:ebucketmigrator_srv:complete_native_vb_filter_change:170]Proceeding with reading unread binaries [ns_server:debug,2014-08-19T16:49:49.421,ns_1@10.242.238.90:<0.20060.0>:ebucketmigrator_srv:confirm_downstream:197]Going to confirm reception downstream messages [ns_server:debug,2014-08-19T16:49:49.421,ns_1@10.242.238.90:<0.20060.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:49:49.421,ns_1@10.242.238.90:<0.20067.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:49:49.421,ns_1@10.242.238.90:<0.20067.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:49:49.421,ns_1@10.242.238.90:<0.20060.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:49:49.421,ns_1@10.242.238.90:<0.20060.0>:ebucketmigrator_srv:confirm_downstream:201]Confirmed upstream messages are feeded to kernel [ns_server:debug,2014-08-19T16:49:49.422,ns_1@10.242.238.90:<0.20060.0>:ebucketmigrator_srv:reply_and_die:210]Passed old state to caller [ns_server:debug,2014-08-19T16:49:49.422,ns_1@10.242.238.90:<0.20060.0>:ebucketmigrator_srv:reply_and_die:213]Sent out state. Preparing to die [ns_server:debug,2014-08-19T16:49:49.422,ns_1@10.242.238.90:<0.20064.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:143]Got old state from previous ebucketmigrator: <0.20060.0> [ns_server:debug,2014-08-19T16:49:49.422,ns_1@10.242.238.90:<0.20064.0>:ns_vbm_new_sup:perform_vbucket_filter_change_loop:184]Sent old state to new instance [ns_server:info,2014-08-19T16:49:49.422,ns_1@10.242.238.90:<0.20069.0>:ns_vbm_new_sup:mk_old_state_retriever:83]Got vbucket filter change old state. Proceeding vbucket filter change operation [ns_server:debug,2014-08-19T16:49:49.422,ns_1@10.242.238.90:<0.20069.0>:ebucketmigrator_srv:init:494]Got old ebucketmigrator state from <0.20060.0>: {state,#Port<0.13886>,#Port<0.13883>,#Port<0.13887>,#Port<0.13884>, <0.20061.0>,<<"cut off">>,<<"cut off">>,[],37,false,false,0, {1408,452589,420766}, completed, {<0.20064.0>,#Ref<0.0.0.220921>}, <<"replication_ns_1@10.242.238.90">>,<0.20060.0>, {had_backfill,false,undefined,[]}, completed,false}. [ns_server:debug,2014-08-19T16:49:49.423,ns_1@10.242.238.90:<0.17162.0>:ns_process_registry:handle_info:98]Got exit msg: {'EXIT',<0.20064.0>,{#Ref<0.0.0.220895>,<0.20069.0>}} [error_logger:info,2014-08-19T16:49:49.423,ns_1@10.242.238.90:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,'ns_vbm_new_sup-default'} started: [{pid,<0.20069.0>}, {name, {new_child_id, [1004,1005,1006,1008,1010,1013,1015,1016,1017, 1019,1022,1023], 'ns_1@10.242.238.91'}}, {mfargs, {ebucketmigrator_srv,start_link, [{"10.242.238.91",11209}, {"10.242.238.90",11209}, [{old_state_retriever, #Fun}, {on_not_ready_vbuckets, #Fun}, {username,"default"}, {password,get_from_config}, {vbuckets, [1004,1005,1006,1008,1010,1013,1015,1016, 1017,1019,1022,1023]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]]}}, {restart_type,temporary}, {shutdown,60000}, {child_type,worker}] [ns_server:debug,2014-08-19T16:49:49.426,ns_1@10.242.238.90:<0.20069.0>:ebucketmigrator_srv:init:621]Reusing old upstream: [{vbuckets,[1004,1005,1006,1008,1010,1013,1015,1016,1017,1019,1022,1023]}, {name,<<"replication_ns_1@10.242.238.90">>}, {takeover,false}] [rebalance:debug,2014-08-19T16:49:49.427,ns_1@10.242.238.90:<0.20069.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.20070.0> [rebalance:debug,2014-08-19T16:49:49.427,ns_1@10.242.238.90:<0.19301.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:49:49.427,ns_1@10.242.238.90:<0.19301.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:49:49.427,ns_1@10.242.238.90:<0.20072.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:49:49.427,ns_1@10.242.238.90:<0.20072.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:49:49.427,ns_1@10.242.238.90:<0.19301.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:49:49.429,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:49:49.432,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:49:49.432,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 3523 us [ns_server:debug,2014-08-19T16:49:49.433,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:49:49.434,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{1017, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [rebalance:debug,2014-08-19T16:49:49.448,ns_1@10.242.238.90:<0.19679.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:49:49.448,ns_1@10.242.238.90:<0.19679.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:49:49.448,ns_1@10.242.238.90:<0.20073.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:49:49.448,ns_1@10.242.238.90:<0.20073.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:49:49.449,ns_1@10.242.238.90:<0.19679.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:49:49.454,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:49:49.457,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:49:49.457,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 2546 us [ns_server:debug,2014-08-19T16:49:49.457,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:49:49.458,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{749, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:info,2014-08-19T16:49:49.460,ns_1@10.242.238.90:<0.18787.0>:ns_memcached:do_handle_call:527]Changed vbucket 756 state to active [ns_server:debug,2014-08-19T16:49:49.473,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [views:debug,2014-08-19T16:49:49.474,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/754. Updated state: active (1) [ns_server:debug,2014-08-19T16:49:49.474,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",754,active,1} [ns_server:debug,2014-08-19T16:49:49.479,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:49:49.479,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 5374 us [ns_server:debug,2014-08-19T16:49:49.479,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:info,2014-08-19T16:49:49.482,ns_1@10.242.238.90:<0.18787.0>:ns_memcached:do_handle_call:527]Changed vbucket 1007 state to replica [ns_server:info,2014-08-19T16:49:49.482,ns_1@10.242.238.90:tap_replication_manager-default<0.18775.0>:tap_replication_manager:change_vbucket_filter:200]Going to change replication from 'ns_1@10.242.238.91' to have [1004,1005,1006,1007,1008,1010,1013,1015,1016,1017,1019,1022,1023] ([1007], []) [ns_server:debug,2014-08-19T16:49:49.480,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{757, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:49:49.484,ns_1@10.242.238.90:<0.20075.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:135]Registered myself under id:{"default", {new_child_id, [1004,1005,1006,1007,1008,1010,1013,1015,1016, 1017,1019,1022,1023], 'ns_1@10.242.238.91'}, #Ref<0.0.0.221183>} Args:[{"10.242.238.91",11209}, {"10.242.238.90",11209}, [{old_state_retriever,#Fun}, {on_not_ready_vbuckets,#Fun}, {username,"default"}, {password,get_from_config}, {vbuckets,[1004,1005,1006,1007,1008,1010,1013,1015,1016,1017,1019,1022, 1023]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]] [ns_server:debug,2014-08-19T16:49:49.484,ns_1@10.242.238.90:<0.20075.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:139]Linked myself to old ebucketmigrator <0.20069.0> [ns_server:info,2014-08-19T16:49:49.484,ns_1@10.242.238.90:<0.20069.0>:ebucketmigrator_srv:handle_call:270]Starting new-style vbucket filter change on stream `replication_ns_1@10.242.238.90` [ns_server:info,2014-08-19T16:49:49.497,ns_1@10.242.238.90:<0.20069.0>:ebucketmigrator_srv:handle_call:297]Changing vbucket filter on tap stream `replication_ns_1@10.242.238.90`: [{1004,1}, {1005,1}, {1006,1}, {1007,1}, {1008,1}, {1010,1}, {1013,1}, {1015,1}, {1016,1}, {1017,1}, {1019,1}, {1022,1}, {1023,1}] [ns_server:info,2014-08-19T16:49:49.497,ns_1@10.242.238.90:<0.20069.0>:ebucketmigrator_srv:handle_call:307]Successfully changed vbucket filter on tap stream `replication_ns_1@10.242.238.90`. [ns_server:info,2014-08-19T16:49:49.498,ns_1@10.242.238.90:<0.20069.0>:ebucketmigrator_srv:process_upstream:1027]Got vbucket filter change completion message. Silencing upstream sender [ns_server:info,2014-08-19T16:49:49.498,ns_1@10.242.238.90:<0.20069.0>:ebucketmigrator_srv:handle_info:366]Got reply from upstream silencing request. Completing state transition to a new ebucketmigrator. [ns_server:debug,2014-08-19T16:49:49.498,ns_1@10.242.238.90:<0.20069.0>:ebucketmigrator_srv:complete_native_vb_filter_change:170]Proceeding with reading unread binaries [ns_server:debug,2014-08-19T16:49:49.498,ns_1@10.242.238.90:<0.20069.0>:ebucketmigrator_srv:confirm_downstream:197]Going to confirm reception downstream messages [ns_server:debug,2014-08-19T16:49:49.499,ns_1@10.242.238.90:<0.20069.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:49:49.499,ns_1@10.242.238.90:<0.20078.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:49:49.499,ns_1@10.242.238.90:<0.20078.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:49:49.499,ns_1@10.242.238.90:<0.20069.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:49:49.499,ns_1@10.242.238.90:<0.20069.0>:ebucketmigrator_srv:confirm_downstream:201]Confirmed upstream messages are feeded to kernel [ns_server:debug,2014-08-19T16:49:49.499,ns_1@10.242.238.90:<0.20069.0>:ebucketmigrator_srv:reply_and_die:210]Passed old state to caller [ns_server:debug,2014-08-19T16:49:49.499,ns_1@10.242.238.90:<0.20069.0>:ebucketmigrator_srv:reply_and_die:213]Sent out state. Preparing to die [ns_server:debug,2014-08-19T16:49:49.499,ns_1@10.242.238.90:<0.20075.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:143]Got old state from previous ebucketmigrator: <0.20069.0> [ns_server:debug,2014-08-19T16:49:49.500,ns_1@10.242.238.90:<0.20075.0>:ns_vbm_new_sup:perform_vbucket_filter_change_loop:184]Sent old state to new instance [ns_server:info,2014-08-19T16:49:49.500,ns_1@10.242.238.90:<0.20080.0>:ns_vbm_new_sup:mk_old_state_retriever:83]Got vbucket filter change old state. Proceeding vbucket filter change operation [ns_server:debug,2014-08-19T16:49:49.500,ns_1@10.242.238.90:<0.20080.0>:ebucketmigrator_srv:init:494]Got old ebucketmigrator state from <0.20069.0>: {state,#Port<0.13886>,#Port<0.13883>,#Port<0.13887>,#Port<0.13884>, <0.20070.0>,<<"cut off">>,<<"cut off">>,[],40,false,false,0, {1408,452589,498578}, completed, {<0.20075.0>,#Ref<0.0.0.221198>}, <<"replication_ns_1@10.242.238.90">>,<0.20069.0>, {had_backfill,false,undefined,[]}, completed,false}. [ns_server:debug,2014-08-19T16:49:49.500,ns_1@10.242.238.90:<0.17162.0>:ns_process_registry:handle_info:98]Got exit msg: {'EXIT',<0.20075.0>,{#Ref<0.0.0.221185>,<0.20080.0>}} [error_logger:info,2014-08-19T16:49:49.500,ns_1@10.242.238.90:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,'ns_vbm_new_sup-default'} started: [{pid,<0.20080.0>}, {name, {new_child_id, [1004,1005,1006,1007,1008,1010,1013,1015,1016, 1017,1019,1022,1023], 'ns_1@10.242.238.91'}}, {mfargs, {ebucketmigrator_srv,start_link, [{"10.242.238.91",11209}, {"10.242.238.90",11209}, [{old_state_retriever, #Fun}, {on_not_ready_vbuckets, #Fun}, {username,"default"}, {password,get_from_config}, {vbuckets, [1004,1005,1006,1007,1008,1010,1013,1015, 1016,1017,1019,1022,1023]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]]}}, {restart_type,temporary}, {shutdown,60000}, {child_type,worker}] [ns_server:debug,2014-08-19T16:49:49.503,ns_1@10.242.238.90:<0.20080.0>:ebucketmigrator_srv:init:621]Reusing old upstream: [{vbuckets,[1004,1005,1006,1007,1008,1010,1013,1015,1016,1017,1019,1022,1023]}, {name,<<"replication_ns_1@10.242.238.90">>}, {takeover,false}] [rebalance:debug,2014-08-19T16:49:49.504,ns_1@10.242.238.90:<0.20080.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.20081.0> [ns_server:debug,2014-08-19T16:49:49.506,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:49:49.509,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 3037 us [ns_server:debug,2014-08-19T16:49:49.509,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:49:49.510,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{1007, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:49:49.511,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:49:49.528,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:49:49.531,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:49:49.532,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 3459 us [ns_server:debug,2014-08-19T16:49:49.532,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [views:debug,2014-08-19T16:49:49.532,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/752. Updated state: active (1) [ns_server:debug,2014-08-19T16:49:49.533,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",752,active,1} [ns_server:debug,2014-08-19T16:49:49.533,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{754, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:49:49.581,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [views:debug,2014-08-19T16:49:49.583,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/761. Updated state: active (1) [ns_server:debug,2014-08-19T16:49:49.583,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",761,active,1} [ns_server:debug,2014-08-19T16:49:49.588,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 7370 us [ns_server:debug,2014-08-19T16:49:49.588,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:49:49.589,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:49:49.590,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{765, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:49:49.613,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:49:49.614,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 1402 us [ns_server:debug,2014-08-19T16:49:49.614,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:49:49.615,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:49:49.616,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{747, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:49:49.638,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:49:49.641,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:49:49.641,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 3154 us [ns_server:debug,2014-08-19T16:49:49.642,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [views:debug,2014-08-19T16:49:49.642,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/756. Updated state: active (1) [ns_server:debug,2014-08-19T16:49:49.642,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",756,active,1} [ns_server:debug,2014-08-19T16:49:49.642,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{752, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:info,2014-08-19T16:49:49.647,ns_1@10.242.238.90:<0.18787.0>:ns_memcached:do_handle_call:527]Changed vbucket 1018 state to replica [ns_server:info,2014-08-19T16:49:49.648,ns_1@10.242.238.90:tap_replication_manager-default<0.18775.0>:tap_replication_manager:change_vbucket_filter:200]Going to change replication from 'ns_1@10.242.238.91' to have [1004,1005,1006,1007,1008,1010,1013,1015,1016,1017,1018,1019,1022,1023] ([1018], []) [ns_server:debug,2014-08-19T16:49:49.650,ns_1@10.242.238.90:<0.20086.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:135]Registered myself under id:{"default", {new_child_id, [1004,1005,1006,1007,1008,1010,1013,1015,1016, 1017,1018,1019,1022,1023], 'ns_1@10.242.238.91'}, #Ref<0.0.0.221506>} Args:[{"10.242.238.91",11209}, {"10.242.238.90",11209}, [{old_state_retriever,#Fun}, {on_not_ready_vbuckets,#Fun}, {username,"default"}, {password,get_from_config}, {vbuckets,[1004,1005,1006,1007,1008,1010,1013,1015,1016,1017,1018,1019, 1022,1023]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]] [ns_server:debug,2014-08-19T16:49:49.651,ns_1@10.242.238.90:<0.20086.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:139]Linked myself to old ebucketmigrator <0.20080.0> [ns_server:info,2014-08-19T16:49:49.651,ns_1@10.242.238.90:<0.20080.0>:ebucketmigrator_srv:handle_call:270]Starting new-style vbucket filter change on stream `replication_ns_1@10.242.238.90` [ns_server:info,2014-08-19T16:49:49.658,ns_1@10.242.238.90:<0.20080.0>:ebucketmigrator_srv:handle_call:297]Changing vbucket filter on tap stream `replication_ns_1@10.242.238.90`: [{1004,1}, {1005,1}, {1006,1}, {1007,1}, {1008,1}, {1010,1}, {1013,1}, {1015,1}, {1016,1}, {1017,1}, {1018,1}, {1019,1}, {1022,1}, {1023,1}] [ns_server:info,2014-08-19T16:49:49.659,ns_1@10.242.238.90:<0.20080.0>:ebucketmigrator_srv:handle_call:307]Successfully changed vbucket filter on tap stream `replication_ns_1@10.242.238.90`. [ns_server:info,2014-08-19T16:49:49.659,ns_1@10.242.238.90:<0.20080.0>:ebucketmigrator_srv:process_upstream:1027]Got vbucket filter change completion message. Silencing upstream sender [ns_server:info,2014-08-19T16:49:49.659,ns_1@10.242.238.90:<0.20080.0>:ebucketmigrator_srv:handle_info:366]Got reply from upstream silencing request. Completing state transition to a new ebucketmigrator. [ns_server:debug,2014-08-19T16:49:49.659,ns_1@10.242.238.90:<0.20080.0>:ebucketmigrator_srv:complete_native_vb_filter_change:170]Proceeding with reading unread binaries [ns_server:debug,2014-08-19T16:49:49.659,ns_1@10.242.238.90:<0.20080.0>:ebucketmigrator_srv:confirm_downstream:197]Going to confirm reception downstream messages [ns_server:debug,2014-08-19T16:49:49.660,ns_1@10.242.238.90:<0.20080.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:49:49.660,ns_1@10.242.238.90:<0.20088.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:49:49.660,ns_1@10.242.238.90:<0.20088.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:49:49.660,ns_1@10.242.238.90:<0.20080.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:49:49.660,ns_1@10.242.238.90:<0.20080.0>:ebucketmigrator_srv:confirm_downstream:201]Confirmed upstream messages are feeded to kernel [ns_server:debug,2014-08-19T16:49:49.660,ns_1@10.242.238.90:<0.20080.0>:ebucketmigrator_srv:reply_and_die:210]Passed old state to caller [ns_server:debug,2014-08-19T16:49:49.660,ns_1@10.242.238.90:<0.20080.0>:ebucketmigrator_srv:reply_and_die:213]Sent out state. Preparing to die [ns_server:debug,2014-08-19T16:49:49.660,ns_1@10.242.238.90:<0.20086.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:143]Got old state from previous ebucketmigrator: <0.20080.0> [ns_server:debug,2014-08-19T16:49:49.661,ns_1@10.242.238.90:<0.20086.0>:ns_vbm_new_sup:perform_vbucket_filter_change_loop:184]Sent old state to new instance [ns_server:info,2014-08-19T16:49:49.661,ns_1@10.242.238.90:<0.20090.0>:ns_vbm_new_sup:mk_old_state_retriever:83]Got vbucket filter change old state. Proceeding vbucket filter change operation [ns_server:debug,2014-08-19T16:49:49.661,ns_1@10.242.238.90:<0.20090.0>:ebucketmigrator_srv:init:494]Got old ebucketmigrator state from <0.20080.0>: {state,#Port<0.13886>,#Port<0.13883>,#Port<0.13887>,#Port<0.13884>, <0.20081.0>,<<"cut off">>,<<"cut off">>,[],43,false,false,0, {1408,452589,659672}, completed, {<0.20086.0>,#Ref<0.0.0.221519>}, <<"replication_ns_1@10.242.238.90">>,<0.20080.0>, {had_backfill,false,undefined,[]}, completed,false}. [ns_server:debug,2014-08-19T16:49:49.661,ns_1@10.242.238.90:<0.17162.0>:ns_process_registry:handle_info:98]Got exit msg: {'EXIT',<0.20086.0>,{#Ref<0.0.0.221508>,<0.20090.0>}} [error_logger:info,2014-08-19T16:49:49.661,ns_1@10.242.238.90:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,'ns_vbm_new_sup-default'} started: [{pid,<0.20090.0>}, {name, {new_child_id, [1004,1005,1006,1007,1008,1010,1013,1015,1016, 1017,1018,1019,1022,1023], 'ns_1@10.242.238.91'}}, {mfargs, {ebucketmigrator_srv,start_link, [{"10.242.238.91",11209}, {"10.242.238.90",11209}, [{old_state_retriever, #Fun}, {on_not_ready_vbuckets, #Fun}, {username,"default"}, {password,get_from_config}, {vbuckets, [1004,1005,1006,1007,1008,1010,1013,1015, 1016,1017,1018,1019,1022,1023]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]]}}, {restart_type,temporary}, {shutdown,60000}, {child_type,worker}] [ns_server:debug,2014-08-19T16:49:49.665,ns_1@10.242.238.90:<0.20090.0>:ebucketmigrator_srv:init:621]Reusing old upstream: [{vbuckets,[1004,1005,1006,1007,1008,1010,1013,1015,1016,1017,1018,1019,1022, 1023]}, {name,<<"replication_ns_1@10.242.238.90">>}, {takeover,false}] [rebalance:debug,2014-08-19T16:49:49.665,ns_1@10.242.238.90:<0.20090.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.20092.0> [ns_server:debug,2014-08-19T16:49:49.668,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:49:49.671,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:49:49.671,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 2628 us [ns_server:debug,2014-08-19T16:49:49.671,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:49:49.672,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{1018, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:info,2014-08-19T16:49:49.674,ns_1@10.242.238.90:<0.18787.0>:ns_memcached:do_handle_call:527]Changed vbucket 1014 state to replica [ns_server:info,2014-08-19T16:49:49.674,ns_1@10.242.238.90:tap_replication_manager-default<0.18775.0>:tap_replication_manager:change_vbucket_filter:200]Going to change replication from 'ns_1@10.242.238.91' to have [1004,1005,1006,1007,1008,1010,1013,1014,1015,1016,1017,1018,1019,1022,1023] ([1014], []) [ns_server:debug,2014-08-19T16:49:49.675,ns_1@10.242.238.90:<0.20093.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:135]Registered myself under id:{"default", {new_child_id, [1004,1005,1006,1007,1008,1010,1013,1014,1015, 1016,1017,1018,1019,1022,1023], 'ns_1@10.242.238.91'}, #Ref<0.0.0.221642>} Args:[{"10.242.238.91",11209}, {"10.242.238.90",11209}, [{old_state_retriever,#Fun}, {on_not_ready_vbuckets,#Fun}, {username,"default"}, {password,get_from_config}, {vbuckets,[1004,1005,1006,1007,1008,1010,1013,1014,1015,1016,1017,1018, 1019,1022,1023]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]] [ns_server:debug,2014-08-19T16:49:49.675,ns_1@10.242.238.90:<0.20093.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:139]Linked myself to old ebucketmigrator <0.20090.0> [ns_server:info,2014-08-19T16:49:49.675,ns_1@10.242.238.90:<0.20090.0>:ebucketmigrator_srv:handle_call:270]Starting new-style vbucket filter change on stream `replication_ns_1@10.242.238.90` [ns_server:info,2014-08-19T16:49:49.679,ns_1@10.242.238.90:<0.20090.0>:ebucketmigrator_srv:handle_call:297]Changing vbucket filter on tap stream `replication_ns_1@10.242.238.90`: [{1004,1}, {1005,1}, {1006,1}, {1007,1}, {1008,1}, {1010,1}, {1013,1}, {1014,1}, {1015,1}, {1016,1}, {1017,1}, {1018,1}, {1019,1}, {1022,1}, {1023,1}] [ns_server:info,2014-08-19T16:49:49.679,ns_1@10.242.238.90:<0.20090.0>:ebucketmigrator_srv:handle_call:307]Successfully changed vbucket filter on tap stream `replication_ns_1@10.242.238.90`. [ns_server:info,2014-08-19T16:49:49.679,ns_1@10.242.238.90:<0.20090.0>:ebucketmigrator_srv:process_upstream:1027]Got vbucket filter change completion message. Silencing upstream sender [ns_server:info,2014-08-19T16:49:49.680,ns_1@10.242.238.90:<0.20090.0>:ebucketmigrator_srv:handle_info:366]Got reply from upstream silencing request. Completing state transition to a new ebucketmigrator. [ns_server:debug,2014-08-19T16:49:49.680,ns_1@10.242.238.90:<0.20090.0>:ebucketmigrator_srv:complete_native_vb_filter_change:170]Proceeding with reading unread binaries [ns_server:debug,2014-08-19T16:49:49.680,ns_1@10.242.238.90:<0.20090.0>:ebucketmigrator_srv:confirm_downstream:197]Going to confirm reception downstream messages [ns_server:debug,2014-08-19T16:49:49.680,ns_1@10.242.238.90:<0.20090.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:49:49.680,ns_1@10.242.238.90:<0.20095.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:49:49.680,ns_1@10.242.238.90:<0.20095.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:49:49.680,ns_1@10.242.238.90:<0.20090.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:49:49.680,ns_1@10.242.238.90:<0.20090.0>:ebucketmigrator_srv:confirm_downstream:201]Confirmed upstream messages are feeded to kernel [ns_server:debug,2014-08-19T16:49:49.680,ns_1@10.242.238.90:<0.20090.0>:ebucketmigrator_srv:reply_and_die:210]Passed old state to caller [ns_server:debug,2014-08-19T16:49:49.681,ns_1@10.242.238.90:<0.20090.0>:ebucketmigrator_srv:reply_and_die:213]Sent out state. Preparing to die [ns_server:debug,2014-08-19T16:49:49.681,ns_1@10.242.238.90:<0.20093.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:143]Got old state from previous ebucketmigrator: <0.20090.0> [ns_server:debug,2014-08-19T16:49:49.681,ns_1@10.242.238.90:<0.20093.0>:ns_vbm_new_sup:perform_vbucket_filter_change_loop:184]Sent old state to new instance [ns_server:info,2014-08-19T16:49:49.681,ns_1@10.242.238.90:<0.20097.0>:ns_vbm_new_sup:mk_old_state_retriever:83]Got vbucket filter change old state. Proceeding vbucket filter change operation [ns_server:debug,2014-08-19T16:49:49.681,ns_1@10.242.238.90:<0.20097.0>:ebucketmigrator_srv:init:494]Got old ebucketmigrator state from <0.20090.0>: {state,#Port<0.13886>,#Port<0.13883>,#Port<0.13887>,#Port<0.13884>, <0.20092.0>,<<"cut off">>,<<"cut off">>,[],46,false,false,0, {1408,452589,679924}, completed, {<0.20093.0>,#Ref<0.0.0.221655>}, <<"replication_ns_1@10.242.238.90">>,<0.20090.0>, {had_backfill,false,undefined,[]}, completed,false}. [ns_server:debug,2014-08-19T16:49:49.681,ns_1@10.242.238.90:<0.17162.0>:ns_process_registry:handle_info:98]Got exit msg: {'EXIT',<0.20093.0>,{#Ref<0.0.0.221644>,<0.20097.0>}} [error_logger:info,2014-08-19T16:49:49.681,ns_1@10.242.238.90:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,'ns_vbm_new_sup-default'} started: [{pid,<0.20097.0>}, {name, {new_child_id, [1004,1005,1006,1007,1008,1010,1013,1014,1015, 1016,1017,1018,1019,1022,1023], 'ns_1@10.242.238.91'}}, {mfargs, {ebucketmigrator_srv,start_link, [{"10.242.238.91",11209}, {"10.242.238.90",11209}, [{old_state_retriever, #Fun}, {on_not_ready_vbuckets, #Fun}, {username,"default"}, {password,get_from_config}, {vbuckets, [1004,1005,1006,1007,1008,1010,1013,1014, 1015,1016,1017,1018,1019,1022,1023]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]]}}, {restart_type,temporary}, {shutdown,60000}, {child_type,worker}] [ns_server:debug,2014-08-19T16:49:49.685,ns_1@10.242.238.90:<0.20097.0>:ebucketmigrator_srv:init:621]Reusing old upstream: [{vbuckets,[1004,1005,1006,1007,1008,1010,1013,1014,1015,1016,1017,1018,1019, 1022,1023]}, {name,<<"replication_ns_1@10.242.238.90">>}, {takeover,false}] [rebalance:debug,2014-08-19T16:49:49.685,ns_1@10.242.238.90:<0.20097.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.20098.0> [ns_server:debug,2014-08-19T16:49:49.686,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:49:49.689,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 2657 us [ns_server:debug,2014-08-19T16:49:49.689,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:49:49.690,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:49:49.690,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{1014, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:49:49.712,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:49:49.715,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:49:49.715,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 3049 us [ns_server:debug,2014-08-19T16:49:49.716,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:49:49.716,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{761, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:info,2014-08-19T16:49:49.718,ns_1@10.242.238.90:<0.18787.0>:ns_memcached:do_handle_call:527]Changed vbucket 1021 state to replica [ns_server:info,2014-08-19T16:49:49.719,ns_1@10.242.238.90:tap_replication_manager-default<0.18775.0>:tap_replication_manager:change_vbucket_filter:200]Going to change replication from 'ns_1@10.242.238.91' to have [1004,1005,1006,1007,1008,1010,1013,1014,1015,1016,1017,1018,1019,1021,1022, 1023] ([1021], []) [ns_server:debug,2014-08-19T16:49:49.721,ns_1@10.242.238.90:<0.20100.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:135]Registered myself under id:{"default", {new_child_id, [1004,1005,1006,1007,1008,1010,1013,1014,1015, 1016,1017,1018,1019,1021,1022,1023], 'ns_1@10.242.238.91'}, #Ref<0.0.0.221806>} Args:[{"10.242.238.91",11209}, {"10.242.238.90",11209}, [{old_state_retriever,#Fun}, {on_not_ready_vbuckets,#Fun}, {username,"default"}, {password,get_from_config}, {vbuckets,[1004,1005,1006,1007,1008,1010,1013,1014,1015,1016,1017,1018, 1019,1021,1022,1023]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]] [ns_server:debug,2014-08-19T16:49:49.721,ns_1@10.242.238.90:<0.20100.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:139]Linked myself to old ebucketmigrator <0.20097.0> [ns_server:info,2014-08-19T16:49:49.721,ns_1@10.242.238.90:<0.20097.0>:ebucketmigrator_srv:handle_call:270]Starting new-style vbucket filter change on stream `replication_ns_1@10.242.238.90` [ns_server:info,2014-08-19T16:49:49.730,ns_1@10.242.238.90:<0.20097.0>:ebucketmigrator_srv:handle_call:297]Changing vbucket filter on tap stream `replication_ns_1@10.242.238.90`: [{1004,1}, {1005,1}, {1006,1}, {1007,1}, {1008,1}, {1010,1}, {1013,1}, {1014,1}, {1015,1}, {1016,1}, {1017,1}, {1018,1}, {1019,1}, {1021,1}, {1022,1}, {1023,1}] [ns_server:info,2014-08-19T16:49:49.730,ns_1@10.242.238.90:<0.20097.0>:ebucketmigrator_srv:handle_call:307]Successfully changed vbucket filter on tap stream `replication_ns_1@10.242.238.90`. [ns_server:info,2014-08-19T16:49:49.731,ns_1@10.242.238.90:<0.20097.0>:ebucketmigrator_srv:process_upstream:1027]Got vbucket filter change completion message. Silencing upstream sender [ns_server:info,2014-08-19T16:49:49.731,ns_1@10.242.238.90:<0.20097.0>:ebucketmigrator_srv:handle_info:366]Got reply from upstream silencing request. Completing state transition to a new ebucketmigrator. [ns_server:debug,2014-08-19T16:49:49.731,ns_1@10.242.238.90:<0.20097.0>:ebucketmigrator_srv:complete_native_vb_filter_change:170]Proceeding with reading unread binaries [ns_server:debug,2014-08-19T16:49:49.731,ns_1@10.242.238.90:<0.20097.0>:ebucketmigrator_srv:confirm_downstream:197]Going to confirm reception downstream messages [ns_server:debug,2014-08-19T16:49:49.731,ns_1@10.242.238.90:<0.20097.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:49:49.731,ns_1@10.242.238.90:<0.20103.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:49:49.731,ns_1@10.242.238.90:<0.20103.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:49:49.731,ns_1@10.242.238.90:<0.20097.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:49:49.732,ns_1@10.242.238.90:<0.20097.0>:ebucketmigrator_srv:confirm_downstream:201]Confirmed upstream messages are feeded to kernel [ns_server:debug,2014-08-19T16:49:49.732,ns_1@10.242.238.90:<0.20097.0>:ebucketmigrator_srv:reply_and_die:210]Passed old state to caller [ns_server:debug,2014-08-19T16:49:49.732,ns_1@10.242.238.90:<0.20097.0>:ebucketmigrator_srv:reply_and_die:213]Sent out state. Preparing to die [ns_server:debug,2014-08-19T16:49:49.732,ns_1@10.242.238.90:<0.20100.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:143]Got old state from previous ebucketmigrator: <0.20097.0> [ns_server:debug,2014-08-19T16:49:49.732,ns_1@10.242.238.90:<0.20100.0>:ns_vbm_new_sup:perform_vbucket_filter_change_loop:184]Sent old state to new instance [ns_server:info,2014-08-19T16:49:49.732,ns_1@10.242.238.90:<0.20105.0>:ns_vbm_new_sup:mk_old_state_retriever:83]Got vbucket filter change old state. Proceeding vbucket filter change operation [ns_server:debug,2014-08-19T16:49:49.733,ns_1@10.242.238.90:<0.20105.0>:ebucketmigrator_srv:init:494]Got old ebucketmigrator state from <0.20097.0>: {state,#Port<0.13886>,#Port<0.13883>,#Port<0.13887>,#Port<0.13884>, <0.20098.0>,<<"cut off">>,<<"cut off">>,[],49,false,false,0, {1408,452589,730983}, completed, {<0.20100.0>,#Ref<0.0.0.221819>}, <<"replication_ns_1@10.242.238.90">>,<0.20097.0>, {had_backfill,false,undefined,[]}, completed,false}. [ns_server:debug,2014-08-19T16:49:49.733,ns_1@10.242.238.90:<0.17162.0>:ns_process_registry:handle_info:98]Got exit msg: {'EXIT',<0.20100.0>,{#Ref<0.0.0.221808>,<0.20105.0>}} [error_logger:info,2014-08-19T16:49:49.733,ns_1@10.242.238.90:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,'ns_vbm_new_sup-default'} started: [{pid,<0.20105.0>}, {name, {new_child_id, [1004,1005,1006,1007,1008,1010,1013,1014,1015, 1016,1017,1018,1019,1021,1022,1023], 'ns_1@10.242.238.91'}}, {mfargs, {ebucketmigrator_srv,start_link, [{"10.242.238.91",11209}, {"10.242.238.90",11209}, [{old_state_retriever, #Fun}, {on_not_ready_vbuckets, #Fun}, {username,"default"}, {password,get_from_config}, {vbuckets, [1004,1005,1006,1007,1008,1010,1013,1014, 1015,1016,1017,1018,1019,1021,1022, 1023]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]]}}, {restart_type,temporary}, {shutdown,60000}, {child_type,worker}] [ns_server:debug,2014-08-19T16:49:49.736,ns_1@10.242.238.90:<0.20105.0>:ebucketmigrator_srv:init:621]Reusing old upstream: [{vbuckets,[1004,1005,1006,1007,1008,1010,1013,1014,1015,1016,1017,1018,1019, 1021,1022,1023]}, {name,<<"replication_ns_1@10.242.238.90">>}, {takeover,false}] [rebalance:debug,2014-08-19T16:49:49.737,ns_1@10.242.238.90:<0.20105.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.20106.0> [ns_server:debug,2014-08-19T16:49:49.737,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:49:49.745,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 7952 us [ns_server:debug,2014-08-19T16:49:49.745,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:49:49.746,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:49:49.747,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{1021, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:info,2014-08-19T16:49:49.748,ns_1@10.242.238.90:<0.18787.0>:ns_memcached:do_handle_call:527]Changed vbucket 1009 state to replica [ns_server:info,2014-08-19T16:49:49.749,ns_1@10.242.238.90:tap_replication_manager-default<0.18775.0>:tap_replication_manager:change_vbucket_filter:200]Going to change replication from 'ns_1@10.242.238.91' to have [1004,1005,1006,1007,1008,1009,1010,1013,1014,1015,1016,1017,1018,1019,1021, 1022,1023] ([1009], []) [ns_server:debug,2014-08-19T16:49:49.750,ns_1@10.242.238.90:<0.20107.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:135]Registered myself under id:{"default", {new_child_id, [1004,1005,1006,1007,1008,1009,1010,1013,1014, 1015,1016,1017,1018,1019,1021,1022,1023], 'ns_1@10.242.238.91'}, #Ref<0.0.0.221942>} Args:[{"10.242.238.91",11209}, {"10.242.238.90",11209}, [{old_state_retriever,#Fun}, {on_not_ready_vbuckets,#Fun}, {username,"default"}, {password,get_from_config}, {vbuckets,[1004,1005,1006,1007,1008,1009,1010,1013,1014,1015,1016,1017, 1018,1019,1021,1022,1023]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]] [ns_server:debug,2014-08-19T16:49:49.750,ns_1@10.242.238.90:<0.20107.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:139]Linked myself to old ebucketmigrator <0.20105.0> [ns_server:info,2014-08-19T16:49:49.750,ns_1@10.242.238.90:<0.20105.0>:ebucketmigrator_srv:handle_call:270]Starting new-style vbucket filter change on stream `replication_ns_1@10.242.238.90` [ns_server:info,2014-08-19T16:49:49.754,ns_1@10.242.238.90:<0.20105.0>:ebucketmigrator_srv:handle_call:297]Changing vbucket filter on tap stream `replication_ns_1@10.242.238.90`: [{1004,1}, {1005,1}, {1006,1}, {1007,1}, {1008,1}, {1009,1}, {1010,1}, {1013,1}, {1014,1}, {1015,1}, {1016,1}, {1017,1}, {1018,1}, {1019,1}, {1021,1}, {1022,1}, {1023,1}] [ns_server:info,2014-08-19T16:49:49.754,ns_1@10.242.238.90:<0.20105.0>:ebucketmigrator_srv:handle_call:307]Successfully changed vbucket filter on tap stream `replication_ns_1@10.242.238.90`. [ns_server:info,2014-08-19T16:49:49.755,ns_1@10.242.238.90:<0.20105.0>:ebucketmigrator_srv:process_upstream:1027]Got vbucket filter change completion message. Silencing upstream sender [ns_server:info,2014-08-19T16:49:49.755,ns_1@10.242.238.90:<0.20105.0>:ebucketmigrator_srv:handle_info:366]Got reply from upstream silencing request. Completing state transition to a new ebucketmigrator. [ns_server:debug,2014-08-19T16:49:49.755,ns_1@10.242.238.90:<0.20105.0>:ebucketmigrator_srv:complete_native_vb_filter_change:170]Proceeding with reading unread binaries [ns_server:debug,2014-08-19T16:49:49.755,ns_1@10.242.238.90:<0.20105.0>:ebucketmigrator_srv:confirm_downstream:197]Going to confirm reception downstream messages [ns_server:debug,2014-08-19T16:49:49.755,ns_1@10.242.238.90:<0.20105.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:49:49.755,ns_1@10.242.238.90:<0.20109.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:49:49.755,ns_1@10.242.238.90:<0.20109.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:49:49.756,ns_1@10.242.238.90:<0.20105.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:49:49.756,ns_1@10.242.238.90:<0.20105.0>:ebucketmigrator_srv:confirm_downstream:201]Confirmed upstream messages are feeded to kernel [ns_server:debug,2014-08-19T16:49:49.756,ns_1@10.242.238.90:<0.20105.0>:ebucketmigrator_srv:reply_and_die:210]Passed old state to caller [ns_server:debug,2014-08-19T16:49:49.756,ns_1@10.242.238.90:<0.20105.0>:ebucketmigrator_srv:reply_and_die:213]Sent out state. Preparing to die [ns_server:debug,2014-08-19T16:49:49.756,ns_1@10.242.238.90:<0.20107.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:143]Got old state from previous ebucketmigrator: <0.20105.0> [ns_server:debug,2014-08-19T16:49:49.756,ns_1@10.242.238.90:<0.20107.0>:ns_vbm_new_sup:perform_vbucket_filter_change_loop:184]Sent old state to new instance [ns_server:info,2014-08-19T16:49:49.756,ns_1@10.242.238.90:<0.20112.0>:ns_vbm_new_sup:mk_old_state_retriever:83]Got vbucket filter change old state. Proceeding vbucket filter change operation [ns_server:debug,2014-08-19T16:49:49.756,ns_1@10.242.238.90:<0.20112.0>:ebucketmigrator_srv:init:494]Got old ebucketmigrator state from <0.20105.0>: {state,#Port<0.13886>,#Port<0.13883>,#Port<0.13887>,#Port<0.13884>, <0.20106.0>,<<"cut off">>,<<"cut off">>,[],52,false,false,0, {1408,452589,755260}, completed, {<0.20107.0>,#Ref<0.0.0.221955>}, <<"replication_ns_1@10.242.238.90">>,<0.20105.0>, {had_backfill,false,undefined,[]}, completed,false}. [ns_server:debug,2014-08-19T16:49:49.757,ns_1@10.242.238.90:<0.17162.0>:ns_process_registry:handle_info:98]Got exit msg: {'EXIT',<0.20107.0>,{#Ref<0.0.0.221944>,<0.20112.0>}} [error_logger:info,2014-08-19T16:49:49.757,ns_1@10.242.238.90:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,'ns_vbm_new_sup-default'} started: [{pid,<0.20112.0>}, {name, {new_child_id, [1004,1005,1006,1007,1008,1009,1010,1013,1014, 1015,1016,1017,1018,1019,1021,1022,1023], 'ns_1@10.242.238.91'}}, {mfargs, {ebucketmigrator_srv,start_link, [{"10.242.238.91",11209}, {"10.242.238.90",11209}, [{old_state_retriever, #Fun}, {on_not_ready_vbuckets, #Fun}, {username,"default"}, {password,get_from_config}, {vbuckets, [1004,1005,1006,1007,1008,1009,1010,1013, 1014,1015,1016,1017,1018,1019,1021,1022, 1023]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]]}}, {restart_type,temporary}, {shutdown,60000}, {child_type,worker}] [ns_server:debug,2014-08-19T16:49:49.761,ns_1@10.242.238.90:<0.20112.0>:ebucketmigrator_srv:init:621]Reusing old upstream: [{vbuckets,[1004,1005,1006,1007,1008,1009,1010,1013,1014,1015,1016,1017,1018, 1019,1021,1022,1023]}, {name,<<"replication_ns_1@10.242.238.90">>}, {takeover,false}] [rebalance:debug,2014-08-19T16:49:49.761,ns_1@10.242.238.90:<0.20112.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.20113.0> [ns_server:debug,2014-08-19T16:49:49.763,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:49:49.767,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 3295 us [ns_server:debug,2014-08-19T16:49:49.767,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:49:49.767,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:49:49.768,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{1009, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:info,2014-08-19T16:49:49.770,ns_1@10.242.238.90:<0.18787.0>:ns_memcached:do_handle_call:527]Changed vbucket 1003 state to replica [ns_server:info,2014-08-19T16:49:49.770,ns_1@10.242.238.90:tap_replication_manager-default<0.18775.0>:tap_replication_manager:change_vbucket_filter:200]Going to change replication from 'ns_1@10.242.238.91' to have [1003,1004,1005,1006,1007,1008,1009,1010,1013,1014,1015,1016,1017,1018,1019, 1021,1022,1023] ([1003], []) [ns_server:debug,2014-08-19T16:49:49.777,ns_1@10.242.238.90:<0.20114.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:135]Registered myself under id:{"default", {new_child_id, [1003,1004,1005,1006,1007,1008,1009,1010,1013, 1014,1015,1016,1017,1018,1019,1021,1022,1023], 'ns_1@10.242.238.91'}, #Ref<0.0.0.222085>} Args:[{"10.242.238.91",11209}, {"10.242.238.90",11209}, [{old_state_retriever,#Fun}, {on_not_ready_vbuckets,#Fun}, {username,"default"}, {password,get_from_config}, {vbuckets,[1003,1004,1005,1006,1007,1008,1009,1010,1013,1014,1015,1016, 1017,1018,1019,1021,1022,1023]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]] [ns_server:debug,2014-08-19T16:49:49.777,ns_1@10.242.238.90:<0.20114.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:139]Linked myself to old ebucketmigrator <0.20112.0> [ns_server:info,2014-08-19T16:49:49.777,ns_1@10.242.238.90:<0.20112.0>:ebucketmigrator_srv:handle_call:270]Starting new-style vbucket filter change on stream `replication_ns_1@10.242.238.90` [ns_server:info,2014-08-19T16:49:49.787,ns_1@10.242.238.90:<0.20112.0>:ebucketmigrator_srv:handle_call:297]Changing vbucket filter on tap stream `replication_ns_1@10.242.238.90`: [{1003,1}, {1004,1}, {1005,1}, {1006,1}, {1007,1}, {1008,1}, {1009,1}, {1010,1}, {1013,1}, {1014,1}, {1015,1}, {1016,1}, {1017,1}, {1018,1}, {1019,1}, {1021,1}, {1022,1}, {1023,1}] [ns_server:info,2014-08-19T16:49:49.787,ns_1@10.242.238.90:<0.20112.0>:ebucketmigrator_srv:handle_call:307]Successfully changed vbucket filter on tap stream `replication_ns_1@10.242.238.90`. [ns_server:info,2014-08-19T16:49:49.788,ns_1@10.242.238.90:<0.20112.0>:ebucketmigrator_srv:process_upstream:1027]Got vbucket filter change completion message. Silencing upstream sender [ns_server:info,2014-08-19T16:49:49.788,ns_1@10.242.238.90:<0.20112.0>:ebucketmigrator_srv:handle_info:366]Got reply from upstream silencing request. Completing state transition to a new ebucketmigrator. [ns_server:debug,2014-08-19T16:49:49.788,ns_1@10.242.238.90:<0.20112.0>:ebucketmigrator_srv:complete_native_vb_filter_change:170]Proceeding with reading unread binaries [ns_server:debug,2014-08-19T16:49:49.788,ns_1@10.242.238.90:<0.20112.0>:ebucketmigrator_srv:confirm_downstream:197]Going to confirm reception downstream messages [ns_server:debug,2014-08-19T16:49:49.788,ns_1@10.242.238.90:<0.20112.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:49:49.788,ns_1@10.242.238.90:<0.20116.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:49:49.788,ns_1@10.242.238.90:<0.20116.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:49:49.788,ns_1@10.242.238.90:<0.20112.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:49:49.788,ns_1@10.242.238.90:<0.20112.0>:ebucketmigrator_srv:confirm_downstream:201]Confirmed upstream messages are feeded to kernel [ns_server:debug,2014-08-19T16:49:49.788,ns_1@10.242.238.90:<0.20112.0>:ebucketmigrator_srv:reply_and_die:210]Passed old state to caller [ns_server:debug,2014-08-19T16:49:49.789,ns_1@10.242.238.90:<0.20112.0>:ebucketmigrator_srv:reply_and_die:213]Sent out state. Preparing to die [ns_server:debug,2014-08-19T16:49:49.789,ns_1@10.242.238.90:<0.20114.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:143]Got old state from previous ebucketmigrator: <0.20112.0> [ns_server:debug,2014-08-19T16:49:49.789,ns_1@10.242.238.90:<0.20114.0>:ns_vbm_new_sup:perform_vbucket_filter_change_loop:184]Sent old state to new instance [ns_server:info,2014-08-19T16:49:49.789,ns_1@10.242.238.90:<0.20118.0>:ns_vbm_new_sup:mk_old_state_retriever:83]Got vbucket filter change old state. Proceeding vbucket filter change operation [ns_server:debug,2014-08-19T16:49:49.789,ns_1@10.242.238.90:<0.20118.0>:ebucketmigrator_srv:init:494]Got old ebucketmigrator state from <0.20112.0>: {state,#Port<0.13886>,#Port<0.13883>,#Port<0.13887>,#Port<0.13884>, <0.20113.0>,<<"cut off">>,<<"cut off">>,[],55,false,false,0, {1408,452589,788040}, completed, {<0.20114.0>,#Ref<0.0.0.222099>}, <<"replication_ns_1@10.242.238.90">>,<0.20112.0>, {had_backfill,false,undefined,[]}, completed,false}. [ns_server:debug,2014-08-19T16:49:49.790,ns_1@10.242.238.90:<0.17162.0>:ns_process_registry:handle_info:98]Got exit msg: {'EXIT',<0.20114.0>,{#Ref<0.0.0.222087>,<0.20118.0>}} [error_logger:info,2014-08-19T16:49:49.789,ns_1@10.242.238.90:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,'ns_vbm_new_sup-default'} started: [{pid,<0.20118.0>}, {name, {new_child_id, [1003,1004,1005,1006,1007,1008,1009,1010,1013, 1014,1015,1016,1017,1018,1019,1021,1022,1023], 'ns_1@10.242.238.91'}}, {mfargs, {ebucketmigrator_srv,start_link, [{"10.242.238.91",11209}, {"10.242.238.90",11209}, [{old_state_retriever, #Fun}, {on_not_ready_vbuckets, #Fun}, {username,"default"}, {password,get_from_config}, {vbuckets, [1003,1004,1005,1006,1007,1008,1009,1010, 1013,1014,1015,1016,1017,1018,1019,1021, 1022,1023]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]]}}, {restart_type,temporary}, {shutdown,60000}, {child_type,worker}] [ns_server:debug,2014-08-19T16:49:49.793,ns_1@10.242.238.90:<0.20118.0>:ebucketmigrator_srv:init:621]Reusing old upstream: [{vbuckets,[1003,1004,1005,1006,1007,1008,1009,1010,1013,1014,1015,1016,1017, 1018,1019,1021,1022,1023]}, {name,<<"replication_ns_1@10.242.238.90">>}, {takeover,false}] [rebalance:debug,2014-08-19T16:49:49.794,ns_1@10.242.238.90:<0.20118.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.20120.0> [ns_server:debug,2014-08-19T16:49:49.794,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:49:49.798,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 3602 us [ns_server:debug,2014-08-19T16:49:49.798,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:49:49.798,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:49:49.799,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{1003, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:info,2014-08-19T16:49:49.805,ns_1@10.242.238.90:<0.18787.0>:ns_memcached:do_handle_call:527]Changed vbucket 1012 state to replica [ns_server:info,2014-08-19T16:49:49.805,ns_1@10.242.238.90:tap_replication_manager-default<0.18775.0>:tap_replication_manager:change_vbucket_filter:200]Going to change replication from 'ns_1@10.242.238.91' to have [1003,1004,1005,1006,1007,1008,1009,1010,1012,1013,1014,1015,1016,1017,1018, 1019,1021,1022,1023] ([1012], []) [ns_server:debug,2014-08-19T16:49:49.806,ns_1@10.242.238.90:<0.20121.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:135]Registered myself under id:{"default", {new_child_id, [1003,1004,1005,1006,1007,1008,1009,1010,1012, 1013,1014,1015,1016,1017,1018,1019,1021,1022, 1023], 'ns_1@10.242.238.91'}, #Ref<0.0.0.222226>} Args:[{"10.242.238.91",11209}, {"10.242.238.90",11209}, [{old_state_retriever,#Fun}, {on_not_ready_vbuckets,#Fun}, {username,"default"}, {password,get_from_config}, {vbuckets,[1003,1004,1005,1006,1007,1008,1009,1010,1012,1013,1014,1015, 1016,1017,1018,1019,1021,1022,1023]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]] [ns_server:debug,2014-08-19T16:49:49.806,ns_1@10.242.238.90:<0.20121.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:139]Linked myself to old ebucketmigrator <0.20118.0> [ns_server:info,2014-08-19T16:49:49.806,ns_1@10.242.238.90:<0.20118.0>:ebucketmigrator_srv:handle_call:270]Starting new-style vbucket filter change on stream `replication_ns_1@10.242.238.90` [ns_server:info,2014-08-19T16:49:49.810,ns_1@10.242.238.90:<0.20118.0>:ebucketmigrator_srv:handle_call:297]Changing vbucket filter on tap stream `replication_ns_1@10.242.238.90`: [{1003,1}, {1004,1}, {1005,1}, {1006,1}, {1007,1}, {1008,1}, {1009,1}, {1010,1}, {1012,1}, {1013,1}, {1014,1}, {1015,1}, {1016,1}, {1017,1}, {1018,1}, {1019,1}, {1021,1}, {1022,1}, {1023,1}] [ns_server:info,2014-08-19T16:49:49.811,ns_1@10.242.238.90:<0.20118.0>:ebucketmigrator_srv:handle_call:307]Successfully changed vbucket filter on tap stream `replication_ns_1@10.242.238.90`. [ns_server:info,2014-08-19T16:49:49.811,ns_1@10.242.238.90:<0.20118.0>:ebucketmigrator_srv:process_upstream:1027]Got vbucket filter change completion message. Silencing upstream sender [ns_server:info,2014-08-19T16:49:49.811,ns_1@10.242.238.90:<0.20118.0>:ebucketmigrator_srv:handle_info:366]Got reply from upstream silencing request. Completing state transition to a new ebucketmigrator. [ns_server:debug,2014-08-19T16:49:49.811,ns_1@10.242.238.90:<0.20118.0>:ebucketmigrator_srv:complete_native_vb_filter_change:170]Proceeding with reading unread binaries [ns_server:debug,2014-08-19T16:49:49.811,ns_1@10.242.238.90:<0.20118.0>:ebucketmigrator_srv:confirm_downstream:197]Going to confirm reception downstream messages [ns_server:debug,2014-08-19T16:49:49.811,ns_1@10.242.238.90:<0.20118.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:49:49.811,ns_1@10.242.238.90:<0.20123.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:49:49.811,ns_1@10.242.238.90:<0.20123.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:49:49.812,ns_1@10.242.238.90:<0.20118.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:49:49.812,ns_1@10.242.238.90:<0.20118.0>:ebucketmigrator_srv:confirm_downstream:201]Confirmed upstream messages are feeded to kernel [ns_server:debug,2014-08-19T16:49:49.812,ns_1@10.242.238.90:<0.20118.0>:ebucketmigrator_srv:reply_and_die:210]Passed old state to caller [ns_server:debug,2014-08-19T16:49:49.812,ns_1@10.242.238.90:<0.20118.0>:ebucketmigrator_srv:reply_and_die:213]Sent out state. Preparing to die [ns_server:debug,2014-08-19T16:49:49.812,ns_1@10.242.238.90:<0.20121.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:143]Got old state from previous ebucketmigrator: <0.20118.0> [ns_server:debug,2014-08-19T16:49:49.812,ns_1@10.242.238.90:<0.20121.0>:ns_vbm_new_sup:perform_vbucket_filter_change_loop:184]Sent old state to new instance [ns_server:info,2014-08-19T16:49:49.812,ns_1@10.242.238.90:<0.20125.0>:ns_vbm_new_sup:mk_old_state_retriever:83]Got vbucket filter change old state. Proceeding vbucket filter change operation [ns_server:debug,2014-08-19T16:49:49.812,ns_1@10.242.238.90:<0.20125.0>:ebucketmigrator_srv:init:494]Got old ebucketmigrator state from <0.20118.0>: {state,#Port<0.13886>,#Port<0.13883>,#Port<0.13887>,#Port<0.13884>, <0.20120.0>,<<"cut off">>,<<"cut off">>,[],58,false,false,0, {1408,452589,811224}, completed, {<0.20121.0>,#Ref<0.0.0.222239>}, <<"replication_ns_1@10.242.238.90">>,<0.20118.0>, {had_backfill,false,undefined,[]}, completed,false}. [error_logger:info,2014-08-19T16:49:49.813,ns_1@10.242.238.90:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,'ns_vbm_new_sup-default'} started: [{pid,<0.20125.0>}, {name, {new_child_id, [1003,1004,1005,1006,1007,1008,1009,1010,1012, 1013,1014,1015,1016,1017,1018,1019,1021,1022, 1023], 'ns_1@10.242.238.91'}}, {mfargs, {ebucketmigrator_srv,start_link, [{"10.242.238.91",11209}, {"10.242.238.90",11209}, [{old_state_retriever, #Fun}, {on_not_ready_vbuckets, #Fun}, {username,"default"}, {password,get_from_config}, {vbuckets, [1003,1004,1005,1006,1007,1008,1009,1010, 1012,1013,1014,1015,1016,1017,1018,1019, 1021,1022,1023]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]]}}, {restart_type,temporary}, {shutdown,60000}, {child_type,worker}] [ns_server:debug,2014-08-19T16:49:49.813,ns_1@10.242.238.90:<0.17162.0>:ns_process_registry:handle_info:98]Got exit msg: {'EXIT',<0.20121.0>,{#Ref<0.0.0.222228>,<0.20125.0>}} [ns_server:debug,2014-08-19T16:49:49.816,ns_1@10.242.238.90:<0.20125.0>:ebucketmigrator_srv:init:621]Reusing old upstream: [{vbuckets,[1003,1004,1005,1006,1007,1008,1009,1010,1012,1013,1014,1015,1016, 1017,1018,1019,1021,1022,1023]}, {name,<<"replication_ns_1@10.242.238.90">>}, {takeover,false}] [rebalance:debug,2014-08-19T16:49:49.817,ns_1@10.242.238.90:<0.20125.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.20126.0> [ns_server:debug,2014-08-19T16:49:49.818,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:49:49.820,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:49:49.820,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 1334 us [ns_server:debug,2014-08-19T16:49:49.820,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:49:49.821,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{1012, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:info,2014-08-19T16:49:49.823,ns_1@10.242.238.90:<0.18787.0>:ns_memcached:do_handle_call:527]Changed vbucket 1020 state to replica [ns_server:info,2014-08-19T16:49:49.823,ns_1@10.242.238.90:tap_replication_manager-default<0.18775.0>:tap_replication_manager:change_vbucket_filter:200]Going to change replication from 'ns_1@10.242.238.91' to have [1003,1004,1005,1006,1007,1008,1009,1010,1012,1013,1014,1015,1016,1017,1018, 1019,1020,1021,1022,1023] ([1020], []) [ns_server:debug,2014-08-19T16:49:49.826,ns_1@10.242.238.90:<0.20128.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:135]Registered myself under id:{"default", {new_child_id, [1003,1004,1005,1006,1007,1008,1009,1010,1012, 1013,1014,1015,1016,1017,1018,1019,1020,1021, 1022,1023], 'ns_1@10.242.238.91'}, #Ref<0.0.0.222360>} Args:[{"10.242.238.91",11209}, {"10.242.238.90",11209}, [{old_state_retriever,#Fun}, {on_not_ready_vbuckets,#Fun}, {username,"default"}, {password,get_from_config}, {vbuckets,[1003,1004,1005,1006,1007,1008,1009,1010,1012,1013,1014,1015, 1016,1017,1018,1019,1020,1021,1022,1023]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]] [ns_server:debug,2014-08-19T16:49:49.827,ns_1@10.242.238.90:<0.20128.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:139]Linked myself to old ebucketmigrator <0.20125.0> [ns_server:info,2014-08-19T16:49:49.827,ns_1@10.242.238.90:<0.20125.0>:ebucketmigrator_srv:handle_call:270]Starting new-style vbucket filter change on stream `replication_ns_1@10.242.238.90` [ns_server:info,2014-08-19T16:49:49.831,ns_1@10.242.238.90:<0.20125.0>:ebucketmigrator_srv:handle_call:297]Changing vbucket filter on tap stream `replication_ns_1@10.242.238.90`: [{1003,1}, {1004,1}, {1005,1}, {1006,1}, {1007,1}, {1008,1}, {1009,1}, {1010,1}, {1012,1}, {1013,1}, {1014,1}, {1015,1}, {1016,1}, {1017,1}, {1018,1}, {1019,1}, {1020,1}, {1021,1}, {1022,1}, {1023,1}] [ns_server:info,2014-08-19T16:49:49.831,ns_1@10.242.238.90:<0.20125.0>:ebucketmigrator_srv:handle_call:307]Successfully changed vbucket filter on tap stream `replication_ns_1@10.242.238.90`. [ns_server:info,2014-08-19T16:49:49.831,ns_1@10.242.238.90:<0.20125.0>:ebucketmigrator_srv:process_upstream:1027]Got vbucket filter change completion message. Silencing upstream sender [ns_server:info,2014-08-19T16:49:49.831,ns_1@10.242.238.90:<0.20125.0>:ebucketmigrator_srv:handle_info:366]Got reply from upstream silencing request. Completing state transition to a new ebucketmigrator. [ns_server:debug,2014-08-19T16:49:49.832,ns_1@10.242.238.90:<0.20125.0>:ebucketmigrator_srv:complete_native_vb_filter_change:170]Proceeding with reading unread binaries [ns_server:debug,2014-08-19T16:49:49.832,ns_1@10.242.238.90:<0.20125.0>:ebucketmigrator_srv:confirm_downstream:197]Going to confirm reception downstream messages [ns_server:debug,2014-08-19T16:49:49.832,ns_1@10.242.238.90:<0.20125.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:49:49.832,ns_1@10.242.238.90:<0.20130.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:49:49.832,ns_1@10.242.238.90:<0.20130.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:49:49.832,ns_1@10.242.238.90:<0.20125.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:49:49.832,ns_1@10.242.238.90:<0.20125.0>:ebucketmigrator_srv:confirm_downstream:201]Confirmed upstream messages are feeded to kernel [ns_server:debug,2014-08-19T16:49:49.832,ns_1@10.242.238.90:<0.20125.0>:ebucketmigrator_srv:reply_and_die:210]Passed old state to caller [ns_server:debug,2014-08-19T16:49:49.832,ns_1@10.242.238.90:<0.20125.0>:ebucketmigrator_srv:reply_and_die:213]Sent out state. Preparing to die [ns_server:debug,2014-08-19T16:49:49.832,ns_1@10.242.238.90:<0.20128.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:143]Got old state from previous ebucketmigrator: <0.20125.0> [ns_server:debug,2014-08-19T16:49:49.833,ns_1@10.242.238.90:<0.20128.0>:ns_vbm_new_sup:perform_vbucket_filter_change_loop:184]Sent old state to new instance [ns_server:info,2014-08-19T16:49:49.833,ns_1@10.242.238.90:<0.20132.0>:ns_vbm_new_sup:mk_old_state_retriever:83]Got vbucket filter change old state. Proceeding vbucket filter change operation [ns_server:debug,2014-08-19T16:49:49.833,ns_1@10.242.238.90:<0.20132.0>:ebucketmigrator_srv:init:494]Got old ebucketmigrator state from <0.20125.0>: {state,#Port<0.13886>,#Port<0.13883>,#Port<0.13887>,#Port<0.13884>, <0.20126.0>,<<"cut off">>,<<"cut off">>,[],61,false,false,0, {1408,452589,831831}, completed, {<0.20128.0>,#Ref<0.0.0.222373>}, <<"replication_ns_1@10.242.238.90">>,<0.20125.0>, {had_backfill,false,undefined,[]}, completed,false}. [ns_server:debug,2014-08-19T16:49:49.833,ns_1@10.242.238.90:<0.17162.0>:ns_process_registry:handle_info:98]Got exit msg: {'EXIT',<0.20128.0>,{#Ref<0.0.0.222362>,<0.20132.0>}} [error_logger:info,2014-08-19T16:49:49.833,ns_1@10.242.238.90:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,'ns_vbm_new_sup-default'} started: [{pid,<0.20132.0>}, {name, {new_child_id, [1003,1004,1005,1006,1007,1008,1009,1010,1012, 1013,1014,1015,1016,1017,1018,1019,1020,1021, 1022,1023], 'ns_1@10.242.238.91'}}, {mfargs, {ebucketmigrator_srv,start_link, [{"10.242.238.91",11209}, {"10.242.238.90",11209}, [{old_state_retriever, #Fun}, {on_not_ready_vbuckets, #Fun}, {username,"default"}, {password,get_from_config}, {vbuckets, [1003,1004,1005,1006,1007,1008,1009,1010, 1012,1013,1014,1015,1016,1017,1018,1019, 1020,1021,1022,1023]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]]}}, {restart_type,temporary}, {shutdown,60000}, {child_type,worker}] [ns_server:debug,2014-08-19T16:49:49.837,ns_1@10.242.238.90:<0.20132.0>:ebucketmigrator_srv:init:621]Reusing old upstream: [{vbuckets,[1003,1004,1005,1006,1007,1008,1009,1010,1012,1013,1014,1015,1016, 1017,1018,1019,1020,1021,1022,1023]}, {name,<<"replication_ns_1@10.242.238.90">>}, {takeover,false}] [rebalance:debug,2014-08-19T16:49:49.837,ns_1@10.242.238.90:<0.20132.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.20133.0> [ns_server:debug,2014-08-19T16:49:49.837,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:49:49.840,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 2906 us [ns_server:debug,2014-08-19T16:49:49.841,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:49:49.842,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:49:49.843,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{1020, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:info,2014-08-19T16:49:49.847,ns_1@10.242.238.90:<0.18787.0>:ns_memcached:do_handle_call:527]Changed vbucket 1011 state to replica [ns_server:info,2014-08-19T16:49:49.847,ns_1@10.242.238.90:tap_replication_manager-default<0.18775.0>:tap_replication_manager:change_vbucket_filter:200]Going to change replication from 'ns_1@10.242.238.91' to have [1003,1004,1005,1006,1007,1008,1009,1010,1011,1012,1013,1014,1015,1016,1017, 1018,1019,1020,1021,1022,1023] ([1011], []) [ns_server:debug,2014-08-19T16:49:49.848,ns_1@10.242.238.90:<0.20134.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:135]Registered myself under id:{"default", {new_child_id, [1003,1004,1005,1006,1007,1008,1009,1010,1011, 1012,1013,1014,1015,1016,1017,1018,1019,1020, 1021,1022,1023], 'ns_1@10.242.238.91'}, #Ref<0.0.0.222502>} Args:[{"10.242.238.91",11209}, {"10.242.238.90",11209}, [{old_state_retriever,#Fun}, {on_not_ready_vbuckets,#Fun}, {username,"default"}, {password,get_from_config}, {vbuckets,[1003,1004,1005,1006,1007,1008,1009,1010,1011,1012,1013,1014, 1015,1016,1017,1018,1019,1020,1021,1022,1023]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]] [ns_server:debug,2014-08-19T16:49:49.848,ns_1@10.242.238.90:<0.20134.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:139]Linked myself to old ebucketmigrator <0.20132.0> [ns_server:info,2014-08-19T16:49:49.849,ns_1@10.242.238.90:<0.20132.0>:ebucketmigrator_srv:handle_call:270]Starting new-style vbucket filter change on stream `replication_ns_1@10.242.238.90` [ns_server:info,2014-08-19T16:49:49.852,ns_1@10.242.238.90:<0.20132.0>:ebucketmigrator_srv:handle_call:297]Changing vbucket filter on tap stream `replication_ns_1@10.242.238.90`: [{1003,1}, {1004,1}, {1005,1}, {1006,1}, {1007,1}, {1008,1}, {1009,1}, {1010,1}, {1011,1}, {1012,1}, {1013,1}, {1014,1}, {1015,1}, {1016,1}, {1017,1}, {1018,1}, {1019,1}, {1020,1}, {1021,1}, {1022,1}, {1023,1}] [ns_server:info,2014-08-19T16:49:49.853,ns_1@10.242.238.90:<0.20132.0>:ebucketmigrator_srv:handle_call:307]Successfully changed vbucket filter on tap stream `replication_ns_1@10.242.238.90`. [ns_server:info,2014-08-19T16:49:49.853,ns_1@10.242.238.90:<0.20132.0>:ebucketmigrator_srv:process_upstream:1027]Got vbucket filter change completion message. Silencing upstream sender [ns_server:info,2014-08-19T16:49:49.853,ns_1@10.242.238.90:<0.20132.0>:ebucketmigrator_srv:handle_info:366]Got reply from upstream silencing request. Completing state transition to a new ebucketmigrator. [ns_server:debug,2014-08-19T16:49:49.853,ns_1@10.242.238.90:<0.20132.0>:ebucketmigrator_srv:complete_native_vb_filter_change:170]Proceeding with reading unread binaries [ns_server:debug,2014-08-19T16:49:49.854,ns_1@10.242.238.90:<0.20132.0>:ebucketmigrator_srv:confirm_downstream:197]Going to confirm reception downstream messages [ns_server:debug,2014-08-19T16:49:49.854,ns_1@10.242.238.90:<0.20132.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:49:49.854,ns_1@10.242.238.90:<0.20136.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:49:49.854,ns_1@10.242.238.90:<0.20136.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:49:49.854,ns_1@10.242.238.90:<0.20132.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:49:49.854,ns_1@10.242.238.90:<0.20132.0>:ebucketmigrator_srv:confirm_downstream:201]Confirmed upstream messages are feeded to kernel [ns_server:debug,2014-08-19T16:49:49.854,ns_1@10.242.238.90:<0.20132.0>:ebucketmigrator_srv:reply_and_die:210]Passed old state to caller [ns_server:debug,2014-08-19T16:49:49.854,ns_1@10.242.238.90:<0.20132.0>:ebucketmigrator_srv:reply_and_die:213]Sent out state. Preparing to die [ns_server:debug,2014-08-19T16:49:49.854,ns_1@10.242.238.90:<0.20134.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:143]Got old state from previous ebucketmigrator: <0.20132.0> [ns_server:debug,2014-08-19T16:49:49.855,ns_1@10.242.238.90:<0.20134.0>:ns_vbm_new_sup:perform_vbucket_filter_change_loop:184]Sent old state to new instance [ns_server:info,2014-08-19T16:49:49.855,ns_1@10.242.238.90:<0.20138.0>:ns_vbm_new_sup:mk_old_state_retriever:83]Got vbucket filter change old state. Proceeding vbucket filter change operation [ns_server:debug,2014-08-19T16:49:49.855,ns_1@10.242.238.90:<0.20138.0>:ebucketmigrator_srv:init:494]Got old ebucketmigrator state from <0.20132.0>: {state,#Port<0.13886>,#Port<0.13883>,#Port<0.13887>,#Port<0.13884>, <0.20133.0>,<<"cut off">>,<<"cut off">>,[],64,false,false,0, {1408,452589,853680}, completed, {<0.20134.0>,#Ref<0.0.0.222516>}, <<"replication_ns_1@10.242.238.90">>,<0.20132.0>, {had_backfill,false,undefined,[]}, completed,false}. [ns_server:debug,2014-08-19T16:49:49.855,ns_1@10.242.238.90:<0.17162.0>:ns_process_registry:handle_info:98]Got exit msg: {'EXIT',<0.20134.0>,{#Ref<0.0.0.222504>,<0.20138.0>}} [error_logger:info,2014-08-19T16:49:49.855,ns_1@10.242.238.90:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,'ns_vbm_new_sup-default'} started: [{pid,<0.20138.0>}, {name, {new_child_id, [1003,1004,1005,1006,1007,1008,1009,1010,1011, 1012,1013,1014,1015,1016,1017,1018,1019,1020, 1021,1022,1023], 'ns_1@10.242.238.91'}}, {mfargs, {ebucketmigrator_srv,start_link, [{"10.242.238.91",11209}, {"10.242.238.90",11209}, [{old_state_retriever, #Fun}, {on_not_ready_vbuckets, #Fun}, {username,"default"}, {password,get_from_config}, {vbuckets, [1003,1004,1005,1006,1007,1008,1009,1010, 1011,1012,1013,1014,1015,1016,1017,1018, 1019,1020,1021,1022,1023]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]]}}, {restart_type,temporary}, {shutdown,60000}, {child_type,worker}] [ns_server:debug,2014-08-19T16:49:49.862,ns_1@10.242.238.90:<0.20138.0>:ebucketmigrator_srv:init:621]Reusing old upstream: [{vbuckets,[1003,1004,1005,1006,1007,1008,1009,1010,1011,1012,1013,1014,1015, 1016,1017,1018,1019,1020,1021,1022,1023]}, {name,<<"replication_ns_1@10.242.238.90">>}, {takeover,false}] [ns_server:debug,2014-08-19T16:49:49.862,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [rebalance:debug,2014-08-19T16:49:49.862,ns_1@10.242.238.90:<0.20138.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.20140.0> [ns_server:debug,2014-08-19T16:49:49.864,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:49:49.864,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 2471 us [ns_server:debug,2014-08-19T16:49:49.865,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:49:49.866,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{1011, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:info,2014-08-19T16:49:49.871,ns_1@10.242.238.90:<0.18787.0>:ns_memcached:do_handle_call:527]Changed vbucket 1002 state to replica [ns_server:info,2014-08-19T16:49:49.872,ns_1@10.242.238.90:tap_replication_manager-default<0.18775.0>:tap_replication_manager:change_vbucket_filter:200]Going to change replication from 'ns_1@10.242.238.91' to have [1002,1003,1004,1005,1006,1007,1008,1009,1010,1011,1012,1013,1014,1015,1016, 1017,1018,1019,1020,1021,1022,1023] ([1002], []) [ns_server:debug,2014-08-19T16:49:49.874,ns_1@10.242.238.90:<0.20141.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:135]Registered myself under id:{"default", {new_child_id, [1002,1003,1004,1005,1006,1007,1008,1009,1010, 1011,1012,1013,1014,1015,1016,1017,1018,1019, 1020,1021,1022,1023], 'ns_1@10.242.238.91'}, #Ref<0.0.0.222640>} Args:[{"10.242.238.91",11209}, {"10.242.238.90",11209}, [{old_state_retriever,#Fun}, {on_not_ready_vbuckets,#Fun}, {username,"default"}, {password,get_from_config}, {vbuckets,[1002,1003,1004,1005,1006,1007,1008,1009,1010,1011,1012,1013, 1014,1015,1016,1017,1018,1019,1020,1021,1022,1023]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]] [ns_server:debug,2014-08-19T16:49:49.875,ns_1@10.242.238.90:<0.20141.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:139]Linked myself to old ebucketmigrator <0.20138.0> [ns_server:info,2014-08-19T16:49:49.875,ns_1@10.242.238.90:<0.20138.0>:ebucketmigrator_srv:handle_call:270]Starting new-style vbucket filter change on stream `replication_ns_1@10.242.238.90` [ns_server:info,2014-08-19T16:49:49.878,ns_1@10.242.238.90:<0.20138.0>:ebucketmigrator_srv:handle_call:297]Changing vbucket filter on tap stream `replication_ns_1@10.242.238.90`: [{1002,1}, {1003,1}, {1004,1}, {1005,1}, {1006,1}, {1007,1}, {1008,1}, {1009,1}, {1010,1}, {1011,1}, {1012,1}, {1013,1}, {1014,1}, {1015,1}, {1016,1}, {1017,1}, {1018,1}, {1019,1}, {1020,1}, {1021,1}, {1022,1}, {1023,1}] [ns_server:info,2014-08-19T16:49:49.879,ns_1@10.242.238.90:<0.20138.0>:ebucketmigrator_srv:handle_call:307]Successfully changed vbucket filter on tap stream `replication_ns_1@10.242.238.90`. [ns_server:info,2014-08-19T16:49:49.879,ns_1@10.242.238.90:<0.20138.0>:ebucketmigrator_srv:process_upstream:1027]Got vbucket filter change completion message. Silencing upstream sender [ns_server:info,2014-08-19T16:49:49.879,ns_1@10.242.238.90:<0.20138.0>:ebucketmigrator_srv:handle_info:366]Got reply from upstream silencing request. Completing state transition to a new ebucketmigrator. [ns_server:debug,2014-08-19T16:49:49.879,ns_1@10.242.238.90:<0.20138.0>:ebucketmigrator_srv:complete_native_vb_filter_change:170]Proceeding with reading unread binaries [ns_server:debug,2014-08-19T16:49:49.879,ns_1@10.242.238.90:<0.20138.0>:ebucketmigrator_srv:confirm_downstream:197]Going to confirm reception downstream messages [ns_server:debug,2014-08-19T16:49:49.879,ns_1@10.242.238.90:<0.20138.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:49:49.879,ns_1@10.242.238.90:<0.20143.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:49:49.880,ns_1@10.242.238.90:<0.20143.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:49:49.880,ns_1@10.242.238.90:<0.20138.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:49:49.880,ns_1@10.242.238.90:<0.20138.0>:ebucketmigrator_srv:confirm_downstream:201]Confirmed upstream messages are feeded to kernel [ns_server:debug,2014-08-19T16:49:49.880,ns_1@10.242.238.90:<0.20138.0>:ebucketmigrator_srv:reply_and_die:210]Passed old state to caller [ns_server:debug,2014-08-19T16:49:49.880,ns_1@10.242.238.90:<0.20138.0>:ebucketmigrator_srv:reply_and_die:213]Sent out state. Preparing to die [ns_server:debug,2014-08-19T16:49:49.880,ns_1@10.242.238.90:<0.20141.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:143]Got old state from previous ebucketmigrator: <0.20138.0> [ns_server:debug,2014-08-19T16:49:49.881,ns_1@10.242.238.90:<0.20141.0>:ns_vbm_new_sup:perform_vbucket_filter_change_loop:184]Sent old state to new instance [ns_server:info,2014-08-19T16:49:49.881,ns_1@10.242.238.90:<0.20145.0>:ns_vbm_new_sup:mk_old_state_retriever:83]Got vbucket filter change old state. Proceeding vbucket filter change operation [ns_server:debug,2014-08-19T16:49:49.881,ns_1@10.242.238.90:<0.20145.0>:ebucketmigrator_srv:init:494]Got old ebucketmigrator state from <0.20138.0>: {state,#Port<0.13886>,#Port<0.13883>,#Port<0.13887>,#Port<0.13884>, <0.20140.0>,<<"cut off">>,<<"cut off">>,[],67,false,false,0, {1408,452589,879469}, completed, {<0.20141.0>,#Ref<0.0.0.222653>}, <<"replication_ns_1@10.242.238.90">>,<0.20138.0>, {had_backfill,false,undefined,[]}, completed,false}. [ns_server:debug,2014-08-19T16:49:49.881,ns_1@10.242.238.90:<0.17162.0>:ns_process_registry:handle_info:98]Got exit msg: {'EXIT',<0.20141.0>,{#Ref<0.0.0.222642>,<0.20145.0>}} [error_logger:info,2014-08-19T16:49:49.881,ns_1@10.242.238.90:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,'ns_vbm_new_sup-default'} started: [{pid,<0.20145.0>}, {name, {new_child_id, [1002,1003,1004,1005,1006,1007,1008,1009,1010, 1011,1012,1013,1014,1015,1016,1017,1018,1019, 1020,1021,1022,1023], 'ns_1@10.242.238.91'}}, {mfargs, {ebucketmigrator_srv,start_link, [{"10.242.238.91",11209}, {"10.242.238.90",11209}, [{old_state_retriever, #Fun}, {on_not_ready_vbuckets, #Fun}, {username,"default"}, {password,get_from_config}, {vbuckets, [1002,1003,1004,1005,1006,1007,1008,1009, 1010,1011,1012,1013,1014,1015,1016,1017, 1018,1019,1020,1021,1022,1023]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]]}}, {restart_type,temporary}, {shutdown,60000}, {child_type,worker}] [ns_server:debug,2014-08-19T16:49:49.884,ns_1@10.242.238.90:<0.20145.0>:ebucketmigrator_srv:init:621]Reusing old upstream: [{vbuckets,[1002,1003,1004,1005,1006,1007,1008,1009,1010,1011,1012,1013,1014, 1015,1016,1017,1018,1019,1020,1021,1022,1023]}, {name,<<"replication_ns_1@10.242.238.90">>}, {takeover,false}] [rebalance:debug,2014-08-19T16:49:49.885,ns_1@10.242.238.90:<0.20145.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.20146.0> [ns_server:debug,2014-08-19T16:49:49.885,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:49:49.893,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:49:49.894,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 7356 us [ns_server:debug,2014-08-19T16:49:49.894,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:49:49.894,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{1002, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:49:49.914,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:49:49.918,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 2255 us [ns_server:debug,2014-08-19T16:49:49.918,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:49:49.919,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:49:49.919,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{756, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:info,2014-08-19T16:49:49.933,ns_1@10.242.238.90:<0.18787.0>:ns_memcached:do_handle_call:527]Changed vbucket 1001 state to replica [ns_server:info,2014-08-19T16:49:49.937,ns_1@10.242.238.90:<0.20149.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 1001 to state replica [ns_server:debug,2014-08-19T16:49:49.977,ns_1@10.242.238.90:<0.20149.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_1001_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:49:49.978,ns_1@10.242.238.90:<0.20149.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[1001]}, {checkpoints,[{1001,0}]}, {name,<<"replication_building_1001_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[1001]}, {takeover,false}, {suffix,"building_1001_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",1001,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,false}]} [rebalance:debug,2014-08-19T16:49:49.979,ns_1@10.242.238.90:<0.20149.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.20150.0> [rebalance:debug,2014-08-19T16:49:49.979,ns_1@10.242.238.90:<0.20149.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:49:49.980,ns_1@10.242.238.90:<0.20149.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.29965.0>,#Ref<16550.0.1.52364>}]} [rebalance:info,2014-08-19T16:49:49.980,ns_1@10.242.238.90:<0.20149.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 1001 [rebalance:debug,2014-08-19T16:49:49.981,ns_1@10.242.238.90:<0.20149.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.29965.0>,#Ref<16550.0.1.52364>}] [ns_server:debug,2014-08-19T16:49:49.982,ns_1@10.242.238.90:<0.20149.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:debug,2014-08-19T16:49:49.998,ns_1@10.242.238.90:<0.20165.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 1001 [ns_server:info,2014-08-19T16:49:50.004,ns_1@10.242.238.90:<0.18787.0>:ns_memcached:do_handle_call:527]Changed vbucket 746 state to replica [ns_server:info,2014-08-19T16:49:50.010,ns_1@10.242.238.90:<0.20168.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 746 to state replica [ns_server:debug,2014-08-19T16:49:50.058,ns_1@10.242.238.90:<0.20168.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_746_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:49:50.059,ns_1@10.242.238.90:<0.20168.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[746]}, {checkpoints,[{746,0}]}, {name,<<"replication_building_746_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[746]}, {takeover,false}, {suffix,"building_746_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",746,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,true}]} [rebalance:debug,2014-08-19T16:49:50.060,ns_1@10.242.238.90:<0.20168.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.20169.0> [rebalance:debug,2014-08-19T16:49:50.060,ns_1@10.242.238.90:<0.20168.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:49:50.061,ns_1@10.242.238.90:<0.20168.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.29985.0>,#Ref<16550.0.1.52490>}]} [rebalance:info,2014-08-19T16:49:50.061,ns_1@10.242.238.90:<0.20168.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 746 [rebalance:debug,2014-08-19T16:49:50.061,ns_1@10.242.238.90:<0.20168.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.29985.0>,#Ref<16550.0.1.52490>}] [ns_server:debug,2014-08-19T16:49:50.062,ns_1@10.242.238.90:<0.20168.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:49:50.062,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.20170.0> (ok) [rebalance:debug,2014-08-19T16:49:50.063,ns_1@10.242.238.90:<0.20171.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 746 [ns_server:debug,2014-08-19T16:49:50.075,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 1001. Nacking mccouch update. [views:debug,2014-08-19T16:49:50.075,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/1001. Updated state: replica (0) [ns_server:debug,2014-08-19T16:49:50.075,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",1001,replica,0} [ns_server:debug,2014-08-19T16:49:50.075,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [766,750,1013,753,1016,756,1019,1003,759,1022,1006,762,1009,765,749,1012,752, 1015,755,1018,1002,758,1021,1005,761,1008,764,748,1011,767,751,1014,754,1017, 1001,757,1020,1004,760,1023,1007,763,747,1010] [ns_server:info,2014-08-19T16:49:50.145,ns_1@10.242.238.90:<0.18787.0>:ns_memcached:do_handle_call:527]Changed vbucket 1000 state to replica [ns_server:info,2014-08-19T16:49:50.149,ns_1@10.242.238.90:<0.20174.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 1000 to state replica [views:debug,2014-08-19T16:49:50.159,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/1001. Updated state: replica (0) [ns_server:debug,2014-08-19T16:49:50.159,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",1001,replica,0} [ns_server:debug,2014-08-19T16:49:50.185,ns_1@10.242.238.90:<0.20174.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_1000_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:49:50.187,ns_1@10.242.238.90:<0.20174.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[1000]}, {checkpoints,[{1000,0}]}, {name,<<"replication_building_1000_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[1000]}, {takeover,false}, {suffix,"building_1000_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",1000,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,false}]} [rebalance:debug,2014-08-19T16:49:50.188,ns_1@10.242.238.90:<0.20174.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.20175.0> [rebalance:debug,2014-08-19T16:49:50.188,ns_1@10.242.238.90:<0.20174.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:49:50.188,ns_1@10.242.238.90:<0.20174.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.30042.0>,#Ref<16550.0.1.52791>}]} [rebalance:info,2014-08-19T16:49:50.189,ns_1@10.242.238.90:<0.20174.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 1000 [rebalance:debug,2014-08-19T16:49:50.189,ns_1@10.242.238.90:<0.20174.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.30042.0>,#Ref<16550.0.1.52791>}] [ns_server:debug,2014-08-19T16:49:50.190,ns_1@10.242.238.90:<0.20174.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:debug,2014-08-19T16:49:50.206,ns_1@10.242.238.90:<0.20176.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 1000 [ns_server:info,2014-08-19T16:49:50.212,ns_1@10.242.238.90:<0.18787.0>:ns_memcached:do_handle_call:527]Changed vbucket 745 state to replica [ns_server:info,2014-08-19T16:49:50.219,ns_1@10.242.238.90:<0.20193.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 745 to state replica [ns_server:debug,2014-08-19T16:49:50.269,ns_1@10.242.238.90:<0.20193.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_745_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:49:50.271,ns_1@10.242.238.90:<0.20193.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[745]}, {checkpoints,[{745,0}]}, {name,<<"replication_building_745_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[745]}, {takeover,false}, {suffix,"building_745_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",745,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,true}]} [rebalance:debug,2014-08-19T16:49:50.271,ns_1@10.242.238.90:<0.20193.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.20194.0> [rebalance:debug,2014-08-19T16:49:50.271,ns_1@10.242.238.90:<0.20193.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:49:50.272,ns_1@10.242.238.90:<0.20193.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.30062.0>,#Ref<16550.0.1.52907>}]} [rebalance:info,2014-08-19T16:49:50.272,ns_1@10.242.238.90:<0.20193.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 745 [rebalance:debug,2014-08-19T16:49:50.273,ns_1@10.242.238.90:<0.20193.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.30062.0>,#Ref<16550.0.1.52907>}] [ns_server:debug,2014-08-19T16:49:50.273,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.20195.0> (ok) [ns_server:debug,2014-08-19T16:49:50.273,ns_1@10.242.238.90:<0.20193.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:debug,2014-08-19T16:49:50.275,ns_1@10.242.238.90:<0.20196.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 745 [ns_server:debug,2014-08-19T16:49:50.285,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 1000. Nacking mccouch update. [views:debug,2014-08-19T16:49:50.285,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/1000. Updated state: replica (0) [ns_server:debug,2014-08-19T16:49:50.285,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",1000,replica,0} [ns_server:debug,2014-08-19T16:49:50.286,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [766,750,1013,753,1016,1000,756,1019,1003,759,1022,1006,762,1009,765,749,1012, 752,1015,755,1018,1002,758,1021,1005,761,1008,764,748,1011,767,751,1014,754, 1017,1001,757,1020,1004,760,1023,1007,763,747,1010] [ns_server:info,2014-08-19T16:49:50.348,ns_1@10.242.238.90:<0.18787.0>:ns_memcached:do_handle_call:527]Changed vbucket 999 state to replica [ns_server:info,2014-08-19T16:49:50.352,ns_1@10.242.238.90:<0.20199.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 999 to state replica [views:debug,2014-08-19T16:49:50.361,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/1000. Updated state: replica (0) [ns_server:debug,2014-08-19T16:49:50.361,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",1000,replica,0} [ns_server:debug,2014-08-19T16:49:50.388,ns_1@10.242.238.90:<0.20199.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_999_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:49:50.389,ns_1@10.242.238.90:<0.20199.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[999]}, {checkpoints,[{999,0}]}, {name,<<"replication_building_999_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[999]}, {takeover,false}, {suffix,"building_999_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",999,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,false}]} [rebalance:debug,2014-08-19T16:49:50.390,ns_1@10.242.238.90:<0.20199.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.20200.0> [rebalance:debug,2014-08-19T16:49:50.390,ns_1@10.242.238.90:<0.20199.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:49:50.391,ns_1@10.242.238.90:<0.20199.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.30120.0>,#Ref<16550.0.1.53195>}]} [rebalance:info,2014-08-19T16:49:50.391,ns_1@10.242.238.90:<0.20199.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 999 [rebalance:debug,2014-08-19T16:49:50.391,ns_1@10.242.238.90:<0.20199.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.30120.0>,#Ref<16550.0.1.53195>}] [ns_server:debug,2014-08-19T16:49:50.392,ns_1@10.242.238.90:<0.20199.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:debug,2014-08-19T16:49:50.412,ns_1@10.242.238.90:<0.20215.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 999 [ns_server:info,2014-08-19T16:49:50.418,ns_1@10.242.238.90:<0.18787.0>:ns_memcached:do_handle_call:527]Changed vbucket 744 state to replica [ns_server:info,2014-08-19T16:49:50.424,ns_1@10.242.238.90:<0.20218.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 744 to state replica [ns_server:debug,2014-08-19T16:49:50.443,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 746. Nacking mccouch update. [views:debug,2014-08-19T16:49:50.443,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/746. Updated state: pending (0) [ns_server:debug,2014-08-19T16:49:50.443,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",746,pending,0} [ns_server:debug,2014-08-19T16:49:50.444,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [766,750,1013,753,1016,1000,756,1019,1003,759,1022,1006,762,746,1009,765,749, 1012,752,1015,755,1018,1002,758,1021,1005,761,1008,764,748,1011,767,751,1014, 754,1017,1001,757,1020,1004,760,1023,1007,763,747,1010] [ns_server:debug,2014-08-19T16:49:50.471,ns_1@10.242.238.90:<0.20218.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_744_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:49:50.473,ns_1@10.242.238.90:<0.20218.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[744]}, {checkpoints,[{744,0}]}, {name,<<"replication_building_744_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[744]}, {takeover,false}, {suffix,"building_744_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",744,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,true}]} [rebalance:debug,2014-08-19T16:49:50.474,ns_1@10.242.238.90:<0.20218.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.20219.0> [rebalance:debug,2014-08-19T16:49:50.474,ns_1@10.242.238.90:<0.20218.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:49:50.474,ns_1@10.242.238.90:<0.20218.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.30140.0>,#Ref<16550.0.1.53336>}]} [rebalance:info,2014-08-19T16:49:50.475,ns_1@10.242.238.90:<0.20218.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 744 [rebalance:debug,2014-08-19T16:49:50.475,ns_1@10.242.238.90:<0.20218.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.30140.0>,#Ref<16550.0.1.53336>}] [ns_server:debug,2014-08-19T16:49:50.475,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.20220.0> (ok) [ns_server:debug,2014-08-19T16:49:50.476,ns_1@10.242.238.90:<0.20218.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [views:debug,2014-08-19T16:49:50.476,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/746. Updated state: pending (0) [ns_server:debug,2014-08-19T16:49:50.476,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",746,pending,0} [rebalance:debug,2014-08-19T16:49:50.477,ns_1@10.242.238.90:<0.20221.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 744 [rebalance:debug,2014-08-19T16:49:50.477,ns_1@10.242.238.90:<0.20171.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:49:50.477,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.20171.0> (ok) [ns_server:info,2014-08-19T16:49:50.549,ns_1@10.242.238.90:<0.18787.0>:ns_memcached:do_handle_call:527]Changed vbucket 998 state to replica [ns_server:info,2014-08-19T16:49:50.553,ns_1@10.242.238.90:<0.20238.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 998 to state replica [ns_server:debug,2014-08-19T16:49:50.577,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 744. Nacking mccouch update. [views:debug,2014-08-19T16:49:50.577,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/744. Updated state: pending (0) [ns_server:debug,2014-08-19T16:49:50.577,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",744,pending,0} [ns_server:debug,2014-08-19T16:49:50.577,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [766,750,1013,753,1016,1000,756,1019,1003,759,1022,1006,762,746,1009,765,749, 1012,752,1015,755,1018,1002,758,1021,1005,761,1008,764,748,1011,767,751,1014, 754,1017,1001,757,1020,1004,760,744,1023,1007,763,747,1010] [ns_server:debug,2014-08-19T16:49:50.586,ns_1@10.242.238.90:<0.20238.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_998_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:49:50.588,ns_1@10.242.238.90:<0.20238.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[998]}, {checkpoints,[{998,0}]}, {name,<<"replication_building_998_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[998]}, {takeover,false}, {suffix,"building_998_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",998,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,false}]} [rebalance:debug,2014-08-19T16:49:50.588,ns_1@10.242.238.90:<0.20238.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.20239.0> [rebalance:debug,2014-08-19T16:49:50.589,ns_1@10.242.238.90:<0.20238.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:49:50.589,ns_1@10.242.238.90:<0.20238.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.30197.0>,#Ref<16550.0.1.53601>}]} [rebalance:info,2014-08-19T16:49:50.589,ns_1@10.242.238.90:<0.20238.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 998 [rebalance:debug,2014-08-19T16:49:50.590,ns_1@10.242.238.90:<0.20238.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.30197.0>,#Ref<16550.0.1.53601>}] [ns_server:debug,2014-08-19T16:49:50.591,ns_1@10.242.238.90:<0.20238.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:debug,2014-08-19T16:49:50.608,ns_1@10.242.238.90:<0.20240.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 998 [views:debug,2014-08-19T16:49:50.611,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/744. Updated state: pending (0) [ns_server:debug,2014-08-19T16:49:50.611,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",744,pending,0} [ns_server:info,2014-08-19T16:49:50.614,ns_1@10.242.238.90:<0.18787.0>:ns_memcached:do_handle_call:527]Changed vbucket 743 state to replica [ns_server:info,2014-08-19T16:49:50.620,ns_1@10.242.238.90:<0.20243.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 743 to state replica [ns_server:debug,2014-08-19T16:49:50.669,ns_1@10.242.238.90:<0.20243.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_743_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:49:50.671,ns_1@10.242.238.90:<0.20243.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[743]}, {checkpoints,[{743,0}]}, {name,<<"replication_building_743_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[743]}, {takeover,false}, {suffix,"building_743_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",743,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,true}]} [rebalance:debug,2014-08-19T16:49:50.671,ns_1@10.242.238.90:<0.20243.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.20258.0> [rebalance:debug,2014-08-19T16:49:50.672,ns_1@10.242.238.90:<0.20243.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:49:50.672,ns_1@10.242.238.90:<0.20243.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.30217.0>,#Ref<16550.0.1.53695>}]} [rebalance:info,2014-08-19T16:49:50.672,ns_1@10.242.238.90:<0.20243.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 743 [rebalance:debug,2014-08-19T16:49:50.673,ns_1@10.242.238.90:<0.20243.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.30217.0>,#Ref<16550.0.1.53695>}] [ns_server:debug,2014-08-19T16:49:50.673,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.20259.0> (ok) [ns_server:debug,2014-08-19T16:49:50.673,ns_1@10.242.238.90:<0.20243.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:debug,2014-08-19T16:49:50.675,ns_1@10.242.238.90:<0.20260.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 743 [ns_server:debug,2014-08-19T16:49:50.711,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 999. Nacking mccouch update. [views:debug,2014-08-19T16:49:50.711,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/999. Updated state: replica (0) [ns_server:debug,2014-08-19T16:49:50.711,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",999,replica,0} [ns_server:debug,2014-08-19T16:49:50.712,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [766,750,1013,753,1016,1000,756,1019,1003,759,1022,1006,762,746,1009,765,749, 1012,999,752,1015,755,1018,1002,758,1021,1005,761,1008,764,748,1011,767,751, 1014,754,1017,1001,757,1020,1004,760,744,1023,1007,763,747,1010] [ns_server:info,2014-08-19T16:49:50.748,ns_1@10.242.238.90:<0.18787.0>:ns_memcached:do_handle_call:527]Changed vbucket 997 state to replica [ns_server:info,2014-08-19T16:49:50.752,ns_1@10.242.238.90:<0.20263.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 997 to state replica [views:debug,2014-08-19T16:49:50.762,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/999. Updated state: replica (0) [ns_server:debug,2014-08-19T16:49:50.762,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",999,replica,0} [ns_server:debug,2014-08-19T16:49:50.791,ns_1@10.242.238.90:<0.20263.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_997_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:49:50.793,ns_1@10.242.238.90:<0.20263.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[997]}, {checkpoints,[{997,0}]}, {name,<<"replication_building_997_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[997]}, {takeover,false}, {suffix,"building_997_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",997,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,false}]} [rebalance:debug,2014-08-19T16:49:50.794,ns_1@10.242.238.90:<0.20263.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.20264.0> [rebalance:debug,2014-08-19T16:49:50.794,ns_1@10.242.238.90:<0.20263.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:49:50.794,ns_1@10.242.238.90:<0.20263.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.30274.0>,#Ref<16550.0.1.53960>}]} [rebalance:info,2014-08-19T16:49:50.794,ns_1@10.242.238.90:<0.20263.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 997 [rebalance:debug,2014-08-19T16:49:50.795,ns_1@10.242.238.90:<0.20263.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.30274.0>,#Ref<16550.0.1.53960>}] [ns_server:debug,2014-08-19T16:49:50.796,ns_1@10.242.238.90:<0.20263.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:debug,2014-08-19T16:49:50.812,ns_1@10.242.238.90:<0.20270.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 997 [ns_server:info,2014-08-19T16:49:50.818,ns_1@10.242.238.90:<0.18787.0>:ns_memcached:do_handle_call:527]Changed vbucket 742 state to replica [ns_server:info,2014-08-19T16:49:50.824,ns_1@10.242.238.90:<0.20282.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 742 to state replica [ns_server:debug,2014-08-19T16:49:50.845,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 745. Nacking mccouch update. [views:debug,2014-08-19T16:49:50.845,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/745. Updated state: pending (0) [ns_server:debug,2014-08-19T16:49:50.846,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",745,pending,0} [ns_server:debug,2014-08-19T16:49:50.846,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [766,750,1013,753,1016,1000,756,1019,1003,759,1022,1006,762,746,1009,765,749, 1012,999,752,1015,755,1018,1002,758,1021,1005,761,745,1008,764,748,1011,767, 751,1014,754,1017,1001,757,1020,1004,760,744,1023,1007,763,747,1010] [ns_server:debug,2014-08-19T16:49:50.876,ns_1@10.242.238.90:<0.20282.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_742_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:49:50.877,ns_1@10.242.238.90:<0.20282.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[742]}, {checkpoints,[{742,0}]}, {name,<<"replication_building_742_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[742]}, {takeover,false}, {suffix,"building_742_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",742,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,true}]} [rebalance:debug,2014-08-19T16:49:50.878,ns_1@10.242.238.90:<0.20282.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.20283.0> [rebalance:debug,2014-08-19T16:49:50.878,ns_1@10.242.238.90:<0.20282.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:49:50.878,ns_1@10.242.238.90:<0.20282.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.30294.0>,#Ref<16550.0.1.54075>}]} [rebalance:info,2014-08-19T16:49:50.879,ns_1@10.242.238.90:<0.20282.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 742 [rebalance:debug,2014-08-19T16:49:50.879,ns_1@10.242.238.90:<0.20282.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.30294.0>,#Ref<16550.0.1.54075>}] [views:debug,2014-08-19T16:49:50.879,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/745. Updated state: pending (0) [ns_server:debug,2014-08-19T16:49:50.879,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",745,pending,0} [ns_server:debug,2014-08-19T16:49:50.879,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.20284.0> (ok) [ns_server:debug,2014-08-19T16:49:50.879,ns_1@10.242.238.90:<0.20282.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:debug,2014-08-19T16:49:50.881,ns_1@10.242.238.90:<0.20285.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 742 [ns_server:info,2014-08-19T16:49:50.954,ns_1@10.242.238.90:<0.18787.0>:ns_memcached:do_handle_call:527]Changed vbucket 996 state to replica [ns_server:info,2014-08-19T16:49:50.957,ns_1@10.242.238.90:<0.20288.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 996 to state replica [ns_server:debug,2014-08-19T16:49:50.995,ns_1@10.242.238.90:<0.20288.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_996_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:49:50.996,ns_1@10.242.238.90:<0.20288.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[996]}, {checkpoints,[{996,0}]}, {name,<<"replication_building_996_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[996]}, {takeover,false}, {suffix,"building_996_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",996,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,false}]} [rebalance:debug,2014-08-19T16:49:50.997,ns_1@10.242.238.90:<0.20288.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.20303.0> [rebalance:debug,2014-08-19T16:49:50.997,ns_1@10.242.238.90:<0.20288.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:49:50.998,ns_1@10.242.238.90:<0.20288.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.30351.0>,#Ref<16550.0.1.54340>}]} [rebalance:info,2014-08-19T16:49:50.998,ns_1@10.242.238.90:<0.20288.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 996 [rebalance:debug,2014-08-19T16:49:50.998,ns_1@10.242.238.90:<0.20288.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.30351.0>,#Ref<16550.0.1.54340>}] [ns_server:debug,2014-08-19T16:49:50.999,ns_1@10.242.238.90:<0.20288.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:49:51.003,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 998. Nacking mccouch update. [views:debug,2014-08-19T16:49:51.003,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/998. Updated state: replica (0) [ns_server:debug,2014-08-19T16:49:51.003,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",998,replica,0} [ns_server:debug,2014-08-19T16:49:51.003,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [766,750,1013,753,1016,1000,756,1019,1003,759,1022,1006,762,746,1009,765,749, 1012,999,752,1015,755,1018,1002,758,1021,1005,761,745,1008,764,748,1011,998, 767,751,1014,754,1017,1001,757,1020,1004,760,744,1023,1007,763,747,1010] [rebalance:debug,2014-08-19T16:49:51.019,ns_1@10.242.238.90:<0.20304.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 996 [ns_server:info,2014-08-19T16:49:51.025,ns_1@10.242.238.90:<0.18787.0>:ns_memcached:do_handle_call:527]Changed vbucket 741 state to replica [ns_server:info,2014-08-19T16:49:51.031,ns_1@10.242.238.90:<0.20307.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 741 to state replica [views:debug,2014-08-19T16:49:51.053,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/998. Updated state: replica (0) [ns_server:debug,2014-08-19T16:49:51.053,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",998,replica,0} [ns_server:debug,2014-08-19T16:49:51.079,ns_1@10.242.238.90:<0.20307.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_741_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:49:51.081,ns_1@10.242.238.90:<0.20307.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[741]}, {checkpoints,[{741,0}]}, {name,<<"replication_building_741_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[741]}, {takeover,false}, {suffix,"building_741_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",741,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,true}]} [rebalance:debug,2014-08-19T16:49:51.082,ns_1@10.242.238.90:<0.20307.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.20308.0> [rebalance:debug,2014-08-19T16:49:51.082,ns_1@10.242.238.90:<0.20307.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:49:51.082,ns_1@10.242.238.90:<0.20307.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.30371.0>,#Ref<16550.0.1.54477>}]} [rebalance:info,2014-08-19T16:49:51.082,ns_1@10.242.238.90:<0.20307.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 741 [rebalance:debug,2014-08-19T16:49:51.083,ns_1@10.242.238.90:<0.20307.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.30371.0>,#Ref<16550.0.1.54477>}] [ns_server:debug,2014-08-19T16:49:51.083,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.20309.0> (ok) [ns_server:debug,2014-08-19T16:49:51.083,ns_1@10.242.238.90:<0.20307.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:debug,2014-08-19T16:49:51.085,ns_1@10.242.238.90:<0.20310.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 741 [ns_server:info,2014-08-19T16:49:51.156,ns_1@10.242.238.90:<0.18787.0>:ns_memcached:do_handle_call:527]Changed vbucket 995 state to replica [ns_server:info,2014-08-19T16:49:51.160,ns_1@10.242.238.90:<0.20327.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 995 to state replica [ns_server:debug,2014-08-19T16:49:51.178,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 742. Nacking mccouch update. [views:debug,2014-08-19T16:49:51.179,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/742. Updated state: pending (0) [ns_server:debug,2014-08-19T16:49:51.179,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",742,pending,0} [ns_server:debug,2014-08-19T16:49:51.179,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [766,750,1013,753,1016,1000,756,1019,1003,759,1022,1006,762,746,1009,765,749, 1012,999,752,1015,755,1018,1002,758,742,1021,1005,761,745,1008,764,748,1011, 998,767,751,1014,754,1017,1001,757,1020,1004,760,744,1023,1007,763,747,1010] [ns_server:debug,2014-08-19T16:49:51.195,ns_1@10.242.238.90:<0.20327.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_995_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:49:51.196,ns_1@10.242.238.90:<0.20327.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[995]}, {checkpoints,[{995,0}]}, {name,<<"replication_building_995_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[995]}, {takeover,false}, {suffix,"building_995_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",995,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,false}]} [rebalance:debug,2014-08-19T16:49:51.197,ns_1@10.242.238.90:<0.20327.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.20328.0> [rebalance:debug,2014-08-19T16:49:51.197,ns_1@10.242.238.90:<0.20327.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:49:51.198,ns_1@10.242.238.90:<0.20327.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.30437.0>,#Ref<16550.0.1.54794>}]} [rebalance:info,2014-08-19T16:49:51.198,ns_1@10.242.238.90:<0.20327.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 995 [rebalance:debug,2014-08-19T16:49:51.198,ns_1@10.242.238.90:<0.20327.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.30437.0>,#Ref<16550.0.1.54794>}] [ns_server:debug,2014-08-19T16:49:51.199,ns_1@10.242.238.90:<0.20327.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:debug,2014-08-19T16:49:51.219,ns_1@10.242.238.90:<0.20329.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 995 [ns_server:info,2014-08-19T16:49:51.225,ns_1@10.242.238.90:<0.18787.0>:ns_memcached:do_handle_call:527]Changed vbucket 740 state to replica [ns_server:info,2014-08-19T16:49:51.230,ns_1@10.242.238.90:<0.20332.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 740 to state replica [views:debug,2014-08-19T16:49:51.246,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/742. Updated state: pending (0) [ns_server:debug,2014-08-19T16:49:51.246,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",742,pending,0} [rebalance:debug,2014-08-19T16:49:51.247,ns_1@10.242.238.90:<0.20215.0>:janitor_agent:handle_call:795]Done [rebalance:debug,2014-08-19T16:49:51.247,ns_1@10.242.238.90:<0.20176.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:49:51.247,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.20215.0> (ok) [ns_server:debug,2014-08-19T16:49:51.247,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.20176.0> (ok) [ns_server:debug,2014-08-19T16:49:51.279,ns_1@10.242.238.90:<0.20332.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_740_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:49:51.281,ns_1@10.242.238.90:<0.20332.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[740]}, {checkpoints,[{740,0}]}, {name,<<"replication_building_740_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[740]}, {takeover,false}, {suffix,"building_740_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",740,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,true}]} [rebalance:debug,2014-08-19T16:49:51.281,ns_1@10.242.238.90:<0.20332.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.20333.0> [rebalance:debug,2014-08-19T16:49:51.282,ns_1@10.242.238.90:<0.20332.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:49:51.282,ns_1@10.242.238.90:<0.20332.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.30457.0>,#Ref<16550.0.1.54910>}]} [rebalance:info,2014-08-19T16:49:51.282,ns_1@10.242.238.90:<0.20332.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 740 [rebalance:debug,2014-08-19T16:49:51.283,ns_1@10.242.238.90:<0.20332.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.30457.0>,#Ref<16550.0.1.54910>}] [ns_server:debug,2014-08-19T16:49:51.283,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.20334.0> (ok) [ns_server:debug,2014-08-19T16:49:51.283,ns_1@10.242.238.90:<0.20332.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:debug,2014-08-19T16:49:51.285,ns_1@10.242.238.90:<0.20335.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 740 [ns_server:info,2014-08-19T16:49:51.359,ns_1@10.242.238.90:<0.18787.0>:ns_memcached:do_handle_call:527]Changed vbucket 994 state to replica [ns_server:info,2014-08-19T16:49:51.363,ns_1@10.242.238.90:<0.20338.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 994 to state replica [ns_server:debug,2014-08-19T16:49:51.397,ns_1@10.242.238.90:<0.20338.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_994_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:49:51.398,ns_1@10.242.238.90:<0.20338.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[994]}, {checkpoints,[{994,0}]}, {name,<<"replication_building_994_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[994]}, {takeover,false}, {suffix,"building_994_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",994,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,false}]} [rebalance:debug,2014-08-19T16:49:51.399,ns_1@10.242.238.90:<0.20338.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.20359.0> [rebalance:debug,2014-08-19T16:49:51.399,ns_1@10.242.238.90:<0.20338.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:49:51.399,ns_1@10.242.238.90:<0.20338.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.30524.0>,#Ref<16550.0.1.55226>}]} [rebalance:info,2014-08-19T16:49:51.400,ns_1@10.242.238.90:<0.20338.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 994 [rebalance:debug,2014-08-19T16:49:51.400,ns_1@10.242.238.90:<0.20338.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.30524.0>,#Ref<16550.0.1.55226>}] [ns_server:debug,2014-08-19T16:49:51.400,ns_1@10.242.238.90:<0.20338.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:debug,2014-08-19T16:49:51.420,ns_1@10.242.238.90:<0.20360.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 994 [ns_server:info,2014-08-19T16:49:51.426,ns_1@10.242.238.90:<0.18787.0>:ns_memcached:do_handle_call:527]Changed vbucket 739 state to replica [ns_server:info,2014-08-19T16:49:51.433,ns_1@10.242.238.90:<0.20364.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 739 to state replica [ns_server:debug,2014-08-19T16:49:51.446,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 997. Nacking mccouch update. [views:debug,2014-08-19T16:49:51.446,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/997. Updated state: replica (0) [ns_server:debug,2014-08-19T16:49:51.446,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",997,replica,0} [ns_server:debug,2014-08-19T16:49:51.446,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,766,750,1013,753,1016,1000,756,1019,1003,759,1022,1006,762,746,1009,765, 749,1012,999,752,1015,755,1018,1002,758,742,1021,1005,761,745,1008,764,748, 1011,998,767,751,1014,754,1017,1001,757,1020,1004,760,744,1023,1007,763,747, 1010] [ns_server:debug,2014-08-19T16:49:51.484,ns_1@10.242.238.90:<0.20364.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_739_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:49:51.485,ns_1@10.242.238.90:<0.20364.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[739]}, {checkpoints,[{739,0}]}, {name,<<"replication_building_739_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[739]}, {takeover,false}, {suffix,"building_739_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",739,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,true}]} [rebalance:debug,2014-08-19T16:49:51.486,ns_1@10.242.238.90:<0.20364.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.20365.0> [rebalance:debug,2014-08-19T16:49:51.486,ns_1@10.242.238.90:<0.20364.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:49:51.487,ns_1@10.242.238.90:<0.20364.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.30544.0>,#Ref<16550.0.1.55343>}]} [rebalance:info,2014-08-19T16:49:51.487,ns_1@10.242.238.90:<0.20364.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 739 [rebalance:debug,2014-08-19T16:49:51.487,ns_1@10.242.238.90:<0.20364.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.30544.0>,#Ref<16550.0.1.55343>}] [ns_server:debug,2014-08-19T16:49:51.488,ns_1@10.242.238.90:<0.20364.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:49:51.488,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.20366.0> (ok) [rebalance:debug,2014-08-19T16:49:51.489,ns_1@10.242.238.90:<0.20367.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 739 [views:debug,2014-08-19T16:49:51.522,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/997. Updated state: replica (0) [ns_server:debug,2014-08-19T16:49:51.522,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",997,replica,0} [ns_server:info,2014-08-19T16:49:51.563,ns_1@10.242.238.90:<0.18787.0>:ns_memcached:do_handle_call:527]Changed vbucket 993 state to replica [ns_server:info,2014-08-19T16:49:51.567,ns_1@10.242.238.90:<0.20370.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 993 to state replica [ns_server:debug,2014-08-19T16:49:51.602,ns_1@10.242.238.90:<0.20370.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_993_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:49:51.603,ns_1@10.242.238.90:<0.20370.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[993]}, {checkpoints,[{993,0}]}, {name,<<"replication_building_993_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[993]}, {takeover,false}, {suffix,"building_993_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",993,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,false}]} [rebalance:debug,2014-08-19T16:49:51.604,ns_1@10.242.238.90:<0.20370.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.20385.0> [rebalance:debug,2014-08-19T16:49:51.604,ns_1@10.242.238.90:<0.20370.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:49:51.605,ns_1@10.242.238.90:<0.20370.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.30602.0>,#Ref<16550.0.1.55641>}]} [rebalance:info,2014-08-19T16:49:51.605,ns_1@10.242.238.90:<0.20370.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 993 [rebalance:debug,2014-08-19T16:49:51.605,ns_1@10.242.238.90:<0.20370.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.30602.0>,#Ref<16550.0.1.55641>}] [ns_server:debug,2014-08-19T16:49:51.606,ns_1@10.242.238.90:<0.20370.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:debug,2014-08-19T16:49:51.624,ns_1@10.242.238.90:<0.20386.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 993 [ns_server:info,2014-08-19T16:49:51.629,ns_1@10.242.238.90:<0.18787.0>:ns_memcached:do_handle_call:527]Changed vbucket 738 state to replica [ns_server:info,2014-08-19T16:49:51.636,ns_1@10.242.238.90:<0.20389.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 738 to state replica [ns_server:debug,2014-08-19T16:49:51.647,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 995. Nacking mccouch update. [views:debug,2014-08-19T16:49:51.647,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/995. Updated state: replica (0) [ns_server:debug,2014-08-19T16:49:51.647,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",995,replica,0} [ns_server:debug,2014-08-19T16:49:51.647,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,766,750,1013,753,1016,1000,756,1019,1003,759,1022,1006,762,746,1009,765, 749,1012,999,752,1015,755,1018,1002,758,742,1021,1005,761,745,1008,995,764, 748,1011,998,767,751,1014,754,1017,1001,757,1020,1004,760,744,1023,1007,763, 747,1010] [ns_server:debug,2014-08-19T16:49:51.685,ns_1@10.242.238.90:<0.20389.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_738_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:49:51.686,ns_1@10.242.238.90:<0.20389.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[738]}, {checkpoints,[{738,0}]}, {name,<<"replication_building_738_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[738]}, {takeover,false}, {suffix,"building_738_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",738,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,true}]} [rebalance:debug,2014-08-19T16:49:51.687,ns_1@10.242.238.90:<0.20389.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.20390.0> [rebalance:debug,2014-08-19T16:49:51.687,ns_1@10.242.238.90:<0.20389.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:49:51.688,ns_1@10.242.238.90:<0.20389.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.30636.0>,#Ref<16550.0.1.55789>}]} [rebalance:info,2014-08-19T16:49:51.688,ns_1@10.242.238.90:<0.20389.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 738 [rebalance:debug,2014-08-19T16:49:51.688,ns_1@10.242.238.90:<0.20389.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.30636.0>,#Ref<16550.0.1.55789>}] [ns_server:debug,2014-08-19T16:49:51.689,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.20391.0> (ok) [ns_server:debug,2014-08-19T16:49:51.689,ns_1@10.242.238.90:<0.20389.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:debug,2014-08-19T16:49:51.691,ns_1@10.242.238.90:<0.20392.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 738 [views:debug,2014-08-19T16:49:51.714,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/995. Updated state: replica (0) [ns_server:debug,2014-08-19T16:49:51.714,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",995,replica,0} [ns_server:info,2014-08-19T16:49:51.764,ns_1@10.242.238.90:<0.18787.0>:ns_memcached:do_handle_call:527]Changed vbucket 992 state to replica [ns_server:info,2014-08-19T16:49:51.768,ns_1@10.242.238.90:<0.20395.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 992 to state replica [ns_server:debug,2014-08-19T16:49:51.803,ns_1@10.242.238.90:<0.20395.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_992_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:49:51.804,ns_1@10.242.238.90:<0.20395.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[992]}, {checkpoints,[{992,0}]}, {name,<<"replication_building_992_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[992]}, {takeover,false}, {suffix,"building_992_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",992,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,false}]} [rebalance:debug,2014-08-19T16:49:51.805,ns_1@10.242.238.90:<0.20395.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.20410.0> [rebalance:debug,2014-08-19T16:49:51.805,ns_1@10.242.238.90:<0.20395.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:49:51.806,ns_1@10.242.238.90:<0.20395.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.30679.0>,#Ref<16550.0.1.56024>}]} [rebalance:info,2014-08-19T16:49:51.806,ns_1@10.242.238.90:<0.20395.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 992 [rebalance:debug,2014-08-19T16:49:51.806,ns_1@10.242.238.90:<0.20395.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.30679.0>,#Ref<16550.0.1.56024>}] [ns_server:debug,2014-08-19T16:49:51.807,ns_1@10.242.238.90:<0.20395.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:49:51.813,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 743. Nacking mccouch update. [views:debug,2014-08-19T16:49:51.813,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/743. Updated state: pending (0) [ns_server:debug,2014-08-19T16:49:51.813,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",743,pending,0} [ns_server:debug,2014-08-19T16:49:51.813,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,766,750,1013,753,1016,1000,756,1019,1003,759,743,1022,1006,762,746,1009, 765,749,1012,999,752,1015,755,1018,1002,758,742,1021,1005,761,745,1008,995, 764,748,1011,998,767,751,1014,754,1017,1001,757,1020,1004,760,744,1023,1007, 763,747,1010] [rebalance:debug,2014-08-19T16:49:51.825,ns_1@10.242.238.90:<0.20411.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 992 [ns_server:info,2014-08-19T16:49:51.830,ns_1@10.242.238.90:<0.18787.0>:ns_memcached:do_handle_call:527]Changed vbucket 737 state to replica [ns_server:info,2014-08-19T16:49:51.837,ns_1@10.242.238.90:<0.20414.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 737 to state replica [views:debug,2014-08-19T16:49:51.847,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/743. Updated state: pending (0) [ns_server:debug,2014-08-19T16:49:51.847,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",743,pending,0} [ns_server:debug,2014-08-19T16:49:51.884,ns_1@10.242.238.90:<0.20414.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_737_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:49:51.886,ns_1@10.242.238.90:<0.20414.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[737]}, {checkpoints,[{737,0}]}, {name,<<"replication_building_737_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[737]}, {takeover,false}, {suffix,"building_737_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",737,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,true}]} [rebalance:debug,2014-08-19T16:49:51.887,ns_1@10.242.238.90:<0.20414.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.20415.0> [rebalance:debug,2014-08-19T16:49:51.887,ns_1@10.242.238.90:<0.20414.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:49:51.887,ns_1@10.242.238.90:<0.20414.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.30713.0>,#Ref<16550.0.1.56167>}]} [rebalance:info,2014-08-19T16:49:51.887,ns_1@10.242.238.90:<0.20414.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 737 [rebalance:debug,2014-08-19T16:49:51.888,ns_1@10.242.238.90:<0.20414.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.30713.0>,#Ref<16550.0.1.56167>}] [ns_server:debug,2014-08-19T16:49:51.888,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.20416.0> (ok) [ns_server:debug,2014-08-19T16:49:51.888,ns_1@10.242.238.90:<0.20414.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:debug,2014-08-19T16:49:51.890,ns_1@10.242.238.90:<0.20417.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 737 [ns_server:debug,2014-08-19T16:49:51.939,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 741. Nacking mccouch update. [views:debug,2014-08-19T16:49:51.939,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/741. Updated state: pending (0) [ns_server:debug,2014-08-19T16:49:51.939,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",741,pending,0} [ns_server:debug,2014-08-19T16:49:51.939,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,766,750,1013,753,1016,1000,756,1019,1003,759,743,1022,1006,762,746,1009, 765,749,1012,999,752,1015,755,1018,1002,758,742,1021,1005,761,745,1008,995, 764,748,1011,998,767,751,1014,754,1017,1001,757,741,1020,1004,760,744,1023, 1007,763,747,1010] [ns_server:info,2014-08-19T16:49:51.969,ns_1@10.242.238.90:<0.18787.0>:ns_memcached:do_handle_call:527]Changed vbucket 991 state to replica [views:debug,2014-08-19T16:49:51.973,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/741. Updated state: pending (0) [ns_server:debug,2014-08-19T16:49:51.973,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",741,pending,0} [ns_server:info,2014-08-19T16:49:51.973,ns_1@10.242.238.90:<0.20434.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 991 to state replica [ns_server:debug,2014-08-19T16:49:52.007,ns_1@10.242.238.90:<0.20434.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_991_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:49:52.009,ns_1@10.242.238.90:<0.20434.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[991]}, {checkpoints,[{991,0}]}, {name,<<"replication_building_991_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[991]}, {takeover,false}, {suffix,"building_991_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",991,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,false}]} [rebalance:debug,2014-08-19T16:49:52.010,ns_1@10.242.238.90:<0.20434.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.20435.0> [rebalance:debug,2014-08-19T16:49:52.010,ns_1@10.242.238.90:<0.20434.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:49:52.010,ns_1@10.242.238.90:<0.20434.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.30756.0>,#Ref<16550.0.1.56405>}]} [rebalance:info,2014-08-19T16:49:52.010,ns_1@10.242.238.90:<0.20434.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 991 [rebalance:debug,2014-08-19T16:49:52.011,ns_1@10.242.238.90:<0.20434.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.30756.0>,#Ref<16550.0.1.56405>}] [ns_server:debug,2014-08-19T16:49:52.012,ns_1@10.242.238.90:<0.20434.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:debug,2014-08-19T16:49:52.031,ns_1@10.242.238.90:<0.20436.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 991 [ns_server:info,2014-08-19T16:49:52.037,ns_1@10.242.238.90:<0.18787.0>:ns_memcached:do_handle_call:527]Changed vbucket 736 state to replica [ns_server:info,2014-08-19T16:49:52.045,ns_1@10.242.238.90:<0.20453.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 736 to state replica [ns_server:debug,2014-08-19T16:49:52.073,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 996. Nacking mccouch update. [views:debug,2014-08-19T16:49:52.073,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/996. Updated state: replica (0) [ns_server:debug,2014-08-19T16:49:52.074,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",996,replica,0} [ns_server:debug,2014-08-19T16:49:52.074,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,766,750,1013,753,1016,1000,756,1019,1003,759,743,1022,1006,762,746,1009, 996,765,749,1012,999,752,1015,755,1018,1002,758,742,1021,1005,761,745,1008, 995,764,748,1011,998,767,751,1014,754,1017,1001,757,741,1020,1004,760,744, 1023,1007,763,747,1010] [ns_server:debug,2014-08-19T16:49:52.094,ns_1@10.242.238.90:<0.20453.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_736_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:49:52.096,ns_1@10.242.238.90:<0.20453.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[736]}, {checkpoints,[{736,0}]}, {name,<<"replication_building_736_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[736]}, {takeover,false}, {suffix,"building_736_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",736,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,true}]} [rebalance:debug,2014-08-19T16:49:52.096,ns_1@10.242.238.90:<0.20453.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.20454.0> [rebalance:debug,2014-08-19T16:49:52.096,ns_1@10.242.238.90:<0.20453.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:49:52.097,ns_1@10.242.238.90:<0.20453.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.30790.0>,#Ref<16550.0.1.56547>}]} [rebalance:info,2014-08-19T16:49:52.097,ns_1@10.242.238.90:<0.20453.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 736 [rebalance:debug,2014-08-19T16:49:52.097,ns_1@10.242.238.90:<0.20453.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.30790.0>,#Ref<16550.0.1.56547>}] [ns_server:debug,2014-08-19T16:49:52.098,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.20455.0> (ok) [ns_server:debug,2014-08-19T16:49:52.098,ns_1@10.242.238.90:<0.20453.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:debug,2014-08-19T16:49:52.099,ns_1@10.242.238.90:<0.20456.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 736 [views:debug,2014-08-19T16:49:52.107,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/996. Updated state: replica (0) [ns_server:debug,2014-08-19T16:49:52.107,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",996,replica,0} [ns_server:info,2014-08-19T16:49:52.175,ns_1@10.242.238.90:<0.18787.0>:ns_memcached:do_handle_call:527]Changed vbucket 990 state to replica [ns_server:info,2014-08-19T16:49:52.179,ns_1@10.242.238.90:<0.20473.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 990 to state replica [ns_server:debug,2014-08-19T16:49:52.191,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 994. Nacking mccouch update. [views:debug,2014-08-19T16:49:52.191,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/994. Updated state: replica (0) [ns_server:debug,2014-08-19T16:49:52.192,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",994,replica,0} [ns_server:debug,2014-08-19T16:49:52.192,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,766,750,1013,753,1016,1000,756,1019,1003,759,743,1022,1006,762,746,1009, 996,765,749,1012,999,752,1015,755,1018,1002,758,742,1021,1005,761,745,1008, 995,764,748,1011,998,767,751,1014,754,1017,1001,757,741,1020,1004,760,744, 1023,1007,994,763,747,1010] [ns_server:debug,2014-08-19T16:49:52.213,ns_1@10.242.238.90:<0.20473.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_990_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:49:52.214,ns_1@10.242.238.90:<0.20473.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[990]}, {checkpoints,[{990,0}]}, {name,<<"replication_building_990_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[990]}, {takeover,false}, {suffix,"building_990_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",990,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,false}]} [rebalance:debug,2014-08-19T16:49:52.215,ns_1@10.242.238.90:<0.20473.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.20474.0> [rebalance:debug,2014-08-19T16:49:52.215,ns_1@10.242.238.90:<0.20473.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:49:52.216,ns_1@10.242.238.90:<0.20473.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.30833.0>,#Ref<16550.0.1.56775>}]} [rebalance:info,2014-08-19T16:49:52.216,ns_1@10.242.238.90:<0.20473.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 990 [rebalance:debug,2014-08-19T16:49:52.216,ns_1@10.242.238.90:<0.20473.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.30833.0>,#Ref<16550.0.1.56775>}] [ns_server:debug,2014-08-19T16:49:52.217,ns_1@10.242.238.90:<0.20473.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [views:debug,2014-08-19T16:49:52.225,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/994. Updated state: replica (0) [ns_server:debug,2014-08-19T16:49:52.225,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",994,replica,0} [rebalance:debug,2014-08-19T16:49:52.235,ns_1@10.242.238.90:<0.20475.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 990 [ns_server:info,2014-08-19T16:49:52.242,ns_1@10.242.238.90:<0.18787.0>:ns_memcached:do_handle_call:527]Changed vbucket 735 state to replica [ns_server:info,2014-08-19T16:49:52.248,ns_1@10.242.238.90:<0.20478.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 735 to state replica [ns_server:debug,2014-08-19T16:49:52.298,ns_1@10.242.238.90:<0.20478.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_735_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:49:52.299,ns_1@10.242.238.90:<0.20478.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[735]}, {checkpoints,[{735,0}]}, {name,<<"replication_building_735_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[735]}, {takeover,false}, {suffix,"building_735_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",735,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,true}]} [rebalance:debug,2014-08-19T16:49:52.300,ns_1@10.242.238.90:<0.20478.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.20493.0> [rebalance:debug,2014-08-19T16:49:52.300,ns_1@10.242.238.90:<0.20478.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:49:52.300,ns_1@10.242.238.90:<0.20478.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.30853.0>,#Ref<16550.0.1.56889>}]} [rebalance:info,2014-08-19T16:49:52.300,ns_1@10.242.238.90:<0.20478.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 735 [rebalance:debug,2014-08-19T16:49:52.301,ns_1@10.242.238.90:<0.20478.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.30853.0>,#Ref<16550.0.1.56889>}] [ns_server:debug,2014-08-19T16:49:52.301,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.20494.0> (ok) [ns_server:debug,2014-08-19T16:49:52.301,ns_1@10.242.238.90:<0.20478.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:debug,2014-08-19T16:49:52.303,ns_1@10.242.238.90:<0.20495.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 735 [ns_server:debug,2014-08-19T16:49:52.323,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 992. Nacking mccouch update. [views:debug,2014-08-19T16:49:52.323,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/992. Updated state: replica (0) [ns_server:debug,2014-08-19T16:49:52.323,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",992,replica,0} [ns_server:debug,2014-08-19T16:49:52.323,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,766,750,1013,753,1016,1000,756,1019,1003,759,743,1022,1006,762,746,1009, 996,765,749,1012,999,752,1015,755,1018,1002,758,742,1021,1005,992,761,745, 1008,995,764,748,1011,998,767,751,1014,754,1017,1001,757,741,1020,1004,760, 744,1023,1007,994,763,747,1010] [ns_server:info,2014-08-19T16:49:52.375,ns_1@10.242.238.90:<0.18787.0>:ns_memcached:do_handle_call:527]Changed vbucket 989 state to replica [ns_server:info,2014-08-19T16:49:52.379,ns_1@10.242.238.90:<0.20498.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 989 to state replica [views:debug,2014-08-19T16:49:52.399,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/992. Updated state: replica (0) [ns_server:debug,2014-08-19T16:49:52.399,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",992,replica,0} [ns_server:debug,2014-08-19T16:49:52.412,ns_1@10.242.238.90:<0.20498.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_989_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:49:52.414,ns_1@10.242.238.90:<0.20498.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[989]}, {checkpoints,[{989,0}]}, {name,<<"replication_building_989_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[989]}, {takeover,false}, {suffix,"building_989_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",989,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,false}]} [rebalance:debug,2014-08-19T16:49:52.415,ns_1@10.242.238.90:<0.20498.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.20499.0> [rebalance:debug,2014-08-19T16:49:52.415,ns_1@10.242.238.90:<0.20498.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:49:52.415,ns_1@10.242.238.90:<0.20498.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.30910.0>,#Ref<16550.0.1.57170>}]} [rebalance:info,2014-08-19T16:49:52.415,ns_1@10.242.238.90:<0.20498.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 989 [rebalance:debug,2014-08-19T16:49:52.416,ns_1@10.242.238.90:<0.20498.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.30910.0>,#Ref<16550.0.1.57170>}] [ns_server:debug,2014-08-19T16:49:52.417,ns_1@10.242.238.90:<0.20498.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:debug,2014-08-19T16:49:52.436,ns_1@10.242.238.90:<0.20500.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 989 [ns_server:info,2014-08-19T16:49:52.442,ns_1@10.242.238.90:<0.18787.0>:ns_memcached:do_handle_call:527]Changed vbucket 734 state to replica [ns_server:info,2014-08-19T16:49:52.448,ns_1@10.242.238.90:<0.20503.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 734 to state replica [ns_server:debug,2014-08-19T16:49:52.497,ns_1@10.242.238.90:<0.20503.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_734_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:49:52.498,ns_1@10.242.238.90:<0.20503.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[734]}, {checkpoints,[{734,0}]}, {name,<<"replication_building_734_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[734]}, {takeover,false}, {suffix,"building_734_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",734,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,true}]} [rebalance:debug,2014-08-19T16:49:52.499,ns_1@10.242.238.90:<0.20503.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.20518.0> [rebalance:debug,2014-08-19T16:49:52.499,ns_1@10.242.238.90:<0.20503.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:49:52.500,ns_1@10.242.238.90:<0.20503.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.30930.0>,#Ref<16550.0.1.57288>}]} [rebalance:info,2014-08-19T16:49:52.500,ns_1@10.242.238.90:<0.20503.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 734 [rebalance:debug,2014-08-19T16:49:52.500,ns_1@10.242.238.90:<0.20503.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.30930.0>,#Ref<16550.0.1.57288>}] [ns_server:debug,2014-08-19T16:49:52.501,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.20519.0> (ok) [ns_server:debug,2014-08-19T16:49:52.501,ns_1@10.242.238.90:<0.20503.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:debug,2014-08-19T16:49:52.503,ns_1@10.242.238.90:<0.20520.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 734 [ns_server:debug,2014-08-19T16:49:52.549,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 740. Nacking mccouch update. [views:debug,2014-08-19T16:49:52.549,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/740. Updated state: pending (0) [ns_server:debug,2014-08-19T16:49:52.549,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",740,pending,0} [ns_server:debug,2014-08-19T16:49:52.549,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,766,750,1013,753,1016,1000,756,740,1019,1003,759,743,1022,1006,762,746, 1009,996,765,749,1012,999,752,1015,755,1018,1002,758,742,1021,1005,992,761, 745,1008,995,764,748,1011,998,767,751,1014,754,1017,1001,757,741,1020,1004, 760,744,1023,1007,994,763,747,1010] [ns_server:info,2014-08-19T16:49:52.578,ns_1@10.242.238.90:<0.18787.0>:ns_memcached:do_handle_call:527]Changed vbucket 988 state to replica [ns_server:info,2014-08-19T16:49:52.582,ns_1@10.242.238.90:<0.20523.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 988 to state replica [ns_server:debug,2014-08-19T16:49:52.616,ns_1@10.242.238.90:<0.20523.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_988_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:49:52.617,ns_1@10.242.238.90:<0.20523.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[988]}, {checkpoints,[{988,0}]}, {name,<<"replication_building_988_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[988]}, {takeover,false}, {suffix,"building_988_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",988,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,false}]} [rebalance:debug,2014-08-19T16:49:52.618,ns_1@10.242.238.90:<0.20523.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.20524.0> [rebalance:debug,2014-08-19T16:49:52.618,ns_1@10.242.238.90:<0.20523.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:49:52.619,ns_1@10.242.238.90:<0.20523.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.30987.0>,#Ref<16550.0.1.58507>}]} [rebalance:info,2014-08-19T16:49:52.619,ns_1@10.242.238.90:<0.20523.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 988 [rebalance:debug,2014-08-19T16:49:52.619,ns_1@10.242.238.90:<0.20523.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.30987.0>,#Ref<16550.0.1.58507>}] [ns_server:debug,2014-08-19T16:49:52.621,ns_1@10.242.238.90:<0.20523.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [views:debug,2014-08-19T16:49:52.624,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/740. Updated state: pending (0) [ns_server:debug,2014-08-19T16:49:52.624,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",740,pending,0} [rebalance:debug,2014-08-19T16:49:52.639,ns_1@10.242.238.90:<0.20525.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 988 [ns_server:info,2014-08-19T16:49:52.646,ns_1@10.242.238.90:<0.18787.0>:ns_memcached:do_handle_call:527]Changed vbucket 733 state to replica [ns_server:info,2014-08-19T16:49:52.652,ns_1@10.242.238.90:<0.20528.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 733 to state replica [ns_server:debug,2014-08-19T16:49:52.701,ns_1@10.242.238.90:<0.20528.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_733_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:49:52.702,ns_1@10.242.238.90:<0.20528.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[733]}, {checkpoints,[{733,0}]}, {name,<<"replication_building_733_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[733]}, {takeover,false}, {suffix,"building_733_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",733,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,true}]} [rebalance:debug,2014-08-19T16:49:52.703,ns_1@10.242.238.90:<0.20528.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.20543.0> [rebalance:debug,2014-08-19T16:49:52.703,ns_1@10.242.238.90:<0.20528.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:49:52.703,ns_1@10.242.238.90:<0.20528.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.31007.0>,#Ref<16550.0.1.58645>}]} [rebalance:info,2014-08-19T16:49:52.704,ns_1@10.242.238.90:<0.20528.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 733 [rebalance:debug,2014-08-19T16:49:52.704,ns_1@10.242.238.90:<0.20528.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.31007.0>,#Ref<16550.0.1.58645>}] [ns_server:debug,2014-08-19T16:49:52.704,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.20544.0> (ok) [ns_server:debug,2014-08-19T16:49:52.705,ns_1@10.242.238.90:<0.20528.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:debug,2014-08-19T16:49:52.706,ns_1@10.242.238.90:<0.20545.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 733 [ns_server:info,2014-08-19T16:49:52.779,ns_1@10.242.238.90:<0.18787.0>:ns_memcached:do_handle_call:527]Changed vbucket 987 state to replica [ns_server:info,2014-08-19T16:49:52.782,ns_1@10.242.238.90:<0.20549.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 987 to state replica [ns_server:debug,2014-08-19T16:49:52.783,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 738. Nacking mccouch update. [views:debug,2014-08-19T16:49:52.783,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/738. Updated state: pending (0) [ns_server:debug,2014-08-19T16:49:52.783,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",738,pending,0} [ns_server:debug,2014-08-19T16:49:52.783,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,766,750,1013,753,1016,1000,756,740,1019,1003,759,743,1022,1006,762,746, 1009,996,765,749,1012,999,752,1015,755,1018,1002,758,742,1021,1005,992,761, 745,1008,995,764,748,1011,998,767,751,1014,754,738,1017,1001,757,741,1020, 1004,760,744,1023,1007,994,763,747,1010] [ns_server:debug,2014-08-19T16:49:52.817,ns_1@10.242.238.90:<0.20549.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_987_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:49:52.818,ns_1@10.242.238.90:<0.20549.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[987]}, {checkpoints,[{987,0}]}, {name,<<"replication_building_987_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[987]}, {takeover,false}, {suffix,"building_987_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",987,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,false}]} [rebalance:debug,2014-08-19T16:49:52.819,ns_1@10.242.238.90:<0.20549.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.20550.0> [rebalance:debug,2014-08-19T16:49:52.819,ns_1@10.242.238.90:<0.20549.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:49:52.820,ns_1@10.242.238.90:<0.20549.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.31064.0>,#Ref<16550.0.1.58908>}]} [rebalance:info,2014-08-19T16:49:52.820,ns_1@10.242.238.90:<0.20549.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 987 [rebalance:debug,2014-08-19T16:49:52.820,ns_1@10.242.238.90:<0.20549.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.31064.0>,#Ref<16550.0.1.58908>}] [ns_server:debug,2014-08-19T16:49:52.821,ns_1@10.242.238.90:<0.20549.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:debug,2014-08-19T16:49:52.841,ns_1@10.242.238.90:<0.20551.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 987 [ns_server:info,2014-08-19T16:49:52.847,ns_1@10.242.238.90:<0.18787.0>:ns_memcached:do_handle_call:527]Changed vbucket 732 state to replica [ns_server:info,2014-08-19T16:49:52.853,ns_1@10.242.238.90:<0.20554.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 732 to state replica [views:debug,2014-08-19T16:49:52.867,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/738. Updated state: pending (0) [ns_server:debug,2014-08-19T16:49:52.867,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",738,pending,0} [ns_server:debug,2014-08-19T16:49:52.907,ns_1@10.242.238.90:<0.20554.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_732_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:49:52.909,ns_1@10.242.238.90:<0.20554.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[732]}, {checkpoints,[{732,0}]}, {name,<<"replication_building_732_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[732]}, {takeover,false}, {suffix,"building_732_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",732,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,true}]} [rebalance:debug,2014-08-19T16:49:52.909,ns_1@10.242.238.90:<0.20554.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.20555.0> [rebalance:debug,2014-08-19T16:49:52.909,ns_1@10.242.238.90:<0.20554.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:49:52.910,ns_1@10.242.238.90:<0.20554.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.31098.0>,#Ref<16550.0.1.59074>}]} [rebalance:info,2014-08-19T16:49:52.910,ns_1@10.242.238.90:<0.20554.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 732 [rebalance:debug,2014-08-19T16:49:52.910,ns_1@10.242.238.90:<0.20554.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.31098.0>,#Ref<16550.0.1.59074>}] [ns_server:debug,2014-08-19T16:49:52.911,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.20556.0> (ok) [ns_server:debug,2014-08-19T16:49:52.912,ns_1@10.242.238.90:<0.20554.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:debug,2014-08-19T16:49:52.912,ns_1@10.242.238.90:<0.20557.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 732 [ns_server:info,2014-08-19T16:49:52.993,ns_1@10.242.238.90:<0.18787.0>:ns_memcached:do_handle_call:527]Changed vbucket 986 state to replica [ns_server:info,2014-08-19T16:49:52.997,ns_1@10.242.238.90:<0.20574.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 986 to state replica [ns_server:debug,2014-08-19T16:49:53.009,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 993. Nacking mccouch update. [views:debug,2014-08-19T16:49:53.009,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/993. Updated state: replica (0) [ns_server:debug,2014-08-19T16:49:53.009,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",993,replica,0} [ns_server:debug,2014-08-19T16:49:53.009,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,766,750,1013,753,1016,1000,756,740,1019,1003,759,743,1022,1006,993,762, 746,1009,996,765,749,1012,999,752,1015,755,1018,1002,758,742,1021,1005,992, 761,745,1008,995,764,748,1011,998,767,751,1014,754,738,1017,1001,757,741, 1020,1004,760,744,1023,1007,994,763,747,1010] [ns_server:debug,2014-08-19T16:49:53.032,ns_1@10.242.238.90:<0.20574.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_986_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:49:53.034,ns_1@10.242.238.90:<0.20574.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[986]}, {checkpoints,[{986,0}]}, {name,<<"replication_building_986_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[986]}, {takeover,false}, {suffix,"building_986_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",986,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,false}]} [rebalance:debug,2014-08-19T16:49:53.035,ns_1@10.242.238.90:<0.20574.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.20575.0> [rebalance:debug,2014-08-19T16:49:53.035,ns_1@10.242.238.90:<0.20574.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:49:53.035,ns_1@10.242.238.90:<0.20574.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.31199.0>,#Ref<16550.0.1.59936>}]} [rebalance:info,2014-08-19T16:49:53.036,ns_1@10.242.238.90:<0.20574.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 986 [rebalance:debug,2014-08-19T16:49:53.036,ns_1@10.242.238.90:<0.20574.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.31199.0>,#Ref<16550.0.1.59936>}] [ns_server:debug,2014-08-19T16:49:53.037,ns_1@10.242.238.90:<0.20574.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:debug,2014-08-19T16:49:53.055,ns_1@10.242.238.90:<0.20576.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 986 [ns_server:info,2014-08-19T16:49:53.062,ns_1@10.242.238.90:<0.18787.0>:ns_memcached:do_handle_call:527]Changed vbucket 731 state to replica [ns_server:info,2014-08-19T16:49:53.069,ns_1@10.242.238.90:<0.20579.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 731 to state replica [views:debug,2014-08-19T16:49:53.092,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/993. Updated state: replica (0) [ns_server:debug,2014-08-19T16:49:53.093,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",993,replica,0} [ns_server:debug,2014-08-19T16:49:53.121,ns_1@10.242.238.90:<0.20579.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_731_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:49:53.123,ns_1@10.242.238.90:<0.20579.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[731]}, {checkpoints,[{731,0}]}, {name,<<"replication_building_731_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[731]}, {takeover,false}, {suffix,"building_731_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",731,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,true}]} [rebalance:debug,2014-08-19T16:49:53.123,ns_1@10.242.238.90:<0.20579.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.20580.0> [rebalance:debug,2014-08-19T16:49:53.123,ns_1@10.242.238.90:<0.20579.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:49:53.124,ns_1@10.242.238.90:<0.20579.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.31219.0>,#Ref<16550.0.1.60050>}]} [rebalance:info,2014-08-19T16:49:53.124,ns_1@10.242.238.90:<0.20579.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 731 [rebalance:debug,2014-08-19T16:49:53.124,ns_1@10.242.238.90:<0.20579.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.31219.0>,#Ref<16550.0.1.60050>}] [ns_server:debug,2014-08-19T16:49:53.125,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.20581.0> (ok) [ns_server:debug,2014-08-19T16:49:53.125,ns_1@10.242.238.90:<0.20579.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:debug,2014-08-19T16:49:53.127,ns_1@10.242.238.90:<0.20582.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 731 [ns_server:info,2014-08-19T16:49:53.203,ns_1@10.242.238.90:<0.18787.0>:ns_memcached:do_handle_call:527]Changed vbucket 985 state to replica [ns_server:info,2014-08-19T16:49:53.208,ns_1@10.242.238.90:<0.20599.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 985 to state replica [ns_server:debug,2014-08-19T16:49:53.242,ns_1@10.242.238.90:<0.20599.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_985_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:49:53.244,ns_1@10.242.238.90:<0.20599.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[985]}, {checkpoints,[{985,0}]}, {name,<<"replication_building_985_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[985]}, {takeover,false}, {suffix,"building_985_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",985,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,false}]} [rebalance:debug,2014-08-19T16:49:53.244,ns_1@10.242.238.90:<0.20599.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.20600.0> [rebalance:debug,2014-08-19T16:49:53.244,ns_1@10.242.238.90:<0.20599.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:49:53.245,ns_1@10.242.238.90:<0.20599.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.31281.0>,#Ref<16550.0.1.60333>}]} [rebalance:info,2014-08-19T16:49:53.245,ns_1@10.242.238.90:<0.20599.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 985 [rebalance:debug,2014-08-19T16:49:53.245,ns_1@10.242.238.90:<0.20599.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.31281.0>,#Ref<16550.0.1.60333>}] [ns_server:debug,2014-08-19T16:49:53.246,ns_1@10.242.238.90:<0.20599.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:49:53.259,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 991. Nacking mccouch update. [views:debug,2014-08-19T16:49:53.259,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/991. Updated state: replica (0) [ns_server:debug,2014-08-19T16:49:53.260,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",991,replica,0} [ns_server:debug,2014-08-19T16:49:53.260,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,766,750,1013,753,1016,1000,756,740,1019,1003,759,743,1022,1006,993,762, 746,1009,996,765,749,1012,999,752,1015,755,1018,1002,758,742,1021,1005,992, 761,745,1008,995,764,748,1011,998,767,751,1014,754,738,1017,1001,757,741, 1020,1004,991,760,744,1023,1007,994,763,747,1010] [rebalance:debug,2014-08-19T16:49:53.266,ns_1@10.242.238.90:<0.20601.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 985 [ns_server:info,2014-08-19T16:49:53.272,ns_1@10.242.238.90:<0.18787.0>:ns_memcached:do_handle_call:527]Changed vbucket 730 state to replica [ns_server:info,2014-08-19T16:49:53.278,ns_1@10.242.238.90:<0.20604.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 730 to state replica [ns_server:debug,2014-08-19T16:49:53.332,ns_1@10.242.238.90:<0.20604.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_730_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:49:53.334,ns_1@10.242.238.90:<0.20604.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[730]}, {checkpoints,[{730,0}]}, {name,<<"replication_building_730_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[730]}, {takeover,false}, {suffix,"building_730_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",730,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,true}]} [rebalance:debug,2014-08-19T16:49:53.334,ns_1@10.242.238.90:<0.20604.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.20605.0> [rebalance:debug,2014-08-19T16:49:53.335,ns_1@10.242.238.90:<0.20604.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:49:53.335,ns_1@10.242.238.90:<0.20604.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.31301.0>,#Ref<16550.0.1.60448>}]} [rebalance:info,2014-08-19T16:49:53.335,ns_1@10.242.238.90:<0.20604.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 730 [rebalance:debug,2014-08-19T16:49:53.336,ns_1@10.242.238.90:<0.20604.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.31301.0>,#Ref<16550.0.1.60448>}] [ns_server:debug,2014-08-19T16:49:53.336,ns_1@10.242.238.90:<0.20604.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:49:53.337,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.20608.0> (ok) [rebalance:debug,2014-08-19T16:49:53.338,ns_1@10.242.238.90:<0.20610.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 730 [views:debug,2014-08-19T16:49:53.342,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/991. Updated state: replica (0) [ns_server:debug,2014-08-19T16:49:53.342,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",991,replica,0} [ns_server:info,2014-08-19T16:49:53.414,ns_1@10.242.238.90:<0.18787.0>:ns_memcached:do_handle_call:527]Changed vbucket 984 state to replica [ns_server:info,2014-08-19T16:49:53.418,ns_1@10.242.238.90:<0.20638.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 984 to state replica [ns_server:debug,2014-08-19T16:49:53.435,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 989. Nacking mccouch update. [views:debug,2014-08-19T16:49:53.435,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/989. Updated state: replica (0) [ns_server:debug,2014-08-19T16:49:53.435,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",989,replica,0} [ns_server:debug,2014-08-19T16:49:53.435,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,766,750,1013,753,1016,1000,756,740,1019,1003,759,743,1022,1006,993,762, 746,1009,996,765,749,1012,999,752,1015,755,1018,1002,989,758,742,1021,1005, 992,761,745,1008,995,764,748,1011,998,767,751,1014,754,738,1017,1001,757,741, 1020,1004,991,760,744,1023,1007,994,763,747,1010] [ns_server:debug,2014-08-19T16:49:53.453,ns_1@10.242.238.90:<0.20638.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_984_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:49:53.455,ns_1@10.242.238.90:<0.20638.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[984]}, {checkpoints,[{984,0}]}, {name,<<"replication_building_984_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[984]}, {takeover,false}, {suffix,"building_984_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",984,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,false}]} [rebalance:debug,2014-08-19T16:49:53.455,ns_1@10.242.238.90:<0.20638.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.20641.0> [rebalance:debug,2014-08-19T16:49:53.456,ns_1@10.242.238.90:<0.20638.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:49:53.456,ns_1@10.242.238.90:<0.20638.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.31359.0>,#Ref<16550.0.1.60773>}]} [rebalance:info,2014-08-19T16:49:53.456,ns_1@10.242.238.90:<0.20638.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 984 [rebalance:debug,2014-08-19T16:49:53.456,ns_1@10.242.238.90:<0.20638.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.31359.0>,#Ref<16550.0.1.60773>}] [ns_server:debug,2014-08-19T16:49:53.458,ns_1@10.242.238.90:<0.20638.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [views:debug,2014-08-19T16:49:53.468,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/989. Updated state: replica (0) [ns_server:debug,2014-08-19T16:49:53.469,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",989,replica,0} [rebalance:debug,2014-08-19T16:49:53.481,ns_1@10.242.238.90:<0.20642.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 984 [ns_server:info,2014-08-19T16:49:53.487,ns_1@10.242.238.90:<0.18787.0>:ns_memcached:do_handle_call:527]Changed vbucket 729 state to replica [ns_server:info,2014-08-19T16:49:53.492,ns_1@10.242.238.90:<0.20645.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 729 to state replica [ns_server:debug,2014-08-19T16:49:53.543,ns_1@10.242.238.90:<0.20645.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_729_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:49:53.544,ns_1@10.242.238.90:<0.20645.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[729]}, {checkpoints,[{729,0}]}, {name,<<"replication_building_729_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[729]}, {takeover,false}, {suffix,"building_729_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",729,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,true}]} [rebalance:debug,2014-08-19T16:49:53.544,ns_1@10.242.238.90:<0.20645.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.20660.0> [rebalance:debug,2014-08-19T16:49:53.545,ns_1@10.242.238.90:<0.20645.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:49:53.545,ns_1@10.242.238.90:<0.20645.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.31379.0>,#Ref<16550.0.1.60894>}]} [rebalance:info,2014-08-19T16:49:53.545,ns_1@10.242.238.90:<0.20645.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 729 [rebalance:debug,2014-08-19T16:49:53.546,ns_1@10.242.238.90:<0.20645.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.31379.0>,#Ref<16550.0.1.60894>}] [ns_server:debug,2014-08-19T16:49:53.546,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.20661.0> (ok) [ns_server:debug,2014-08-19T16:49:53.547,ns_1@10.242.238.90:<0.20645.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:debug,2014-08-19T16:49:53.548,ns_1@10.242.238.90:<0.20662.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 729 [ns_server:debug,2014-08-19T16:49:53.577,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 987. Nacking mccouch update. [views:debug,2014-08-19T16:49:53.577,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/987. Updated state: replica (0) [ns_server:debug,2014-08-19T16:49:53.578,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",987,replica,0} [ns_server:debug,2014-08-19T16:49:53.578,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,766,750,1013,753,1016,1000,987,756,740,1019,1003,759,743,1022,1006,993, 762,746,1009,996,765,749,1012,999,752,1015,755,1018,1002,989,758,742,1021, 1005,992,761,745,1008,995,764,748,1011,998,767,751,1014,754,738,1017,1001, 757,741,1020,1004,991,760,744,1023,1007,994,763,747,1010] [ns_server:info,2014-08-19T16:49:53.622,ns_1@10.242.238.90:<0.18787.0>:ns_memcached:do_handle_call:527]Changed vbucket 983 state to replica [ns_server:info,2014-08-19T16:49:53.627,ns_1@10.242.238.90:<0.20665.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 983 to state replica [views:debug,2014-08-19T16:49:53.636,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/987. Updated state: replica (0) [ns_server:debug,2014-08-19T16:49:53.637,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",987,replica,0} [ns_server:debug,2014-08-19T16:49:53.661,ns_1@10.242.238.90:<0.20665.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_983_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:49:53.662,ns_1@10.242.238.90:<0.20665.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[983]}, {checkpoints,[{983,0}]}, {name,<<"replication_building_983_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[983]}, {takeover,false}, {suffix,"building_983_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",983,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,false}]} [rebalance:debug,2014-08-19T16:49:53.663,ns_1@10.242.238.90:<0.20665.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.20666.0> [rebalance:debug,2014-08-19T16:49:53.663,ns_1@10.242.238.90:<0.20665.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:49:53.664,ns_1@10.242.238.90:<0.20665.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.31436.0>,#Ref<16550.0.1.61141>}]} [rebalance:info,2014-08-19T16:49:53.664,ns_1@10.242.238.90:<0.20665.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 983 [rebalance:debug,2014-08-19T16:49:53.664,ns_1@10.242.238.90:<0.20665.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.31436.0>,#Ref<16550.0.1.61141>}] [ns_server:debug,2014-08-19T16:49:53.665,ns_1@10.242.238.90:<0.20665.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:debug,2014-08-19T16:49:53.685,ns_1@10.242.238.90:<0.20667.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 983 [ns_server:info,2014-08-19T16:49:53.690,ns_1@10.242.238.90:<0.18787.0>:ns_memcached:do_handle_call:527]Changed vbucket 728 state to replica [ns_server:info,2014-08-19T16:49:53.697,ns_1@10.242.238.90:<0.20684.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 728 to state replica [ns_server:debug,2014-08-19T16:49:53.720,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 739. Nacking mccouch update. [views:debug,2014-08-19T16:49:53.720,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/739. Updated state: pending (0) [ns_server:debug,2014-08-19T16:49:53.720,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",739,pending,0} [ns_server:debug,2014-08-19T16:49:53.720,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,766,750,1013,753,1016,1000,987,756,740,1019,1003,759,743,1022,1006,993, 762,746,1009,996,765,749,1012,999,752,1015,755,739,1018,1002,989,758,742, 1021,1005,992,761,745,1008,995,764,748,1011,998,767,751,1014,754,738,1017, 1001,757,741,1020,1004,991,760,744,1023,1007,994,763,747,1010] [ns_server:debug,2014-08-19T16:49:53.746,ns_1@10.242.238.90:<0.20684.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_728_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:49:53.748,ns_1@10.242.238.90:<0.20684.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[728]}, {checkpoints,[{728,0}]}, {name,<<"replication_building_728_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[728]}, {takeover,false}, {suffix,"building_728_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",728,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,true}]} [rebalance:debug,2014-08-19T16:49:53.749,ns_1@10.242.238.90:<0.20684.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.20685.0> [rebalance:debug,2014-08-19T16:49:53.749,ns_1@10.242.238.90:<0.20684.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:49:53.749,ns_1@10.242.238.90:<0.20684.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.31456.0>,#Ref<16550.0.1.61280>}]} [rebalance:info,2014-08-19T16:49:53.749,ns_1@10.242.238.90:<0.20684.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 728 [rebalance:debug,2014-08-19T16:49:53.750,ns_1@10.242.238.90:<0.20684.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.31456.0>,#Ref<16550.0.1.61280>}] [ns_server:debug,2014-08-19T16:49:53.750,ns_1@10.242.238.90:<0.20684.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:49:53.751,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.20686.0> (ok) [rebalance:debug,2014-08-19T16:49:53.752,ns_1@10.242.238.90:<0.20687.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 728 [views:debug,2014-08-19T16:49:53.754,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/739. Updated state: pending (0) [ns_server:debug,2014-08-19T16:49:53.754,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",739,pending,0} [ns_server:info,2014-08-19T16:49:53.826,ns_1@10.242.238.90:<0.18787.0>:ns_memcached:do_handle_call:527]Changed vbucket 982 state to replica [ns_server:info,2014-08-19T16:49:53.830,ns_1@10.242.238.90:<0.20704.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 982 to state replica [ns_server:debug,2014-08-19T16:49:53.837,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 737. Nacking mccouch update. [views:debug,2014-08-19T16:49:53.837,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/737. Updated state: pending (0) [ns_server:debug,2014-08-19T16:49:53.838,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",737,pending,0} [ns_server:debug,2014-08-19T16:49:53.838,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,766,750,1013,753,737,1016,1000,987,756,740,1019,1003,759,743,1022,1006, 993,762,746,1009,996,765,749,1012,999,752,1015,755,739,1018,1002,989,758,742, 1021,1005,992,761,745,1008,995,764,748,1011,998,767,751,1014,754,738,1017, 1001,757,741,1020,1004,991,760,744,1023,1007,994,763,747,1010] [ns_server:debug,2014-08-19T16:49:53.864,ns_1@10.242.238.90:<0.20704.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_982_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:49:53.866,ns_1@10.242.238.90:<0.20704.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[982]}, {checkpoints,[{982,0}]}, {name,<<"replication_building_982_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[982]}, {takeover,false}, {suffix,"building_982_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",982,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,false}]} [rebalance:debug,2014-08-19T16:49:53.866,ns_1@10.242.238.90:<0.20704.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.20705.0> [rebalance:debug,2014-08-19T16:49:53.866,ns_1@10.242.238.90:<0.20704.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:49:53.867,ns_1@10.242.238.90:<0.20704.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.31513.0>,#Ref<16550.0.1.61543>}]} [rebalance:info,2014-08-19T16:49:53.867,ns_1@10.242.238.90:<0.20704.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 982 [rebalance:debug,2014-08-19T16:49:53.868,ns_1@10.242.238.90:<0.20704.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.31513.0>,#Ref<16550.0.1.61543>}] [ns_server:debug,2014-08-19T16:49:53.869,ns_1@10.242.238.90:<0.20704.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [views:debug,2014-08-19T16:49:53.871,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/737. Updated state: pending (0) [ns_server:debug,2014-08-19T16:49:53.871,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",737,pending,0} [rebalance:debug,2014-08-19T16:49:53.887,ns_1@10.242.238.90:<0.20706.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 982 [ns_server:info,2014-08-19T16:49:53.893,ns_1@10.242.238.90:<0.18787.0>:ns_memcached:do_handle_call:527]Changed vbucket 727 state to replica [ns_server:info,2014-08-19T16:49:53.900,ns_1@10.242.238.90:<0.20709.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 727 to state replica [ns_server:debug,2014-08-19T16:49:53.949,ns_1@10.242.238.90:<0.20709.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_727_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:49:53.951,ns_1@10.242.238.90:<0.20709.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[727]}, {checkpoints,[{727,0}]}, {name,<<"replication_building_727_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[727]}, {takeover,false}, {suffix,"building_727_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",727,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,true}]} [rebalance:debug,2014-08-19T16:49:53.952,ns_1@10.242.238.90:<0.20709.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.20710.0> [rebalance:debug,2014-08-19T16:49:53.952,ns_1@10.242.238.90:<0.20709.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:49:53.953,ns_1@10.242.238.90:<0.20709.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.31533.0>,#Ref<16550.0.1.61661>}]} [rebalance:info,2014-08-19T16:49:53.953,ns_1@10.242.238.90:<0.20709.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 727 [rebalance:debug,2014-08-19T16:49:53.953,ns_1@10.242.238.90:<0.20709.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.31533.0>,#Ref<16550.0.1.61661>}] [ns_server:debug,2014-08-19T16:49:53.954,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.20717.0> (ok) [ns_server:debug,2014-08-19T16:49:53.954,ns_1@10.242.238.90:<0.20709.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:debug,2014-08-19T16:49:53.955,ns_1@10.242.238.90:<0.20725.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 727 [ns_server:debug,2014-08-19T16:49:54.028,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 735. Nacking mccouch update. [views:debug,2014-08-19T16:49:54.028,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/735. Updated state: pending (0) [ns_server:debug,2014-08-19T16:49:54.028,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",735,pending,0} [ns_server:debug,2014-08-19T16:49:54.028,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,766,750,1013,753,737,1016,1000,987,756,740,1019,1003,759,743,1022,1006, 993,762,746,1009,996,765,749,1012,999,752,1015,755,739,1018,1002,989,758,742, 1021,1005,992,761,745,1008,995,764,748,1011,998,767,751,735,1014,754,738, 1017,1001,757,741,1020,1004,991,760,744,1023,1007,994,763,747,1010] [ns_server:info,2014-08-19T16:49:54.035,ns_1@10.242.238.90:<0.18787.0>:ns_memcached:do_handle_call:527]Changed vbucket 981 state to replica [ns_server:info,2014-08-19T16:49:54.041,ns_1@10.242.238.90:<0.20729.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 981 to state replica [ns_server:debug,2014-08-19T16:49:54.074,ns_1@10.242.238.90:<0.20729.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_981_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:49:54.076,ns_1@10.242.238.90:<0.20729.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[981]}, {checkpoints,[{981,0}]}, {name,<<"replication_building_981_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[981]}, {takeover,false}, {suffix,"building_981_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",981,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,false}]} [rebalance:debug,2014-08-19T16:49:54.077,ns_1@10.242.238.90:<0.20729.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.20730.0> [rebalance:debug,2014-08-19T16:49:54.077,ns_1@10.242.238.90:<0.20729.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:49:54.078,ns_1@10.242.238.90:<0.20729.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.31590.0>,#Ref<16550.0.1.61948>}]} [rebalance:info,2014-08-19T16:49:54.078,ns_1@10.242.238.90:<0.20729.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 981 [rebalance:debug,2014-08-19T16:49:54.078,ns_1@10.242.238.90:<0.20729.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.31590.0>,#Ref<16550.0.1.61948>}] [views:debug,2014-08-19T16:49:54.078,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/735. Updated state: pending (0) [ns_server:debug,2014-08-19T16:49:54.079,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",735,pending,0} [ns_server:debug,2014-08-19T16:49:54.080,ns_1@10.242.238.90:<0.20729.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:debug,2014-08-19T16:49:54.098,ns_1@10.242.238.90:<0.20731.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 981 [ns_server:info,2014-08-19T16:49:54.104,ns_1@10.242.238.90:<0.18787.0>:ns_memcached:do_handle_call:527]Changed vbucket 726 state to replica [ns_server:info,2014-08-19T16:49:54.109,ns_1@10.242.238.90:<0.20734.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 726 to state replica [ns_server:debug,2014-08-19T16:49:54.159,ns_1@10.242.238.90:<0.20734.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_726_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:49:54.160,ns_1@10.242.238.90:<0.20734.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[726]}, {checkpoints,[{726,0}]}, {name,<<"replication_building_726_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[726]}, {takeover,false}, {suffix,"building_726_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",726,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,true}]} [rebalance:debug,2014-08-19T16:49:54.161,ns_1@10.242.238.90:<0.20734.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.20749.0> [rebalance:debug,2014-08-19T16:49:54.161,ns_1@10.242.238.90:<0.20734.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:49:54.162,ns_1@10.242.238.90:<0.20734.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.31624.0>,#Ref<16550.0.1.62108>}]} [rebalance:info,2014-08-19T16:49:54.162,ns_1@10.242.238.90:<0.20734.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 726 [rebalance:debug,2014-08-19T16:49:54.162,ns_1@10.242.238.90:<0.20734.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.31624.0>,#Ref<16550.0.1.62108>}] [ns_server:debug,2014-08-19T16:49:54.163,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.20750.0> (ok) [ns_server:debug,2014-08-19T16:49:54.163,ns_1@10.242.238.90:<0.20734.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:debug,2014-08-19T16:49:54.164,ns_1@10.242.238.90:<0.20751.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 726 [ns_server:debug,2014-08-19T16:49:54.212,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 733. Nacking mccouch update. [views:debug,2014-08-19T16:49:54.212,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/733. Updated state: pending (0) [ns_server:debug,2014-08-19T16:49:54.212,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",733,pending,0} [ns_server:debug,2014-08-19T16:49:54.212,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,766,750,1013,753,737,1016,1000,987,756,740,1019,1003,759,743,1022,1006, 993,762,746,1009,996,765,749,733,1012,999,752,1015,755,739,1018,1002,989,758, 742,1021,1005,992,761,745,1008,995,764,748,1011,998,767,751,735,1014,754,738, 1017,1001,757,741,1020,1004,991,760,744,1023,1007,994,763,747,1010] [ns_server:info,2014-08-19T16:49:54.237,ns_1@10.242.238.90:<0.18787.0>:ns_memcached:do_handle_call:527]Changed vbucket 980 state to replica [ns_server:info,2014-08-19T16:49:54.242,ns_1@10.242.238.90:<0.20754.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 980 to state replica [ns_server:debug,2014-08-19T16:49:54.276,ns_1@10.242.238.90:<0.20754.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_980_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:49:54.278,ns_1@10.242.238.90:<0.20754.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[980]}, {checkpoints,[{980,0}]}, {name,<<"replication_building_980_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[980]}, {takeover,false}, {suffix,"building_980_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",980,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,false}]} [rebalance:debug,2014-08-19T16:49:54.278,ns_1@10.242.238.90:<0.20754.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.20755.0> [rebalance:debug,2014-08-19T16:49:54.278,ns_1@10.242.238.90:<0.20754.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:49:54.279,ns_1@10.242.238.90:<0.20754.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.31680.0>,#Ref<16550.0.1.62377>}]} [rebalance:info,2014-08-19T16:49:54.279,ns_1@10.242.238.90:<0.20754.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 980 [views:debug,2014-08-19T16:49:54.279,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/733. Updated state: pending (0) [ns_server:debug,2014-08-19T16:49:54.279,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",733,pending,0} [rebalance:debug,2014-08-19T16:49:54.280,ns_1@10.242.238.90:<0.20754.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.31680.0>,#Ref<16550.0.1.62377>}] [ns_server:debug,2014-08-19T16:49:54.281,ns_1@10.242.238.90:<0.20754.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:debug,2014-08-19T16:49:54.301,ns_1@10.242.238.90:<0.20756.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 980 [ns_server:debug,2014-08-19T16:49:54.404,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 990. Nacking mccouch update. [views:debug,2014-08-19T16:49:54.405,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/990. Updated state: replica (0) [ns_server:debug,2014-08-19T16:49:54.405,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",990,replica,0} [ns_server:debug,2014-08-19T16:49:54.405,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,766,750,1013,753,737,1016,1000,987,756,740,1019,1003,990,759,743,1022, 1006,993,762,746,1009,996,765,749,733,1012,999,752,1015,755,739,1018,1002, 989,758,742,1021,1005,992,761,745,1008,995,764,748,1011,998,767,751,735,1014, 754,738,1017,1001,757,741,1020,1004,991,760,744,1023,1007,994,763,747,1010] [views:debug,2014-08-19T16:49:54.455,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/990. Updated state: replica (0) [ns_server:debug,2014-08-19T16:49:54.455,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",990,replica,0} [ns_server:debug,2014-08-19T16:49:54.530,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 988. Nacking mccouch update. [views:debug,2014-08-19T16:49:54.530,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/988. Updated state: replica (0) [ns_server:debug,2014-08-19T16:49:54.531,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",988,replica,0} [ns_server:debug,2014-08-19T16:49:54.531,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,766,750,1013,753,737,1016,1000,987,756,740,1019,1003,990,759,743,1022, 1006,993,762,746,1009,996,765,749,733,1012,999,752,1015,755,739,1018,1002, 989,758,742,1021,1005,992,761,745,1008,995,764,748,1011,998,767,751,735,1014, 754,738,1017,1001,988,757,741,1020,1004,991,760,744,1023,1007,994,763,747, 1010] [views:debug,2014-08-19T16:49:54.564,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/988. Updated state: replica (0) [ns_server:debug,2014-08-19T16:49:54.565,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",988,replica,0} [ns_server:debug,2014-08-19T16:49:54.696,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 986. Nacking mccouch update. [views:debug,2014-08-19T16:49:54.696,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/986. Updated state: replica (0) [ns_server:debug,2014-08-19T16:49:54.697,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",986,replica,0} [ns_server:debug,2014-08-19T16:49:54.697,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,766,750,1013,753,737,1016,1000,987,756,740,1019,1003,990,759,743,1022, 1006,993,762,746,1009,996,765,749,733,1012,999,752,1015,986,755,739,1018, 1002,989,758,742,1021,1005,992,761,745,1008,995,764,748,1011,998,767,751,735, 1014,754,738,1017,1001,988,757,741,1020,1004,991,760,744,1023,1007,994,763, 747,1010] [views:debug,2014-08-19T16:49:54.763,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/986. Updated state: replica (0) [ns_server:debug,2014-08-19T16:49:54.764,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",986,replica,0} [ns_server:debug,2014-08-19T16:49:54.914,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 984. Nacking mccouch update. [views:debug,2014-08-19T16:49:54.914,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/984. Updated state: replica (0) [ns_server:debug,2014-08-19T16:49:54.914,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",984,replica,0} [ns_server:debug,2014-08-19T16:49:54.914,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,766,750,1013,984,753,737,1016,1000,987,756,740,1019,1003,990,759,743, 1022,1006,993,762,746,1009,996,765,749,733,1012,999,752,1015,986,755,739, 1018,1002,989,758,742,1021,1005,992,761,745,1008,995,764,748,1011,998,767, 751,735,1014,754,738,1017,1001,988,757,741,1020,1004,991,760,744,1023,1007, 994,763,747,1010] [views:debug,2014-08-19T16:49:54.964,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/984. Updated state: replica (0) [ns_server:debug,2014-08-19T16:49:54.964,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",984,replica,0} [ns_server:debug,2014-08-19T16:49:55.115,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 982. Nacking mccouch update. [views:debug,2014-08-19T16:49:55.115,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/982. Updated state: replica (0) [ns_server:debug,2014-08-19T16:49:55.115,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",982,replica,0} [ns_server:debug,2014-08-19T16:49:55.115,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,766,750,1013,984,753,737,1016,1000,987,756,740,1019,1003,990,759,743, 1022,1006,993,762,746,1009,996,765,749,733,1012,999,752,1015,986,755,739, 1018,1002,989,758,742,1021,1005,992,761,745,1008,995,764,748,1011,998,982, 767,751,735,1014,754,738,1017,1001,988,757,741,1020,1004,991,760,744,1023, 1007,994,763,747,1010] [views:debug,2014-08-19T16:49:55.166,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/982. Updated state: replica (0) [ns_server:debug,2014-08-19T16:49:55.166,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",982,replica,0} [ns_server:debug,2014-08-19T16:49:55.307,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 980. Nacking mccouch update. [views:debug,2014-08-19T16:49:55.307,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/980. Updated state: replica (0) [ns_server:debug,2014-08-19T16:49:55.307,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",980,replica,0} [ns_server:debug,2014-08-19T16:49:55.307,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,766,750,1013,984,753,737,1016,1000,987,756,740,1019,1003,990,759,743, 1022,1006,993,762,746,1009,996,980,765,749,733,1012,999,752,1015,986,755,739, 1018,1002,989,758,742,1021,1005,992,761,745,1008,995,764,748,1011,998,982, 767,751,735,1014,754,738,1017,1001,988,757,741,1020,1004,991,760,744,1023, 1007,994,763,747,1010] [views:debug,2014-08-19T16:49:55.375,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/980. Updated state: replica (0) [ns_server:debug,2014-08-19T16:49:55.375,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",980,replica,0} [ns_server:debug,2014-08-19T16:49:55.539,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 736. Nacking mccouch update. [views:debug,2014-08-19T16:49:55.539,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/736. Updated state: pending (0) [ns_server:debug,2014-08-19T16:49:55.539,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",736,pending,0} [ns_server:debug,2014-08-19T16:49:55.540,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,766,750,1013,984,753,737,1016,1000,987,756,740,1019,1003,990,759,743, 1022,1006,993,762,746,1009,996,980,765,749,733,1012,999,752,736,1015,986,755, 739,1018,1002,989,758,742,1021,1005,992,761,745,1008,995,764,748,1011,998, 982,767,751,735,1014,754,738,1017,1001,988,757,741,1020,1004,991,760,744, 1023,1007,994,763,747,1010] [views:debug,2014-08-19T16:49:55.573,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/736. Updated state: pending (0) [ns_server:debug,2014-08-19T16:49:55.573,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",736,pending,0} [ns_server:debug,2014-08-19T16:49:55.674,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 734. Nacking mccouch update. [views:debug,2014-08-19T16:49:55.674,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/734. Updated state: pending (0) [ns_server:debug,2014-08-19T16:49:55.674,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",734,pending,0} [ns_server:debug,2014-08-19T16:49:55.674,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,766,750,734,1013,984,753,737,1016,1000,987,756,740,1019,1003,990,759,743, 1022,1006,993,762,746,1009,996,980,765,749,733,1012,999,752,736,1015,986,755, 739,1018,1002,989,758,742,1021,1005,992,761,745,1008,995,764,748,1011,998, 982,767,751,735,1014,754,738,1017,1001,988,757,741,1020,1004,991,760,744, 1023,1007,994,763,747,1010] [views:debug,2014-08-19T16:49:55.733,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/734. Updated state: pending (0) [ns_server:debug,2014-08-19T16:49:55.733,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",734,pending,0} [ns_server:debug,2014-08-19T16:49:55.816,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 732. Nacking mccouch update. [views:debug,2014-08-19T16:49:55.816,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/732. Updated state: pending (0) [ns_server:debug,2014-08-19T16:49:55.816,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",732,pending,0} [ns_server:debug,2014-08-19T16:49:55.816,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,766,750,734,1013,984,753,737,1016,1000,987,756,740,1019,1003,990,759,743, 1022,1006,993,762,746,1009,996,980,765,749,733,1012,999,752,736,1015,986,755, 739,1018,1002,989,758,742,1021,1005,992,761,745,1008,995,764,748,732,1011, 998,982,767,751,735,1014,754,738,1017,1001,988,757,741,1020,1004,991,760,744, 1023,1007,994,763,747,1010] [views:debug,2014-08-19T16:49:55.876,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/732. Updated state: pending (0) [ns_server:debug,2014-08-19T16:49:55.876,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",732,pending,0} [ns_server:debug,2014-08-19T16:49:56.042,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 730. Nacking mccouch update. [views:debug,2014-08-19T16:49:56.042,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/730. Updated state: pending (0) [ns_server:debug,2014-08-19T16:49:56.042,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",730,pending,0} [ns_server:debug,2014-08-19T16:49:56.042,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,766,750,734,1013,984,753,737,1016,1000,987,756,740,1019,1003,990,759,743, 1022,1006,993,762,746,730,1009,996,980,765,749,733,1012,999,752,736,1015,986, 755,739,1018,1002,989,758,742,1021,1005,992,761,745,1008,995,764,748,732, 1011,998,982,767,751,735,1014,754,738,1017,1001,988,757,741,1020,1004,991, 760,744,1023,1007,994,763,747,1010] [views:debug,2014-08-19T16:49:56.100,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/730. Updated state: pending (0) [ns_server:debug,2014-08-19T16:49:56.101,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",730,pending,0} [ns_server:debug,2014-08-19T16:49:56.176,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 728. Nacking mccouch update. [views:debug,2014-08-19T16:49:56.176,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/728. Updated state: pending (0) [ns_server:debug,2014-08-19T16:49:56.176,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",728,pending,0} [ns_server:debug,2014-08-19T16:49:56.176,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,766,750,734,1013,984,753,737,1016,1000,987,756,740,1019,1003,990,759,743, 1022,1006,993,762,746,730,1009,996,980,765,749,733,1012,999,752,736,1015,986, 755,739,1018,1002,989,758,742,1021,1005,992,761,745,1008,995,764,748,732, 1011,998,982,767,751,735,1014,754,738,1017,1001,988,757,741,1020,1004,991, 760,744,728,1023,1007,994,763,747,1010] [views:debug,2014-08-19T16:49:56.210,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/728. Updated state: pending (0) [ns_server:debug,2014-08-19T16:49:56.210,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",728,pending,0} [ns_server:debug,2014-08-19T16:49:56.367,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 726. Nacking mccouch update. [views:debug,2014-08-19T16:49:56.367,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/726. Updated state: pending (0) [ns_server:debug,2014-08-19T16:49:56.367,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",726,pending,0} [ns_server:debug,2014-08-19T16:49:56.367,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,766,750,734,1013,984,753,737,1016,1000,987,756,740,1019,1003,990,759,743, 1022,1006,993,762,746,730,1009,996,980,765,749,733,1012,999,752,736,1015,986, 755,739,1018,1002,989,758,742,726,1021,1005,992,761,745,1008,995,764,748,732, 1011,998,982,767,751,735,1014,754,738,1017,1001,988,757,741,1020,1004,991, 760,744,728,1023,1007,994,763,747,1010] [views:debug,2014-08-19T16:49:56.451,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/726. Updated state: pending (0) [ns_server:debug,2014-08-19T16:49:56.451,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",726,pending,0} [rebalance:debug,2014-08-19T16:49:56.452,ns_1@10.242.238.90:<0.20165.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:49:56.452,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.20165.0> (ok) [ns_server:debug,2014-08-19T16:49:56.636,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 985. Nacking mccouch update. [views:debug,2014-08-19T16:49:56.636,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/985. Updated state: replica (0) [ns_server:debug,2014-08-19T16:49:56.636,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",985,replica,0} [ns_server:debug,2014-08-19T16:49:56.636,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,750,984,753,737,1016,1000,987,756,740,1019,1003,990,759,743,1022,1006, 993,762,746,730,1009,996,980,765,749,733,1012,999,752,736,1015,986,755,739, 1018,1002,989,758,742,726,1021,1005,992,761,745,1008,995,764,748,732,1011, 998,982,767,751,735,1014,985,754,738,1017,1001,988,757,741,1020,1004,991,760, 744,728,1023,1007,994,763,747,1010,766,734,1013] [views:debug,2014-08-19T16:49:56.670,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/985. Updated state: replica (0) [ns_server:debug,2014-08-19T16:49:56.671,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",985,replica,0} [ns_server:debug,2014-08-19T16:49:56.753,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 983. Nacking mccouch update. [views:debug,2014-08-19T16:49:56.754,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/983. Updated state: replica (0) [ns_server:debug,2014-08-19T16:49:56.754,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",983,replica,0} [ns_server:debug,2014-08-19T16:49:56.754,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,750,984,753,737,1016,1000,987,756,740,1019,1003,990,759,743,1022,1006, 993,762,746,730,1009,996,980,765,749,733,1012,999,983,752,736,1015,986,755, 739,1018,1002,989,758,742,726,1021,1005,992,761,745,1008,995,764,748,732, 1011,998,982,767,751,735,1014,985,754,738,1017,1001,988,757,741,1020,1004, 991,760,744,728,1023,1007,994,763,747,1010,766,734,1013] [views:debug,2014-08-19T16:49:56.804,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/983. Updated state: replica (0) [ns_server:debug,2014-08-19T16:49:56.804,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",983,replica,0} [ns_server:debug,2014-08-19T16:49:56.888,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 981. Nacking mccouch update. [views:debug,2014-08-19T16:49:56.888,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/981. Updated state: replica (0) [ns_server:debug,2014-08-19T16:49:56.888,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",981,replica,0} [ns_server:debug,2014-08-19T16:49:56.888,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,750,984,753,737,1016,1000,987,756,740,1019,1003,990,759,743,1022,1006, 993,762,746,730,1009,996,980,765,749,733,1012,999,983,752,736,1015,986,755, 739,1018,1002,989,758,742,726,1021,1005,992,761,745,1008,995,764,748,732, 1011,998,982,767,751,735,1014,985,754,738,1017,1001,988,757,741,1020,1004, 991,760,744,728,1023,1007,994,763,747,1010,981,766,734,1013] [views:debug,2014-08-19T16:49:56.938,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/981. Updated state: replica (0) [ns_server:debug,2014-08-19T16:49:56.938,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",981,replica,0} [ns_server:debug,2014-08-19T16:49:57.093,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 731. Nacking mccouch update. [views:debug,2014-08-19T16:49:57.093,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/731. Updated state: pending (0) [ns_server:debug,2014-08-19T16:49:57.093,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",731,pending,0} [ns_server:debug,2014-08-19T16:49:57.094,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,750,984,753,737,1016,1000,987,756,740,1019,1003,990,759,743,1022,1006, 993,762,746,730,1009,996,980,765,749,733,1012,999,983,752,736,1015,986,755, 739,1018,1002,989,758,742,726,1021,1005,992,761,745,1008,995,764,748,732, 1011,998,982,767,751,735,1014,985,754,738,1017,1001,988,757,741,1020,1004, 991,760,744,728,1023,1007,994,763,747,731,1010,981,766,734,1013] [views:debug,2014-08-19T16:49:57.127,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/731. Updated state: pending (0) [ns_server:debug,2014-08-19T16:49:57.127,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",731,pending,0} [ns_server:debug,2014-08-19T16:49:57.236,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 729. Nacking mccouch update. [views:debug,2014-08-19T16:49:57.236,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/729. Updated state: pending (0) [ns_server:debug,2014-08-19T16:49:57.236,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",729,pending,0} [ns_server:debug,2014-08-19T16:49:57.236,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,750,984,753,737,1016,1000,987,756,740,1019,1003,990,759,743,1022,1006, 993,762,746,730,1009,996,980,765,749,733,1012,999,983,752,736,1015,986,755, 739,1018,1002,989,758,742,726,1021,1005,992,761,745,729,1008,995,764,748,732, 1011,998,982,767,751,735,1014,985,754,738,1017,1001,988,757,741,1020,1004, 991,760,744,728,1023,1007,994,763,747,731,1010,981,766,734,1013] [views:debug,2014-08-19T16:49:57.296,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/729. Updated state: pending (0) [ns_server:debug,2014-08-19T16:49:57.296,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",729,pending,0} [ns_server:debug,2014-08-19T16:49:57.388,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 727. Nacking mccouch update. [views:debug,2014-08-19T16:49:57.388,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/727. Updated state: pending (0) [ns_server:debug,2014-08-19T16:49:57.388,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",727,pending,0} [ns_server:debug,2014-08-19T16:49:57.388,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,750,984,737,1016,987,756,740,1019,1003,990,759,743,727,1022,1006,993,762, 746,730,1009,996,980,765,749,733,1012,999,983,752,736,1015,986,755,739,1018, 1002,989,758,742,726,1021,1005,992,761,745,729,1008,995,764,748,732,1011,998, 982,767,751,735,1014,985,754,738,1017,1001,988,757,741,1020,1004,991,760,744, 728,1023,1007,994,763,747,731,1010,981,766,734,1013,753,1000] [views:debug,2014-08-19T16:49:57.438,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/727. Updated state: pending (0) [ns_server:debug,2014-08-19T16:49:57.438,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",727,pending,0} [rebalance:debug,2014-08-19T16:49:57.441,ns_1@10.242.238.90:<0.20751.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:49:57.442,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.20751.0> (ok) [rebalance:debug,2014-08-19T16:49:57.472,ns_1@10.242.238.90:<0.20687.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:49:57.472,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.20687.0> (ok) [rebalance:debug,2014-08-19T16:49:57.506,ns_1@10.242.238.90:<0.20610.0>:janitor_agent:handle_call:795]Done [rebalance:debug,2014-08-19T16:49:57.506,ns_1@10.242.238.90:<0.20725.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:49:57.506,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.20610.0> (ok) [ns_server:debug,2014-08-19T16:49:57.506,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.20725.0> (ok) [rebalance:debug,2014-08-19T16:49:57.590,ns_1@10.242.238.90:<0.20662.0>:janitor_agent:handle_call:795]Done [rebalance:debug,2014-08-19T16:49:57.590,ns_1@10.242.238.90:<0.20557.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:49:57.590,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.20662.0> (ok) [ns_server:debug,2014-08-19T16:49:57.590,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.20557.0> (ok) [rebalance:debug,2014-08-19T16:49:57.697,ns_1@10.242.238.90:<0.20582.0>:janitor_agent:handle_call:795]Done [rebalance:debug,2014-08-19T16:49:57.697,ns_1@10.242.238.90:<0.20520.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:49:57.697,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.20582.0> (ok) [ns_server:debug,2014-08-19T16:49:57.697,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.20520.0> (ok) [rebalance:debug,2014-08-19T16:49:57.831,ns_1@10.242.238.90:<0.20456.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:49:57.831,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.20456.0> (ok) [rebalance:debug,2014-08-19T16:49:57.831,ns_1@10.242.238.90:<0.20545.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:49:57.831,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.20545.0> (ok) [rebalance:debug,2014-08-19T16:49:57.931,ns_1@10.242.238.90:<0.20392.0>:janitor_agent:handle_call:795]Done [rebalance:debug,2014-08-19T16:49:57.931,ns_1@10.242.238.90:<0.20495.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:49:57.932,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.20392.0> (ok) [ns_server:debug,2014-08-19T16:49:57.932,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.20495.0> (ok) [rebalance:debug,2014-08-19T16:49:58.032,ns_1@10.242.238.90:<0.20335.0>:janitor_agent:handle_call:795]Done [rebalance:debug,2014-08-19T16:49:58.032,ns_1@10.242.238.90:<0.20417.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:49:58.032,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.20335.0> (ok) [ns_server:debug,2014-08-19T16:49:58.032,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.20417.0> (ok) [rebalance:debug,2014-08-19T16:49:58.132,ns_1@10.242.238.90:<0.20285.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:49:58.133,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.20285.0> (ok) [rebalance:debug,2014-08-19T16:49:58.133,ns_1@10.242.238.90:<0.20367.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:49:58.133,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.20367.0> (ok) [rebalance:debug,2014-08-19T16:49:58.233,ns_1@10.242.238.90:<0.20221.0>:janitor_agent:handle_call:795]Done [rebalance:debug,2014-08-19T16:49:58.233,ns_1@10.242.238.90:<0.20310.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:49:58.233,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.20221.0> (ok) [ns_server:debug,2014-08-19T16:49:58.233,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.20310.0> (ok) [rebalance:debug,2014-08-19T16:49:58.333,ns_1@10.242.238.90:<0.20756.0>:janitor_agent:handle_call:795]Done [rebalance:debug,2014-08-19T16:49:58.333,ns_1@10.242.238.90:<0.20260.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:49:58.333,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.20756.0> (ok) [ns_server:debug,2014-08-19T16:49:58.333,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.20260.0> (ok) [rebalance:debug,2014-08-19T16:49:58.459,ns_1@10.242.238.90:<0.20706.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:49:58.459,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.20706.0> (ok) [rebalance:debug,2014-08-19T16:49:58.459,ns_1@10.242.238.90:<0.20196.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:49:58.459,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.20196.0> (ok) [rebalance:debug,2014-08-19T16:49:58.599,ns_1@10.242.238.90:<0.20642.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:49:58.599,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.20642.0> (ok) [rebalance:debug,2014-08-19T16:49:58.599,ns_1@10.242.238.90:<0.20731.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:49:58.599,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.20731.0> (ok) [rebalance:debug,2014-08-19T16:49:58.716,ns_1@10.242.238.90:<0.20576.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:49:58.716,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.20576.0> (ok) [rebalance:debug,2014-08-19T16:49:58.716,ns_1@10.242.238.90:<0.20667.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:49:58.716,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.20667.0> (ok) [rebalance:debug,2014-08-19T16:49:58.841,ns_1@10.242.238.90:<0.20601.0>:janitor_agent:handle_call:795]Done [rebalance:debug,2014-08-19T16:49:58.841,ns_1@10.242.238.90:<0.20525.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:49:58.841,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.20601.0> (ok) [ns_server:debug,2014-08-19T16:49:58.841,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.20525.0> (ok) [rebalance:debug,2014-08-19T16:49:58.863,ns_1@10.242.238.90:<0.21036.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 743 [rebalance:debug,2014-08-19T16:49:58.950,ns_1@10.242.238.90:<0.20551.0>:janitor_agent:handle_call:795]Done [rebalance:debug,2014-08-19T16:49:58.950,ns_1@10.242.238.90:<0.20475.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:49:58.950,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.20551.0> (ok) [ns_server:debug,2014-08-19T16:49:58.950,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.20475.0> (ok) [rebalance:debug,2014-08-19T16:49:59.042,ns_1@10.242.238.90:<0.20411.0>:janitor_agent:handle_call:795]Done [rebalance:debug,2014-08-19T16:49:59.042,ns_1@10.242.238.90:<0.20500.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:49:59.042,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.20411.0> (ok) [ns_server:debug,2014-08-19T16:49:59.042,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.20500.0> (ok) [rebalance:debug,2014-08-19T16:49:59.159,ns_1@10.242.238.90:<0.20436.0>:janitor_agent:handle_call:795]Done [rebalance:debug,2014-08-19T16:49:59.159,ns_1@10.242.238.90:<0.20360.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:49:59.159,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.20436.0> (ok) [ns_server:debug,2014-08-19T16:49:59.159,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.20360.0> (ok) [rebalance:debug,2014-08-19T16:49:59.284,ns_1@10.242.238.90:<0.20386.0>:janitor_agent:handle_call:795]Done [rebalance:debug,2014-08-19T16:49:59.284,ns_1@10.242.238.90:<0.20304.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:49:59.285,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.20386.0> (ok) [ns_server:debug,2014-08-19T16:49:59.285,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.20304.0> (ok) [rebalance:debug,2014-08-19T16:49:59.410,ns_1@10.242.238.90:<0.20240.0>:janitor_agent:handle_call:795]Done [rebalance:debug,2014-08-19T16:49:59.410,ns_1@10.242.238.90:<0.20329.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:49:59.410,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.20240.0> (ok) [ns_server:debug,2014-08-19T16:49:59.410,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.20329.0> (ok) [rebalance:debug,2014-08-19T16:49:59.551,ns_1@10.242.238.90:<0.20270.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:49:59.551,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.20270.0> (ok) [rebalance:debug,2014-08-19T16:49:59.601,ns_1@10.242.238.90:<0.21036.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:49:59.601,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.21036.0> (ok) [rebalance:debug,2014-08-19T16:50:00.509,ns_1@10.242.238.90:<0.21046.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 744 [rebalance:debug,2014-08-19T16:50:00.509,ns_1@10.242.238.90:<0.21049.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 745 [rebalance:debug,2014-08-19T16:50:00.510,ns_1@10.242.238.90:<0.21046.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:50:00.510,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.21046.0> (ok) [rebalance:debug,2014-08-19T16:50:00.510,ns_1@10.242.238.90:<0.21049.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:50:00.510,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.21049.0> (ok) [rebalance:debug,2014-08-19T16:50:00.633,ns_1@10.242.238.90:<0.21052.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 746 [rebalance:debug,2014-08-19T16:50:00.634,ns_1@10.242.238.90:<0.21055.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 999 [rebalance:debug,2014-08-19T16:50:00.634,ns_1@10.242.238.90:<0.21052.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:50:00.635,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.21052.0> (ok) [rebalance:debug,2014-08-19T16:50:00.635,ns_1@10.242.238.90:<0.21055.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:50:00.635,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.21055.0> (ok) [rebalance:debug,2014-08-19T16:50:00.733,ns_1@10.242.238.90:<0.21058.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 998 [rebalance:debug,2014-08-19T16:50:00.733,ns_1@10.242.238.90:<0.21061.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 1001 [rebalance:debug,2014-08-19T16:50:00.734,ns_1@10.242.238.90:<0.21058.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:50:00.734,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.21058.0> (ok) [rebalance:debug,2014-08-19T16:50:00.734,ns_1@10.242.238.90:<0.21061.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:50:00.735,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.21061.0> (ok) [rebalance:debug,2014-08-19T16:50:00.833,ns_1@10.242.238.90:<0.21064.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 1000 [rebalance:debug,2014-08-19T16:50:00.835,ns_1@10.242.238.90:<0.21064.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:50:00.835,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.21064.0> (ok) [rebalance:debug,2014-08-19T16:50:01.826,ns_1@10.242.238.90:<0.21073.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 727 [rebalance:debug,2014-08-19T16:50:01.827,ns_1@10.242.238.90:<0.21073.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:50:01.827,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.21073.0> (ok) [rebalance:debug,2014-08-19T16:50:01.935,ns_1@10.242.238.90:<0.21076.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 729 [rebalance:debug,2014-08-19T16:50:01.936,ns_1@10.242.238.90:<0.21076.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:50:01.936,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.21076.0> (ok) [rebalance:debug,2014-08-19T16:50:01.995,ns_1@10.242.238.90:<0.21079.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 731 [rebalance:debug,2014-08-19T16:50:01.997,ns_1@10.242.238.90:<0.21079.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:50:01.997,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.21079.0> (ok) [ns_server:debug,2014-08-19T16:50:02.012,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:50:02.015,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:02.015,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 3120 us [ns_server:debug,2014-08-19T16:50:02.016,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:02.016,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{488, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:50:02.066,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [rebalance:debug,2014-08-19T16:50:02.067,ns_1@10.242.238.90:<0.21083.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 726 [rebalance:debug,2014-08-19T16:50:02.067,ns_1@10.242.238.90:<0.21084.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 733 [rebalance:debug,2014-08-19T16:50:02.069,ns_1@10.242.238.90:<0.21083.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:50:02.069,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.21083.0> (ok) [rebalance:debug,2014-08-19T16:50:02.069,ns_1@10.242.238.90:<0.21084.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:50:02.069,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.21084.0> (ok) [ns_server:debug,2014-08-19T16:50:02.070,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:02.071,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 4032 us [ns_server:debug,2014-08-19T16:50:02.071,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:02.071,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{490, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [rebalance:debug,2014-08-19T16:50:02.162,ns_1@10.242.238.90:<0.21090.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 728 [rebalance:debug,2014-08-19T16:50:02.162,ns_1@10.242.238.90:<0.21091.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 735 [rebalance:debug,2014-08-19T16:50:02.163,ns_1@10.242.238.90:<0.21090.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:50:02.163,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.21090.0> (ok) [rebalance:debug,2014-08-19T16:50:02.164,ns_1@10.242.238.90:<0.21091.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:50:02.164,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.21091.0> (ok) [rebalance:debug,2014-08-19T16:50:02.278,ns_1@10.242.238.90:<0.21096.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 730 [rebalance:debug,2014-08-19T16:50:02.279,ns_1@10.242.238.90:<0.21099.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 737 [rebalance:debug,2014-08-19T16:50:02.280,ns_1@10.242.238.90:<0.21096.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:50:02.280,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.21096.0> (ok) [rebalance:debug,2014-08-19T16:50:02.280,ns_1@10.242.238.90:<0.21099.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:50:02.280,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.21099.0> (ok) [rebalance:debug,2014-08-19T16:50:02.396,ns_1@10.242.238.90:<0.21102.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 739 [rebalance:debug,2014-08-19T16:50:02.396,ns_1@10.242.238.90:<0.21105.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 732 [rebalance:debug,2014-08-19T16:50:02.397,ns_1@10.242.238.90:<0.21102.0>:janitor_agent:handle_call:795]Done [rebalance:debug,2014-08-19T16:50:02.397,ns_1@10.242.238.90:<0.21105.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:50:02.397,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.21102.0> (ok) [ns_server:debug,2014-08-19T16:50:02.397,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.21105.0> (ok) [rebalance:debug,2014-08-19T16:50:02.496,ns_1@10.242.238.90:<0.21108.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 734 [rebalance:debug,2014-08-19T16:50:02.497,ns_1@10.242.238.90:<0.21111.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 741 [rebalance:debug,2014-08-19T16:50:02.498,ns_1@10.242.238.90:<0.21108.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:50:02.498,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.21108.0> (ok) [rebalance:debug,2014-08-19T16:50:02.498,ns_1@10.242.238.90:<0.21111.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:50:02.498,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.21111.0> (ok) [ns_server:debug,2014-08-19T16:50:02.611,ns_1@10.242.238.90:<0.21115.0>:capi_set_view_manager:do_wait_index_updated:618]References to wait: [] ("default", 743) [ns_server:debug,2014-08-19T16:50:02.611,ns_1@10.242.238.90:<0.21115.0>:capi_set_view_manager:do_wait_index_updated:638]All refs fired [ns_server:debug,2014-08-19T16:50:02.611,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.21114.0> (ok) [rebalance:debug,2014-08-19T16:50:02.612,ns_1@10.242.238.90:<0.20243.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:50:02.612,ns_1@10.242.238.90:<0.20243.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:50:02.612,ns_1@10.242.238.90:<0.21116.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:50:02.612,ns_1@10.242.238.90:<0.21116.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:50:02.612,ns_1@10.242.238.90:<0.20243.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [rebalance:debug,2014-08-19T16:50:02.613,ns_1@10.242.238.90:<0.21117.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 736 [rebalance:debug,2014-08-19T16:50:02.614,ns_1@10.242.238.90:<0.21117.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:50:02.614,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.21117.0> (ok) [ns_server:info,2014-08-19T16:50:02.659,ns_1@10.242.238.90:<0.18787.0>:ns_memcached:do_handle_call:527]Changed vbucket 743 state to active [ns_server:debug,2014-08-19T16:50:02.661,ns_1@10.242.238.90:<0.21121.0>:capi_set_view_manager:do_wait_index_updated:618]References to wait: [] ("default", 745) [ns_server:debug,2014-08-19T16:50:02.661,ns_1@10.242.238.90:<0.21121.0>:capi_set_view_manager:do_wait_index_updated:638]All refs fired [ns_server:debug,2014-08-19T16:50:02.661,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.21120.0> (ok) [rebalance:debug,2014-08-19T16:50:02.662,ns_1@10.242.238.90:<0.20193.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:50:02.662,ns_1@10.242.238.90:<0.20193.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:50:02.662,ns_1@10.242.238.90:<0.21122.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:50:02.662,ns_1@10.242.238.90:<0.21122.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:50:02.662,ns_1@10.242.238.90:<0.20193.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [rebalance:debug,2014-08-19T16:50:02.662,ns_1@10.242.238.90:<0.21123.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 738 [ns_server:debug,2014-08-19T16:50:02.683,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:50:02.686,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 3279 us [ns_server:debug,2014-08-19T16:50:02.686,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:02.687,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:02.687,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{743, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:info,2014-08-19T16:50:02.709,ns_1@10.242.238.90:<0.18787.0>:ns_memcached:do_handle_call:527]Changed vbucket 745 state to active [rebalance:debug,2014-08-19T16:50:02.714,ns_1@10.242.238.90:<0.21127.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 981 [rebalance:debug,2014-08-19T16:50:02.714,ns_1@10.242.238.90:<0.21130.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 740 [views:debug,2014-08-19T16:50:02.726,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/743. Updated state: active (1) [ns_server:debug,2014-08-19T16:50:02.726,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",743,active,1} [rebalance:debug,2014-08-19T16:50:02.727,ns_1@10.242.238.90:<0.21123.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:50:02.727,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.21123.0> (ok) [rebalance:debug,2014-08-19T16:50:02.727,ns_1@10.242.238.90:<0.21127.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:50:02.727,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.21127.0> (ok) [ns_server:debug,2014-08-19T16:50:02.732,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:50:02.733,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:02.734,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 1523 us [ns_server:debug,2014-08-19T16:50:02.734,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:02.734,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{745, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [views:debug,2014-08-19T16:50:02.760,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/745. Updated state: active (1) [ns_server:debug,2014-08-19T16:50:02.760,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",745,active,1} [rebalance:debug,2014-08-19T16:50:02.761,ns_1@10.242.238.90:<0.21130.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:50:02.761,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.21130.0> (ok) [rebalance:debug,2014-08-19T16:50:02.784,ns_1@10.242.238.90:<0.21135.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 742 [rebalance:debug,2014-08-19T16:50:02.785,ns_1@10.242.238.90:<0.21138.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 983 [rebalance:debug,2014-08-19T16:50:02.785,ns_1@10.242.238.90:<0.21135.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:50:02.785,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.21135.0> (ok) [rebalance:debug,2014-08-19T16:50:02.786,ns_1@10.242.238.90:<0.21138.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:50:02.786,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.21138.0> (ok) [ns_server:debug,2014-08-19T16:50:02.865,ns_1@10.242.238.90:<0.21142.0>:capi_set_view_manager:do_wait_index_updated:618]References to wait: [] ("default", 744) [ns_server:debug,2014-08-19T16:50:02.865,ns_1@10.242.238.90:<0.21142.0>:capi_set_view_manager:do_wait_index_updated:638]All refs fired [ns_server:debug,2014-08-19T16:50:02.866,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.21141.0> (ok) [rebalance:debug,2014-08-19T16:50:02.866,ns_1@10.242.238.90:<0.20218.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:50:02.866,ns_1@10.242.238.90:<0.20218.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:50:02.866,ns_1@10.242.238.90:<0.21143.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:50:02.867,ns_1@10.242.238.90:<0.21143.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:50:02.867,ns_1@10.242.238.90:<0.20218.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [rebalance:debug,2014-08-19T16:50:02.867,ns_1@10.242.238.90:<0.21144.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 985 [rebalance:debug,2014-08-19T16:50:02.869,ns_1@10.242.238.90:<0.21144.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:50:02.869,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.21144.0> (ok) [ns_server:debug,2014-08-19T16:50:02.899,ns_1@10.242.238.90:<0.21148.0>:capi_set_view_manager:do_wait_index_updated:618]References to wait: [] ("default", 746) [ns_server:debug,2014-08-19T16:50:02.899,ns_1@10.242.238.90:<0.21148.0>:capi_set_view_manager:do_wait_index_updated:638]All refs fired [ns_server:debug,2014-08-19T16:50:02.899,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.21147.0> (ok) [rebalance:debug,2014-08-19T16:50:02.900,ns_1@10.242.238.90:<0.20168.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:50:02.900,ns_1@10.242.238.90:<0.20168.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:50:02.900,ns_1@10.242.238.90:<0.21149.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:50:02.900,ns_1@10.242.238.90:<0.21149.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:50:02.900,ns_1@10.242.238.90:<0.20168.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [rebalance:debug,2014-08-19T16:50:02.901,ns_1@10.242.238.90:<0.21150.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 987 [rebalance:debug,2014-08-19T16:50:02.902,ns_1@10.242.238.90:<0.21150.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:50:02.903,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.21150.0> (ok) [ns_server:info,2014-08-19T16:50:02.914,ns_1@10.242.238.90:<0.18787.0>:ns_memcached:do_handle_call:527]Changed vbucket 744 state to active [ns_server:debug,2014-08-19T16:50:02.938,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:50:02.945,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:02.946,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 6390 us [ns_server:debug,2014-08-19T16:50:02.946,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:02.946,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{744, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:info,2014-08-19T16:50:02.949,ns_1@10.242.238.90:<0.18787.0>:ns_memcached:do_handle_call:527]Changed vbucket 746 state to active [ns_server:debug,2014-08-19T16:50:02.968,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [rebalance:debug,2014-08-19T16:50:02.971,ns_1@10.242.238.90:<0.21154.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 980 [rebalance:debug,2014-08-19T16:50:02.971,ns_1@10.242.238.90:<0.21155.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 989 [ns_server:debug,2014-08-19T16:50:02.974,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:02.974,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 5860 us [ns_server:debug,2014-08-19T16:50:02.974,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:02.975,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{746, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [views:debug,2014-08-19T16:50:02.993,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/744. Updated state: active (1) [ns_server:debug,2014-08-19T16:50:02.993,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",744,active,1} [rebalance:debug,2014-08-19T16:50:02.994,ns_1@10.242.238.90:<0.21154.0>:janitor_agent:handle_call:795]Done [rebalance:debug,2014-08-19T16:50:02.994,ns_1@10.242.238.90:<0.21155.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:50:02.994,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.21154.0> (ok) [ns_server:debug,2014-08-19T16:50:02.994,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.21155.0> (ok) [views:debug,2014-08-19T16:50:03.052,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/746. Updated state: active (1) [ns_server:debug,2014-08-19T16:50:03.052,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",746,active,1} [rebalance:debug,2014-08-19T16:50:03.078,ns_1@10.242.238.90:<0.21161.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 991 [rebalance:debug,2014-08-19T16:50:03.078,ns_1@10.242.238.90:<0.21164.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 982 [rebalance:debug,2014-08-19T16:50:03.079,ns_1@10.242.238.90:<0.21164.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:50:03.079,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.21164.0> (ok) [rebalance:debug,2014-08-19T16:50:03.079,ns_1@10.242.238.90:<0.21161.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:50:03.079,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.21161.0> (ok) [rebalance:debug,2014-08-19T16:50:03.145,ns_1@10.242.238.90:<0.21167.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 993 [rebalance:debug,2014-08-19T16:50:03.145,ns_1@10.242.238.90:<0.21170.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 984 [rebalance:debug,2014-08-19T16:50:03.146,ns_1@10.242.238.90:<0.21170.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:50:03.146,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.21170.0> (ok) [rebalance:debug,2014-08-19T16:50:03.146,ns_1@10.242.238.90:<0.21167.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:50:03.146,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.21167.0> (ok) [rebalance:debug,2014-08-19T16:50:03.253,ns_1@10.242.238.90:<0.21173.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 986 [rebalance:debug,2014-08-19T16:50:03.254,ns_1@10.242.238.90:<0.21176.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 995 [rebalance:debug,2014-08-19T16:50:03.254,ns_1@10.242.238.90:<0.21173.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:50:03.254,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.21173.0> (ok) [rebalance:debug,2014-08-19T16:50:03.255,ns_1@10.242.238.90:<0.21176.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:50:03.255,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.21176.0> (ok) [rebalance:debug,2014-08-19T16:50:03.337,ns_1@10.242.238.90:<0.21183.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 997 [rebalance:debug,2014-08-19T16:50:03.338,ns_1@10.242.238.90:<0.21186.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 988 [rebalance:debug,2014-08-19T16:50:03.339,ns_1@10.242.238.90:<0.21183.0>:janitor_agent:handle_call:795]Done [rebalance:debug,2014-08-19T16:50:03.339,ns_1@10.242.238.90:<0.21186.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:50:03.339,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.21183.0> (ok) [ns_server:debug,2014-08-19T16:50:03.339,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.21186.0> (ok) [rebalance:debug,2014-08-19T16:50:03.430,ns_1@10.242.238.90:<0.21204.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 990 [rebalance:debug,2014-08-19T16:50:03.432,ns_1@10.242.238.90:<0.21204.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:50:03.432,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.21204.0> (ok) [rebalance:debug,2014-08-19T16:50:03.477,ns_1@10.242.238.90:<0.20199.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:50:03.477,ns_1@10.242.238.90:<0.20199.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:50:03.477,ns_1@10.242.238.90:<0.21208.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:50:03.478,ns_1@10.242.238.90:<0.21208.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:50:03.478,ns_1@10.242.238.90:<0.20199.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [rebalance:debug,2014-08-19T16:50:03.480,ns_1@10.242.238.90:<0.21209.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 992 [rebalance:debug,2014-08-19T16:50:03.481,ns_1@10.242.238.90:<0.21209.0>:janitor_agent:handle_call:795]Done [ns_server:info,2014-08-19T16:50:03.481,ns_1@10.242.238.90:<0.18787.0>:ns_memcached:do_handle_call:527]Changed vbucket 999 state to replica [ns_server:info,2014-08-19T16:50:03.481,ns_1@10.242.238.90:tap_replication_manager-default<0.18775.0>:tap_replication_manager:change_vbucket_filter:200]Going to change replication from 'ns_1@10.242.238.91' to have [999,1002,1003,1004,1005,1006,1007,1008,1009,1010,1011,1012,1013,1014,1015, 1016,1017,1018,1019,1020,1021,1022,1023] ([999], []) [ns_server:debug,2014-08-19T16:50:03.482,ns_1@10.242.238.90:compaction_daemon<0.17567.0>:compaction_daemon:handle_info:447]Starting compaction for the following buckets: [<<"default">>] [ns_server:debug,2014-08-19T16:50:03.482,ns_1@10.242.238.90:<0.21212.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:135]Registered myself under id:{"default", {new_child_id, [999,1002,1003,1004,1005,1006,1007,1008,1009, 1010,1011,1012,1013,1014,1015,1016,1017,1018, 1019,1020,1021,1022,1023], 'ns_1@10.242.238.91'}, #Ref<0.0.0.232944>} Args:[{"10.242.238.91",11209}, {"10.242.238.90",11209}, [{old_state_retriever,#Fun}, {on_not_ready_vbuckets,#Fun}, {username,"default"}, {password,get_from_config}, {vbuckets,[999,1002,1003,1004,1005,1006,1007,1008,1009,1010,1011,1012, 1013,1014,1015,1016,1017,1018,1019,1020,1021,1022,1023]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]] [ns_server:debug,2014-08-19T16:50:03.483,ns_1@10.242.238.90:<0.21212.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:139]Linked myself to old ebucketmigrator <0.20145.0> [ns_server:info,2014-08-19T16:50:03.483,ns_1@10.242.238.90:<0.20145.0>:ebucketmigrator_srv:handle_call:270]Starting new-style vbucket filter change on stream `replication_ns_1@10.242.238.90` [ns_server:info,2014-08-19T16:50:03.484,ns_1@10.242.238.90:<0.21214.0>:compaction_daemon:try_to_cleanup_indexes:650]Cleaning up indexes for bucket `default` [ns_server:info,2014-08-19T16:50:03.485,ns_1@10.242.238.90:<0.21214.0>:compaction_daemon:spawn_bucket_compactor:609]Compacting bucket default with config: [{database_fragmentation_threshold,{30,undefined}}, {view_fragmentation_threshold,{30,undefined}}] [ns_server:info,2014-08-19T16:50:03.495,ns_1@10.242.238.90:<0.20145.0>:ebucketmigrator_srv:handle_call:297]Changing vbucket filter on tap stream `replication_ns_1@10.242.238.90`: [{999,1}, {1002,1}, {1003,1}, {1004,1}, {1005,1}, {1006,1}, {1007,1}, {1008,1}, {1009,1}, {1010,1}, {1011,1}, {1012,1}, {1013,1}, {1014,1}, {1015,1}, {1016,1}, {1017,1}, {1018,1}, {1019,1}, {1020,1}, {1021,1}, {1022,1}, {1023,1}] [ns_server:info,2014-08-19T16:50:03.495,ns_1@10.242.238.90:<0.20145.0>:ebucketmigrator_srv:handle_call:307]Successfully changed vbucket filter on tap stream `replication_ns_1@10.242.238.90`. [ns_server:info,2014-08-19T16:50:03.495,ns_1@10.242.238.90:<0.20145.0>:ebucketmigrator_srv:process_upstream:1027]Got vbucket filter change completion message. Silencing upstream sender [ns_server:info,2014-08-19T16:50:03.495,ns_1@10.242.238.90:<0.20145.0>:ebucketmigrator_srv:handle_info:366]Got reply from upstream silencing request. Completing state transition to a new ebucketmigrator. [ns_server:debug,2014-08-19T16:50:03.495,ns_1@10.242.238.90:<0.20145.0>:ebucketmigrator_srv:complete_native_vb_filter_change:170]Proceeding with reading unread binaries [ns_server:debug,2014-08-19T16:50:03.495,ns_1@10.242.238.90:<0.21217.0>:compaction_daemon:bucket_needs_compaction:1042]`default` data size is 6624, disk size is 997536 [ns_server:debug,2014-08-19T16:50:03.495,ns_1@10.242.238.90:<0.20145.0>:ebucketmigrator_srv:confirm_downstream:197]Going to confirm reception downstream messages [ns_server:debug,2014-08-19T16:50:03.495,ns_1@10.242.238.90:compaction_daemon<0.17567.0>:compaction_daemon:handle_info:505]Finished compaction iteration. [ns_server:debug,2014-08-19T16:50:03.496,ns_1@10.242.238.90:<0.20145.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:50:03.496,ns_1@10.242.238.90:<0.21218.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:50:03.496,ns_1@10.242.238.90:<0.21218.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:50:03.496,ns_1@10.242.238.90:<0.20145.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:50:03.496,ns_1@10.242.238.90:<0.20145.0>:ebucketmigrator_srv:confirm_downstream:201]Confirmed upstream messages are feeded to kernel [ns_server:debug,2014-08-19T16:50:03.496,ns_1@10.242.238.90:compaction_daemon<0.17567.0>:compaction_daemon:schedule_next_compaction:1519]Finished compaction too soon. Next run will be in 30s [ns_server:debug,2014-08-19T16:50:03.496,ns_1@10.242.238.90:<0.20145.0>:ebucketmigrator_srv:reply_and_die:210]Passed old state to caller [ns_server:debug,2014-08-19T16:50:03.496,ns_1@10.242.238.90:<0.20145.0>:ebucketmigrator_srv:reply_and_die:213]Sent out state. Preparing to die [ns_server:debug,2014-08-19T16:50:03.496,ns_1@10.242.238.90:<0.21212.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:143]Got old state from previous ebucketmigrator: <0.20145.0> [ns_server:debug,2014-08-19T16:50:03.497,ns_1@10.242.238.90:<0.21212.0>:ns_vbm_new_sup:perform_vbucket_filter_change_loop:184]Sent old state to new instance [ns_server:info,2014-08-19T16:50:03.497,ns_1@10.242.238.90:<0.21220.0>:ns_vbm_new_sup:mk_old_state_retriever:83]Got vbucket filter change old state. Proceeding vbucket filter change operation [ns_server:debug,2014-08-19T16:50:03.497,ns_1@10.242.238.90:<0.21220.0>:ebucketmigrator_srv:init:494]Got old ebucketmigrator state from <0.20145.0>: {state,#Port<0.13886>,#Port<0.13883>,#Port<0.13887>,#Port<0.13884>, <0.20146.0>,<<"cut off">>,<<"cut off">>,[],70,false,false,0, {1408,452603,495872}, completed, {<0.21212.0>,#Ref<0.0.0.232961>}, <<"replication_ns_1@10.242.238.90">>,<0.20145.0>, {had_backfill,false,undefined,[]}, completed,false}. [ns_server:debug,2014-08-19T16:50:03.497,ns_1@10.242.238.90:<0.17162.0>:ns_process_registry:handle_info:98]Got exit msg: {'EXIT',<0.21212.0>,{#Ref<0.0.0.232946>,<0.21220.0>}} [ns_server:debug,2014-08-19T16:50:03.497,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.21209.0> (ok) [error_logger:info,2014-08-19T16:50:03.497,ns_1@10.242.238.90:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,'ns_vbm_new_sup-default'} started: [{pid,<0.21220.0>}, {name, {new_child_id, [999,1002,1003,1004,1005,1006,1007,1008,1009, 1010,1011,1012,1013,1014,1015,1016,1017,1018, 1019,1020,1021,1022,1023], 'ns_1@10.242.238.91'}}, {mfargs, {ebucketmigrator_srv,start_link, [{"10.242.238.91",11209}, {"10.242.238.90",11209}, [{old_state_retriever, #Fun}, {on_not_ready_vbuckets, #Fun}, {username,"default"}, {password,get_from_config}, {vbuckets, [999,1002,1003,1004,1005,1006,1007,1008, 1009,1010,1011,1012,1013,1014,1015,1016, 1017,1018,1019,1020,1021,1022,1023]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]]}}, {restart_type,temporary}, {shutdown,60000}, {child_type,worker}] [ns_server:debug,2014-08-19T16:50:03.504,ns_1@10.242.238.90:<0.21220.0>:ebucketmigrator_srv:init:621]Reusing old upstream: [{vbuckets,[999,1002,1003,1004,1005,1006,1007,1008,1009,1010,1011,1012,1013, 1014,1015,1016,1017,1018,1019,1020,1021,1022,1023]}, {name,<<"replication_ns_1@10.242.238.90">>}, {takeover,false}] [rebalance:debug,2014-08-19T16:50:03.504,ns_1@10.242.238.90:<0.21220.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.21221.0> [ns_server:debug,2014-08-19T16:50:03.505,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:50:03.509,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:03.509,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 3718 us [ns_server:debug,2014-08-19T16:50:03.510,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:03.510,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{999, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [rebalance:debug,2014-08-19T16:50:03.528,ns_1@10.242.238.90:<0.20149.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:50:03.529,ns_1@10.242.238.90:<0.20149.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:50:03.529,ns_1@10.242.238.90:<0.21223.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:50:03.529,ns_1@10.242.238.90:<0.21223.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:50:03.529,ns_1@10.242.238.90:<0.20149.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [rebalance:debug,2014-08-19T16:50:03.530,ns_1@10.242.238.90:<0.21224.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 994 [rebalance:debug,2014-08-19T16:50:03.532,ns_1@10.242.238.90:<0.21224.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:50:03.532,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.21224.0> (ok) [ns_server:info,2014-08-19T16:50:03.532,ns_1@10.242.238.90:<0.18787.0>:ns_memcached:do_handle_call:527]Changed vbucket 1001 state to replica [ns_server:info,2014-08-19T16:50:03.532,ns_1@10.242.238.90:tap_replication_manager-default<0.18775.0>:tap_replication_manager:change_vbucket_filter:200]Going to change replication from 'ns_1@10.242.238.91' to have [999,1001,1002,1003,1004,1005,1006,1007,1008,1009,1010,1011,1012,1013,1014, 1015,1016,1017,1018,1019,1020,1021,1022,1023] ([1001], []) [ns_server:debug,2014-08-19T16:50:03.535,ns_1@10.242.238.90:<0.21227.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:135]Registered myself under id:{"default", {new_child_id, [999,1001,1002,1003,1004,1005,1006,1007,1008, 1009,1010,1011,1012,1013,1014,1015,1016,1017, 1018,1019,1020,1021,1022,1023], 'ns_1@10.242.238.91'}, #Ref<0.0.0.233255>} Args:[{"10.242.238.91",11209}, {"10.242.238.90",11209}, [{old_state_retriever,#Fun}, {on_not_ready_vbuckets,#Fun}, {username,"default"}, {password,get_from_config}, {vbuckets,[999,1001,1002,1003,1004,1005,1006,1007,1008,1009,1010,1011, 1012,1013,1014,1015,1016,1017,1018,1019,1020,1021,1022, 1023]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]] [ns_server:debug,2014-08-19T16:50:03.535,ns_1@10.242.238.90:<0.21227.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:139]Linked myself to old ebucketmigrator <0.21220.0> [ns_server:info,2014-08-19T16:50:03.535,ns_1@10.242.238.90:<0.21220.0>:ebucketmigrator_srv:handle_call:270]Starting new-style vbucket filter change on stream `replication_ns_1@10.242.238.90` [ns_server:info,2014-08-19T16:50:03.541,ns_1@10.242.238.90:<0.21220.0>:ebucketmigrator_srv:handle_call:297]Changing vbucket filter on tap stream `replication_ns_1@10.242.238.90`: [{999,1}, {1001,1}, {1002,1}, {1003,1}, {1004,1}, {1005,1}, {1006,1}, {1007,1}, {1008,1}, {1009,1}, {1010,1}, {1011,1}, {1012,1}, {1013,1}, {1014,1}, {1015,1}, {1016,1}, {1017,1}, {1018,1}, {1019,1}, {1020,1}, {1021,1}, {1022,1}, {1023,1}] [ns_server:info,2014-08-19T16:50:03.542,ns_1@10.242.238.90:<0.21220.0>:ebucketmigrator_srv:handle_call:307]Successfully changed vbucket filter on tap stream `replication_ns_1@10.242.238.90`. [ns_server:info,2014-08-19T16:50:03.542,ns_1@10.242.238.90:<0.21220.0>:ebucketmigrator_srv:process_upstream:1027]Got vbucket filter change completion message. Silencing upstream sender [ns_server:info,2014-08-19T16:50:03.542,ns_1@10.242.238.90:<0.21220.0>:ebucketmigrator_srv:handle_info:366]Got reply from upstream silencing request. Completing state transition to a new ebucketmigrator. [ns_server:debug,2014-08-19T16:50:03.542,ns_1@10.242.238.90:<0.21220.0>:ebucketmigrator_srv:complete_native_vb_filter_change:170]Proceeding with reading unread binaries [ns_server:debug,2014-08-19T16:50:03.542,ns_1@10.242.238.90:<0.21220.0>:ebucketmigrator_srv:confirm_downstream:197]Going to confirm reception downstream messages [ns_server:debug,2014-08-19T16:50:03.543,ns_1@10.242.238.90:<0.21220.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:50:03.543,ns_1@10.242.238.90:<0.21229.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:50:03.543,ns_1@10.242.238.90:<0.21229.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:50:03.543,ns_1@10.242.238.90:<0.21220.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:50:03.543,ns_1@10.242.238.90:<0.21220.0>:ebucketmigrator_srv:confirm_downstream:201]Confirmed upstream messages are feeded to kernel [ns_server:debug,2014-08-19T16:50:03.543,ns_1@10.242.238.90:<0.21220.0>:ebucketmigrator_srv:reply_and_die:210]Passed old state to caller [ns_server:debug,2014-08-19T16:50:03.543,ns_1@10.242.238.90:<0.21220.0>:ebucketmigrator_srv:reply_and_die:213]Sent out state. Preparing to die [ns_server:debug,2014-08-19T16:50:03.543,ns_1@10.242.238.90:<0.21227.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:143]Got old state from previous ebucketmigrator: <0.21220.0> [ns_server:debug,2014-08-19T16:50:03.544,ns_1@10.242.238.90:<0.21227.0>:ns_vbm_new_sup:perform_vbucket_filter_change_loop:184]Sent old state to new instance [ns_server:info,2014-08-19T16:50:03.544,ns_1@10.242.238.90:<0.21231.0>:ns_vbm_new_sup:mk_old_state_retriever:83]Got vbucket filter change old state. Proceeding vbucket filter change operation [ns_server:debug,2014-08-19T16:50:03.544,ns_1@10.242.238.90:<0.21231.0>:ebucketmigrator_srv:init:494]Got old ebucketmigrator state from <0.21220.0>: {state,#Port<0.13886>,#Port<0.13883>,#Port<0.13887>,#Port<0.13884>, <0.21221.0>,<<"cut off">>,<<"cut off">>,[],73,false,false,0, {1408,452603,542603}, completed, {<0.21227.0>,#Ref<0.0.0.233268>}, <<"replication_ns_1@10.242.238.90">>,<0.21220.0>, {had_backfill,false,undefined,[]}, completed,false}. [ns_server:debug,2014-08-19T16:50:03.544,ns_1@10.242.238.90:<0.17162.0>:ns_process_registry:handle_info:98]Got exit msg: {'EXIT',<0.21227.0>,{#Ref<0.0.0.233257>,<0.21231.0>}} [error_logger:info,2014-08-19T16:50:03.544,ns_1@10.242.238.90:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,'ns_vbm_new_sup-default'} started: [{pid,<0.21231.0>}, {name, {new_child_id, [999,1001,1002,1003,1004,1005,1006,1007,1008, 1009,1010,1011,1012,1013,1014,1015,1016,1017, 1018,1019,1020,1021,1022,1023], 'ns_1@10.242.238.91'}}, {mfargs, {ebucketmigrator_srv,start_link, [{"10.242.238.91",11209}, {"10.242.238.90",11209}, [{old_state_retriever, #Fun}, {on_not_ready_vbuckets, #Fun}, {username,"default"}, {password,get_from_config}, {vbuckets, [999,1001,1002,1003,1004,1005,1006,1007, 1008,1009,1010,1011,1012,1013,1014,1015, 1016,1017,1018,1019,1020,1021,1022, 1023]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]]}}, {restart_type,temporary}, {shutdown,60000}, {child_type,worker}] [ns_server:debug,2014-08-19T16:50:03.549,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:50:03.550,ns_1@10.242.238.90:<0.21231.0>:ebucketmigrator_srv:init:621]Reusing old upstream: [{vbuckets,[999,1001,1002,1003,1004,1005,1006,1007,1008,1009,1010,1011,1012, 1013,1014,1015,1016,1017,1018,1019,1020,1021,1022,1023]}, {name,<<"replication_ns_1@10.242.238.90">>}, {takeover,false}] [rebalance:debug,2014-08-19T16:50:03.551,ns_1@10.242.238.90:<0.21231.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.21232.0> [ns_server:debug,2014-08-19T16:50:03.554,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:03.554,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 4660 us [ns_server:debug,2014-08-19T16:50:03.555,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:03.555,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{1001, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [rebalance:debug,2014-08-19T16:50:03.689,ns_1@10.242.238.90:<0.21234.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 996 [rebalance:debug,2014-08-19T16:50:03.690,ns_1@10.242.238.90:<0.21234.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:50:03.690,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.21234.0> (ok) [ns_server:debug,2014-08-19T16:50:03.741,ns_1@10.242.238.90:<0.21238.0>:capi_set_view_manager:do_wait_index_updated:618]References to wait: [] ("default", 727) [ns_server:debug,2014-08-19T16:50:03.741,ns_1@10.242.238.90:<0.21238.0>:capi_set_view_manager:do_wait_index_updated:638]All refs fired [ns_server:debug,2014-08-19T16:50:03.741,ns_1@10.242.238.90:<0.21240.0>:capi_set_view_manager:do_wait_index_updated:618]References to wait: [] ("default", 729) [ns_server:debug,2014-08-19T16:50:03.741,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.21237.0> (ok) [ns_server:debug,2014-08-19T16:50:03.742,ns_1@10.242.238.90:<0.21240.0>:capi_set_view_manager:do_wait_index_updated:638]All refs fired [ns_server:debug,2014-08-19T16:50:03.742,ns_1@10.242.238.90:<0.21244.0>:capi_set_view_manager:do_wait_index_updated:618]References to wait: [] ("default", 733) [ns_server:debug,2014-08-19T16:50:03.742,ns_1@10.242.238.90:<0.21244.0>:capi_set_view_manager:do_wait_index_updated:638]All refs fired [ns_server:debug,2014-08-19T16:50:03.742,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.21239.0> (ok) [ns_server:debug,2014-08-19T16:50:03.742,ns_1@10.242.238.90:<0.21245.0>:capi_set_view_manager:do_wait_index_updated:618]References to wait: [] ("default", 731) [ns_server:debug,2014-08-19T16:50:03.742,ns_1@10.242.238.90:<0.21245.0>:capi_set_view_manager:do_wait_index_updated:638]All refs fired [ns_server:debug,2014-08-19T16:50:03.742,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.21241.0> (ok) [ns_server:debug,2014-08-19T16:50:03.742,ns_1@10.242.238.90:<0.21250.0>:capi_set_view_manager:do_wait_index_updated:618]References to wait: [] ("default", 735) [ns_server:debug,2014-08-19T16:50:03.742,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.21242.0> (ok) [ns_server:debug,2014-08-19T16:50:03.742,ns_1@10.242.238.90:<0.21250.0>:capi_set_view_manager:do_wait_index_updated:638]All refs fired [ns_server:debug,2014-08-19T16:50:03.742,ns_1@10.242.238.90:<0.21254.0>:capi_set_view_manager:do_wait_index_updated:618]References to wait: [] ("default", 741) [rebalance:debug,2014-08-19T16:50:03.742,ns_1@10.242.238.90:<0.20709.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:50:03.742,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.21243.0> (ok) [ns_server:debug,2014-08-19T16:50:03.743,ns_1@10.242.238.90:<0.21254.0>:capi_set_view_manager:do_wait_index_updated:638]All refs fired [ns_server:debug,2014-08-19T16:50:03.743,ns_1@10.242.238.90:<0.21255.0>:capi_set_view_manager:do_wait_index_updated:618]References to wait: [] ("default", 728) [ns_server:debug,2014-08-19T16:50:03.743,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.21249.0> (ok) [rebalance:debug,2014-08-19T16:50:03.743,ns_1@10.242.238.90:<0.20645.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:50:03.743,ns_1@10.242.238.90:<0.21255.0>:capi_set_view_manager:do_wait_index_updated:638]All refs fired [rebalance:debug,2014-08-19T16:50:03.743,ns_1@10.242.238.90:<0.20528.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:50:03.743,ns_1@10.242.238.90:<0.20709.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:50:03.743,ns_1@10.242.238.90:<0.21257.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:50:03.743,ns_1@10.242.238.90:<0.21259.0>:capi_set_view_manager:do_wait_index_updated:618]References to wait: [] ("default", 737) [ns_server:debug,2014-08-19T16:50:03.743,ns_1@10.242.238.90:<0.20645.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [rebalance:debug,2014-08-19T16:50:03.743,ns_1@10.242.238.90:<0.20579.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:50:03.743,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.21248.0> (ok) [ns_server:debug,2014-08-19T16:50:03.743,ns_1@10.242.238.90:<0.21258.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:50:03.743,ns_1@10.242.238.90:<0.21257.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:debug,2014-08-19T16:50:03.743,ns_1@10.242.238.90:<0.20478.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:50:03.743,ns_1@10.242.238.90:<0.21259.0>:capi_set_view_manager:do_wait_index_updated:638]All refs fired [ns_server:debug,2014-08-19T16:50:03.743,ns_1@10.242.238.90:<0.20528.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:50:03.743,ns_1@10.242.238.90:<0.21258.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:50:03.743,ns_1@10.242.238.90:<0.20709.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:50:03.743,ns_1@10.242.238.90:<0.21261.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [rebalance:debug,2014-08-19T16:50:03.743,ns_1@10.242.238.90:<0.20307.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:50:03.743,ns_1@10.242.238.90:<0.21262.0>:capi_set_view_manager:do_wait_index_updated:618]References to wait: [] ("default", 726) [ns_server:debug,2014-08-19T16:50:03.743,ns_1@10.242.238.90:<0.20478.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:50:03.743,ns_1@10.242.238.90:<0.21261.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [ns_server:debug,2014-08-19T16:50:03.743,ns_1@10.242.238.90:<0.21264.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:50:03.744,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.21247.0> (ok) [ns_server:debug,2014-08-19T16:50:03.744,ns_1@10.242.238.90:<0.20579.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:50:03.744,ns_1@10.242.238.90:<0.21265.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:50:03.744,ns_1@10.242.238.90:<0.21262.0>:capi_set_view_manager:do_wait_index_updated:638]All refs fired [rebalance:info,2014-08-19T16:50:03.744,ns_1@10.242.238.90:<0.20645.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:50:03.744,ns_1@10.242.238.90:<0.21266.0>:capi_set_view_manager:do_wait_index_updated:618]References to wait: [] ("default", 739) [ns_server:debug,2014-08-19T16:50:03.744,ns_1@10.242.238.90:<0.21264.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [ns_server:debug,2014-08-19T16:50:03.744,ns_1@10.242.238.90:<0.20307.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:50:03.744,ns_1@10.242.238.90:<0.21265.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:debug,2014-08-19T16:50:03.744,ns_1@10.242.238.90:<0.20684.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [rebalance:info,2014-08-19T16:50:03.744,ns_1@10.242.238.90:<0.20528.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:50:03.744,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.21246.0> (ok) [ns_server:debug,2014-08-19T16:50:03.744,ns_1@10.242.238.90:<0.21267.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:50:03.744,ns_1@10.242.238.90:<0.21266.0>:capi_set_view_manager:do_wait_index_updated:638]All refs fired [rebalance:info,2014-08-19T16:50:03.744,ns_1@10.242.238.90:<0.20478.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:50:03.744,ns_1@10.242.238.90:<0.21269.0>:capi_set_view_manager:do_wait_index_updated:618]References to wait: [] ("default", 730) [rebalance:info,2014-08-19T16:50:03.744,ns_1@10.242.238.90:<0.20579.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:50:03.744,ns_1@10.242.238.90:<0.21267.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [ns_server:debug,2014-08-19T16:50:03.744,ns_1@10.242.238.90:<0.20684.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:50:03.744,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.21251.0> (ok) [ns_server:debug,2014-08-19T16:50:03.744,ns_1@10.242.238.90:<0.21270.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:50:03.744,ns_1@10.242.238.90:<0.21269.0>:capi_set_view_manager:do_wait_index_updated:638]All refs fired [rebalance:info,2014-08-19T16:50:03.744,ns_1@10.242.238.90:<0.20307.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:50:03.744,ns_1@10.242.238.90:<0.21272.0>:capi_set_view_manager:do_wait_index_updated:618]References to wait: [] ("default", 732) [ns_server:debug,2014-08-19T16:50:03.744,ns_1@10.242.238.90:<0.21270.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:debug,2014-08-19T16:50:03.744,ns_1@10.242.238.90:<0.20414.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:50:03.744,ns_1@10.242.238.90:<0.21272.0>:capi_set_view_manager:do_wait_index_updated:638]All refs fired [ns_server:debug,2014-08-19T16:50:03.744,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.21252.0> (ok) [ns_server:debug,2014-08-19T16:50:03.744,ns_1@10.242.238.90:<0.21273.0>:capi_set_view_manager:do_wait_index_updated:618]References to wait: [] ("default", 734) [rebalance:debug,2014-08-19T16:50:03.744,ns_1@10.242.238.90:<0.20734.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:50:03.745,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.21253.0> (ok) [ns_server:debug,2014-08-19T16:50:03.745,ns_1@10.242.238.90:<0.21273.0>:capi_set_view_manager:do_wait_index_updated:638]All refs fired [ns_server:debug,2014-08-19T16:50:03.745,ns_1@10.242.238.90:<0.20414.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [rebalance:info,2014-08-19T16:50:03.745,ns_1@10.242.238.90:<0.20684.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:50:03.745,ns_1@10.242.238.90:<0.21275.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:50:03.745,ns_1@10.242.238.90:<0.21274.0>:capi_set_view_manager:do_wait_index_updated:618]References to wait: [] ("default", 738) [ns_server:debug,2014-08-19T16:50:03.745,ns_1@10.242.238.90:<0.20734.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:50:03.745,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.21256.0> (ok) [ns_server:debug,2014-08-19T16:50:03.745,ns_1@10.242.238.90:<0.21276.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:50:03.745,ns_1@10.242.238.90:<0.21275.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [ns_server:debug,2014-08-19T16:50:03.745,ns_1@10.242.238.90:<0.21274.0>:capi_set_view_manager:do_wait_index_updated:638]All refs fired [ns_server:debug,2014-08-19T16:50:03.745,ns_1@10.242.238.90:<0.21276.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [ns_server:debug,2014-08-19T16:50:03.745,ns_1@10.242.238.90:<0.21277.0>:capi_set_view_manager:do_wait_index_updated:618]References to wait: [] ("default", 736) [rebalance:info,2014-08-19T16:50:03.745,ns_1@10.242.238.90:<0.20414.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:50:03.745,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.21260.0> (ok) [rebalance:debug,2014-08-19T16:50:03.745,ns_1@10.242.238.90:<0.20364.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:50:03.745,ns_1@10.242.238.90:<0.21277.0>:capi_set_view_manager:do_wait_index_updated:638]All refs fired [ns_server:debug,2014-08-19T16:50:03.745,ns_1@10.242.238.90:<0.21278.0>:capi_set_view_manager:do_wait_index_updated:618]References to wait: [] ("default", 740) [rebalance:info,2014-08-19T16:50:03.745,ns_1@10.242.238.90:<0.20734.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:50:03.745,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.21263.0> (ok) [rebalance:debug,2014-08-19T16:50:03.745,ns_1@10.242.238.90:<0.20604.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:50:03.745,ns_1@10.242.238.90:<0.20364.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [rebalance:debug,2014-08-19T16:50:03.745,ns_1@10.242.238.90:<0.20554.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:50:03.745,ns_1@10.242.238.90:<0.21280.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:50:03.745,ns_1@10.242.238.90:<0.21279.0>:capi_set_view_manager:do_wait_index_updated:618]References to wait: [] ("default", 742) [ns_server:debug,2014-08-19T16:50:03.745,ns_1@10.242.238.90:<0.21278.0>:capi_set_view_manager:do_wait_index_updated:638]All refs fired [rebalance:debug,2014-08-19T16:50:03.745,ns_1@10.242.238.90:<0.20503.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [rebalance:debug,2014-08-19T16:50:03.745,ns_1@10.242.238.90:<0.20389.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:50:03.746,ns_1@10.242.238.90:<0.21280.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [ns_server:debug,2014-08-19T16:50:03.746,ns_1@10.242.238.90:<0.21279.0>:capi_set_view_manager:do_wait_index_updated:638]All refs fired [ns_server:debug,2014-08-19T16:50:03.746,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.21268.0> (ok) [ns_server:debug,2014-08-19T16:50:03.746,ns_1@10.242.238.90:<0.20604.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:50:03.746,ns_1@10.242.238.90:<0.20554.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:50:03.746,ns_1@10.242.238.90:<0.21282.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:50:03.746,ns_1@10.242.238.90:<0.21281.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:50:03.746,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.21271.0> (ok) [ns_server:debug,2014-08-19T16:50:03.746,ns_1@10.242.238.90:<0.20503.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:50:03.746,ns_1@10.242.238.90:<0.20389.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [rebalance:debug,2014-08-19T16:50:03.746,ns_1@10.242.238.90:<0.20453.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:50:03.746,ns_1@10.242.238.90:<0.21282.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:50:03.746,ns_1@10.242.238.90:<0.20364.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:50:03.746,ns_1@10.242.238.90:<0.21281.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [ns_server:debug,2014-08-19T16:50:03.746,ns_1@10.242.238.90:<0.21283.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:50:03.746,ns_1@10.242.238.90:<0.21284.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [rebalance:info,2014-08-19T16:50:03.746,ns_1@10.242.238.90:<0.20604.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:50:03.746,ns_1@10.242.238.90:<0.21283.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:50:03.746,ns_1@10.242.238.90:<0.20554.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:50:03.746,ns_1@10.242.238.90:<0.21284.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [ns_server:debug,2014-08-19T16:50:03.746,ns_1@10.242.238.90:<0.20453.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:50:03.746,ns_1@10.242.238.90:<0.21285.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [rebalance:debug,2014-08-19T16:50:03.746,ns_1@10.242.238.90:<0.20332.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [rebalance:info,2014-08-19T16:50:03.746,ns_1@10.242.238.90:<0.20389.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [rebalance:info,2014-08-19T16:50:03.746,ns_1@10.242.238.90:<0.20503.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:50:03.746,ns_1@10.242.238.90:<0.21285.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [ns_server:debug,2014-08-19T16:50:03.746,ns_1@10.242.238.90:<0.20332.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [rebalance:info,2014-08-19T16:50:03.746,ns_1@10.242.238.90:<0.20453.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:50:03.746,ns_1@10.242.238.90:<0.21286.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [rebalance:debug,2014-08-19T16:50:03.746,ns_1@10.242.238.90:<0.20282.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:50:03.746,ns_1@10.242.238.90:<0.21286.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:50:03.747,ns_1@10.242.238.90:<0.20332.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:50:03.747,ns_1@10.242.238.90:<0.20282.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:50:03.747,ns_1@10.242.238.90:<0.21287.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:50:03.747,ns_1@10.242.238.90:<0.21287.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:50:03.747,ns_1@10.242.238.90:<0.20282.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [rebalance:debug,2014-08-19T16:50:03.904,ns_1@10.242.238.90:<0.20238.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:50:03.904,ns_1@10.242.238.90:<0.20238.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:50:03.904,ns_1@10.242.238.90:<0.21288.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:50:03.904,ns_1@10.242.238.90:<0.21288.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:50:03.904,ns_1@10.242.238.90:<0.20238.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:50:03.918,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:50:03.921,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:03.921,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 3337 us [ns_server:debug,2014-08-19T16:50:03.921,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:03.922,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{487, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [rebalance:debug,2014-08-19T16:50:03.928,ns_1@10.242.238.90:<0.20174.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:info,2014-08-19T16:50:03.929,ns_1@10.242.238.90:<0.18786.0>:ns_memcached:do_handle_call:527]Changed vbucket 998 state to replica [ns_server:debug,2014-08-19T16:50:03.929,ns_1@10.242.238.90:<0.20174.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:50:03.929,ns_1@10.242.238.90:<0.21290.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:50:03.929,ns_1@10.242.238.90:<0.21290.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [ns_server:info,2014-08-19T16:50:03.929,ns_1@10.242.238.90:tap_replication_manager-default<0.18775.0>:tap_replication_manager:change_vbucket_filter:200]Going to change replication from 'ns_1@10.242.238.91' to have [998,999,1001,1002,1003,1004,1005,1006,1007,1008,1009,1010,1011,1012,1013, 1014,1015,1016,1017,1018,1019,1020,1021,1022,1023] ([998], []) [rebalance:info,2014-08-19T16:50:03.929,ns_1@10.242.238.90:<0.20174.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:50:03.930,ns_1@10.242.238.90:<0.21291.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:135]Registered myself under id:{"default", {new_child_id, [998,999,1001,1002,1003,1004,1005,1006,1007, 1008,1009,1010,1011,1012,1013,1014,1015,1016, 1017,1018,1019,1020,1021,1022,1023], 'ns_1@10.242.238.91'}, #Ref<0.0.0.233901>} Args:[{"10.242.238.91",11209}, {"10.242.238.90",11209}, [{old_state_retriever,#Fun}, {on_not_ready_vbuckets,#Fun}, {username,"default"}, {password,get_from_config}, {vbuckets,[998,999,1001,1002,1003,1004,1005,1006,1007,1008,1009,1010, 1011,1012,1013,1014,1015,1016,1017,1018,1019,1020,1021,1022, 1023]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]] [ns_server:debug,2014-08-19T16:50:03.930,ns_1@10.242.238.90:<0.21291.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:139]Linked myself to old ebucketmigrator <0.21231.0> [ns_server:info,2014-08-19T16:50:03.930,ns_1@10.242.238.90:<0.21231.0>:ebucketmigrator_srv:handle_call:270]Starting new-style vbucket filter change on stream `replication_ns_1@10.242.238.90` [ns_server:info,2014-08-19T16:50:03.941,ns_1@10.242.238.90:<0.21231.0>:ebucketmigrator_srv:handle_call:297]Changing vbucket filter on tap stream `replication_ns_1@10.242.238.90`: [{998,1}, {999,1}, {1001,1}, {1002,1}, {1003,1}, {1004,1}, {1005,1}, {1006,1}, {1007,1}, {1008,1}, {1009,1}, {1010,1}, {1011,1}, {1012,1}, {1013,1}, {1014,1}, {1015,1}, {1016,1}, {1017,1}, {1018,1}, {1019,1}, {1020,1}, {1021,1}, {1022,1}, {1023,1}] [ns_server:info,2014-08-19T16:50:03.941,ns_1@10.242.238.90:<0.21231.0>:ebucketmigrator_srv:handle_call:307]Successfully changed vbucket filter on tap stream `replication_ns_1@10.242.238.90`. [ns_server:info,2014-08-19T16:50:03.942,ns_1@10.242.238.90:<0.21231.0>:ebucketmigrator_srv:process_upstream:1027]Got vbucket filter change completion message. Silencing upstream sender [ns_server:info,2014-08-19T16:50:03.942,ns_1@10.242.238.90:<0.21231.0>:ebucketmigrator_srv:handle_info:366]Got reply from upstream silencing request. Completing state transition to a new ebucketmigrator. [ns_server:debug,2014-08-19T16:50:03.942,ns_1@10.242.238.90:<0.21231.0>:ebucketmigrator_srv:complete_native_vb_filter_change:170]Proceeding with reading unread binaries [ns_server:debug,2014-08-19T16:50:03.942,ns_1@10.242.238.90:<0.21231.0>:ebucketmigrator_srv:confirm_downstream:197]Going to confirm reception downstream messages [ns_server:debug,2014-08-19T16:50:03.942,ns_1@10.242.238.90:<0.21231.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:50:03.942,ns_1@10.242.238.90:<0.21293.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:50:03.942,ns_1@10.242.238.90:<0.21293.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:50:03.943,ns_1@10.242.238.90:<0.21231.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:50:03.943,ns_1@10.242.238.90:<0.21231.0>:ebucketmigrator_srv:confirm_downstream:201]Confirmed upstream messages are feeded to kernel [ns_server:debug,2014-08-19T16:50:03.943,ns_1@10.242.238.90:<0.21231.0>:ebucketmigrator_srv:reply_and_die:210]Passed old state to caller [ns_server:debug,2014-08-19T16:50:03.943,ns_1@10.242.238.90:<0.21231.0>:ebucketmigrator_srv:reply_and_die:213]Sent out state. Preparing to die [ns_server:debug,2014-08-19T16:50:03.943,ns_1@10.242.238.90:<0.21291.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:143]Got old state from previous ebucketmigrator: <0.21231.0> [ns_server:debug,2014-08-19T16:50:03.943,ns_1@10.242.238.90:<0.21291.0>:ns_vbm_new_sup:perform_vbucket_filter_change_loop:184]Sent old state to new instance [ns_server:info,2014-08-19T16:50:03.943,ns_1@10.242.238.90:<0.21295.0>:ns_vbm_new_sup:mk_old_state_retriever:83]Got vbucket filter change old state. Proceeding vbucket filter change operation [ns_server:debug,2014-08-19T16:50:03.943,ns_1@10.242.238.90:<0.21295.0>:ebucketmigrator_srv:init:494]Got old ebucketmigrator state from <0.21231.0>: {state,#Port<0.13886>,#Port<0.13883>,#Port<0.13887>,#Port<0.13884>, <0.21232.0>,<<"cut off">>,<<"cut off">>,[],76,false,false,0, {1408,452603,942120}, completed, {<0.21291.0>,#Ref<0.0.0.233914>}, <<"replication_ns_1@10.242.238.90">>,<0.21231.0>, {had_backfill,false,undefined,[]}, completed,false}. [ns_server:debug,2014-08-19T16:50:03.944,ns_1@10.242.238.90:<0.17162.0>:ns_process_registry:handle_info:98]Got exit msg: {'EXIT',<0.21291.0>,{#Ref<0.0.0.233903>,<0.21295.0>}} [error_logger:info,2014-08-19T16:50:03.944,ns_1@10.242.238.90:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,'ns_vbm_new_sup-default'} started: [{pid,<0.21295.0>}, {name, {new_child_id, [998,999,1001,1002,1003,1004,1005,1006,1007, 1008,1009,1010,1011,1012,1013,1014,1015,1016, 1017,1018,1019,1020,1021,1022,1023], 'ns_1@10.242.238.91'}}, {mfargs, {ebucketmigrator_srv,start_link, [{"10.242.238.91",11209}, {"10.242.238.90",11209}, [{old_state_retriever, #Fun}, {on_not_ready_vbuckets, #Fun}, {username,"default"}, {password,get_from_config}, {vbuckets, [998,999,1001,1002,1003,1004,1005,1006, 1007,1008,1009,1010,1011,1012,1013,1014, 1015,1016,1017,1018,1019,1020,1021,1022, 1023]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]]}}, {restart_type,temporary}, {shutdown,60000}, {child_type,worker}] [ns_server:debug,2014-08-19T16:50:03.950,ns_1@10.242.238.90:<0.21295.0>:ebucketmigrator_srv:init:621]Reusing old upstream: [{vbuckets,[998,999,1001,1002,1003,1004,1005,1006,1007,1008,1009,1010,1011, 1012,1013,1014,1015,1016,1017,1018,1019,1020,1021,1022,1023]}, {name,<<"replication_ns_1@10.242.238.90">>}, {takeover,false}] [ns_server:debug,2014-08-19T16:50:03.950,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [rebalance:debug,2014-08-19T16:50:03.950,ns_1@10.242.238.90:<0.21295.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.21296.0> [ns_server:debug,2014-08-19T16:50:03.953,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:03.953,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 2791 us [ns_server:debug,2014-08-19T16:50:03.954,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:03.954,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{998, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:50:03.974,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:50:03.982,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 7325 us [ns_server:debug,2014-08-19T16:50:03.982,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:03.983,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:03.983,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{489, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:50:04.000,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:50:04.003,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:04.003,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 2967 us [ns_server:debug,2014-08-19T16:50:04.003,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:04.004,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{471, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [rebalance:debug,2014-08-19T16:50:04.017,ns_1@10.242.238.90:<0.20327.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:50:04.018,ns_1@10.242.238.90:<0.20327.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:50:04.018,ns_1@10.242.238.90:<0.21299.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:50:04.018,ns_1@10.242.238.90:<0.21299.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:50:04.018,ns_1@10.242.238.90:<0.20327.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:50:04.018,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:50:04.021,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 2484 us [ns_server:debug,2014-08-19T16:50:04.021,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:04.021,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:04.022,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{475, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:info,2014-08-19T16:50:04.025,ns_1@10.242.238.90:<0.18786.0>:ns_memcached:do_handle_call:527]Changed vbucket 1000 state to replica [ns_server:info,2014-08-19T16:50:04.025,ns_1@10.242.238.90:tap_replication_manager-default<0.18775.0>:tap_replication_manager:change_vbucket_filter:200]Going to change replication from 'ns_1@10.242.238.91' to have [998,999,1000,1001,1002,1003,1004,1005,1006,1007,1008,1009,1010,1011,1012, 1013,1014,1015,1016,1017,1018,1019,1020,1021,1022,1023] ([1000], []) [ns_server:debug,2014-08-19T16:50:04.026,ns_1@10.242.238.90:<0.21301.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:135]Registered myself under id:{"default", {new_child_id, [998,999,1000,1001,1002,1003,1004,1005,1006, 1007,1008,1009,1010,1011,1012,1013,1014,1015, 1016,1017,1018,1019,1020,1021,1022,1023], 'ns_1@10.242.238.91'}, #Ref<0.0.0.234123>} Args:[{"10.242.238.91",11209}, {"10.242.238.90",11209}, [{old_state_retriever,#Fun}, {on_not_ready_vbuckets,#Fun}, {username,"default"}, {password,get_from_config}, {vbuckets,[998,999,1000,1001,1002,1003,1004,1005,1006,1007,1008,1009, 1010,1011,1012,1013,1014,1015,1016,1017,1018,1019,1020,1021, 1022,1023]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]] [ns_server:debug,2014-08-19T16:50:04.027,ns_1@10.242.238.90:<0.21301.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:139]Linked myself to old ebucketmigrator <0.21295.0> [ns_server:info,2014-08-19T16:50:04.027,ns_1@10.242.238.90:<0.21295.0>:ebucketmigrator_srv:handle_call:270]Starting new-style vbucket filter change on stream `replication_ns_1@10.242.238.90` [ns_server:info,2014-08-19T16:50:04.034,ns_1@10.242.238.90:<0.21295.0>:ebucketmigrator_srv:handle_call:297]Changing vbucket filter on tap stream `replication_ns_1@10.242.238.90`: [{998,1}, {999,1}, {1000,1}, {1001,1}, {1002,1}, {1003,1}, {1004,1}, {1005,1}, {1006,1}, {1007,1}, {1008,1}, {1009,1}, {1010,1}, {1011,1}, {1012,1}, {1013,1}, {1014,1}, {1015,1}, {1016,1}, {1017,1}, {1018,1}, {1019,1}, {1020,1}, {1021,1}, {1022,1}, {1023,1}] [rebalance:debug,2014-08-19T16:50:04.034,ns_1@10.242.238.90:<0.20549.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:info,2014-08-19T16:50:04.034,ns_1@10.242.238.90:<0.21295.0>:ebucketmigrator_srv:handle_call:307]Successfully changed vbucket filter on tap stream `replication_ns_1@10.242.238.90`. [ns_server:debug,2014-08-19T16:50:04.034,ns_1@10.242.238.90:<0.20549.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:50:04.035,ns_1@10.242.238.90:<0.21303.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:50:04.035,ns_1@10.242.238.90:<0.21303.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [ns_server:info,2014-08-19T16:50:04.035,ns_1@10.242.238.90:<0.21295.0>:ebucketmigrator_srv:process_upstream:1027]Got vbucket filter change completion message. Silencing upstream sender [ns_server:info,2014-08-19T16:50:04.035,ns_1@10.242.238.90:<0.21295.0>:ebucketmigrator_srv:handle_info:366]Got reply from upstream silencing request. Completing state transition to a new ebucketmigrator. [ns_server:debug,2014-08-19T16:50:04.035,ns_1@10.242.238.90:<0.21295.0>:ebucketmigrator_srv:complete_native_vb_filter_change:170]Proceeding with reading unread binaries [rebalance:info,2014-08-19T16:50:04.035,ns_1@10.242.238.90:<0.20549.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:50:04.035,ns_1@10.242.238.90:<0.21295.0>:ebucketmigrator_srv:confirm_downstream:197]Going to confirm reception downstream messages [ns_server:debug,2014-08-19T16:50:04.035,ns_1@10.242.238.90:<0.21295.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:50:04.035,ns_1@10.242.238.90:<0.21304.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:50:04.035,ns_1@10.242.238.90:<0.21304.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:50:04.035,ns_1@10.242.238.90:<0.21295.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:50:04.035,ns_1@10.242.238.90:<0.21295.0>:ebucketmigrator_srv:confirm_downstream:201]Confirmed upstream messages are feeded to kernel [ns_server:debug,2014-08-19T16:50:04.036,ns_1@10.242.238.90:<0.21295.0>:ebucketmigrator_srv:reply_and_die:210]Passed old state to caller [ns_server:debug,2014-08-19T16:50:04.036,ns_1@10.242.238.90:<0.21295.0>:ebucketmigrator_srv:reply_and_die:213]Sent out state. Preparing to die [ns_server:debug,2014-08-19T16:50:04.036,ns_1@10.242.238.90:<0.21301.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:143]Got old state from previous ebucketmigrator: <0.21295.0> [ns_server:debug,2014-08-19T16:50:04.038,ns_1@10.242.238.90:<0.21301.0>:ns_vbm_new_sup:perform_vbucket_filter_change_loop:184]Sent old state to new instance [ns_server:info,2014-08-19T16:50:04.038,ns_1@10.242.238.90:<0.21306.0>:ns_vbm_new_sup:mk_old_state_retriever:83]Got vbucket filter change old state. Proceeding vbucket filter change operation [ns_server:debug,2014-08-19T16:50:04.039,ns_1@10.242.238.90:<0.21306.0>:ebucketmigrator_srv:init:494]Got old ebucketmigrator state from <0.21295.0>: {state,#Port<0.13886>,#Port<0.13883>,#Port<0.13887>,#Port<0.13884>, <0.21296.0>,<<"cut off">>,<<"cut off">>,[],79,false,false,0, {1408,452604,35170}, completed, {<0.21301.0>,#Ref<0.0.0.234137>}, <<"replication_ns_1@10.242.238.90">>,<0.21295.0>, {had_backfill,false,undefined,[]}, completed,false}. [ns_server:debug,2014-08-19T16:50:04.039,ns_1@10.242.238.90:<0.17162.0>:ns_process_registry:handle_info:98]Got exit msg: {'EXIT',<0.21301.0>,{#Ref<0.0.0.234125>,<0.21306.0>}} [error_logger:info,2014-08-19T16:50:04.039,ns_1@10.242.238.90:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,'ns_vbm_new_sup-default'} started: [{pid,<0.21306.0>}, {name, {new_child_id, [998,999,1000,1001,1002,1003,1004,1005,1006, 1007,1008,1009,1010,1011,1012,1013,1014,1015, 1016,1017,1018,1019,1020,1021,1022,1023], 'ns_1@10.242.238.91'}}, {mfargs, {ebucketmigrator_srv,start_link, [{"10.242.238.91",11209}, {"10.242.238.90",11209}, [{old_state_retriever, #Fun}, {on_not_ready_vbuckets, #Fun}, {username,"default"}, {password,get_from_config}, {vbuckets, [998,999,1000,1001,1002,1003,1004,1005, 1006,1007,1008,1009,1010,1011,1012,1013, 1014,1015,1016,1017,1018,1019,1020,1021, 1022,1023]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]]}}, {restart_type,temporary}, {shutdown,60000}, {child_type,worker}] [ns_server:debug,2014-08-19T16:50:04.045,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:50:04.047,ns_1@10.242.238.90:<0.21306.0>:ebucketmigrator_srv:init:621]Reusing old upstream: [{vbuckets,[998,999,1000,1001,1002,1003,1004,1005,1006,1007,1008,1009,1010, 1011,1012,1013,1014,1015,1016,1017,1018,1019,1020,1021,1022,1023]}, {name,<<"replication_ns_1@10.242.238.90">>}, {takeover,false}] [rebalance:debug,2014-08-19T16:50:04.047,ns_1@10.242.238.90:<0.21306.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.21307.0> [ns_server:debug,2014-08-19T16:50:04.048,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:04.048,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 3024 us [ns_server:debug,2014-08-19T16:50:04.049,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:info,2014-08-19T16:50:04.049,ns_1@10.242.238.90:<0.18786.0>:ns_memcached:do_handle_call:527]Changed vbucket 740 state to active [ns_server:debug,2014-08-19T16:50:04.049,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{1000, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:50:04.065,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:info,2014-08-19T16:50:04.067,ns_1@10.242.238.90:<0.18786.0>:ns_memcached:do_handle_call:527]Changed vbucket 741 state to active [ns_server:debug,2014-08-19T16:50:04.068,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 2828 us [ns_server:debug,2014-08-19T16:50:04.068,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:04.069,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{470, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:50:04.069,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [rebalance:debug,2014-08-19T16:50:04.085,ns_1@10.242.238.90:<0.20729.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:50:04.086,ns_1@10.242.238.90:<0.20729.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:50:04.086,ns_1@10.242.238.90:<0.21309.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:50:04.086,ns_1@10.242.238.90:<0.21309.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:50:04.086,ns_1@10.242.238.90:<0.20729.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:50:04.088,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:50:04.090,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:04.091,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 2848 us [ns_server:debug,2014-08-19T16:50:04.091,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:04.092,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{479, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [rebalance:debug,2014-08-19T16:50:04.108,ns_1@10.242.238.90:<0.20395.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:50:04.108,ns_1@10.242.238.90:<0.20395.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:50:04.108,ns_1@10.242.238.90:<0.21311.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:50:04.108,ns_1@10.242.238.90:<0.21311.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:50:04.109,ns_1@10.242.238.90:<0.20395.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:50:04.111,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:50:04.117,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 5471 us [ns_server:debug,2014-08-19T16:50:04.117,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:04.117,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:04.118,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{473, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [rebalance:debug,2014-08-19T16:50:04.120,ns_1@10.242.238.90:<0.20754.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:50:04.120,ns_1@10.242.238.90:<0.20754.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:50:04.120,ns_1@10.242.238.90:<0.21312.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:50:04.121,ns_1@10.242.238.90:<0.21312.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:50:04.121,ns_1@10.242.238.90:<0.20754.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [views:debug,2014-08-19T16:50:04.127,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/740. Updated state: active (1) [ns_server:debug,2014-08-19T16:50:04.127,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",740,active,1} [rebalance:debug,2014-08-19T16:50:04.133,ns_1@10.242.238.90:<0.20523.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:50:04.134,ns_1@10.242.238.90:<0.20523.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:50:04.134,ns_1@10.242.238.90:<0.21314.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:50:04.134,ns_1@10.242.238.90:<0.21314.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [ns_server:debug,2014-08-19T16:50:04.134,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [rebalance:info,2014-08-19T16:50:04.134,ns_1@10.242.238.90:<0.20523.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:50:04.137,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:04.137,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 3071 us [ns_server:debug,2014-08-19T16:50:04.137,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:04.138,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{477, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:info,2014-08-19T16:50:04.140,ns_1@10.242.238.90:<0.18786.0>:ns_memcached:do_handle_call:527]Changed vbucket 995 state to replica [ns_server:info,2014-08-19T16:50:04.140,ns_1@10.242.238.90:tap_replication_manager-default<0.18775.0>:tap_replication_manager:change_vbucket_filter:200]Going to change replication from 'ns_1@10.242.238.91' to have [995,998,999,1000,1001,1002,1003,1004,1005,1006,1007,1008,1009,1010,1011,1012, 1013,1014,1015,1016,1017,1018,1019,1020,1021,1022,1023] ([995], []) [ns_server:debug,2014-08-19T16:50:04.141,ns_1@10.242.238.90:<0.21315.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:135]Registered myself under id:{"default", {new_child_id, [995,998,999,1000,1001,1002,1003,1004,1005, 1006,1007,1008,1009,1010,1011,1012,1013,1014, 1015,1016,1017,1018,1019,1020,1021,1022,1023], 'ns_1@10.242.238.91'}, #Ref<0.0.0.234494>} Args:[{"10.242.238.91",11209}, {"10.242.238.90",11209}, [{old_state_retriever,#Fun}, {on_not_ready_vbuckets,#Fun}, {username,"default"}, {password,get_from_config}, {vbuckets,[995,998,999,1000,1001,1002,1003,1004,1005,1006,1007,1008, 1009,1010,1011,1012,1013,1014,1015,1016,1017,1018,1019,1020, 1021,1022,1023]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]] [ns_server:debug,2014-08-19T16:50:04.141,ns_1@10.242.238.90:<0.21315.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:139]Linked myself to old ebucketmigrator <0.21306.0> [ns_server:info,2014-08-19T16:50:04.141,ns_1@10.242.238.90:<0.21306.0>:ebucketmigrator_srv:handle_call:270]Starting new-style vbucket filter change on stream `replication_ns_1@10.242.238.90` [rebalance:debug,2014-08-19T16:50:04.150,ns_1@10.242.238.90:<0.20665.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:50:04.150,ns_1@10.242.238.90:<0.20665.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:50:04.151,ns_1@10.242.238.90:<0.21317.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:50:04.151,ns_1@10.242.238.90:<0.21317.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:50:04.151,ns_1@10.242.238.90:<0.20665.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:info,2014-08-19T16:50:04.152,ns_1@10.242.238.90:<0.21306.0>:ebucketmigrator_srv:handle_call:297]Changing vbucket filter on tap stream `replication_ns_1@10.242.238.90`: [{995,1}, {998,1}, {999,1}, {1000,1}, {1001,1}, {1002,1}, {1003,1}, {1004,1}, {1005,1}, {1006,1}, {1007,1}, {1008,1}, {1009,1}, {1010,1}, {1011,1}, {1012,1}, {1013,1}, {1014,1}, {1015,1}, {1016,1}, {1017,1}, {1018,1}, {1019,1}, {1020,1}, {1021,1}, {1022,1}, {1023,1}] [ns_server:info,2014-08-19T16:50:04.153,ns_1@10.242.238.90:<0.21306.0>:ebucketmigrator_srv:handle_call:307]Successfully changed vbucket filter on tap stream `replication_ns_1@10.242.238.90`. [ns_server:info,2014-08-19T16:50:04.153,ns_1@10.242.238.90:<0.21306.0>:ebucketmigrator_srv:process_upstream:1027]Got vbucket filter change completion message. Silencing upstream sender [ns_server:info,2014-08-19T16:50:04.153,ns_1@10.242.238.90:<0.21306.0>:ebucketmigrator_srv:handle_info:366]Got reply from upstream silencing request. Completing state transition to a new ebucketmigrator. [ns_server:debug,2014-08-19T16:50:04.153,ns_1@10.242.238.90:<0.21306.0>:ebucketmigrator_srv:complete_native_vb_filter_change:170]Proceeding with reading unread binaries [ns_server:debug,2014-08-19T16:50:04.153,ns_1@10.242.238.90:<0.21306.0>:ebucketmigrator_srv:confirm_downstream:197]Going to confirm reception downstream messages [ns_server:debug,2014-08-19T16:50:04.154,ns_1@10.242.238.90:<0.21306.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:50:04.154,ns_1@10.242.238.90:<0.21318.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:50:04.154,ns_1@10.242.238.90:<0.21318.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:50:04.154,ns_1@10.242.238.90:<0.21306.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:50:04.154,ns_1@10.242.238.90:<0.21306.0>:ebucketmigrator_srv:confirm_downstream:201]Confirmed upstream messages are feeded to kernel [ns_server:debug,2014-08-19T16:50:04.154,ns_1@10.242.238.90:<0.21306.0>:ebucketmigrator_srv:reply_and_die:210]Passed old state to caller [ns_server:debug,2014-08-19T16:50:04.154,ns_1@10.242.238.90:<0.21306.0>:ebucketmigrator_srv:reply_and_die:213]Sent out state. Preparing to die [ns_server:debug,2014-08-19T16:50:04.154,ns_1@10.242.238.90:<0.21315.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:143]Got old state from previous ebucketmigrator: <0.21306.0> [ns_server:debug,2014-08-19T16:50:04.155,ns_1@10.242.238.90:<0.21315.0>:ns_vbm_new_sup:perform_vbucket_filter_change_loop:184]Sent old state to new instance [ns_server:info,2014-08-19T16:50:04.155,ns_1@10.242.238.90:<0.21320.0>:ns_vbm_new_sup:mk_old_state_retriever:83]Got vbucket filter change old state. Proceeding vbucket filter change operation [ns_server:debug,2014-08-19T16:50:04.155,ns_1@10.242.238.90:<0.21320.0>:ebucketmigrator_srv:init:494]Got old ebucketmigrator state from <0.21306.0>: {state,#Port<0.13886>,#Port<0.13883>,#Port<0.13887>,#Port<0.13884>, <0.21307.0>,<<"cut off">>,<<"cut off">>,[],82,false,false,0, {1408,452604,153660}, completed, {<0.21315.0>,#Ref<0.0.0.234507>}, <<"replication_ns_1@10.242.238.90">>,<0.21306.0>, {had_backfill,false,undefined,[]}, completed,false}. [ns_server:debug,2014-08-19T16:50:04.155,ns_1@10.242.238.90:<0.17162.0>:ns_process_registry:handle_info:98]Got exit msg: {'EXIT',<0.21315.0>,{#Ref<0.0.0.234496>,<0.21320.0>}} [error_logger:info,2014-08-19T16:50:04.155,ns_1@10.242.238.90:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,'ns_vbm_new_sup-default'} started: [{pid,<0.21320.0>}, {name, {new_child_id, [995,998,999,1000,1001,1002,1003,1004,1005, 1006,1007,1008,1009,1010,1011,1012,1013,1014, 1015,1016,1017,1018,1019,1020,1021,1022,1023], 'ns_1@10.242.238.91'}}, {mfargs, {ebucketmigrator_srv,start_link, [{"10.242.238.91",11209}, {"10.242.238.90",11209}, [{old_state_retriever, #Fun}, {on_not_ready_vbuckets, #Fun}, {username,"default"}, {password,get_from_config}, {vbuckets, [995,998,999,1000,1001,1002,1003,1004, 1005,1006,1007,1008,1009,1010,1011,1012, 1013,1014,1015,1016,1017,1018,1019,1020, 1021,1022,1023]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]]}}, {restart_type,temporary}, {shutdown,60000}, {child_type,worker}] [ns_server:debug,2014-08-19T16:50:04.161,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:50:04.163,ns_1@10.242.238.90:<0.21320.0>:ebucketmigrator_srv:init:621]Reusing old upstream: [{vbuckets,[995,998,999,1000,1001,1002,1003,1004,1005,1006,1007,1008,1009, 1010,1011,1012,1013,1014,1015,1016,1017,1018,1019,1020,1021,1022, 1023]}, {name,<<"replication_ns_1@10.242.238.90">>}, {takeover,false}] [rebalance:debug,2014-08-19T16:50:04.163,ns_1@10.242.238.90:<0.21320.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.21321.0> [ns_server:debug,2014-08-19T16:50:04.164,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 437 us [ns_server:debug,2014-08-19T16:50:04.164,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:04.164,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:04.165,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{995, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:info,2014-08-19T16:50:04.167,ns_1@10.242.238.90:<0.18786.0>:ns_memcached:do_handle_call:527]Changed vbucket 987 state to replica [ns_server:info,2014-08-19T16:50:04.167,ns_1@10.242.238.90:tap_replication_manager-default<0.18775.0>:tap_replication_manager:change_vbucket_filter:200]Going to change replication from 'ns_1@10.242.238.91' to have [987,995,998,999,1000,1001,1002,1003,1004,1005,1006,1007,1008,1009,1010,1011, 1012,1013,1014,1015,1016,1017,1018,1019,1020,1021,1022,1023] ([987], []) [rebalance:debug,2014-08-19T16:50:04.172,ns_1@10.242.238.90:<0.20704.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:50:04.172,ns_1@10.242.238.90:<0.20704.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:50:04.172,ns_1@10.242.238.90:<0.21325.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:50:04.172,ns_1@10.242.238.90:<0.21325.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [ns_server:debug,2014-08-19T16:50:04.172,ns_1@10.242.238.90:<0.21323.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:135]Registered myself under id:{"default", {new_child_id, [987,995,998,999,1000,1001,1002,1003,1004, 1005,1006,1007,1008,1009,1010,1011,1012,1013, 1014,1015,1016,1017,1018,1019,1020,1021,1022, 1023], 'ns_1@10.242.238.91'}, #Ref<0.0.0.234648>} Args:[{"10.242.238.91",11209}, {"10.242.238.90",11209}, [{old_state_retriever,#Fun}, {on_not_ready_vbuckets,#Fun}, {username,"default"}, {password,get_from_config}, {vbuckets,[987,995,998,999,1000,1001,1002,1003,1004,1005,1006,1007, 1008,1009,1010,1011,1012,1013,1014,1015,1016,1017,1018,1019, 1020,1021,1022,1023]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]] [rebalance:info,2014-08-19T16:50:04.173,ns_1@10.242.238.90:<0.20704.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:50:04.173,ns_1@10.242.238.90:<0.21323.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:139]Linked myself to old ebucketmigrator <0.21320.0> [ns_server:info,2014-08-19T16:50:04.173,ns_1@10.242.238.90:<0.21320.0>:ebucketmigrator_srv:handle_call:270]Starting new-style vbucket filter change on stream `replication_ns_1@10.242.238.90` [ns_server:info,2014-08-19T16:50:04.182,ns_1@10.242.238.90:<0.21320.0>:ebucketmigrator_srv:handle_call:297]Changing vbucket filter on tap stream `replication_ns_1@10.242.238.90`: [{987,1}, {995,1}, {998,1}, {999,1}, {1000,1}, {1001,1}, {1002,1}, {1003,1}, {1004,1}, {1005,1}, {1006,1}, {1007,1}, {1008,1}, {1009,1}, {1010,1}, {1011,1}, {1012,1}, {1013,1}, {1014,1}, {1015,1}, {1016,1}, {1017,1}, {1018,1}, {1019,1}, {1020,1}, {1021,1}, {1022,1}, {1023,1}] [ns_server:info,2014-08-19T16:50:04.182,ns_1@10.242.238.90:<0.21320.0>:ebucketmigrator_srv:handle_call:307]Successfully changed vbucket filter on tap stream `replication_ns_1@10.242.238.90`. [ns_server:info,2014-08-19T16:50:04.183,ns_1@10.242.238.90:<0.21320.0>:ebucketmigrator_srv:process_upstream:1027]Got vbucket filter change completion message. Silencing upstream sender [ns_server:info,2014-08-19T16:50:04.183,ns_1@10.242.238.90:<0.21320.0>:ebucketmigrator_srv:handle_info:366]Got reply from upstream silencing request. Completing state transition to a new ebucketmigrator. [ns_server:debug,2014-08-19T16:50:04.183,ns_1@10.242.238.90:<0.21320.0>:ebucketmigrator_srv:complete_native_vb_filter_change:170]Proceeding with reading unread binaries [ns_server:debug,2014-08-19T16:50:04.183,ns_1@10.242.238.90:<0.21320.0>:ebucketmigrator_srv:confirm_downstream:197]Going to confirm reception downstream messages [ns_server:debug,2014-08-19T16:50:04.183,ns_1@10.242.238.90:<0.21320.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:50:04.183,ns_1@10.242.238.90:<0.21326.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:50:04.183,ns_1@10.242.238.90:<0.21326.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:50:04.183,ns_1@10.242.238.90:<0.21320.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:50:04.184,ns_1@10.242.238.90:<0.21320.0>:ebucketmigrator_srv:confirm_downstream:201]Confirmed upstream messages are feeded to kernel [ns_server:debug,2014-08-19T16:50:04.184,ns_1@10.242.238.90:<0.21320.0>:ebucketmigrator_srv:reply_and_die:210]Passed old state to caller [ns_server:debug,2014-08-19T16:50:04.184,ns_1@10.242.238.90:<0.21320.0>:ebucketmigrator_srv:reply_and_die:213]Sent out state. Preparing to die [ns_server:debug,2014-08-19T16:50:04.184,ns_1@10.242.238.90:<0.21323.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:143]Got old state from previous ebucketmigrator: <0.21320.0> [ns_server:debug,2014-08-19T16:50:04.184,ns_1@10.242.238.90:<0.21323.0>:ns_vbm_new_sup:perform_vbucket_filter_change_loop:184]Sent old state to new instance [ns_server:info,2014-08-19T16:50:04.184,ns_1@10.242.238.90:<0.21328.0>:ns_vbm_new_sup:mk_old_state_retriever:83]Got vbucket filter change old state. Proceeding vbucket filter change operation [ns_server:debug,2014-08-19T16:50:04.184,ns_1@10.242.238.90:<0.21328.0>:ebucketmigrator_srv:init:494]Got old ebucketmigrator state from <0.21320.0>: {state,#Port<0.13886>,#Port<0.13883>,#Port<0.13887>,#Port<0.13884>, <0.21321.0>,<<"cut off">>,<<"cut off">>,[],85,false,false,0, {1408,452604,183011}, completed, {<0.21323.0>,#Ref<0.0.0.234676>}, <<"replication_ns_1@10.242.238.90">>,<0.21320.0>, {had_backfill,false,undefined,[]}, completed,false}. [ns_server:debug,2014-08-19T16:50:04.185,ns_1@10.242.238.90:<0.17162.0>:ns_process_registry:handle_info:98]Got exit msg: {'EXIT',<0.21323.0>,{#Ref<0.0.0.234651>,<0.21328.0>}} [error_logger:info,2014-08-19T16:50:04.185,ns_1@10.242.238.90:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,'ns_vbm_new_sup-default'} started: [{pid,<0.21328.0>}, {name, {new_child_id, [987,995,998,999,1000,1001,1002,1003,1004,1005, 1006,1007,1008,1009,1010,1011,1012,1013,1014, 1015,1016,1017,1018,1019,1020,1021,1022,1023], 'ns_1@10.242.238.91'}}, {mfargs, {ebucketmigrator_srv,start_link, [{"10.242.238.91",11209}, {"10.242.238.90",11209}, [{old_state_retriever, #Fun}, {on_not_ready_vbuckets, #Fun}, {username,"default"}, {password,get_from_config}, {vbuckets, [987,995,998,999,1000,1001,1002,1003, 1004,1005,1006,1007,1008,1009,1010,1011, 1012,1013,1014,1015,1016,1017,1018,1019, 1020,1021,1022,1023]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]]}}, {restart_type,temporary}, {shutdown,60000}, {child_type,worker}] [rebalance:debug,2014-08-19T16:50:04.189,ns_1@10.242.238.90:<0.20434.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:50:04.189,ns_1@10.242.238.90:<0.20434.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:50:04.189,ns_1@10.242.238.90:<0.21329.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:50:04.190,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:50:04.190,ns_1@10.242.238.90:<0.21329.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:50:04.190,ns_1@10.242.238.90:<0.20434.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:50:04.191,ns_1@10.242.238.90:<0.21328.0>:ebucketmigrator_srv:init:621]Reusing old upstream: [{vbuckets,[987,995,998,999,1000,1001,1002,1003,1004,1005,1006,1007,1008,1009, 1010,1011,1012,1013,1014,1015,1016,1017,1018,1019,1020,1021,1022, 1023]}, {name,<<"replication_ns_1@10.242.238.90">>}, {takeover,false}] [rebalance:debug,2014-08-19T16:50:04.191,ns_1@10.242.238.90:<0.21328.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.21330.0> [ns_server:debug,2014-08-19T16:50:04.194,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:04.194,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 4005 us [ns_server:debug,2014-08-19T16:50:04.194,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:04.195,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{987, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [rebalance:debug,2014-08-19T16:50:04.208,ns_1@10.242.238.90:<0.20638.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:50:04.208,ns_1@10.242.238.90:<0.20638.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:50:04.208,ns_1@10.242.238.90:<0.21332.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:50:04.208,ns_1@10.242.238.90:<0.21332.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:50:04.208,ns_1@10.242.238.90:<0.20638.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [views:debug,2014-08-19T16:50:04.210,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/741. Updated state: active (1) [ns_server:debug,2014-08-19T16:50:04.210,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",741,active,1} [ns_server:debug,2014-08-19T16:50:04.212,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:50:04.214,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 908 us [ns_server:debug,2014-08-19T16:50:04.214,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:04.214,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:04.215,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{740, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [rebalance:debug,2014-08-19T16:50:04.221,ns_1@10.242.238.90:<0.20498.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:50:04.221,ns_1@10.242.238.90:<0.20498.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:50:04.221,ns_1@10.242.238.90:<0.21333.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:50:04.221,ns_1@10.242.238.90:<0.21333.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:50:04.222,ns_1@10.242.238.90:<0.20498.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:50:04.235,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [rebalance:debug,2014-08-19T16:50:04.238,ns_1@10.242.238.90:<0.20370.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:50:04.238,ns_1@10.242.238.90:<0.20370.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:50:04.238,ns_1@10.242.238.90:<0.21335.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:50:04.239,ns_1@10.242.238.90:<0.21335.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:50:04.239,ns_1@10.242.238.90:<0.20370.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:50:04.239,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:04.239,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 1896 us [ns_server:debug,2014-08-19T16:50:04.239,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:04.240,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{741, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:info,2014-08-19T16:50:04.242,ns_1@10.242.238.90:<0.18786.0>:ns_memcached:do_handle_call:527]Changed vbucket 981 state to replica [ns_server:info,2014-08-19T16:50:04.242,ns_1@10.242.238.90:tap_replication_manager-default<0.18775.0>:tap_replication_manager:change_vbucket_filter:200]Going to change replication from 'ns_1@10.242.238.91' to have [981,987,995,998,999,1000,1001,1002,1003,1004,1005,1006,1007,1008,1009,1010, 1011,1012,1013,1014,1015,1016,1017,1018,1019,1020,1021,1022,1023] ([981], []) [ns_server:debug,2014-08-19T16:50:04.243,ns_1@10.242.238.90:<0.21336.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:135]Registered myself under id:{"default", {new_child_id, [981,987,995,998,999,1000,1001,1002,1003,1004, 1005,1006,1007,1008,1009,1010,1011,1012,1013, 1014,1015,1016,1017,1018,1019,1020,1021,1022, 1023], 'ns_1@10.242.238.91'}, #Ref<0.0.0.234929>} Args:[{"10.242.238.91",11209}, {"10.242.238.90",11209}, [{old_state_retriever,#Fun}, {on_not_ready_vbuckets,#Fun}, {username,"default"}, {password,get_from_config}, {vbuckets,[981,987,995,998,999,1000,1001,1002,1003,1004,1005,1006,1007, 1008,1009,1010,1011,1012,1013,1014,1015,1016,1017,1018,1019, 1020,1021,1022,1023]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]] [ns_server:debug,2014-08-19T16:50:04.243,ns_1@10.242.238.90:<0.21336.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:139]Linked myself to old ebucketmigrator <0.21328.0> [ns_server:info,2014-08-19T16:50:04.243,ns_1@10.242.238.90:<0.21328.0>:ebucketmigrator_srv:handle_call:270]Starting new-style vbucket filter change on stream `replication_ns_1@10.242.238.90` [ns_server:info,2014-08-19T16:50:04.249,ns_1@10.242.238.90:<0.21328.0>:ebucketmigrator_srv:handle_call:297]Changing vbucket filter on tap stream `replication_ns_1@10.242.238.90`: [{981,1}, {987,1}, {995,1}, {998,1}, {999,1}, {1000,1}, {1001,1}, {1002,1}, {1003,1}, {1004,1}, {1005,1}, {1006,1}, {1007,1}, {1008,1}, {1009,1}, {1010,1}, {1011,1}, {1012,1}, {1013,1}, {1014,1}, {1015,1}, {1016,1}, {1017,1}, {1018,1}, {1019,1}, {1020,1}, {1021,1}, {1022,1}, {1023,1}] [ns_server:info,2014-08-19T16:50:04.250,ns_1@10.242.238.90:<0.21328.0>:ebucketmigrator_srv:handle_call:307]Successfully changed vbucket filter on tap stream `replication_ns_1@10.242.238.90`. [ns_server:info,2014-08-19T16:50:04.250,ns_1@10.242.238.90:<0.21328.0>:ebucketmigrator_srv:process_upstream:1027]Got vbucket filter change completion message. Silencing upstream sender [ns_server:info,2014-08-19T16:50:04.250,ns_1@10.242.238.90:<0.21328.0>:ebucketmigrator_srv:handle_info:366]Got reply from upstream silencing request. Completing state transition to a new ebucketmigrator. [ns_server:debug,2014-08-19T16:50:04.250,ns_1@10.242.238.90:<0.21328.0>:ebucketmigrator_srv:complete_native_vb_filter_change:170]Proceeding with reading unread binaries [ns_server:debug,2014-08-19T16:50:04.250,ns_1@10.242.238.90:<0.21328.0>:ebucketmigrator_srv:confirm_downstream:197]Going to confirm reception downstream messages [ns_server:debug,2014-08-19T16:50:04.250,ns_1@10.242.238.90:<0.21328.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:50:04.251,ns_1@10.242.238.90:<0.21338.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:50:04.251,ns_1@10.242.238.90:<0.21338.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:50:04.251,ns_1@10.242.238.90:<0.21328.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:50:04.251,ns_1@10.242.238.90:<0.21328.0>:ebucketmigrator_srv:confirm_downstream:201]Confirmed upstream messages are feeded to kernel [ns_server:debug,2014-08-19T16:50:04.251,ns_1@10.242.238.90:<0.21328.0>:ebucketmigrator_srv:reply_and_die:210]Passed old state to caller [ns_server:debug,2014-08-19T16:50:04.251,ns_1@10.242.238.90:<0.21328.0>:ebucketmigrator_srv:reply_and_die:213]Sent out state. Preparing to die [ns_server:debug,2014-08-19T16:50:04.251,ns_1@10.242.238.90:<0.21336.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:143]Got old state from previous ebucketmigrator: <0.21328.0> [ns_server:debug,2014-08-19T16:50:04.252,ns_1@10.242.238.90:<0.21336.0>:ns_vbm_new_sup:perform_vbucket_filter_change_loop:184]Sent old state to new instance [ns_server:info,2014-08-19T16:50:04.252,ns_1@10.242.238.90:<0.21340.0>:ns_vbm_new_sup:mk_old_state_retriever:83]Got vbucket filter change old state. Proceeding vbucket filter change operation [ns_server:debug,2014-08-19T16:50:04.252,ns_1@10.242.238.90:<0.21340.0>:ebucketmigrator_srv:init:494]Got old ebucketmigrator state from <0.21328.0>: {state,#Port<0.13886>,#Port<0.13883>,#Port<0.13887>,#Port<0.13884>, <0.21330.0>,<<"cut off">>,<<"cut off">>,[],88,false,false,0, {1408,452604,250507}, completed, {<0.21336.0>,#Ref<0.0.0.234942>}, <<"replication_ns_1@10.242.238.90">>,<0.21328.0>, {had_backfill,false,undefined,[]}, completed,false}. [ns_server:debug,2014-08-19T16:50:04.252,ns_1@10.242.238.90:<0.17162.0>:ns_process_registry:handle_info:98]Got exit msg: {'EXIT',<0.21336.0>,{#Ref<0.0.0.234931>,<0.21340.0>}} [rebalance:debug,2014-08-19T16:50:04.252,ns_1@10.242.238.90:<0.20263.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [error_logger:info,2014-08-19T16:50:04.252,ns_1@10.242.238.90:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,'ns_vbm_new_sup-default'} started: [{pid,<0.21340.0>}, {name, {new_child_id, [981,987,995,998,999,1000,1001,1002,1003,1004, 1005,1006,1007,1008,1009,1010,1011,1012,1013, 1014,1015,1016,1017,1018,1019,1020,1021,1022, 1023], 'ns_1@10.242.238.91'}}, {mfargs, {ebucketmigrator_srv,start_link, [{"10.242.238.91",11209}, {"10.242.238.90",11209}, [{old_state_retriever, #Fun}, {on_not_ready_vbuckets, #Fun}, {username,"default"}, {password,get_from_config}, {vbuckets, [981,987,995,998,999,1000,1001,1002,1003, 1004,1005,1006,1007,1008,1009,1010,1011, 1012,1013,1014,1015,1016,1017,1018,1019, 1020,1021,1022,1023]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]]}}, {restart_type,temporary}, {shutdown,60000}, {child_type,worker}] [ns_server:debug,2014-08-19T16:50:04.253,ns_1@10.242.238.90:<0.20263.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:50:04.253,ns_1@10.242.238.90:<0.21341.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:50:04.253,ns_1@10.242.238.90:<0.21341.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:50:04.253,ns_1@10.242.238.90:<0.20263.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:50:04.257,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:50:04.259,ns_1@10.242.238.90:<0.21340.0>:ebucketmigrator_srv:init:621]Reusing old upstream: [{vbuckets,[981,987,995,998,999,1000,1001,1002,1003,1004,1005,1006,1007,1008, 1009,1010,1011,1012,1013,1014,1015,1016,1017,1018,1019,1020,1021, 1022,1023]}, {name,<<"replication_ns_1@10.242.238.90">>}, {takeover,false}] [rebalance:debug,2014-08-19T16:50:04.260,ns_1@10.242.238.90:<0.21340.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.21342.0> [ns_server:debug,2014-08-19T16:50:04.265,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 7294 us [ns_server:debug,2014-08-19T16:50:04.265,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:04.265,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:04.266,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{981, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:info,2014-08-19T16:50:04.267,ns_1@10.242.238.90:<0.18786.0>:ns_memcached:do_handle_call:527]Changed vbucket 992 state to replica [ns_server:info,2014-08-19T16:50:04.268,ns_1@10.242.238.90:tap_replication_manager-default<0.18775.0>:tap_replication_manager:change_vbucket_filter:200]Going to change replication from 'ns_1@10.242.238.91' to have [981,987,992,995,998,999,1000,1001,1002,1003,1004,1005,1006,1007,1008,1009, 1010,1011,1012,1013,1014,1015,1016,1017,1018,1019,1020,1021,1022,1023] ([992], []) [ns_server:debug,2014-08-19T16:50:04.269,ns_1@10.242.238.90:<0.21344.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:135]Registered myself under id:{"default", {new_child_id, [981,987,992,995,998,999,1000,1001,1002,1003, 1004,1005,1006,1007,1008,1009,1010,1011,1012, 1013,1014,1015,1016,1017,1018,1019,1020,1021, 1022,1023], 'ns_1@10.242.238.91'}, #Ref<0.0.0.235088>} Args:[{"10.242.238.91",11209}, {"10.242.238.90",11209}, [{old_state_retriever,#Fun}, {on_not_ready_vbuckets,#Fun}, {username,"default"}, {password,get_from_config}, {vbuckets,[981,987,992,995,998,999,1000,1001,1002,1003,1004,1005,1006, 1007,1008,1009,1010,1011,1012,1013,1014,1015,1016,1017,1018, 1019,1020,1021,1022,1023]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]] [ns_server:debug,2014-08-19T16:50:04.269,ns_1@10.242.238.90:<0.21344.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:139]Linked myself to old ebucketmigrator <0.21340.0> [ns_server:info,2014-08-19T16:50:04.270,ns_1@10.242.238.90:<0.21340.0>:ebucketmigrator_srv:handle_call:270]Starting new-style vbucket filter change on stream `replication_ns_1@10.242.238.90` [rebalance:debug,2014-08-19T16:50:04.270,ns_1@10.242.238.90:<0.20288.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:50:04.270,ns_1@10.242.238.90:<0.20288.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:50:04.270,ns_1@10.242.238.90:<0.21346.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:50:04.270,ns_1@10.242.238.90:<0.21346.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:50:04.271,ns_1@10.242.238.90:<0.20288.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:info,2014-08-19T16:50:04.276,ns_1@10.242.238.90:<0.21340.0>:ebucketmigrator_srv:handle_call:297]Changing vbucket filter on tap stream `replication_ns_1@10.242.238.90`: [{981,1}, {987,1}, {992,1}, {995,1}, {998,1}, {999,1}, {1000,1}, {1001,1}, {1002,1}, {1003,1}, {1004,1}, {1005,1}, {1006,1}, {1007,1}, {1008,1}, {1009,1}, {1010,1}, {1011,1}, {1012,1}, {1013,1}, {1014,1}, {1015,1}, {1016,1}, {1017,1}, {1018,1}, {1019,1}, {1020,1}, {1021,1}, {1022,1}, {1023,1}] [ns_server:info,2014-08-19T16:50:04.276,ns_1@10.242.238.90:<0.21340.0>:ebucketmigrator_srv:handle_call:307]Successfully changed vbucket filter on tap stream `replication_ns_1@10.242.238.90`. [ns_server:info,2014-08-19T16:50:04.277,ns_1@10.242.238.90:<0.21340.0>:ebucketmigrator_srv:process_upstream:1027]Got vbucket filter change completion message. Silencing upstream sender [ns_server:info,2014-08-19T16:50:04.277,ns_1@10.242.238.90:<0.21340.0>:ebucketmigrator_srv:handle_info:366]Got reply from upstream silencing request. Completing state transition to a new ebucketmigrator. [ns_server:debug,2014-08-19T16:50:04.277,ns_1@10.242.238.90:<0.21340.0>:ebucketmigrator_srv:complete_native_vb_filter_change:170]Proceeding with reading unread binaries [ns_server:debug,2014-08-19T16:50:04.277,ns_1@10.242.238.90:<0.21340.0>:ebucketmigrator_srv:confirm_downstream:197]Going to confirm reception downstream messages [ns_server:debug,2014-08-19T16:50:04.277,ns_1@10.242.238.90:<0.21340.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:50:04.277,ns_1@10.242.238.90:<0.21347.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:50:04.277,ns_1@10.242.238.90:<0.21347.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:50:04.278,ns_1@10.242.238.90:<0.21340.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:50:04.278,ns_1@10.242.238.90:<0.21340.0>:ebucketmigrator_srv:confirm_downstream:201]Confirmed upstream messages are feeded to kernel [ns_server:debug,2014-08-19T16:50:04.278,ns_1@10.242.238.90:<0.21340.0>:ebucketmigrator_srv:reply_and_die:210]Passed old state to caller [ns_server:debug,2014-08-19T16:50:04.278,ns_1@10.242.238.90:<0.21340.0>:ebucketmigrator_srv:reply_and_die:213]Sent out state. Preparing to die [ns_server:debug,2014-08-19T16:50:04.278,ns_1@10.242.238.90:<0.21344.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:143]Got old state from previous ebucketmigrator: <0.21340.0> [ns_server:debug,2014-08-19T16:50:04.278,ns_1@10.242.238.90:<0.21344.0>:ns_vbm_new_sup:perform_vbucket_filter_change_loop:184]Sent old state to new instance [ns_server:info,2014-08-19T16:50:04.278,ns_1@10.242.238.90:<0.21349.0>:ns_vbm_new_sup:mk_old_state_retriever:83]Got vbucket filter change old state. Proceeding vbucket filter change operation [ns_server:debug,2014-08-19T16:50:04.279,ns_1@10.242.238.90:<0.21349.0>:ebucketmigrator_srv:init:494]Got old ebucketmigrator state from <0.21340.0>: {state,#Port<0.13886>,#Port<0.13883>,#Port<0.13887>,#Port<0.13884>, <0.21342.0>,<<"cut off">>,<<"cut off">>,[],91,false,false,0, {1408,452604,277112}, completed, {<0.21344.0>,#Ref<0.0.0.235101>}, <<"replication_ns_1@10.242.238.90">>,<0.21340.0>, {had_backfill,false,undefined,[]}, completed,false}. [ns_server:debug,2014-08-19T16:50:04.279,ns_1@10.242.238.90:<0.17162.0>:ns_process_registry:handle_info:98]Got exit msg: {'EXIT',<0.21344.0>,{#Ref<0.0.0.235090>,<0.21349.0>}} [error_logger:info,2014-08-19T16:50:04.279,ns_1@10.242.238.90:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,'ns_vbm_new_sup-default'} started: [{pid,<0.21349.0>}, {name, {new_child_id, [981,987,992,995,998,999,1000,1001,1002,1003, 1004,1005,1006,1007,1008,1009,1010,1011,1012, 1013,1014,1015,1016,1017,1018,1019,1020,1021, 1022,1023], 'ns_1@10.242.238.91'}}, {mfargs, {ebucketmigrator_srv,start_link, [{"10.242.238.91",11209}, {"10.242.238.90",11209}, [{old_state_retriever, #Fun}, {on_not_ready_vbuckets, #Fun}, {username,"default"}, {password,get_from_config}, {vbuckets, [981,987,992,995,998,999,1000,1001,1002, 1003,1004,1005,1006,1007,1008,1009,1010, 1011,1012,1013,1014,1015,1016,1017,1018, 1019,1020,1021,1022,1023]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]]}}, {restart_type,temporary}, {shutdown,60000}, {child_type,worker}] [ns_server:debug,2014-08-19T16:50:04.284,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:50:04.285,ns_1@10.242.238.90:<0.21349.0>:ebucketmigrator_srv:init:621]Reusing old upstream: [{vbuckets,[981,987,992,995,998,999,1000,1001,1002,1003,1004,1005,1006,1007, 1008,1009,1010,1011,1012,1013,1014,1015,1016,1017,1018,1019,1020, 1021,1022,1023]}, {name,<<"replication_ns_1@10.242.238.90">>}, {takeover,false}] [rebalance:debug,2014-08-19T16:50:04.285,ns_1@10.242.238.90:<0.21349.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.21350.0> [ns_server:debug,2014-08-19T16:50:04.288,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 3016 us [ns_server:debug,2014-08-19T16:50:04.288,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:04.288,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:04.289,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{992, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [rebalance:debug,2014-08-19T16:50:04.290,ns_1@10.242.238.90:<0.20338.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:50:04.290,ns_1@10.242.238.90:<0.20338.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:50:04.290,ns_1@10.242.238.90:<0.21351.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:50:04.290,ns_1@10.242.238.90:<0.21351.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:50:04.290,ns_1@10.242.238.90:<0.20338.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:info,2014-08-19T16:50:04.290,ns_1@10.242.238.90:<0.18786.0>:ns_memcached:do_handle_call:527]Changed vbucket 980 state to replica [ns_server:info,2014-08-19T16:50:04.290,ns_1@10.242.238.90:tap_replication_manager-default<0.18775.0>:tap_replication_manager:change_vbucket_filter:200]Going to change replication from 'ns_1@10.242.238.91' to have [980,981,987,992,995,998,999,1000,1001,1002,1003,1004,1005,1006,1007,1008, 1009,1010,1011,1012,1013,1014,1015,1016,1017,1018,1019,1020,1021,1022,1023] ([980], []) [ns_server:debug,2014-08-19T16:50:04.291,ns_1@10.242.238.90:<0.21352.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:135]Registered myself under id:{"default", {new_child_id, [980,981,987,992,995,998,999,1000,1001,1002, 1003,1004,1005,1006,1007,1008,1009,1010,1011, 1012,1013,1014,1015,1016,1017,1018,1019,1020, 1021,1022,1023], 'ns_1@10.242.238.91'}, #Ref<0.0.0.235252>} Args:[{"10.242.238.91",11209}, {"10.242.238.90",11209}, [{old_state_retriever,#Fun}, {on_not_ready_vbuckets,#Fun}, {username,"default"}, {password,get_from_config}, {vbuckets,[980,981,987,992,995,998,999,1000,1001,1002,1003,1004,1005, 1006,1007,1008,1009,1010,1011,1012,1013,1014,1015,1016,1017, 1018,1019,1020,1021,1022,1023]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]] [ns_server:debug,2014-08-19T16:50:04.292,ns_1@10.242.238.90:<0.21352.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:139]Linked myself to old ebucketmigrator <0.21349.0> [ns_server:info,2014-08-19T16:50:04.292,ns_1@10.242.238.90:<0.21349.0>:ebucketmigrator_srv:handle_call:270]Starting new-style vbucket filter change on stream `replication_ns_1@10.242.238.90` [ns_server:info,2014-08-19T16:50:04.298,ns_1@10.242.238.90:<0.21349.0>:ebucketmigrator_srv:handle_call:297]Changing vbucket filter on tap stream `replication_ns_1@10.242.238.90`: [{980,1}, {981,1}, {987,1}, {992,1}, {995,1}, {998,1}, {999,1}, {1000,1}, {1001,1}, {1002,1}, {1003,1}, {1004,1}, {1005,1}, {1006,1}, {1007,1}, {1008,1}, {1009,1}, {1010,1}, {1011,1}, {1012,1}, {1013,1}, {1014,1}, {1015,1}, {1016,1}, {1017,1}, {1018,1}, {1019,1}, {1020,1}, {1021,1}, {1022,1}, {1023,1}] [ns_server:info,2014-08-19T16:50:04.298,ns_1@10.242.238.90:<0.21349.0>:ebucketmigrator_srv:handle_call:307]Successfully changed vbucket filter on tap stream `replication_ns_1@10.242.238.90`. [ns_server:info,2014-08-19T16:50:04.299,ns_1@10.242.238.90:<0.21349.0>:ebucketmigrator_srv:process_upstream:1027]Got vbucket filter change completion message. Silencing upstream sender [ns_server:info,2014-08-19T16:50:04.299,ns_1@10.242.238.90:<0.21349.0>:ebucketmigrator_srv:handle_info:366]Got reply from upstream silencing request. Completing state transition to a new ebucketmigrator. [ns_server:debug,2014-08-19T16:50:04.299,ns_1@10.242.238.90:<0.21349.0>:ebucketmigrator_srv:complete_native_vb_filter_change:170]Proceeding with reading unread binaries [ns_server:debug,2014-08-19T16:50:04.299,ns_1@10.242.238.90:<0.21349.0>:ebucketmigrator_srv:confirm_downstream:197]Going to confirm reception downstream messages [ns_server:debug,2014-08-19T16:50:04.299,ns_1@10.242.238.90:<0.21349.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:50:04.299,ns_1@10.242.238.90:<0.21355.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:50:04.299,ns_1@10.242.238.90:<0.21355.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:50:04.299,ns_1@10.242.238.90:<0.21349.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:50:04.299,ns_1@10.242.238.90:<0.21349.0>:ebucketmigrator_srv:confirm_downstream:201]Confirmed upstream messages are feeded to kernel [ns_server:debug,2014-08-19T16:50:04.300,ns_1@10.242.238.90:<0.21349.0>:ebucketmigrator_srv:reply_and_die:210]Passed old state to caller [ns_server:debug,2014-08-19T16:50:04.300,ns_1@10.242.238.90:<0.21349.0>:ebucketmigrator_srv:reply_and_die:213]Sent out state. Preparing to die [ns_server:debug,2014-08-19T16:50:04.300,ns_1@10.242.238.90:<0.21352.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:143]Got old state from previous ebucketmigrator: <0.21349.0> [ns_server:debug,2014-08-19T16:50:04.300,ns_1@10.242.238.90:<0.21352.0>:ns_vbm_new_sup:perform_vbucket_filter_change_loop:184]Sent old state to new instance [ns_server:info,2014-08-19T16:50:04.300,ns_1@10.242.238.90:<0.21357.0>:ns_vbm_new_sup:mk_old_state_retriever:83]Got vbucket filter change old state. Proceeding vbucket filter change operation [ns_server:debug,2014-08-19T16:50:04.300,ns_1@10.242.238.90:<0.21357.0>:ebucketmigrator_srv:init:494]Got old ebucketmigrator state from <0.21349.0>: {state,#Port<0.13886>,#Port<0.13883>,#Port<0.13887>,#Port<0.13884>, <0.21350.0>,<<"cut off">>,<<"cut off">>,[],94,false,false,0, {1408,452604,299107}, completed, {<0.21352.0>,#Ref<0.0.0.235265>}, <<"replication_ns_1@10.242.238.90">>,<0.21349.0>, {had_backfill,false,undefined,[]}, completed,false}. [ns_server:debug,2014-08-19T16:50:04.300,ns_1@10.242.238.90:<0.17162.0>:ns_process_registry:handle_info:98]Got exit msg: {'EXIT',<0.21352.0>,{#Ref<0.0.0.235254>,<0.21357.0>}} [error_logger:info,2014-08-19T16:50:04.300,ns_1@10.242.238.90:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,'ns_vbm_new_sup-default'} started: [{pid,<0.21357.0>}, {name, {new_child_id, [980,981,987,992,995,998,999,1000,1001,1002, 1003,1004,1005,1006,1007,1008,1009,1010,1011, 1012,1013,1014,1015,1016,1017,1018,1019,1020, 1021,1022,1023], 'ns_1@10.242.238.91'}}, {mfargs, {ebucketmigrator_srv,start_link, [{"10.242.238.91",11209}, {"10.242.238.90",11209}, [{old_state_retriever, #Fun}, {on_not_ready_vbuckets, #Fun}, {username,"default"}, {password,get_from_config}, {vbuckets, [980,981,987,992,995,998,999,1000,1001, 1002,1003,1004,1005,1006,1007,1008,1009, 1010,1011,1012,1013,1014,1015,1016,1017, 1018,1019,1020,1021,1022,1023]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]]}}, {restart_type,temporary}, {shutdown,60000}, {child_type,worker}] [rebalance:debug,2014-08-19T16:50:04.304,ns_1@10.242.238.90:<0.20473.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:50:04.304,ns_1@10.242.238.90:<0.20473.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:50:04.304,ns_1@10.242.238.90:<0.21358.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:50:04.304,ns_1@10.242.238.90:<0.21358.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:50:04.304,ns_1@10.242.238.90:<0.20473.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:50:04.305,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:50:04.307,ns_1@10.242.238.90:<0.21357.0>:ebucketmigrator_srv:init:621]Reusing old upstream: [{vbuckets,[980,981,987,992,995,998,999,1000,1001,1002,1003,1004,1005,1006, 1007,1008,1009,1010,1011,1012,1013,1014,1015,1016,1017,1018,1019, 1020,1021,1022,1023]}, {name,<<"replication_ns_1@10.242.238.90">>}, {takeover,false}] [rebalance:debug,2014-08-19T16:50:04.307,ns_1@10.242.238.90:<0.21357.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.21359.0> [ns_server:debug,2014-08-19T16:50:04.308,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 2697 us [ns_server:debug,2014-08-19T16:50:04.308,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:04.308,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:04.309,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{980, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:info,2014-08-19T16:50:04.311,ns_1@10.242.238.90:<0.18786.0>:ns_memcached:do_handle_call:527]Changed vbucket 988 state to replica [ns_server:info,2014-08-19T16:50:04.311,ns_1@10.242.238.90:tap_replication_manager-default<0.18775.0>:tap_replication_manager:change_vbucket_filter:200]Going to change replication from 'ns_1@10.242.238.91' to have [980,981,987,988,992,995,998,999,1000,1001,1002,1003,1004,1005,1006,1007,1008, 1009,1010,1011,1012,1013,1014,1015,1016,1017,1018,1019,1020,1021,1022,1023] ([988], []) [ns_server:debug,2014-08-19T16:50:04.312,ns_1@10.242.238.90:<0.21360.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:135]Registered myself under id:{"default", {new_child_id, [980,981,987,988,992,995,998,999,1000,1001, 1002,1003,1004,1005,1006,1007,1008,1009,1010, 1011,1012,1013,1014,1015,1016,1017,1018,1019, 1020,1021,1022,1023], 'ns_1@10.242.238.91'}, #Ref<0.0.0.235404>} Args:[{"10.242.238.91",11209}, {"10.242.238.90",11209}, [{old_state_retriever,#Fun}, {on_not_ready_vbuckets,#Fun}, {username,"default"}, {password,get_from_config}, {vbuckets,[980,981,987,988,992,995,998,999,1000,1001,1002,1003,1004, 1005,1006,1007,1008,1009,1010,1011,1012,1013,1014,1015,1016, 1017,1018,1019,1020,1021,1022,1023]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]] [ns_server:debug,2014-08-19T16:50:04.313,ns_1@10.242.238.90:<0.21360.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:139]Linked myself to old ebucketmigrator <0.21357.0> [ns_server:info,2014-08-19T16:50:04.313,ns_1@10.242.238.90:<0.21357.0>:ebucketmigrator_srv:handle_call:270]Starting new-style vbucket filter change on stream `replication_ns_1@10.242.238.90` [ns_server:info,2014-08-19T16:50:04.319,ns_1@10.242.238.90:<0.21357.0>:ebucketmigrator_srv:handle_call:297]Changing vbucket filter on tap stream `replication_ns_1@10.242.238.90`: [{980,1}, {981,1}, {987,1}, {988,1}, {992,1}, {995,1}, {998,1}, {999,1}, {1000,1}, {1001,1}, {1002,1}, {1003,1}, {1004,1}, {1005,1}, {1006,1}, {1007,1}, {1008,1}, {1009,1}, {1010,1}, {1011,1}, {1012,1}, {1013,1}, {1014,1}, {1015,1}, {1016,1}, {1017,1}, {1018,1}, {1019,1}, {1020,1}, {1021,1}, {1022,1}, {1023,1}] [ns_server:info,2014-08-19T16:50:04.320,ns_1@10.242.238.90:<0.21357.0>:ebucketmigrator_srv:handle_call:307]Successfully changed vbucket filter on tap stream `replication_ns_1@10.242.238.90`. [ns_server:info,2014-08-19T16:50:04.320,ns_1@10.242.238.90:<0.21357.0>:ebucketmigrator_srv:process_upstream:1027]Got vbucket filter change completion message. Silencing upstream sender [ns_server:info,2014-08-19T16:50:04.320,ns_1@10.242.238.90:<0.21357.0>:ebucketmigrator_srv:handle_info:366]Got reply from upstream silencing request. Completing state transition to a new ebucketmigrator. [ns_server:debug,2014-08-19T16:50:04.320,ns_1@10.242.238.90:<0.21357.0>:ebucketmigrator_srv:complete_native_vb_filter_change:170]Proceeding with reading unread binaries [ns_server:debug,2014-08-19T16:50:04.321,ns_1@10.242.238.90:<0.21357.0>:ebucketmigrator_srv:confirm_downstream:197]Going to confirm reception downstream messages [ns_server:debug,2014-08-19T16:50:04.321,ns_1@10.242.238.90:<0.21357.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:50:04.321,ns_1@10.242.238.90:<0.21362.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:50:04.321,ns_1@10.242.238.90:<0.21362.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:50:04.321,ns_1@10.242.238.90:<0.21357.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:50:04.321,ns_1@10.242.238.90:<0.21357.0>:ebucketmigrator_srv:confirm_downstream:201]Confirmed upstream messages are feeded to kernel [ns_server:debug,2014-08-19T16:50:04.322,ns_1@10.242.238.90:<0.21357.0>:ebucketmigrator_srv:reply_and_die:210]Passed old state to caller [ns_server:debug,2014-08-19T16:50:04.322,ns_1@10.242.238.90:<0.21357.0>:ebucketmigrator_srv:reply_and_die:213]Sent out state. Preparing to die [ns_server:debug,2014-08-19T16:50:04.322,ns_1@10.242.238.90:<0.21360.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:143]Got old state from previous ebucketmigrator: <0.21357.0> [ns_server:debug,2014-08-19T16:50:04.322,ns_1@10.242.238.90:<0.21360.0>:ns_vbm_new_sup:perform_vbucket_filter_change_loop:184]Sent old state to new instance [ns_server:info,2014-08-19T16:50:04.322,ns_1@10.242.238.90:<0.21364.0>:ns_vbm_new_sup:mk_old_state_retriever:83]Got vbucket filter change old state. Proceeding vbucket filter change operation [ns_server:debug,2014-08-19T16:50:04.322,ns_1@10.242.238.90:<0.21364.0>:ebucketmigrator_srv:init:494]Got old ebucketmigrator state from <0.21357.0>: {state,#Port<0.13886>,#Port<0.13883>,#Port<0.13887>,#Port<0.13884>, <0.21359.0>,<<"cut off">>,<<"cut off">>,[],97,false,false,0, {1408,452604,320586}, completed, {<0.21360.0>,#Ref<0.0.0.235417>}, <<"replication_ns_1@10.242.238.90">>,<0.21357.0>, {had_backfill,false,undefined,[]}, completed,false}. [ns_server:debug,2014-08-19T16:50:04.323,ns_1@10.242.238.90:<0.17162.0>:ns_process_registry:handle_info:98]Got exit msg: {'EXIT',<0.21360.0>,{#Ref<0.0.0.235406>,<0.21364.0>}} [error_logger:info,2014-08-19T16:50:04.323,ns_1@10.242.238.90:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,'ns_vbm_new_sup-default'} started: [{pid,<0.21364.0>}, {name, {new_child_id, [980,981,987,988,992,995,998,999,1000,1001, 1002,1003,1004,1005,1006,1007,1008,1009,1010, 1011,1012,1013,1014,1015,1016,1017,1018,1019, 1020,1021,1022,1023], 'ns_1@10.242.238.91'}}, {mfargs, {ebucketmigrator_srv,start_link, [{"10.242.238.91",11209}, {"10.242.238.90",11209}, [{old_state_retriever, #Fun}, {on_not_ready_vbuckets, #Fun}, {username,"default"}, {password,get_from_config}, {vbuckets, [980,981,987,988,992,995,998,999,1000, 1001,1002,1003,1004,1005,1006,1007,1008, 1009,1010,1011,1012,1013,1014,1015,1016, 1017,1018,1019,1020,1021,1022,1023]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]]}}, {restart_type,temporary}, {shutdown,60000}, {child_type,worker}] [ns_server:debug,2014-08-19T16:50:04.328,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:50:04.329,ns_1@10.242.238.90:<0.21364.0>:ebucketmigrator_srv:init:621]Reusing old upstream: [{vbuckets,[980,981,987,988,992,995,998,999,1000,1001,1002,1003,1004,1005, 1006,1007,1008,1009,1010,1011,1012,1013,1014,1015,1016,1017,1018, 1019,1020,1021,1022,1023]}, {name,<<"replication_ns_1@10.242.238.90">>}, {takeover,false}] [rebalance:debug,2014-08-19T16:50:04.329,ns_1@10.242.238.90:<0.21364.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.21365.0> [ns_server:debug,2014-08-19T16:50:04.332,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:04.332,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 4296 us [ns_server:debug,2014-08-19T16:50:04.332,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:04.333,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{988, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:info,2014-08-19T16:50:04.335,ns_1@10.242.238.90:<0.18786.0>:ns_memcached:do_handle_call:527]Changed vbucket 983 state to replica [ns_server:info,2014-08-19T16:50:04.335,ns_1@10.242.238.90:tap_replication_manager-default<0.18775.0>:tap_replication_manager:change_vbucket_filter:200]Going to change replication from 'ns_1@10.242.238.91' to have [980,981,983,987,988,992,995,998,999,1000,1001,1002,1003,1004,1005,1006,1007, 1008,1009,1010,1011,1012,1013,1014,1015,1016,1017,1018,1019,1020,1021,1022, 1023] ([983], []) [ns_server:debug,2014-08-19T16:50:04.336,ns_1@10.242.238.90:<0.21367.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:135]Registered myself under id:{"default", {new_child_id, [980,981,983,987,988,992,995,998,999,1000, 1001,1002,1003,1004,1005,1006,1007,1008,1009, 1010,1011,1012,1013,1014,1015,1016,1017,1018, 1019,1020,1021,1022,1023], 'ns_1@10.242.238.91'}, #Ref<0.0.0.235548>} Args:[{"10.242.238.91",11209}, {"10.242.238.90",11209}, [{old_state_retriever,#Fun}, {on_not_ready_vbuckets,#Fun}, {username,"default"}, {password,get_from_config}, {vbuckets,[980,981,983,987,988,992,995,998,999,1000,1001,1002,1003, 1004,1005,1006,1007,1008,1009,1010,1011,1012,1013,1014,1015, 1016,1017,1018,1019,1020,1021,1022,1023]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]] [ns_server:debug,2014-08-19T16:50:04.336,ns_1@10.242.238.90:<0.21367.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:139]Linked myself to old ebucketmigrator <0.21364.0> [ns_server:info,2014-08-19T16:50:04.337,ns_1@10.242.238.90:<0.21364.0>:ebucketmigrator_srv:handle_call:270]Starting new-style vbucket filter change on stream `replication_ns_1@10.242.238.90` [ns_server:info,2014-08-19T16:50:04.342,ns_1@10.242.238.90:<0.21364.0>:ebucketmigrator_srv:handle_call:297]Changing vbucket filter on tap stream `replication_ns_1@10.242.238.90`: [{980,1}, {981,1}, {983,1}, {987,1}, {988,1}, {992,1}, {995,1}, {998,1}, {999,1}, {1000,1}, {1001,1}, {1002,1}, {1003,1}, {1004,1}, {1005,1}, {1006,1}, {1007,1}, {1008,1}, {1009,1}, {1010,1}, {1011,1}, {1012,1}, {1013,1}, {1014,1}, {1015,1}, {1016,1}, {1017,1}, {1018,1}, {1019,1}, {1020,1}, {1021,1}, {1022,1}, {1023,1}] [ns_server:info,2014-08-19T16:50:04.343,ns_1@10.242.238.90:<0.21364.0>:ebucketmigrator_srv:handle_call:307]Successfully changed vbucket filter on tap stream `replication_ns_1@10.242.238.90`. [ns_server:info,2014-08-19T16:50:04.343,ns_1@10.242.238.90:<0.21364.0>:ebucketmigrator_srv:process_upstream:1027]Got vbucket filter change completion message. Silencing upstream sender [ns_server:info,2014-08-19T16:50:04.343,ns_1@10.242.238.90:<0.21364.0>:ebucketmigrator_srv:handle_info:366]Got reply from upstream silencing request. Completing state transition to a new ebucketmigrator. [ns_server:debug,2014-08-19T16:50:04.344,ns_1@10.242.238.90:<0.21364.0>:ebucketmigrator_srv:complete_native_vb_filter_change:170]Proceeding with reading unread binaries [ns_server:debug,2014-08-19T16:50:04.344,ns_1@10.242.238.90:<0.21364.0>:ebucketmigrator_srv:confirm_downstream:197]Going to confirm reception downstream messages [ns_server:debug,2014-08-19T16:50:04.344,ns_1@10.242.238.90:<0.21364.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:50:04.344,ns_1@10.242.238.90:<0.21369.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:50:04.344,ns_1@10.242.238.90:<0.21369.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:50:04.344,ns_1@10.242.238.90:<0.21364.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:50:04.344,ns_1@10.242.238.90:<0.21364.0>:ebucketmigrator_srv:confirm_downstream:201]Confirmed upstream messages are feeded to kernel [ns_server:debug,2014-08-19T16:50:04.344,ns_1@10.242.238.90:<0.21364.0>:ebucketmigrator_srv:reply_and_die:210]Passed old state to caller [ns_server:debug,2014-08-19T16:50:04.345,ns_1@10.242.238.90:<0.21364.0>:ebucketmigrator_srv:reply_and_die:213]Sent out state. Preparing to die [ns_server:debug,2014-08-19T16:50:04.345,ns_1@10.242.238.90:<0.21367.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:143]Got old state from previous ebucketmigrator: <0.21364.0> [ns_server:debug,2014-08-19T16:50:04.345,ns_1@10.242.238.90:<0.21367.0>:ns_vbm_new_sup:perform_vbucket_filter_change_loop:184]Sent old state to new instance [ns_server:info,2014-08-19T16:50:04.345,ns_1@10.242.238.90:<0.21371.0>:ns_vbm_new_sup:mk_old_state_retriever:83]Got vbucket filter change old state. Proceeding vbucket filter change operation [ns_server:debug,2014-08-19T16:50:04.345,ns_1@10.242.238.90:<0.21371.0>:ebucketmigrator_srv:init:494]Got old ebucketmigrator state from <0.21364.0>: {state,#Port<0.13886>,#Port<0.13883>,#Port<0.13887>,#Port<0.13884>, <0.21365.0>,<<"cut off">>,<<"cut off">>,[],100,false,false,0, {1408,452604,343746}, completed, {<0.21367.0>,#Ref<0.0.0.235561>}, <<"replication_ns_1@10.242.238.90">>,<0.21364.0>, {had_backfill,false,undefined,[]}, completed,false}. [ns_server:debug,2014-08-19T16:50:04.345,ns_1@10.242.238.90:<0.17162.0>:ns_process_registry:handle_info:98]Got exit msg: {'EXIT',<0.21367.0>,{#Ref<0.0.0.235550>,<0.21371.0>}} [error_logger:info,2014-08-19T16:50:04.345,ns_1@10.242.238.90:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,'ns_vbm_new_sup-default'} started: [{pid,<0.21371.0>}, {name, {new_child_id, [980,981,983,987,988,992,995,998,999,1000,1001, 1002,1003,1004,1005,1006,1007,1008,1009,1010, 1011,1012,1013,1014,1015,1016,1017,1018,1019, 1020,1021,1022,1023], 'ns_1@10.242.238.91'}}, {mfargs, {ebucketmigrator_srv,start_link, [{"10.242.238.91",11209}, {"10.242.238.90",11209}, [{old_state_retriever, #Fun}, {on_not_ready_vbuckets, #Fun}, {username,"default"}, {password,get_from_config}, {vbuckets, [980,981,983,987,988,992,995,998,999, 1000,1001,1002,1003,1004,1005,1006,1007, 1008,1009,1010,1011,1012,1013,1014,1015, 1016,1017,1018,1019,1020,1021,1022, 1023]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]]}}, {restart_type,temporary}, {shutdown,60000}, {child_type,worker}] [ns_server:info,2014-08-19T16:50:04.346,ns_1@10.242.238.90:<0.18787.0>:ns_memcached:do_handle_call:527]Changed vbucket 736 state to active [ns_server:debug,2014-08-19T16:50:04.351,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:info,2014-08-19T16:50:04.351,ns_1@10.242.238.90:<0.18787.0>:ns_memcached:do_handle_call:527]Changed vbucket 729 state to active [ns_server:debug,2014-08-19T16:50:04.351,ns_1@10.242.238.90:<0.21371.0>:ebucketmigrator_srv:init:621]Reusing old upstream: [{vbuckets,[980,981,983,987,988,992,995,998,999,1000,1001,1002,1003,1004,1005, 1006,1007,1008,1009,1010,1011,1012,1013,1014,1015,1016,1017,1018, 1019,1020,1021,1022,1023]}, {name,<<"replication_ns_1@10.242.238.90">>}, {takeover,false}] [rebalance:debug,2014-08-19T16:50:04.352,ns_1@10.242.238.90:<0.21371.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.21372.0> [ns_server:debug,2014-08-19T16:50:04.354,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 2736 us [ns_server:debug,2014-08-19T16:50:04.354,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:04.354,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:04.355,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{983, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:info,2014-08-19T16:50:04.360,ns_1@10.242.238.90:<0.18787.0>:ns_memcached:do_handle_call:527]Changed vbucket 982 state to replica [ns_server:info,2014-08-19T16:50:04.361,ns_1@10.242.238.90:tap_replication_manager-default<0.18775.0>:tap_replication_manager:change_vbucket_filter:200]Going to change replication from 'ns_1@10.242.238.91' to have [980,981,982,983,987,988,992,995,998,999,1000,1001,1002,1003,1004,1005,1006, 1007,1008,1009,1010,1011,1012,1013,1014,1015,1016,1017,1018,1019,1020,1021, 1022,1023] ([982], []) [ns_server:debug,2014-08-19T16:50:04.362,ns_1@10.242.238.90:<0.21373.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:135]Registered myself under id:{"default", {new_child_id, [980,981,982,983,987,988,992,995,998,999,1000, 1001,1002,1003,1004,1005,1006,1007,1008,1009, 1010,1011,1012,1013,1014,1015,1016,1017,1018, 1019,1020,1021,1022,1023], 'ns_1@10.242.238.91'}, #Ref<0.0.0.235708>} Args:[{"10.242.238.91",11209}, {"10.242.238.90",11209}, [{old_state_retriever,#Fun}, {on_not_ready_vbuckets,#Fun}, {username,"default"}, {password,get_from_config}, {vbuckets,[980,981,982,983,987,988,992,995,998,999,1000,1001,1002,1003, 1004,1005,1006,1007,1008,1009,1010,1011,1012,1013,1014,1015, 1016,1017,1018,1019,1020,1021,1022,1023]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]] [ns_server:debug,2014-08-19T16:50:04.362,ns_1@10.242.238.90:<0.21373.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:139]Linked myself to old ebucketmigrator <0.21371.0> [ns_server:info,2014-08-19T16:50:04.362,ns_1@10.242.238.90:<0.21371.0>:ebucketmigrator_srv:handle_call:270]Starting new-style vbucket filter change on stream `replication_ns_1@10.242.238.90` [ns_server:info,2014-08-19T16:50:04.368,ns_1@10.242.238.90:<0.21371.0>:ebucketmigrator_srv:handle_call:297]Changing vbucket filter on tap stream `replication_ns_1@10.242.238.90`: [{980,1}, {981,1}, {982,1}, {983,1}, {987,1}, {988,1}, {992,1}, {995,1}, {998,1}, {999,1}, {1000,1}, {1001,1}, {1002,1}, {1003,1}, {1004,1}, {1005,1}, {1006,1}, {1007,1}, {1008,1}, {1009,1}, {1010,1}, {1011,1}, {1012,1}, {1013,1}, {1014,1}, {1015,1}, {1016,1}, {1017,1}, {1018,1}, {1019,1}, {1020,1}, {1021,1}, {1022,1}, {1023,1}] [ns_server:info,2014-08-19T16:50:04.369,ns_1@10.242.238.90:<0.21371.0>:ebucketmigrator_srv:handle_call:307]Successfully changed vbucket filter on tap stream `replication_ns_1@10.242.238.90`. [ns_server:info,2014-08-19T16:50:04.369,ns_1@10.242.238.90:<0.21371.0>:ebucketmigrator_srv:process_upstream:1027]Got vbucket filter change completion message. Silencing upstream sender [ns_server:info,2014-08-19T16:50:04.369,ns_1@10.242.238.90:<0.21371.0>:ebucketmigrator_srv:handle_info:366]Got reply from upstream silencing request. Completing state transition to a new ebucketmigrator. [ns_server:debug,2014-08-19T16:50:04.369,ns_1@10.242.238.90:<0.21371.0>:ebucketmigrator_srv:complete_native_vb_filter_change:170]Proceeding with reading unread binaries [ns_server:debug,2014-08-19T16:50:04.369,ns_1@10.242.238.90:<0.21371.0>:ebucketmigrator_srv:confirm_downstream:197]Going to confirm reception downstream messages [ns_server:debug,2014-08-19T16:50:04.369,ns_1@10.242.238.90:<0.21371.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:50:04.369,ns_1@10.242.238.90:<0.21376.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:50:04.370,ns_1@10.242.238.90:<0.21376.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:50:04.370,ns_1@10.242.238.90:<0.21371.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:50:04.370,ns_1@10.242.238.90:<0.21371.0>:ebucketmigrator_srv:confirm_downstream:201]Confirmed upstream messages are feeded to kernel [ns_server:debug,2014-08-19T16:50:04.370,ns_1@10.242.238.90:<0.21371.0>:ebucketmigrator_srv:reply_and_die:210]Passed old state to caller [ns_server:debug,2014-08-19T16:50:04.370,ns_1@10.242.238.90:<0.21371.0>:ebucketmigrator_srv:reply_and_die:213]Sent out state. Preparing to die [ns_server:debug,2014-08-19T16:50:04.370,ns_1@10.242.238.90:<0.21373.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:143]Got old state from previous ebucketmigrator: <0.21371.0> [ns_server:debug,2014-08-19T16:50:04.370,ns_1@10.242.238.90:<0.21373.0>:ns_vbm_new_sup:perform_vbucket_filter_change_loop:184]Sent old state to new instance [ns_server:info,2014-08-19T16:50:04.370,ns_1@10.242.238.90:<0.21378.0>:ns_vbm_new_sup:mk_old_state_retriever:83]Got vbucket filter change old state. Proceeding vbucket filter change operation [ns_server:debug,2014-08-19T16:50:04.371,ns_1@10.242.238.90:<0.21378.0>:ebucketmigrator_srv:init:494]Got old ebucketmigrator state from <0.21371.0>: {state,#Port<0.13886>,#Port<0.13883>,#Port<0.13887>,#Port<0.13884>, <0.21372.0>,<<"cut off">>,<<"cut off">>,[],103,false,false,0, {1408,452604,369351}, completed, {<0.21373.0>,#Ref<0.0.0.235721>}, <<"replication_ns_1@10.242.238.90">>,<0.21371.0>, {had_backfill,false,undefined,[]}, completed,false}. [ns_server:debug,2014-08-19T16:50:04.371,ns_1@10.242.238.90:<0.17162.0>:ns_process_registry:handle_info:98]Got exit msg: {'EXIT',<0.21373.0>,{#Ref<0.0.0.235710>,<0.21378.0>}} [error_logger:info,2014-08-19T16:50:04.371,ns_1@10.242.238.90:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,'ns_vbm_new_sup-default'} started: [{pid,<0.21378.0>}, {name, {new_child_id, [980,981,982,983,987,988,992,995,998,999,1000, 1001,1002,1003,1004,1005,1006,1007,1008,1009, 1010,1011,1012,1013,1014,1015,1016,1017,1018, 1019,1020,1021,1022,1023], 'ns_1@10.242.238.91'}}, {mfargs, {ebucketmigrator_srv,start_link, [{"10.242.238.91",11209}, {"10.242.238.90",11209}, [{old_state_retriever, #Fun}, {on_not_ready_vbuckets, #Fun}, {username,"default"}, {password,get_from_config}, {vbuckets, [980,981,982,983,987,988,992,995,998,999, 1000,1001,1002,1003,1004,1005,1006,1007, 1008,1009,1010,1011,1012,1013,1014,1015, 1016,1017,1018,1019,1020,1021,1022, 1023]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]]}}, {restart_type,temporary}, {shutdown,60000}, {child_type,worker}] [ns_server:info,2014-08-19T16:50:04.372,ns_1@10.242.238.90:<0.18787.0>:ns_memcached:do_handle_call:527]Changed vbucket 730 state to active [ns_server:debug,2014-08-19T16:50:04.377,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:50:04.377,ns_1@10.242.238.90:<0.21378.0>:ebucketmigrator_srv:init:621]Reusing old upstream: [{vbuckets,[980,981,982,983,987,988,992,995,998,999,1000,1001,1002,1003,1004, 1005,1006,1007,1008,1009,1010,1011,1012,1013,1014,1015,1016,1017, 1018,1019,1020,1021,1022,1023]}, {name,<<"replication_ns_1@10.242.238.90">>}, {takeover,false}] [rebalance:debug,2014-08-19T16:50:04.377,ns_1@10.242.238.90:<0.21378.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.21379.0> [ns_server:debug,2014-08-19T16:50:04.380,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:04.380,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 3523 us [ns_server:debug,2014-08-19T16:50:04.381,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:04.382,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{982, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:info,2014-08-19T16:50:04.383,ns_1@10.242.238.90:<0.18787.0>:ns_memcached:do_handle_call:527]Changed vbucket 991 state to replica [ns_server:info,2014-08-19T16:50:04.384,ns_1@10.242.238.90:tap_replication_manager-default<0.18775.0>:tap_replication_manager:change_vbucket_filter:200]Going to change replication from 'ns_1@10.242.238.91' to have [980,981,982,983,987,988,991,992,995,998,999,1000,1001,1002,1003,1004,1005, 1006,1007,1008,1009,1010,1011,1012,1013,1014,1015,1016,1017,1018,1019,1020, 1021,1022,1023] ([991], []) [ns_server:debug,2014-08-19T16:50:04.386,ns_1@10.242.238.90:<0.21380.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:135]Registered myself under id:{"default", {new_child_id, [980,981,982,983,987,988,991,992,995,998,999, 1000,1001,1002,1003,1004,1005,1006,1007,1008, 1009,1010,1011,1012,1013,1014,1015,1016,1017, 1018,1019,1020,1021,1022,1023], 'ns_1@10.242.238.91'}, #Ref<0.0.0.235858>} Args:[{"10.242.238.91",11209}, {"10.242.238.90",11209}, [{old_state_retriever,#Fun}, {on_not_ready_vbuckets,#Fun}, {username,"default"}, {password,get_from_config}, {vbuckets,[980,981,982,983,987,988,991,992,995,998,999,1000,1001,1002, 1003,1004,1005,1006,1007,1008,1009,1010,1011,1012,1013,1014, 1015,1016,1017,1018,1019,1020,1021,1022,1023]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]] [ns_server:debug,2014-08-19T16:50:04.386,ns_1@10.242.238.90:<0.21380.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:139]Linked myself to old ebucketmigrator <0.21378.0> [ns_server:info,2014-08-19T16:50:04.386,ns_1@10.242.238.90:<0.21378.0>:ebucketmigrator_srv:handle_call:270]Starting new-style vbucket filter change on stream `replication_ns_1@10.242.238.90` [ns_server:info,2014-08-19T16:50:04.393,ns_1@10.242.238.90:<0.21378.0>:ebucketmigrator_srv:handle_call:297]Changing vbucket filter on tap stream `replication_ns_1@10.242.238.90`: [{980,1}, {981,1}, {982,1}, {983,1}, {987,1}, {988,1}, {991,1}, {992,1}, {995,1}, {998,1}, {999,1}, {1000,1}, {1001,1}, {1002,1}, {1003,1}, {1004,1}, {1005,1}, {1006,1}, {1007,1}, {1008,1}, {1009,1}, {1010,1}, {1011,1}, {1012,1}, {1013,1}, {1014,1}, {1015,1}, {1016,1}, {1017,1}, {1018,1}, {1019,1}, {1020,1}, {1021,1}, {1022,1}, {1023,1}] [ns_server:info,2014-08-19T16:50:04.393,ns_1@10.242.238.90:<0.21378.0>:ebucketmigrator_srv:handle_call:307]Successfully changed vbucket filter on tap stream `replication_ns_1@10.242.238.90`. [ns_server:info,2014-08-19T16:50:04.394,ns_1@10.242.238.90:<0.21378.0>:ebucketmigrator_srv:process_upstream:1027]Got vbucket filter change completion message. Silencing upstream sender [ns_server:info,2014-08-19T16:50:04.394,ns_1@10.242.238.90:<0.21378.0>:ebucketmigrator_srv:handle_info:366]Got reply from upstream silencing request. Completing state transition to a new ebucketmigrator. [ns_server:debug,2014-08-19T16:50:04.394,ns_1@10.242.238.90:<0.21378.0>:ebucketmigrator_srv:complete_native_vb_filter_change:170]Proceeding with reading unread binaries [ns_server:debug,2014-08-19T16:50:04.394,ns_1@10.242.238.90:<0.21378.0>:ebucketmigrator_srv:confirm_downstream:197]Going to confirm reception downstream messages [ns_server:debug,2014-08-19T16:50:04.394,ns_1@10.242.238.90:<0.21378.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:50:04.394,ns_1@10.242.238.90:<0.21382.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [views:debug,2014-08-19T16:50:04.394,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/736. Updated state: active (1) [ns_server:debug,2014-08-19T16:50:04.394,ns_1@10.242.238.90:<0.21382.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [ns_server:debug,2014-08-19T16:50:04.394,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",736,active,1} [rebalance:info,2014-08-19T16:50:04.394,ns_1@10.242.238.90:<0.21378.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:50:04.395,ns_1@10.242.238.90:<0.21378.0>:ebucketmigrator_srv:confirm_downstream:201]Confirmed upstream messages are feeded to kernel [ns_server:debug,2014-08-19T16:50:04.395,ns_1@10.242.238.90:<0.21378.0>:ebucketmigrator_srv:reply_and_die:210]Passed old state to caller [ns_server:debug,2014-08-19T16:50:04.395,ns_1@10.242.238.90:<0.21378.0>:ebucketmigrator_srv:reply_and_die:213]Sent out state. Preparing to die [ns_server:debug,2014-08-19T16:50:04.395,ns_1@10.242.238.90:<0.21380.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:143]Got old state from previous ebucketmigrator: <0.21378.0> [ns_server:debug,2014-08-19T16:50:04.395,ns_1@10.242.238.90:<0.21380.0>:ns_vbm_new_sup:perform_vbucket_filter_change_loop:184]Sent old state to new instance [ns_server:info,2014-08-19T16:50:04.395,ns_1@10.242.238.90:<0.21384.0>:ns_vbm_new_sup:mk_old_state_retriever:83]Got vbucket filter change old state. Proceeding vbucket filter change operation [ns_server:debug,2014-08-19T16:50:04.395,ns_1@10.242.238.90:<0.21384.0>:ebucketmigrator_srv:init:494]Got old ebucketmigrator state from <0.21378.0>: {state,#Port<0.13886>,#Port<0.13883>,#Port<0.13887>,#Port<0.13884>, <0.21379.0>,<<"cut off">>,<<"cut off">>,[],106,false,false,0, {1408,452604,394085}, completed, {<0.21380.0>,#Ref<0.0.0.235872>}, <<"replication_ns_1@10.242.238.90">>,<0.21378.0>, {had_backfill,false,undefined,[]}, completed,false}. [ns_server:debug,2014-08-19T16:50:04.396,ns_1@10.242.238.90:<0.17162.0>:ns_process_registry:handle_info:98]Got exit msg: {'EXIT',<0.21380.0>,{#Ref<0.0.0.235860>,<0.21384.0>}} [error_logger:info,2014-08-19T16:50:04.396,ns_1@10.242.238.90:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,'ns_vbm_new_sup-default'} started: [{pid,<0.21384.0>}, {name, {new_child_id, [980,981,982,983,987,988,991,992,995,998,999, 1000,1001,1002,1003,1004,1005,1006,1007,1008, 1009,1010,1011,1012,1013,1014,1015,1016,1017, 1018,1019,1020,1021,1022,1023], 'ns_1@10.242.238.91'}}, {mfargs, {ebucketmigrator_srv,start_link, [{"10.242.238.91",11209}, {"10.242.238.90",11209}, [{old_state_retriever, #Fun}, {on_not_ready_vbuckets, #Fun}, {username,"default"}, {password,get_from_config}, {vbuckets, [980,981,982,983,987,988,991,992,995,998, 999,1000,1001,1002,1003,1004,1005,1006, 1007,1008,1009,1010,1011,1012,1013,1014, 1015,1016,1017,1018,1019,1020,1021,1022, 1023]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]]}}, {restart_type,temporary}, {shutdown,60000}, {child_type,worker}] [ns_server:debug,2014-08-19T16:50:04.400,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:50:04.402,ns_1@10.242.238.90:<0.21384.0>:ebucketmigrator_srv:init:621]Reusing old upstream: [{vbuckets,[980,981,982,983,987,988,991,992,995,998,999,1000,1001,1002,1003, 1004,1005,1006,1007,1008,1009,1010,1011,1012,1013,1014,1015,1016, 1017,1018,1019,1020,1021,1022,1023]}, {name,<<"replication_ns_1@10.242.238.90">>}, {takeover,false}] [rebalance:debug,2014-08-19T16:50:04.402,ns_1@10.242.238.90:<0.21384.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.21385.0> [ns_server:info,2014-08-19T16:50:04.403,ns_1@10.242.238.90:<0.18787.0>:ns_memcached:do_handle_call:527]Changed vbucket 742 state to active [ns_server:debug,2014-08-19T16:50:04.406,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 6036 us [ns_server:debug,2014-08-19T16:50:04.407,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:04.407,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:04.408,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{991, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:info,2014-08-19T16:50:04.409,ns_1@10.242.238.90:<0.18787.0>:ns_memcached:do_handle_call:527]Changed vbucket 984 state to replica [ns_server:info,2014-08-19T16:50:04.409,ns_1@10.242.238.90:tap_replication_manager-default<0.18775.0>:tap_replication_manager:change_vbucket_filter:200]Going to change replication from 'ns_1@10.242.238.91' to have [980,981,982,983,984,987,988,991,992,995,998,999,1000,1001,1002,1003,1004, 1005,1006,1007,1008,1009,1010,1011,1012,1013,1014,1015,1016,1017,1018,1019, 1020,1021,1022,1023] ([984], []) [ns_server:debug,2014-08-19T16:50:04.411,ns_1@10.242.238.90:<0.21387.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:135]Registered myself under id:{"default", {new_child_id, [980,981,982,983,984,987,988,991,992,995,998, 999,1000,1001,1002,1003,1004,1005,1006,1007, 1008,1009,1010,1011,1012,1013,1014,1015,1016, 1017,1018,1019,1020,1021,1022,1023], 'ns_1@10.242.238.91'}, #Ref<0.0.0.236033>} Args:[{"10.242.238.91",11209}, {"10.242.238.90",11209}, [{old_state_retriever,#Fun}, {on_not_ready_vbuckets,#Fun}, {username,"default"}, {password,get_from_config}, {vbuckets,[980,981,982,983,984,987,988,991,992,995,998,999,1000,1001, 1002,1003,1004,1005,1006,1007,1008,1009,1010,1011,1012,1013, 1014,1015,1016,1017,1018,1019,1020,1021,1022,1023]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]] [ns_server:debug,2014-08-19T16:50:04.411,ns_1@10.242.238.90:<0.21387.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:139]Linked myself to old ebucketmigrator <0.21384.0> [ns_server:info,2014-08-19T16:50:04.411,ns_1@10.242.238.90:<0.21384.0>:ebucketmigrator_srv:handle_call:270]Starting new-style vbucket filter change on stream `replication_ns_1@10.242.238.90` [ns_server:info,2014-08-19T16:50:04.418,ns_1@10.242.238.90:<0.21384.0>:ebucketmigrator_srv:handle_call:297]Changing vbucket filter on tap stream `replication_ns_1@10.242.238.90`: [{980,1}, {981,1}, {982,1}, {983,1}, {984,1}, {987,1}, {988,1}, {991,1}, {992,1}, {995,1}, {998,1}, {999,1}, {1000,1}, {1001,1}, {1002,1}, {1003,1}, {1004,1}, {1005,1}, {1006,1}, {1007,1}, {1008,1}, {1009,1}, {1010,1}, {1011,1}, {1012,1}, {1013,1}, {1014,1}, {1015,1}, {1016,1}, {1017,1}, {1018,1}, {1019,1}, {1020,1}, {1021,1}, {1022,1}, {1023,1}] [ns_server:info,2014-08-19T16:50:04.419,ns_1@10.242.238.90:<0.21384.0>:ebucketmigrator_srv:handle_call:307]Successfully changed vbucket filter on tap stream `replication_ns_1@10.242.238.90`. [ns_server:info,2014-08-19T16:50:04.419,ns_1@10.242.238.90:<0.21384.0>:ebucketmigrator_srv:process_upstream:1027]Got vbucket filter change completion message. Silencing upstream sender [ns_server:info,2014-08-19T16:50:04.419,ns_1@10.242.238.90:<0.21384.0>:ebucketmigrator_srv:handle_info:366]Got reply from upstream silencing request. Completing state transition to a new ebucketmigrator. [ns_server:debug,2014-08-19T16:50:04.419,ns_1@10.242.238.90:<0.21384.0>:ebucketmigrator_srv:complete_native_vb_filter_change:170]Proceeding with reading unread binaries [ns_server:debug,2014-08-19T16:50:04.419,ns_1@10.242.238.90:<0.21384.0>:ebucketmigrator_srv:confirm_downstream:197]Going to confirm reception downstream messages [ns_server:debug,2014-08-19T16:50:04.419,ns_1@10.242.238.90:<0.21384.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:50:04.419,ns_1@10.242.238.90:<0.21389.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:50:04.419,ns_1@10.242.238.90:<0.21389.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:50:04.420,ns_1@10.242.238.90:<0.21384.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:50:04.420,ns_1@10.242.238.90:<0.21384.0>:ebucketmigrator_srv:confirm_downstream:201]Confirmed upstream messages are feeded to kernel [ns_server:debug,2014-08-19T16:50:04.420,ns_1@10.242.238.90:<0.21384.0>:ebucketmigrator_srv:reply_and_die:210]Passed old state to caller [ns_server:debug,2014-08-19T16:50:04.420,ns_1@10.242.238.90:<0.21384.0>:ebucketmigrator_srv:reply_and_die:213]Sent out state. Preparing to die [ns_server:debug,2014-08-19T16:50:04.420,ns_1@10.242.238.90:<0.21387.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:143]Got old state from previous ebucketmigrator: <0.21384.0> [ns_server:debug,2014-08-19T16:50:04.420,ns_1@10.242.238.90:<0.21387.0>:ns_vbm_new_sup:perform_vbucket_filter_change_loop:184]Sent old state to new instance [ns_server:info,2014-08-19T16:50:04.420,ns_1@10.242.238.90:<0.21391.0>:ns_vbm_new_sup:mk_old_state_retriever:83]Got vbucket filter change old state. Proceeding vbucket filter change operation [ns_server:debug,2014-08-19T16:50:04.420,ns_1@10.242.238.90:<0.21391.0>:ebucketmigrator_srv:init:494]Got old ebucketmigrator state from <0.21384.0>: {state,#Port<0.13886>,#Port<0.13883>,#Port<0.13887>,#Port<0.13884>, <0.21385.0>,<<"cut off">>,<<"cut off">>,[],109,false,false,0, {1408,452604,419301}, completed, {<0.21387.0>,#Ref<0.0.0.236046>}, <<"replication_ns_1@10.242.238.90">>,<0.21384.0>, {had_backfill,false,undefined,[]}, completed,false}. [ns_server:debug,2014-08-19T16:50:04.421,ns_1@10.242.238.90:<0.17162.0>:ns_process_registry:handle_info:98]Got exit msg: {'EXIT',<0.21387.0>,{#Ref<0.0.0.236035>,<0.21391.0>}} [error_logger:info,2014-08-19T16:50:04.421,ns_1@10.242.238.90:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,'ns_vbm_new_sup-default'} started: [{pid,<0.21391.0>}, {name, {new_child_id, [980,981,982,983,984,987,988,991,992,995,998, 999,1000,1001,1002,1003,1004,1005,1006,1007, 1008,1009,1010,1011,1012,1013,1014,1015,1016, 1017,1018,1019,1020,1021,1022,1023], 'ns_1@10.242.238.91'}}, {mfargs, {ebucketmigrator_srv,start_link, [{"10.242.238.91",11209}, {"10.242.238.90",11209}, [{old_state_retriever, #Fun}, {on_not_ready_vbuckets, #Fun}, {username,"default"}, {password,get_from_config}, {vbuckets, [980,981,982,983,984,987,988,991,992,995, 998,999,1000,1001,1002,1003,1004,1005, 1006,1007,1008,1009,1010,1011,1012,1013, 1014,1015,1016,1017,1018,1019,1020,1021, 1022,1023]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]]}}, {restart_type,temporary}, {shutdown,60000}, {child_type,worker}] [ns_server:debug,2014-08-19T16:50:04.425,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:50:04.427,ns_1@10.242.238.90:<0.21391.0>:ebucketmigrator_srv:init:621]Reusing old upstream: [{vbuckets,[980,981,982,983,984,987,988,991,992,995,998,999,1000,1001,1002, 1003,1004,1005,1006,1007,1008,1009,1010,1011,1012,1013,1014,1015, 1016,1017,1018,1019,1020,1021,1022,1023]}, {name,<<"replication_ns_1@10.242.238.90">>}, {takeover,false}] [rebalance:debug,2014-08-19T16:50:04.427,ns_1@10.242.238.90:<0.21391.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.21392.0> [ns_server:debug,2014-08-19T16:50:04.428,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:04.428,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 3467 us [ns_server:debug,2014-08-19T16:50:04.429,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:04.429,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{984, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:info,2014-08-19T16:50:04.431,ns_1@10.242.238.90:<0.18787.0>:ns_memcached:do_handle_call:527]Changed vbucket 989 state to replica [ns_server:info,2014-08-19T16:50:04.431,ns_1@10.242.238.90:tap_replication_manager-default<0.18775.0>:tap_replication_manager:change_vbucket_filter:200]Going to change replication from 'ns_1@10.242.238.91' to have [980,981,982,983,984,987,988,989,991,992,995,998,999,1000,1001,1002,1003,1004, 1005,1006,1007,1008,1009,1010,1011,1012,1013,1014,1015,1016,1017,1018,1019, 1020,1021,1022,1023] ([989], []) [ns_server:debug,2014-08-19T16:50:04.432,ns_1@10.242.238.90:<0.21393.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:135]Registered myself under id:{"default", {new_child_id, [980,981,982,983,984,987,988,989,991,992,995, 998,999,1000,1001,1002,1003,1004,1005,1006, 1007,1008,1009,1010,1011,1012,1013,1014,1015, 1016,1017,1018,1019,1020,1021,1022,1023], 'ns_1@10.242.238.91'}, #Ref<0.0.0.236167>} Args:[{"10.242.238.91",11209}, {"10.242.238.90",11209}, [{old_state_retriever,#Fun}, {on_not_ready_vbuckets,#Fun}, {username,"default"}, {password,get_from_config}, {vbuckets,[980,981,982,983,984,987,988,989,991,992,995,998,999,1000, 1001,1002,1003,1004,1005,1006,1007,1008,1009,1010,1011,1012, 1013,1014,1015,1016,1017,1018,1019,1020,1021,1022,1023]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]] [ns_server:debug,2014-08-19T16:50:04.432,ns_1@10.242.238.90:<0.21393.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:139]Linked myself to old ebucketmigrator <0.21391.0> [ns_server:info,2014-08-19T16:50:04.432,ns_1@10.242.238.90:<0.21391.0>:ebucketmigrator_srv:handle_call:270]Starting new-style vbucket filter change on stream `replication_ns_1@10.242.238.90` [ns_server:info,2014-08-19T16:50:04.438,ns_1@10.242.238.90:<0.21391.0>:ebucketmigrator_srv:handle_call:297]Changing vbucket filter on tap stream `replication_ns_1@10.242.238.90`: [{980,1}, {981,1}, {982,1}, {983,1}, {984,1}, {987,1}, {988,1}, {989,1}, {991,1}, {992,1}, {995,1}, {998,1}, {999,1}, {1000,1}, {1001,1}, {1002,1}, {1003,1}, {1004,1}, {1005,1}, {1006,1}, {1007,1}, {1008,1}, {1009,1}, {1010,1}, {1011,1}, {1012,1}, {1013,1}, {1014,1}, {1015,1}, {1016,1}, {1017,1}, {1018,1}, {1019,1}, {1020,1}, {1021,1}, {1022,1}, {1023,1}] [ns_server:info,2014-08-19T16:50:04.439,ns_1@10.242.238.90:<0.21391.0>:ebucketmigrator_srv:handle_call:307]Successfully changed vbucket filter on tap stream `replication_ns_1@10.242.238.90`. [ns_server:info,2014-08-19T16:50:04.439,ns_1@10.242.238.90:<0.21391.0>:ebucketmigrator_srv:process_upstream:1027]Got vbucket filter change completion message. Silencing upstream sender [ns_server:info,2014-08-19T16:50:04.439,ns_1@10.242.238.90:<0.21391.0>:ebucketmigrator_srv:handle_info:366]Got reply from upstream silencing request. Completing state transition to a new ebucketmigrator. [ns_server:debug,2014-08-19T16:50:04.440,ns_1@10.242.238.90:<0.21391.0>:ebucketmigrator_srv:complete_native_vb_filter_change:170]Proceeding with reading unread binaries [ns_server:debug,2014-08-19T16:50:04.440,ns_1@10.242.238.90:<0.21391.0>:ebucketmigrator_srv:confirm_downstream:197]Going to confirm reception downstream messages [ns_server:debug,2014-08-19T16:50:04.440,ns_1@10.242.238.90:<0.21391.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:50:04.440,ns_1@10.242.238.90:<0.21395.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:50:04.440,ns_1@10.242.238.90:<0.21395.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:50:04.440,ns_1@10.242.238.90:<0.21391.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:50:04.440,ns_1@10.242.238.90:<0.21391.0>:ebucketmigrator_srv:confirm_downstream:201]Confirmed upstream messages are feeded to kernel [ns_server:debug,2014-08-19T16:50:04.440,ns_1@10.242.238.90:<0.21391.0>:ebucketmigrator_srv:reply_and_die:210]Passed old state to caller [ns_server:debug,2014-08-19T16:50:04.441,ns_1@10.242.238.90:<0.21391.0>:ebucketmigrator_srv:reply_and_die:213]Sent out state. Preparing to die [ns_server:debug,2014-08-19T16:50:04.441,ns_1@10.242.238.90:<0.21393.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:143]Got old state from previous ebucketmigrator: <0.21391.0> [ns_server:debug,2014-08-19T16:50:04.441,ns_1@10.242.238.90:<0.21393.0>:ns_vbm_new_sup:perform_vbucket_filter_change_loop:184]Sent old state to new instance [ns_server:info,2014-08-19T16:50:04.441,ns_1@10.242.238.90:<0.21397.0>:ns_vbm_new_sup:mk_old_state_retriever:83]Got vbucket filter change old state. Proceeding vbucket filter change operation [ns_server:debug,2014-08-19T16:50:04.441,ns_1@10.242.238.90:<0.21397.0>:ebucketmigrator_srv:init:494]Got old ebucketmigrator state from <0.21391.0>: {state,#Port<0.13886>,#Port<0.13883>,#Port<0.13887>,#Port<0.13884>, <0.21392.0>,<<"cut off">>,<<"cut off">>,[],112,false,false,0, {1408,452604,439770}, completed, {<0.21393.0>,#Ref<0.0.0.236180>}, <<"replication_ns_1@10.242.238.90">>,<0.21391.0>, {had_backfill,false,undefined,[]}, completed,false}. [ns_server:debug,2014-08-19T16:50:04.441,ns_1@10.242.238.90:<0.17162.0>:ns_process_registry:handle_info:98]Got exit msg: {'EXIT',<0.21393.0>,{#Ref<0.0.0.236169>,<0.21397.0>}} [error_logger:info,2014-08-19T16:50:04.441,ns_1@10.242.238.90:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,'ns_vbm_new_sup-default'} started: [{pid,<0.21397.0>}, {name, {new_child_id, [980,981,982,983,984,987,988,989,991,992,995, 998,999,1000,1001,1002,1003,1004,1005,1006, 1007,1008,1009,1010,1011,1012,1013,1014,1015, 1016,1017,1018,1019,1020,1021,1022,1023], 'ns_1@10.242.238.91'}}, {mfargs, {ebucketmigrator_srv,start_link, [{"10.242.238.91",11209}, {"10.242.238.90",11209}, [{old_state_retriever, #Fun}, {on_not_ready_vbuckets, #Fun}, {username,"default"}, {password,get_from_config}, {vbuckets, [980,981,982,983,984,987,988,989,991,992, 995,998,999,1000,1001,1002,1003,1004, 1005,1006,1007,1008,1009,1010,1011,1012, 1013,1014,1015,1016,1017,1018,1019,1020, 1021,1022,1023]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]]}}, {restart_type,temporary}, {shutdown,60000}, {child_type,worker}] [ns_server:debug,2014-08-19T16:50:04.447,ns_1@10.242.238.90:<0.21397.0>:ebucketmigrator_srv:init:621]Reusing old upstream: [{vbuckets,[980,981,982,983,984,987,988,989,991,992,995,998,999,1000,1001, 1002,1003,1004,1005,1006,1007,1008,1009,1010,1011,1012,1013,1014, 1015,1016,1017,1018,1019,1020,1021,1022,1023]}, {name,<<"replication_ns_1@10.242.238.90">>}, {takeover,false}] [rebalance:debug,2014-08-19T16:50:04.448,ns_1@10.242.238.90:<0.21397.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.21399.0> [ns_server:debug,2014-08-19T16:50:04.448,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:50:04.451,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 2756 us [ns_server:debug,2014-08-19T16:50:04.451,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:04.452,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:04.452,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{989, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:info,2014-08-19T16:50:04.455,ns_1@10.242.238.90:<0.18787.0>:ns_memcached:do_handle_call:527]Changed vbucket 993 state to replica [ns_server:info,2014-08-19T16:50:04.456,ns_1@10.242.238.90:tap_replication_manager-default<0.18775.0>:tap_replication_manager:change_vbucket_filter:200]Going to change replication from 'ns_1@10.242.238.91' to have [980,981,982,983,984,987,988,989,991,992,993,995,998,999,1000,1001,1002,1003, 1004,1005,1006,1007,1008,1009,1010,1011,1012,1013,1014,1015,1016,1017,1018, 1019,1020,1021,1022,1023] ([993], []) [ns_server:debug,2014-08-19T16:50:04.457,ns_1@10.242.238.90:<0.21400.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:135]Registered myself under id:{"default", {new_child_id, [980,981,982,983,984,987,988,989,991,992,993, 995,998,999,1000,1001,1002,1003,1004,1005, 1006,1007,1008,1009,1010,1011,1012,1013,1014, 1015,1016,1017,1018,1019,1020,1021,1022,1023], 'ns_1@10.242.238.91'}, #Ref<0.0.0.236305>} Args:[{"10.242.238.91",11209}, {"10.242.238.90",11209}, [{old_state_retriever,#Fun}, {on_not_ready_vbuckets,#Fun}, {username,"default"}, {password,get_from_config}, {vbuckets,[980,981,982,983,984,987,988,989,991,992,993,995,998,999, 1000,1001,1002,1003,1004,1005,1006,1007,1008,1009,1010,1011, 1012,1013,1014,1015,1016,1017,1018,1019,1020,1021,1022, 1023]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]] [ns_server:debug,2014-08-19T16:50:04.457,ns_1@10.242.238.90:<0.21400.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:139]Linked myself to old ebucketmigrator <0.21397.0> [ns_server:info,2014-08-19T16:50:04.458,ns_1@10.242.238.90:<0.21397.0>:ebucketmigrator_srv:handle_call:270]Starting new-style vbucket filter change on stream `replication_ns_1@10.242.238.90` [ns_server:info,2014-08-19T16:50:04.465,ns_1@10.242.238.90:<0.21397.0>:ebucketmigrator_srv:handle_call:297]Changing vbucket filter on tap stream `replication_ns_1@10.242.238.90`: [{980,1}, {981,1}, {982,1}, {983,1}, {984,1}, {987,1}, {988,1}, {989,1}, {991,1}, {992,1}, {993,1}, {995,1}, {998,1}, {999,1}, {1000,1}, {1001,1}, {1002,1}, {1003,1}, {1004,1}, {1005,1}, {1006,1}, {1007,1}, {1008,1}, {1009,1}, {1010,1}, {1011,1}, {1012,1}, {1013,1}, {1014,1}, {1015,1}, {1016,1}, {1017,1}, {1018,1}, {1019,1}, {1020,1}, {1021,1}, {1022,1}, {1023,1}] [ns_server:info,2014-08-19T16:50:04.466,ns_1@10.242.238.90:<0.21397.0>:ebucketmigrator_srv:handle_call:307]Successfully changed vbucket filter on tap stream `replication_ns_1@10.242.238.90`. [ns_server:info,2014-08-19T16:50:04.466,ns_1@10.242.238.90:<0.21397.0>:ebucketmigrator_srv:process_upstream:1027]Got vbucket filter change completion message. Silencing upstream sender [ns_server:info,2014-08-19T16:50:04.466,ns_1@10.242.238.90:<0.21397.0>:ebucketmigrator_srv:handle_info:366]Got reply from upstream silencing request. Completing state transition to a new ebucketmigrator. [ns_server:debug,2014-08-19T16:50:04.466,ns_1@10.242.238.90:<0.21397.0>:ebucketmigrator_srv:complete_native_vb_filter_change:170]Proceeding with reading unread binaries [ns_server:debug,2014-08-19T16:50:04.466,ns_1@10.242.238.90:<0.21397.0>:ebucketmigrator_srv:confirm_downstream:197]Going to confirm reception downstream messages [ns_server:debug,2014-08-19T16:50:04.467,ns_1@10.242.238.90:<0.21397.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:50:04.467,ns_1@10.242.238.90:<0.21402.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:50:04.467,ns_1@10.242.238.90:<0.21402.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:50:04.467,ns_1@10.242.238.90:<0.21397.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:50:04.467,ns_1@10.242.238.90:<0.21397.0>:ebucketmigrator_srv:confirm_downstream:201]Confirmed upstream messages are feeded to kernel [ns_server:debug,2014-08-19T16:50:04.467,ns_1@10.242.238.90:<0.21397.0>:ebucketmigrator_srv:reply_and_die:210]Passed old state to caller [ns_server:debug,2014-08-19T16:50:04.467,ns_1@10.242.238.90:<0.21397.0>:ebucketmigrator_srv:reply_and_die:213]Sent out state. Preparing to die [ns_server:debug,2014-08-19T16:50:04.467,ns_1@10.242.238.90:<0.21400.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:143]Got old state from previous ebucketmigrator: <0.21397.0> [ns_server:debug,2014-08-19T16:50:04.468,ns_1@10.242.238.90:<0.21400.0>:ns_vbm_new_sup:perform_vbucket_filter_change_loop:184]Sent old state to new instance [ns_server:info,2014-08-19T16:50:04.468,ns_1@10.242.238.90:<0.21404.0>:ns_vbm_new_sup:mk_old_state_retriever:83]Got vbucket filter change old state. Proceeding vbucket filter change operation [ns_server:debug,2014-08-19T16:50:04.468,ns_1@10.242.238.90:<0.21404.0>:ebucketmigrator_srv:init:494]Got old ebucketmigrator state from <0.21397.0>: {state,#Port<0.13886>,#Port<0.13883>,#Port<0.13887>,#Port<0.13884>, <0.21399.0>,<<"cut off">>,<<"cut off">>,[],115,false,false,0, {1408,452604,466589}, completed, {<0.21400.0>,#Ref<0.0.0.236318>}, <<"replication_ns_1@10.242.238.90">>,<0.21397.0>, {had_backfill,false,undefined,[]}, completed,false}. [ns_server:debug,2014-08-19T16:50:04.468,ns_1@10.242.238.90:<0.17162.0>:ns_process_registry:handle_info:98]Got exit msg: {'EXIT',<0.21400.0>,{#Ref<0.0.0.236307>,<0.21404.0>}} [error_logger:info,2014-08-19T16:50:04.468,ns_1@10.242.238.90:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,'ns_vbm_new_sup-default'} started: [{pid,<0.21404.0>}, {name, {new_child_id, [980,981,982,983,984,987,988,989,991,992,993, 995,998,999,1000,1001,1002,1003,1004,1005, 1006,1007,1008,1009,1010,1011,1012,1013,1014, 1015,1016,1017,1018,1019,1020,1021,1022,1023], 'ns_1@10.242.238.91'}}, {mfargs, {ebucketmigrator_srv,start_link, [{"10.242.238.91",11209}, {"10.242.238.90",11209}, [{old_state_retriever, #Fun}, {on_not_ready_vbuckets, #Fun}, {username,"default"}, {password,get_from_config}, {vbuckets, [980,981,982,983,984,987,988,989,991,992, 993,995,998,999,1000,1001,1002,1003, 1004,1005,1006,1007,1008,1009,1010,1011, 1012,1013,1014,1015,1016,1017,1018,1019, 1020,1021,1022,1023]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]]}}, {restart_type,temporary}, {shutdown,60000}, {child_type,worker}] [ns_server:debug,2014-08-19T16:50:04.472,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:50:04.476,ns_1@10.242.238.90:<0.21404.0>:ebucketmigrator_srv:init:621]Reusing old upstream: [{vbuckets,[980,981,982,983,984,987,988,989,991,992,993,995,998,999,1000,1001, 1002,1003,1004,1005,1006,1007,1008,1009,1010,1011,1012,1013,1014, 1015,1016,1017,1018,1019,1020,1021,1022,1023]}, {name,<<"replication_ns_1@10.242.238.90">>}, {takeover,false}] [rebalance:debug,2014-08-19T16:50:04.476,ns_1@10.242.238.90:<0.21404.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.21406.0> [ns_server:debug,2014-08-19T16:50:04.477,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:04.477,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 4423 us [ns_server:debug,2014-08-19T16:50:04.477,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:04.478,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{993, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [views:debug,2014-08-19T16:50:04.479,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/729. Updated state: active (1) [ns_server:debug,2014-08-19T16:50:04.479,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",729,active,1} [ns_server:info,2014-08-19T16:50:04.480,ns_1@10.242.238.90:<0.18787.0>:ns_memcached:do_handle_call:527]Changed vbucket 997 state to replica [ns_server:info,2014-08-19T16:50:04.480,ns_1@10.242.238.90:tap_replication_manager-default<0.18775.0>:tap_replication_manager:change_vbucket_filter:200]Going to change replication from 'ns_1@10.242.238.91' to have [980,981,982,983,984,987,988,989,991,992,993,995,997,998,999,1000,1001,1002, 1003,1004,1005,1006,1007,1008,1009,1010,1011,1012,1013,1014,1015,1016,1017, 1018,1019,1020,1021,1022,1023] ([997], []) [ns_server:debug,2014-08-19T16:50:04.481,ns_1@10.242.238.90:<0.21407.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:135]Registered myself under id:{"default", {new_child_id, [980,981,982,983,984,987,988,989,991,992,993, 995,997,998,999,1000,1001,1002,1003,1004, 1005,1006,1007,1008,1009,1010,1011,1012,1013, 1014,1015,1016,1017,1018,1019,1020,1021,1022, 1023], 'ns_1@10.242.238.91'}, #Ref<0.0.0.236470>} Args:[{"10.242.238.91",11209}, {"10.242.238.90",11209}, [{old_state_retriever,#Fun}, {on_not_ready_vbuckets,#Fun}, {username,"default"}, {password,get_from_config}, {vbuckets,[980,981,982,983,984,987,988,989,991,992,993,995,997,998,999, 1000,1001,1002,1003,1004,1005,1006,1007,1008,1009,1010,1011, 1012,1013,1014,1015,1016,1017,1018,1019,1020,1021,1022, 1023]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]] [ns_server:debug,2014-08-19T16:50:04.482,ns_1@10.242.238.90:<0.21407.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:139]Linked myself to old ebucketmigrator <0.21404.0> [ns_server:info,2014-08-19T16:50:04.482,ns_1@10.242.238.90:<0.21404.0>:ebucketmigrator_srv:handle_call:270]Starting new-style vbucket filter change on stream `replication_ns_1@10.242.238.90` [rebalance:debug,2014-08-19T16:50:04.483,ns_1@10.242.238.90:<0.20574.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:50:04.483,ns_1@10.242.238.90:<0.20574.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:50:04.483,ns_1@10.242.238.90:<0.21409.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:50:04.483,ns_1@10.242.238.90:<0.21409.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:50:04.483,ns_1@10.242.238.90:<0.20574.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:info,2014-08-19T16:50:04.488,ns_1@10.242.238.90:<0.21404.0>:ebucketmigrator_srv:handle_call:297]Changing vbucket filter on tap stream `replication_ns_1@10.242.238.90`: [{980,1}, {981,1}, {982,1}, {983,1}, {984,1}, {987,1}, {988,1}, {989,1}, {991,1}, {992,1}, {993,1}, {995,1}, {997,1}, {998,1}, {999,1}, {1000,1}, {1001,1}, {1002,1}, {1003,1}, {1004,1}, {1005,1}, {1006,1}, {1007,1}, {1008,1}, {1009,1}, {1010,1}, {1011,1}, {1012,1}, {1013,1}, {1014,1}, {1015,1}, {1016,1}, {1017,1}, {1018,1}, {1019,1}, {1020,1}, {1021,1}, {1022,1}, {1023,1}] [ns_server:info,2014-08-19T16:50:04.489,ns_1@10.242.238.90:<0.21404.0>:ebucketmigrator_srv:handle_call:307]Successfully changed vbucket filter on tap stream `replication_ns_1@10.242.238.90`. [ns_server:info,2014-08-19T16:50:04.489,ns_1@10.242.238.90:<0.21404.0>:ebucketmigrator_srv:process_upstream:1027]Got vbucket filter change completion message. Silencing upstream sender [ns_server:info,2014-08-19T16:50:04.489,ns_1@10.242.238.90:<0.21404.0>:ebucketmigrator_srv:handle_info:366]Got reply from upstream silencing request. Completing state transition to a new ebucketmigrator. [ns_server:debug,2014-08-19T16:50:04.490,ns_1@10.242.238.90:<0.21404.0>:ebucketmigrator_srv:complete_native_vb_filter_change:170]Proceeding with reading unread binaries [ns_server:debug,2014-08-19T16:50:04.490,ns_1@10.242.238.90:<0.21404.0>:ebucketmigrator_srv:confirm_downstream:197]Going to confirm reception downstream messages [ns_server:debug,2014-08-19T16:50:04.490,ns_1@10.242.238.90:<0.21404.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:50:04.490,ns_1@10.242.238.90:<0.21410.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:50:04.490,ns_1@10.242.238.90:<0.21410.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:50:04.490,ns_1@10.242.238.90:<0.21404.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:50:04.490,ns_1@10.242.238.90:<0.21404.0>:ebucketmigrator_srv:confirm_downstream:201]Confirmed upstream messages are feeded to kernel [ns_server:debug,2014-08-19T16:50:04.490,ns_1@10.242.238.90:<0.21404.0>:ebucketmigrator_srv:reply_and_die:210]Passed old state to caller [ns_server:debug,2014-08-19T16:50:04.490,ns_1@10.242.238.90:<0.21404.0>:ebucketmigrator_srv:reply_and_die:213]Sent out state. Preparing to die [ns_server:debug,2014-08-19T16:50:04.490,ns_1@10.242.238.90:<0.21407.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:143]Got old state from previous ebucketmigrator: <0.21404.0> [ns_server:debug,2014-08-19T16:50:04.491,ns_1@10.242.238.90:<0.21407.0>:ns_vbm_new_sup:perform_vbucket_filter_change_loop:184]Sent old state to new instance [ns_server:info,2014-08-19T16:50:04.491,ns_1@10.242.238.90:<0.21412.0>:ns_vbm_new_sup:mk_old_state_retriever:83]Got vbucket filter change old state. Proceeding vbucket filter change operation [ns_server:debug,2014-08-19T16:50:04.491,ns_1@10.242.238.90:<0.21412.0>:ebucketmigrator_srv:init:494]Got old ebucketmigrator state from <0.21404.0>: {state,#Port<0.13886>,#Port<0.13883>,#Port<0.13887>,#Port<0.13884>, <0.21406.0>,<<"cut off">>,<<"cut off">>,[],118,false,false,0, {1408,452604,489747}, completed, {<0.21407.0>,#Ref<0.0.0.236483>}, <<"replication_ns_1@10.242.238.90">>,<0.21404.0>, {had_backfill,false,undefined,[]}, completed,false}. [ns_server:debug,2014-08-19T16:50:04.491,ns_1@10.242.238.90:<0.17162.0>:ns_process_registry:handle_info:98]Got exit msg: {'EXIT',<0.21407.0>,{#Ref<0.0.0.236472>,<0.21412.0>}} [error_logger:info,2014-08-19T16:50:04.491,ns_1@10.242.238.90:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,'ns_vbm_new_sup-default'} started: [{pid,<0.21412.0>}, {name, {new_child_id, [980,981,982,983,984,987,988,989,991,992,993, 995,997,998,999,1000,1001,1002,1003,1004,1005, 1006,1007,1008,1009,1010,1011,1012,1013,1014, 1015,1016,1017,1018,1019,1020,1021,1022,1023], 'ns_1@10.242.238.91'}}, {mfargs, {ebucketmigrator_srv,start_link, [{"10.242.238.91",11209}, {"10.242.238.90",11209}, [{old_state_retriever, #Fun}, {on_not_ready_vbuckets, #Fun}, {username,"default"}, {password,get_from_config}, {vbuckets, [980,981,982,983,984,987,988,989,991,992, 993,995,997,998,999,1000,1001,1002,1003, 1004,1005,1006,1007,1008,1009,1010,1011, 1012,1013,1014,1015,1016,1017,1018,1019, 1020,1021,1022,1023]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]]}}, {restart_type,temporary}, {shutdown,60000}, {child_type,worker}] [ns_server:info,2014-08-19T16:50:04.492,ns_1@10.242.238.90:<0.18787.0>:ns_memcached:do_handle_call:527]Changed vbucket 731 state to active [ns_server:debug,2014-08-19T16:50:04.496,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:50:04.497,ns_1@10.242.238.90:<0.21412.0>:ebucketmigrator_srv:init:621]Reusing old upstream: [{vbuckets,[980,981,982,983,984,987,988,989,991,992,993,995,997,998,999,1000, 1001,1002,1003,1004,1005,1006,1007,1008,1009,1010,1011,1012,1013, 1014,1015,1016,1017,1018,1019,1020,1021,1022,1023]}, {name,<<"replication_ns_1@10.242.238.90">>}, {takeover,false}] [rebalance:debug,2014-08-19T16:50:04.498,ns_1@10.242.238.90:<0.21412.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.21413.0> [ns_server:debug,2014-08-19T16:50:04.499,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 2834 us [ns_server:debug,2014-08-19T16:50:04.499,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:04.500,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:04.500,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{997, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:info,2014-08-19T16:50:04.507,ns_1@10.242.238.90:<0.18787.0>:ns_memcached:do_handle_call:527]Changed vbucket 996 state to replica [ns_server:info,2014-08-19T16:50:04.507,ns_1@10.242.238.90:tap_replication_manager-default<0.18775.0>:tap_replication_manager:change_vbucket_filter:200]Going to change replication from 'ns_1@10.242.238.91' to have [980,981,982,983,984,987,988,989,991,992,993,995,996,997,998,999,1000,1001, 1002,1003,1004,1005,1006,1007,1008,1009,1010,1011,1012,1013,1014,1015,1016, 1017,1018,1019,1020,1021,1022,1023] ([996], []) [ns_server:debug,2014-08-19T16:50:04.508,ns_1@10.242.238.90:<0.21414.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:135]Registered myself under id:{"default", {new_child_id, [980,981,982,983,984,987,988,989,991,992,993, 995,996,997,998,999,1000,1001,1002,1003,1004, 1005,1006,1007,1008,1009,1010,1011,1012,1013, 1014,1015,1016,1017,1018,1019,1020,1021,1022, 1023], 'ns_1@10.242.238.91'}, #Ref<0.0.0.236634>} Args:[{"10.242.238.91",11209}, {"10.242.238.90",11209}, [{old_state_retriever,#Fun}, {on_not_ready_vbuckets,#Fun}, {username,"default"}, {password,get_from_config}, {vbuckets,[980,981,982,983,984,987,988,989,991,992,993,995,996,997,998, 999,1000,1001,1002,1003,1004,1005,1006,1007,1008,1009,1010, 1011,1012,1013,1014,1015,1016,1017,1018,1019,1020,1021,1022, 1023]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]] [ns_server:debug,2014-08-19T16:50:04.509,ns_1@10.242.238.90:<0.21414.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:139]Linked myself to old ebucketmigrator <0.21412.0> [ns_server:info,2014-08-19T16:50:04.509,ns_1@10.242.238.90:<0.21412.0>:ebucketmigrator_srv:handle_call:270]Starting new-style vbucket filter change on stream `replication_ns_1@10.242.238.90` [ns_server:info,2014-08-19T16:50:04.515,ns_1@10.242.238.90:<0.21412.0>:ebucketmigrator_srv:handle_call:297]Changing vbucket filter on tap stream `replication_ns_1@10.242.238.90`: [{980,1}, {981,1}, {982,1}, {983,1}, {984,1}, {987,1}, {988,1}, {989,1}, {991,1}, {992,1}, {993,1}, {995,1}, {996,1}, {997,1}, {998,1}, {999,1}, {1000,1}, {1001,1}, {1002,1}, {1003,1}, {1004,1}, {1005,1}, {1006,1}, {1007,1}, {1008,1}, {1009,1}, {1010,1}, {1011,1}, {1012,1}, {1013,1}, {1014,1}, {1015,1}, {1016,1}, {1017,1}, {1018,1}, {1019,1}, {1020,1}, {1021,1}, {1022,1}, {1023,1}] [ns_server:info,2014-08-19T16:50:04.516,ns_1@10.242.238.90:<0.21412.0>:ebucketmigrator_srv:handle_call:307]Successfully changed vbucket filter on tap stream `replication_ns_1@10.242.238.90`. [ns_server:info,2014-08-19T16:50:04.516,ns_1@10.242.238.90:<0.21412.0>:ebucketmigrator_srv:process_upstream:1027]Got vbucket filter change completion message. Silencing upstream sender [ns_server:info,2014-08-19T16:50:04.516,ns_1@10.242.238.90:<0.21412.0>:ebucketmigrator_srv:handle_info:366]Got reply from upstream silencing request. Completing state transition to a new ebucketmigrator. [ns_server:debug,2014-08-19T16:50:04.516,ns_1@10.242.238.90:<0.21412.0>:ebucketmigrator_srv:complete_native_vb_filter_change:170]Proceeding with reading unread binaries [ns_server:debug,2014-08-19T16:50:04.516,ns_1@10.242.238.90:<0.21412.0>:ebucketmigrator_srv:confirm_downstream:197]Going to confirm reception downstream messages [ns_server:debug,2014-08-19T16:50:04.517,ns_1@10.242.238.90:<0.21412.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:50:04.517,ns_1@10.242.238.90:<0.21417.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:50:04.517,ns_1@10.242.238.90:<0.21417.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:50:04.517,ns_1@10.242.238.90:<0.21412.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:50:04.517,ns_1@10.242.238.90:<0.21412.0>:ebucketmigrator_srv:confirm_downstream:201]Confirmed upstream messages are feeded to kernel [ns_server:debug,2014-08-19T16:50:04.517,ns_1@10.242.238.90:<0.21412.0>:ebucketmigrator_srv:reply_and_die:210]Passed old state to caller [ns_server:debug,2014-08-19T16:50:04.517,ns_1@10.242.238.90:<0.21412.0>:ebucketmigrator_srv:reply_and_die:213]Sent out state. Preparing to die [ns_server:debug,2014-08-19T16:50:04.517,ns_1@10.242.238.90:<0.21414.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:143]Got old state from previous ebucketmigrator: <0.21412.0> [ns_server:debug,2014-08-19T16:50:04.517,ns_1@10.242.238.90:<0.21414.0>:ns_vbm_new_sup:perform_vbucket_filter_change_loop:184]Sent old state to new instance [ns_server:info,2014-08-19T16:50:04.518,ns_1@10.242.238.90:<0.21419.0>:ns_vbm_new_sup:mk_old_state_retriever:83]Got vbucket filter change old state. Proceeding vbucket filter change operation [ns_server:debug,2014-08-19T16:50:04.518,ns_1@10.242.238.90:<0.21419.0>:ebucketmigrator_srv:init:494]Got old ebucketmigrator state from <0.21412.0>: {state,#Port<0.13886>,#Port<0.13883>,#Port<0.13887>,#Port<0.13884>, <0.21413.0>,<<"cut off">>,<<"cut off">>,[],121,false,false,0, {1408,452604,516573}, completed, {<0.21414.0>,#Ref<0.0.0.236648>}, <<"replication_ns_1@10.242.238.90">>,<0.21412.0>, {had_backfill,false,undefined,[]}, completed,false}. [ns_server:debug,2014-08-19T16:50:04.518,ns_1@10.242.238.90:<0.17162.0>:ns_process_registry:handle_info:98]Got exit msg: {'EXIT',<0.21414.0>,{#Ref<0.0.0.236636>,<0.21419.0>}} [ns_server:info,2014-08-19T16:50:04.518,ns_1@10.242.238.90:<0.18787.0>:ns_memcached:do_handle_call:527]Changed vbucket 737 state to active [error_logger:info,2014-08-19T16:50:04.518,ns_1@10.242.238.90:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,'ns_vbm_new_sup-default'} started: [{pid,<0.21419.0>}, {name, {new_child_id, [980,981,982,983,984,987,988,989,991,992,993, 995,996,997,998,999,1000,1001,1002,1003,1004, 1005,1006,1007,1008,1009,1010,1011,1012,1013, 1014,1015,1016,1017,1018,1019,1020,1021,1022, 1023], 'ns_1@10.242.238.91'}}, {mfargs, {ebucketmigrator_srv,start_link, [{"10.242.238.91",11209}, {"10.242.238.90",11209}, [{old_state_retriever, #Fun}, {on_not_ready_vbuckets, #Fun}, {username,"default"}, {password,get_from_config}, {vbuckets, [980,981,982,983,984,987,988,989,991,992, 993,995,996,997,998,999,1000,1001,1002, 1003,1004,1005,1006,1007,1008,1009,1010, 1011,1012,1013,1014,1015,1016,1017,1018, 1019,1020,1021,1022,1023]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]]}}, {restart_type,temporary}, {shutdown,60000}, {child_type,worker}] [ns_server:debug,2014-08-19T16:50:04.524,ns_1@10.242.238.90:<0.21419.0>:ebucketmigrator_srv:init:621]Reusing old upstream: [{vbuckets,[980,981,982,983,984,987,988,989,991,992,993,995,996,997,998,999, 1000,1001,1002,1003,1004,1005,1006,1007,1008,1009,1010,1011,1012, 1013,1014,1015,1016,1017,1018,1019,1020,1021,1022,1023]}, {name,<<"replication_ns_1@10.242.238.90">>}, {takeover,false}] [rebalance:debug,2014-08-19T16:50:04.524,ns_1@10.242.238.90:<0.21419.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.21420.0> [ns_server:debug,2014-08-19T16:50:04.526,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [rebalance:debug,2014-08-19T16:50:04.527,ns_1@10.242.238.90:<0.20599.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:50:04.527,ns_1@10.242.238.90:<0.20599.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:50:04.527,ns_1@10.242.238.90:<0.21421.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:50:04.527,ns_1@10.242.238.90:<0.21421.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:50:04.528,ns_1@10.242.238.90:<0.20599.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:50:04.530,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:04.530,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 4147 us [ns_server:debug,2014-08-19T16:50:04.531,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:04.532,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{996, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:info,2014-08-19T16:50:04.533,ns_1@10.242.238.90:<0.18787.0>:ns_memcached:do_handle_call:527]Changed vbucket 994 state to replica [ns_server:info,2014-08-19T16:50:04.533,ns_1@10.242.238.90:tap_replication_manager-default<0.18775.0>:tap_replication_manager:change_vbucket_filter:200]Going to change replication from 'ns_1@10.242.238.91' to have [980,981,982,983,984,987,988,989,991,992,993,994,995,996,997,998,999,1000, 1001,1002,1003,1004,1005,1006,1007,1008,1009,1010,1011,1012,1013,1014,1015, 1016,1017,1018,1019,1020,1021,1022,1023] ([994], []) [ns_server:debug,2014-08-19T16:50:04.534,ns_1@10.242.238.90:<0.21422.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:135]Registered myself under id:{"default", {new_child_id, [980,981,982,983,984,987,988,989,991,992,993, 994,995,996,997,998,999,1000,1001,1002,1003, 1004,1005,1006,1007,1008,1009,1010,1011,1012, 1013,1014,1015,1016,1017,1018,1019,1020,1021, 1022,1023], 'ns_1@10.242.238.91'}, #Ref<0.0.0.236802>} Args:[{"10.242.238.91",11209}, {"10.242.238.90",11209}, [{old_state_retriever,#Fun}, {on_not_ready_vbuckets,#Fun}, {username,"default"}, {password,get_from_config}, {vbuckets,[980,981,982,983,984,987,988,989,991,992,993,994,995,996,997, 998,999,1000,1001,1002,1003,1004,1005,1006,1007,1008,1009, 1010,1011,1012,1013,1014,1015,1016,1017,1018,1019,1020,1021, 1022,1023]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]] [ns_server:debug,2014-08-19T16:50:04.534,ns_1@10.242.238.90:<0.21422.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:139]Linked myself to old ebucketmigrator <0.21419.0> [ns_server:info,2014-08-19T16:50:04.534,ns_1@10.242.238.90:<0.21419.0>:ebucketmigrator_srv:handle_call:270]Starting new-style vbucket filter change on stream `replication_ns_1@10.242.238.90` [ns_server:info,2014-08-19T16:50:04.542,ns_1@10.242.238.90:<0.21419.0>:ebucketmigrator_srv:handle_call:297]Changing vbucket filter on tap stream `replication_ns_1@10.242.238.90`: [{980,1}, {981,1}, {982,1}, {983,1}, {984,1}, {987,1}, {988,1}, {989,1}, {991,1}, {992,1}, {993,1}, {994,1}, {995,1}, {996,1}, {997,1}, {998,1}, {999,1}, {1000,1}, {1001,1}, {1002,1}, {1003,1}, {1004,1}, {1005,1}, {1006,1}, {1007,1}, {1008,1}, {1009,1}, {1010,1}, {1011,1}, {1012,1}, {1013,1}, {1014,1}, {1015,1}, {1016,1}, {1017,1}, {1018,1}, {1019,1}, {1020,1}, {1021,1}, {1022,1}, {1023,1}] [ns_server:info,2014-08-19T16:50:04.543,ns_1@10.242.238.90:<0.21419.0>:ebucketmigrator_srv:handle_call:307]Successfully changed vbucket filter on tap stream `replication_ns_1@10.242.238.90`. [ns_server:info,2014-08-19T16:50:04.543,ns_1@10.242.238.90:<0.21419.0>:ebucketmigrator_srv:process_upstream:1027]Got vbucket filter change completion message. Silencing upstream sender [ns_server:info,2014-08-19T16:50:04.543,ns_1@10.242.238.90:<0.21419.0>:ebucketmigrator_srv:handle_info:366]Got reply from upstream silencing request. Completing state transition to a new ebucketmigrator. [ns_server:debug,2014-08-19T16:50:04.543,ns_1@10.242.238.90:<0.21419.0>:ebucketmigrator_srv:complete_native_vb_filter_change:170]Proceeding with reading unread binaries [ns_server:debug,2014-08-19T16:50:04.543,ns_1@10.242.238.90:<0.21419.0>:ebucketmigrator_srv:confirm_downstream:197]Going to confirm reception downstream messages [ns_server:debug,2014-08-19T16:50:04.543,ns_1@10.242.238.90:<0.21419.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:50:04.544,ns_1@10.242.238.90:<0.21425.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:50:04.544,ns_1@10.242.238.90:<0.21425.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:50:04.544,ns_1@10.242.238.90:<0.21419.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:50:04.544,ns_1@10.242.238.90:<0.21419.0>:ebucketmigrator_srv:confirm_downstream:201]Confirmed upstream messages are feeded to kernel [ns_server:debug,2014-08-19T16:50:04.544,ns_1@10.242.238.90:<0.21419.0>:ebucketmigrator_srv:reply_and_die:210]Passed old state to caller [ns_server:debug,2014-08-19T16:50:04.544,ns_1@10.242.238.90:<0.21419.0>:ebucketmigrator_srv:reply_and_die:213]Sent out state. Preparing to die [ns_server:debug,2014-08-19T16:50:04.544,ns_1@10.242.238.90:<0.21422.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:143]Got old state from previous ebucketmigrator: <0.21419.0> [ns_server:debug,2014-08-19T16:50:04.545,ns_1@10.242.238.90:<0.21422.0>:ns_vbm_new_sup:perform_vbucket_filter_change_loop:184]Sent old state to new instance [ns_server:info,2014-08-19T16:50:04.545,ns_1@10.242.238.90:<0.21427.0>:ns_vbm_new_sup:mk_old_state_retriever:83]Got vbucket filter change old state. Proceeding vbucket filter change operation [ns_server:debug,2014-08-19T16:50:04.545,ns_1@10.242.238.90:<0.21427.0>:ebucketmigrator_srv:init:494]Got old ebucketmigrator state from <0.21419.0>: {state,#Port<0.13886>,#Port<0.13883>,#Port<0.13887>,#Port<0.13884>, <0.21420.0>,<<"cut off">>,<<"cut off">>,[],124,false,false,0, {1408,452604,543396}, completed, {<0.21422.0>,#Ref<0.0.0.236815>}, <<"replication_ns_1@10.242.238.90">>,<0.21419.0>, {had_backfill,false,undefined,[]}, completed,false}. [ns_server:debug,2014-08-19T16:50:04.545,ns_1@10.242.238.90:<0.17162.0>:ns_process_registry:handle_info:98]Got exit msg: {'EXIT',<0.21422.0>,{#Ref<0.0.0.236804>,<0.21427.0>}} [ns_server:info,2014-08-19T16:50:04.545,ns_1@10.242.238.90:<0.18787.0>:ns_memcached:do_handle_call:527]Changed vbucket 738 state to active [error_logger:info,2014-08-19T16:50:04.545,ns_1@10.242.238.90:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,'ns_vbm_new_sup-default'} started: [{pid,<0.21427.0>}, {name, {new_child_id, [980,981,982,983,984,987,988,989,991,992,993, 994,995,996,997,998,999,1000,1001,1002,1003, 1004,1005,1006,1007,1008,1009,1010,1011,1012, 1013,1014,1015,1016,1017,1018,1019,1020,1021, 1022,1023], 'ns_1@10.242.238.91'}}, {mfargs, {ebucketmigrator_srv,start_link, [{"10.242.238.91",11209}, {"10.242.238.90",11209}, [{old_state_retriever, #Fun}, {on_not_ready_vbuckets, #Fun}, {username,"default"}, {password,get_from_config}, {vbuckets, [980,981,982,983,984,987,988,989,991,992, 993,994,995,996,997,998,999,1000,1001, 1002,1003,1004,1005,1006,1007,1008,1009, 1010,1011,1012,1013,1014,1015,1016,1017, 1018,1019,1020,1021,1022,1023]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]]}}, {restart_type,temporary}, {shutdown,60000}, {child_type,worker}] [views:debug,2014-08-19T16:50:04.546,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/742. Updated state: active (1) [ns_server:debug,2014-08-19T16:50:04.546,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",742,active,1} [ns_server:debug,2014-08-19T16:50:04.551,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:50:04.551,ns_1@10.242.238.90:<0.21427.0>:ebucketmigrator_srv:init:621]Reusing old upstream: [{vbuckets,[980,981,982,983,984,987,988,989,991,992,993,994,995,996,997,998, 999,1000,1001,1002,1003,1004,1005,1006,1007,1008,1009,1010,1011, 1012,1013,1014,1015,1016,1017,1018,1019,1020,1021,1022,1023]}, {name,<<"replication_ns_1@10.242.238.90">>}, {takeover,false}] [rebalance:debug,2014-08-19T16:50:04.552,ns_1@10.242.238.90:<0.21427.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.21428.0> [ns_server:info,2014-08-19T16:50:04.557,ns_1@10.242.238.90:<0.18787.0>:ns_memcached:do_handle_call:527]Changed vbucket 733 state to active [ns_server:debug,2014-08-19T16:50:04.558,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 6853 us [ns_server:debug,2014-08-19T16:50:04.558,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:04.559,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:04.560,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{994, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:info,2014-08-19T16:50:04.561,ns_1@10.242.238.90:<0.18787.0>:ns_memcached:do_handle_call:527]Changed vbucket 990 state to replica [ns_server:info,2014-08-19T16:50:04.561,ns_1@10.242.238.90:tap_replication_manager-default<0.18775.0>:tap_replication_manager:change_vbucket_filter:200]Going to change replication from 'ns_1@10.242.238.91' to have [980,981,982,983,984,987,988,989,990,991,992,993,994,995,996,997,998,999,1000, 1001,1002,1003,1004,1005,1006,1007,1008,1009,1010,1011,1012,1013,1014,1015, 1016,1017,1018,1019,1020,1021,1022,1023] ([990], []) [ns_server:debug,2014-08-19T16:50:04.565,ns_1@10.242.238.90:<0.21429.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:135]Registered myself under id:{"default", {new_child_id, [980,981,982,983,984,987,988,989,990,991,992, 993,994,995,996,997,998,999,1000,1001,1002, 1003,1004,1005,1006,1007,1008,1009,1010,1011, 1012,1013,1014,1015,1016,1017,1018,1019,1020, 1021,1022,1023], 'ns_1@10.242.238.91'}, #Ref<0.0.0.236985>} Args:[{"10.242.238.91",11209}, {"10.242.238.90",11209}, [{old_state_retriever,#Fun}, {on_not_ready_vbuckets,#Fun}, {username,"default"}, {password,get_from_config}, {vbuckets,[980,981,982,983,984,987,988,989,990,991,992,993,994,995,996, 997,998,999,1000,1001,1002,1003,1004,1005,1006,1007,1008, 1009,1010,1011,1012,1013,1014,1015,1016,1017,1018,1019,1020, 1021,1022,1023]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]] [ns_server:debug,2014-08-19T16:50:04.565,ns_1@10.242.238.90:<0.21429.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:139]Linked myself to old ebucketmigrator <0.21427.0> [ns_server:info,2014-08-19T16:50:04.565,ns_1@10.242.238.90:<0.21427.0>:ebucketmigrator_srv:handle_call:270]Starting new-style vbucket filter change on stream `replication_ns_1@10.242.238.90` [ns_server:info,2014-08-19T16:50:04.571,ns_1@10.242.238.90:<0.21427.0>:ebucketmigrator_srv:handle_call:297]Changing vbucket filter on tap stream `replication_ns_1@10.242.238.90`: [{980,1}, {981,1}, {982,1}, {983,1}, {984,1}, {987,1}, {988,1}, {989,1}, {990,1}, {991,1}, {992,1}, {993,1}, {994,1}, {995,1}, {996,1}, {997,1}, {998,1}, {999,1}, {1000,1}, {1001,1}, {1002,1}, {1003,1}, {1004,1}, {1005,1}, {1006,1}, {1007,1}, {1008,1}, {1009,1}, {1010,1}, {1011,1}, {1012,1}, {1013,1}, {1014,1}, {1015,1}, {1016,1}, {1017,1}, {1018,1}, {1019,1}, {1020,1}, {1021,1}, {1022,1}, {1023,1}] [ns_server:info,2014-08-19T16:50:04.572,ns_1@10.242.238.90:<0.21427.0>:ebucketmigrator_srv:handle_call:307]Successfully changed vbucket filter on tap stream `replication_ns_1@10.242.238.90`. [ns_server:info,2014-08-19T16:50:04.572,ns_1@10.242.238.90:<0.21427.0>:ebucketmigrator_srv:process_upstream:1027]Got vbucket filter change completion message. Silencing upstream sender [ns_server:info,2014-08-19T16:50:04.573,ns_1@10.242.238.90:<0.21427.0>:ebucketmigrator_srv:handle_info:366]Got reply from upstream silencing request. Completing state transition to a new ebucketmigrator. [ns_server:debug,2014-08-19T16:50:04.573,ns_1@10.242.238.90:<0.21427.0>:ebucketmigrator_srv:complete_native_vb_filter_change:170]Proceeding with reading unread binaries [ns_server:debug,2014-08-19T16:50:04.573,ns_1@10.242.238.90:<0.21427.0>:ebucketmigrator_srv:confirm_downstream:197]Going to confirm reception downstream messages [ns_server:debug,2014-08-19T16:50:04.573,ns_1@10.242.238.90:<0.21427.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:50:04.573,ns_1@10.242.238.90:<0.21431.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:50:04.573,ns_1@10.242.238.90:<0.21431.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:50:04.573,ns_1@10.242.238.90:<0.21427.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:50:04.574,ns_1@10.242.238.90:<0.21427.0>:ebucketmigrator_srv:confirm_downstream:201]Confirmed upstream messages are feeded to kernel [ns_server:debug,2014-08-19T16:50:04.574,ns_1@10.242.238.90:<0.21427.0>:ebucketmigrator_srv:reply_and_die:210]Passed old state to caller [ns_server:debug,2014-08-19T16:50:04.574,ns_1@10.242.238.90:<0.21427.0>:ebucketmigrator_srv:reply_and_die:213]Sent out state. Preparing to die [ns_server:debug,2014-08-19T16:50:04.574,ns_1@10.242.238.90:<0.21429.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:143]Got old state from previous ebucketmigrator: <0.21427.0> [ns_server:debug,2014-08-19T16:50:04.574,ns_1@10.242.238.90:<0.21429.0>:ns_vbm_new_sup:perform_vbucket_filter_change_loop:184]Sent old state to new instance [ns_server:info,2014-08-19T16:50:04.574,ns_1@10.242.238.90:<0.21433.0>:ns_vbm_new_sup:mk_old_state_retriever:83]Got vbucket filter change old state. Proceeding vbucket filter change operation [ns_server:debug,2014-08-19T16:50:04.574,ns_1@10.242.238.90:<0.21433.0>:ebucketmigrator_srv:init:494]Got old ebucketmigrator state from <0.21427.0>: {state,#Port<0.13886>,#Port<0.13883>,#Port<0.13887>,#Port<0.13884>, <0.21428.0>,<<"cut off">>,<<"cut off">>,[],127,false,false,0, {1408,452604,572830}, completed, {<0.21429.0>,#Ref<0.0.0.236998>}, <<"replication_ns_1@10.242.238.90">>,<0.21427.0>, {had_backfill,false,undefined,[]}, completed,false}. [ns_server:debug,2014-08-19T16:50:04.575,ns_1@10.242.238.90:<0.17162.0>:ns_process_registry:handle_info:98]Got exit msg: {'EXIT',<0.21429.0>,{#Ref<0.0.0.236987>,<0.21433.0>}} [error_logger:info,2014-08-19T16:50:04.575,ns_1@10.242.238.90:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,'ns_vbm_new_sup-default'} started: [{pid,<0.21433.0>}, {name, {new_child_id, [980,981,982,983,984,987,988,989,990,991,992, 993,994,995,996,997,998,999,1000,1001,1002, 1003,1004,1005,1006,1007,1008,1009,1010,1011, 1012,1013,1014,1015,1016,1017,1018,1019,1020, 1021,1022,1023], 'ns_1@10.242.238.91'}}, {mfargs, {ebucketmigrator_srv,start_link, [{"10.242.238.91",11209}, {"10.242.238.90",11209}, [{old_state_retriever, #Fun}, {on_not_ready_vbuckets, #Fun}, {username,"default"}, {password,get_from_config}, {vbuckets, [980,981,982,983,984,987,988,989,990,991, 992,993,994,995,996,997,998,999,1000, 1001,1002,1003,1004,1005,1006,1007,1008, 1009,1010,1011,1012,1013,1014,1015,1016, 1017,1018,1019,1020,1021,1022,1023]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]]}}, {restart_type,temporary}, {shutdown,60000}, {child_type,worker}] [ns_server:info,2014-08-19T16:50:04.577,ns_1@10.242.238.90:<0.18787.0>:ns_memcached:do_handle_call:527]Changed vbucket 728 state to active [ns_server:debug,2014-08-19T16:50:04.580,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:50:04.581,ns_1@10.242.238.90:<0.21433.0>:ebucketmigrator_srv:init:621]Reusing old upstream: [{vbuckets,[980,981,982,983,984,987,988,989,990,991,992,993,994,995,996,997, 998,999,1000,1001,1002,1003,1004,1005,1006,1007,1008,1009,1010, 1011,1012,1013,1014,1015,1016,1017,1018,1019,1020,1021,1022,1023]}, {name,<<"replication_ns_1@10.242.238.90">>}, {takeover,false}] [rebalance:debug,2014-08-19T16:50:04.581,ns_1@10.242.238.90:<0.21433.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.21435.0> [ns_server:debug,2014-08-19T16:50:04.583,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:04.583,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 2580 us [ns_server:debug,2014-08-19T16:50:04.584,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:04.585,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{990, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:50:04.607,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:info,2014-08-19T16:50:04.609,ns_1@10.242.238.90:<0.18787.0>:ns_memcached:do_handle_call:527]Changed vbucket 727 state to active [ns_server:debug,2014-08-19T16:50:04.609,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 2704 us [ns_server:debug,2014-08-19T16:50:04.610,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:04.611,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:04.611,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{486, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [views:debug,2014-08-19T16:50:04.613,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/730. Updated state: active (1) [ns_server:debug,2014-08-19T16:50:04.613,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",730,active,1} [ns_server:info,2014-08-19T16:50:04.623,ns_1@10.242.238.90:<0.18787.0>:ns_memcached:do_handle_call:527]Changed vbucket 735 state to active [ns_server:debug,2014-08-19T16:50:04.630,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:50:04.632,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:04.632,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 2014 us [ns_server:debug,2014-08-19T16:50:04.632,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:04.633,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{736, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:50:04.651,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:50:04.653,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 1140 us [ns_server:debug,2014-08-19T16:50:04.653,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:04.653,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:04.654,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{729, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:info,2014-08-19T16:50:04.656,ns_1@10.242.238.90:<0.18787.0>:ns_memcached:do_handle_call:527]Changed vbucket 739 state to active [ns_server:debug,2014-08-19T16:50:04.687,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:info,2014-08-19T16:50:04.688,ns_1@10.242.238.90:<0.18787.0>:ns_memcached:do_handle_call:527]Changed vbucket 734 state to active [views:debug,2014-08-19T16:50:04.688,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/738. Updated state: active (1) [ns_server:debug,2014-08-19T16:50:04.688,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",738,active,1} [ns_server:debug,2014-08-19T16:50:04.689,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 2384 us [ns_server:debug,2014-08-19T16:50:04.689,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:04.690,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:04.690,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{730, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:50:04.704,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:info,2014-08-19T16:50:04.706,ns_1@10.242.238.90:<0.18787.0>:ns_memcached:do_handle_call:527]Changed vbucket 732 state to active [ns_server:debug,2014-08-19T16:50:04.708,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 3426 us [ns_server:debug,2014-08-19T16:50:04.708,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:04.708,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:04.709,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{481, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:50:04.724,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:50:04.728,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 3207 us [ns_server:debug,2014-08-19T16:50:04.728,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:04.728,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:04.729,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{742, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:info,2014-08-19T16:50:04.739,ns_1@10.242.238.90:<0.18787.0>:ns_memcached:do_handle_call:527]Changed vbucket 726 state to active [ns_server:debug,2014-08-19T16:50:04.744,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:50:04.747,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:04.748,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:04.748,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 4024 us [ns_server:debug,2014-08-19T16:50:04.749,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{478, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [views:debug,2014-08-19T16:50:04.755,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/728. Updated state: active (1) [ns_server:debug,2014-08-19T16:50:04.755,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",728,active,1} [ns_server:debug,2014-08-19T16:50:04.765,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:50:04.769,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:04.769,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 3344 us [ns_server:debug,2014-08-19T16:50:04.769,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:04.770,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{480, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:50:04.785,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:50:04.788,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 2642 us [ns_server:debug,2014-08-19T16:50:04.788,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:04.788,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:04.789,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{476, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:info,2014-08-19T16:50:04.795,ns_1@10.242.238.90:<0.18787.0>:ns_memcached:do_handle_call:527]Changed vbucket 986 state to replica [ns_server:info,2014-08-19T16:50:04.795,ns_1@10.242.238.90:tap_replication_manager-default<0.18775.0>:tap_replication_manager:change_vbucket_filter:200]Going to change replication from 'ns_1@10.242.238.91' to have [980,981,982,983,984,986,987,988,989,990,991,992,993,994,995,996,997,998,999, 1000,1001,1002,1003,1004,1005,1006,1007,1008,1009,1010,1011,1012,1013,1014, 1015,1016,1017,1018,1019,1020,1021,1022,1023] ([986], []) [ns_server:debug,2014-08-19T16:50:04.796,ns_1@10.242.238.90:<0.21443.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:135]Registered myself under id:{"default", {new_child_id, [980,981,982,983,984,986,987,988,989,990,991, 992,993,994,995,996,997,998,999,1000,1001, 1002,1003,1004,1005,1006,1007,1008,1009,1010, 1011,1012,1013,1014,1015,1016,1017,1018,1019, 1020,1021,1022,1023], 'ns_1@10.242.238.91'}, #Ref<0.0.0.237513>} Args:[{"10.242.238.91",11209}, {"10.242.238.90",11209}, [{old_state_retriever,#Fun}, {on_not_ready_vbuckets,#Fun}, {username,"default"}, {password,get_from_config}, {vbuckets,[980,981,982,983,984,986,987,988,989,990,991,992,993,994,995, 996,997,998,999,1000,1001,1002,1003,1004,1005,1006,1007, 1008,1009,1010,1011,1012,1013,1014,1015,1016,1017,1018,1019, 1020,1021,1022,1023]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]] [ns_server:debug,2014-08-19T16:50:04.797,ns_1@10.242.238.90:<0.21443.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:139]Linked myself to old ebucketmigrator <0.21433.0> [ns_server:info,2014-08-19T16:50:04.797,ns_1@10.242.238.90:<0.21433.0>:ebucketmigrator_srv:handle_call:270]Starting new-style vbucket filter change on stream `replication_ns_1@10.242.238.90` [ns_server:info,2014-08-19T16:50:04.810,ns_1@10.242.238.90:<0.21433.0>:ebucketmigrator_srv:handle_call:297]Changing vbucket filter on tap stream `replication_ns_1@10.242.238.90`: [{980,1}, {981,1}, {982,1}, {983,1}, {984,1}, {986,1}, {987,1}, {988,1}, {989,1}, {990,1}, {991,1}, {992,1}, {993,1}, {994,1}, {995,1}, {996,1}, {997,1}, {998,1}, {999,1}, {1000,1}, {1001,1}, {1002,1}, {1003,1}, {1004,1}, {1005,1}, {1006,1}, {1007,1}, {1008,1}, {1009,1}, {1010,1}, {1011,1}, {1012,1}, {1013,1}, {1014,1}, {1015,1}, {1016,1}, {1017,1}, {1018,1}, {1019,1}, {1020,1}, {1021,1}, {1022,1}, {1023,1}] [ns_server:info,2014-08-19T16:50:04.812,ns_1@10.242.238.90:<0.21433.0>:ebucketmigrator_srv:handle_call:307]Successfully changed vbucket filter on tap stream `replication_ns_1@10.242.238.90`. [ns_server:info,2014-08-19T16:50:04.812,ns_1@10.242.238.90:<0.21433.0>:ebucketmigrator_srv:process_upstream:1027]Got vbucket filter change completion message. Silencing upstream sender [ns_server:info,2014-08-19T16:50:04.812,ns_1@10.242.238.90:<0.21433.0>:ebucketmigrator_srv:handle_info:366]Got reply from upstream silencing request. Completing state transition to a new ebucketmigrator. [ns_server:debug,2014-08-19T16:50:04.812,ns_1@10.242.238.90:<0.21433.0>:ebucketmigrator_srv:complete_native_vb_filter_change:170]Proceeding with reading unread binaries [ns_server:debug,2014-08-19T16:50:04.812,ns_1@10.242.238.90:<0.21433.0>:ebucketmigrator_srv:confirm_downstream:197]Going to confirm reception downstream messages [ns_server:debug,2014-08-19T16:50:04.812,ns_1@10.242.238.90:<0.21433.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:50:04.812,ns_1@10.242.238.90:<0.21445.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:50:04.813,ns_1@10.242.238.90:<0.21445.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:50:04.813,ns_1@10.242.238.90:<0.21433.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:50:04.813,ns_1@10.242.238.90:<0.21433.0>:ebucketmigrator_srv:confirm_downstream:201]Confirmed upstream messages are feeded to kernel [ns_server:debug,2014-08-19T16:50:04.813,ns_1@10.242.238.90:<0.21433.0>:ebucketmigrator_srv:reply_and_die:210]Passed old state to caller [ns_server:debug,2014-08-19T16:50:04.813,ns_1@10.242.238.90:<0.21433.0>:ebucketmigrator_srv:reply_and_die:213]Sent out state. Preparing to die [ns_server:debug,2014-08-19T16:50:04.813,ns_1@10.242.238.90:<0.21443.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:143]Got old state from previous ebucketmigrator: <0.21433.0> [ns_server:debug,2014-08-19T16:50:04.814,ns_1@10.242.238.90:<0.21443.0>:ns_vbm_new_sup:perform_vbucket_filter_change_loop:184]Sent old state to new instance [ns_server:info,2014-08-19T16:50:04.814,ns_1@10.242.238.90:<0.21447.0>:ns_vbm_new_sup:mk_old_state_retriever:83]Got vbucket filter change old state. Proceeding vbucket filter change operation [ns_server:debug,2014-08-19T16:50:04.814,ns_1@10.242.238.90:<0.21447.0>:ebucketmigrator_srv:init:494]Got old ebucketmigrator state from <0.21433.0>: {state,#Port<0.13886>,#Port<0.13883>,#Port<0.13887>,#Port<0.13884>, <0.21435.0>,<<"cut off">>,<<"cut off">>,[],130,false,false,0, {1408,452604,812330}, completed, {<0.21443.0>,#Ref<0.0.0.237527>}, <<"replication_ns_1@10.242.238.90">>,<0.21433.0>, {had_backfill,false,undefined,[]}, completed,false}. [ns_server:debug,2014-08-19T16:50:04.814,ns_1@10.242.238.90:<0.17162.0>:ns_process_registry:handle_info:98]Got exit msg: {'EXIT',<0.21443.0>,{#Ref<0.0.0.237516>,<0.21447.0>}} [error_logger:info,2014-08-19T16:50:04.814,ns_1@10.242.238.90:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,'ns_vbm_new_sup-default'} started: [{pid,<0.21447.0>}, {name, {new_child_id, [980,981,982,983,984,986,987,988,989,990,991, 992,993,994,995,996,997,998,999,1000,1001, 1002,1003,1004,1005,1006,1007,1008,1009,1010, 1011,1012,1013,1014,1015,1016,1017,1018,1019, 1020,1021,1022,1023], 'ns_1@10.242.238.91'}}, {mfargs, {ebucketmigrator_srv,start_link, [{"10.242.238.91",11209}, {"10.242.238.90",11209}, [{old_state_retriever, #Fun}, {on_not_ready_vbuckets, #Fun}, {username,"default"}, {password,get_from_config}, {vbuckets, [980,981,982,983,984,986,987,988,989,990, 991,992,993,994,995,996,997,998,999, 1000,1001,1002,1003,1004,1005,1006,1007, 1008,1009,1010,1011,1012,1013,1014,1015, 1016,1017,1018,1019,1020,1021,1022, 1023]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]]}}, {restart_type,temporary}, {shutdown,60000}, {child_type,worker}] [views:debug,2014-08-19T16:50:04.818,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/739. Updated state: active (1) [ns_server:debug,2014-08-19T16:50:04.818,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",739,active,1} [ns_server:debug,2014-08-19T16:50:04.818,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:50:04.820,ns_1@10.242.238.90:<0.21447.0>:ebucketmigrator_srv:init:621]Reusing old upstream: [{vbuckets,[980,981,982,983,984,986,987,988,989,990,991,992,993,994,995,996, 997,998,999,1000,1001,1002,1003,1004,1005,1006,1007,1008,1009, 1010,1011,1012,1013,1014,1015,1016,1017,1018,1019,1020,1021,1022, 1023]}, {name,<<"replication_ns_1@10.242.238.90">>}, {takeover,false}] [rebalance:debug,2014-08-19T16:50:04.821,ns_1@10.242.238.90:<0.21447.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.21448.0> [ns_server:debug,2014-08-19T16:50:04.822,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:04.822,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 3337 us [ns_server:debug,2014-08-19T16:50:04.822,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:04.823,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{986, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:50:04.840,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:50:04.846,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:04.847,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{731, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [views:debug,2014-08-19T16:50:04.860,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/737. Updated state: active (1) [ns_server:debug,2014-08-19T16:50:04.863,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 25 us [ns_server:debug,2014-08-19T16:50:04.863,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",737,active,1} [ns_server:debug,2014-08-19T16:50:04.863,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:04.884,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:50:04.885,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 310 us [ns_server:debug,2014-08-19T16:50:04.885,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:04.885,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:04.886,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{737, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:info,2014-08-19T16:50:04.888,ns_1@10.242.238.90:<0.18787.0>:ns_memcached:do_handle_call:527]Changed vbucket 985 state to replica [ns_server:info,2014-08-19T16:50:04.888,ns_1@10.242.238.90:tap_replication_manager-default<0.18775.0>:tap_replication_manager:change_vbucket_filter:200]Going to change replication from 'ns_1@10.242.238.91' to have [980,981,982,983,984,985,986,987,988,989,990,991,992,993,994,995,996,997,998, 999,1000,1001,1002,1003,1004,1005,1006,1007,1008,1009,1010,1011,1012,1013, 1014,1015,1016,1017,1018,1019,1020,1021,1022,1023] ([985], []) [ns_server:debug,2014-08-19T16:50:04.889,ns_1@10.242.238.90:<0.21451.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:135]Registered myself under id:{"default", {new_child_id, [980,981,982,983,984,985,986,987,988,989,990, 991,992,993,994,995,996,997,998,999,1000, 1001,1002,1003,1004,1005,1006,1007,1008,1009, 1010,1011,1012,1013,1014,1015,1016,1017,1018, 1019,1020,1021,1022,1023], 'ns_1@10.242.238.91'}, #Ref<0.0.0.237752>} Args:[{"10.242.238.91",11209}, {"10.242.238.90",11209}, [{old_state_retriever,#Fun}, {on_not_ready_vbuckets,#Fun}, {username,"default"}, {password,get_from_config}, {vbuckets,[980,981,982,983,984,985,986,987,988,989,990,991,992,993,994, 995,996,997,998,999,1000,1001,1002,1003,1004,1005,1006,1007, 1008,1009,1010,1011,1012,1013,1014,1015,1016,1017,1018,1019, 1020,1021,1022,1023]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]] [ns_server:debug,2014-08-19T16:50:04.889,ns_1@10.242.238.90:<0.21451.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:139]Linked myself to old ebucketmigrator <0.21447.0> [ns_server:info,2014-08-19T16:50:04.889,ns_1@10.242.238.90:<0.21447.0>:ebucketmigrator_srv:handle_call:270]Starting new-style vbucket filter change on stream `replication_ns_1@10.242.238.90` [ns_server:info,2014-08-19T16:50:04.896,ns_1@10.242.238.90:<0.21447.0>:ebucketmigrator_srv:handle_call:297]Changing vbucket filter on tap stream `replication_ns_1@10.242.238.90`: [{980,1}, {981,1}, {982,1}, {983,1}, {984,1}, {985,1}, {986,1}, {987,1}, {988,1}, {989,1}, {990,1}, {991,1}, {992,1}, {993,1}, {994,1}, {995,1}, {996,1}, {997,1}, {998,1}, {999,1}, {1000,1}, {1001,1}, {1002,1}, {1003,1}, {1004,1}, {1005,1}, {1006,1}, {1007,1}, {1008,1}, {1009,1}, {1010,1}, {1011,1}, {1012,1}, {1013,1}, {1014,1}, {1015,1}, {1016,1}, {1017,1}, {1018,1}, {1019,1}, {1020,1}, {1021,1}, {1022,1}, {1023,1}] [ns_server:info,2014-08-19T16:50:04.897,ns_1@10.242.238.90:<0.21447.0>:ebucketmigrator_srv:handle_call:307]Successfully changed vbucket filter on tap stream `replication_ns_1@10.242.238.90`. [ns_server:info,2014-08-19T16:50:04.897,ns_1@10.242.238.90:<0.21447.0>:ebucketmigrator_srv:process_upstream:1027]Got vbucket filter change completion message. Silencing upstream sender [ns_server:info,2014-08-19T16:50:04.897,ns_1@10.242.238.90:<0.21447.0>:ebucketmigrator_srv:handle_info:366]Got reply from upstream silencing request. Completing state transition to a new ebucketmigrator. [ns_server:debug,2014-08-19T16:50:04.897,ns_1@10.242.238.90:<0.21447.0>:ebucketmigrator_srv:complete_native_vb_filter_change:170]Proceeding with reading unread binaries [ns_server:debug,2014-08-19T16:50:04.897,ns_1@10.242.238.90:<0.21447.0>:ebucketmigrator_srv:confirm_downstream:197]Going to confirm reception downstream messages [ns_server:debug,2014-08-19T16:50:04.897,ns_1@10.242.238.90:<0.21447.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:50:04.897,ns_1@10.242.238.90:<0.21454.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:50:04.898,ns_1@10.242.238.90:<0.21454.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:50:04.898,ns_1@10.242.238.90:<0.21447.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:50:04.898,ns_1@10.242.238.90:<0.21447.0>:ebucketmigrator_srv:confirm_downstream:201]Confirmed upstream messages are feeded to kernel [ns_server:debug,2014-08-19T16:50:04.898,ns_1@10.242.238.90:<0.21447.0>:ebucketmigrator_srv:reply_and_die:210]Passed old state to caller [ns_server:debug,2014-08-19T16:50:04.898,ns_1@10.242.238.90:<0.21447.0>:ebucketmigrator_srv:reply_and_die:213]Sent out state. Preparing to die [ns_server:debug,2014-08-19T16:50:04.898,ns_1@10.242.238.90:<0.21451.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:143]Got old state from previous ebucketmigrator: <0.21447.0> [ns_server:debug,2014-08-19T16:50:04.898,ns_1@10.242.238.90:<0.21451.0>:ns_vbm_new_sup:perform_vbucket_filter_change_loop:184]Sent old state to new instance [ns_server:info,2014-08-19T16:50:04.899,ns_1@10.242.238.90:<0.21456.0>:ns_vbm_new_sup:mk_old_state_retriever:83]Got vbucket filter change old state. Proceeding vbucket filter change operation [ns_server:debug,2014-08-19T16:50:04.899,ns_1@10.242.238.90:<0.21456.0>:ebucketmigrator_srv:init:494]Got old ebucketmigrator state from <0.21447.0>: {state,#Port<0.13886>,#Port<0.13883>,#Port<0.13887>,#Port<0.13884>, <0.21448.0>,<<"cut off">>,<<"cut off">>,[],133,false,false,0, {1408,452604,897329}, completed, {<0.21451.0>,#Ref<0.0.0.237765>}, <<"replication_ns_1@10.242.238.90">>,<0.21447.0>, {had_backfill,false,undefined,[]}, completed,false}. [ns_server:debug,2014-08-19T16:50:04.899,ns_1@10.242.238.90:<0.17162.0>:ns_process_registry:handle_info:98]Got exit msg: {'EXIT',<0.21451.0>,{#Ref<0.0.0.237754>,<0.21456.0>}} [error_logger:info,2014-08-19T16:50:04.899,ns_1@10.242.238.90:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,'ns_vbm_new_sup-default'} started: [{pid,<0.21456.0>}, {name, {new_child_id, [980,981,982,983,984,985,986,987,988,989,990, 991,992,993,994,995,996,997,998,999,1000,1001, 1002,1003,1004,1005,1006,1007,1008,1009,1010, 1011,1012,1013,1014,1015,1016,1017,1018,1019, 1020,1021,1022,1023], 'ns_1@10.242.238.91'}}, {mfargs, {ebucketmigrator_srv,start_link, [{"10.242.238.91",11209}, {"10.242.238.90",11209}, [{old_state_retriever, #Fun}, {on_not_ready_vbuckets, #Fun}, {username,"default"}, {password,get_from_config}, {vbuckets, [980,981,982,983,984,985,986,987,988,989, 990,991,992,993,994,995,996,997,998,999, 1000,1001,1002,1003,1004,1005,1006,1007, 1008,1009,1010,1011,1012,1013,1014,1015, 1016,1017,1018,1019,1020,1021,1022, 1023]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]]}}, {restart_type,temporary}, {shutdown,60000}, {child_type,worker}] [views:debug,2014-08-19T16:50:04.902,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/735. Updated state: active (1) [ns_server:debug,2014-08-19T16:50:04.902,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",735,active,1} [ns_server:debug,2014-08-19T16:50:04.903,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:50:04.905,ns_1@10.242.238.90:<0.21456.0>:ebucketmigrator_srv:init:621]Reusing old upstream: [{vbuckets,[980,981,982,983,984,985,986,987,988,989,990,991,992,993,994,995, 996,997,998,999,1000,1001,1002,1003,1004,1005,1006,1007,1008,1009, 1010,1011,1012,1013,1014,1015,1016,1017,1018,1019,1020,1021,1022, 1023]}, {name,<<"replication_ns_1@10.242.238.90">>}, {takeover,false}] [rebalance:debug,2014-08-19T16:50:04.906,ns_1@10.242.238.90:<0.21456.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.21457.0> [ns_server:debug,2014-08-19T16:50:04.906,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 2361 us [ns_server:debug,2014-08-19T16:50:04.906,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:04.907,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:04.907,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{985, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:50:04.923,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:50:04.926,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:04.926,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 3203 us [ns_server:debug,2014-08-19T16:50:04.927,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:04.927,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{738, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [views:debug,2014-08-19T16:50:04.935,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/733. Updated state: active (1) [ns_server:debug,2014-08-19T16:50:04.935,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",733,active,1} [ns_server:debug,2014-08-19T16:50:04.944,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:50:04.947,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 2914 us [ns_server:debug,2014-08-19T16:50:04.947,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:04.948,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:04.948,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{733, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:50:04.966,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:50:04.970,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:04.970,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 3226 us [ns_server:debug,2014-08-19T16:50:04.970,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [views:debug,2014-08-19T16:50:04.971,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/731. Updated state: active (1) [ns_server:debug,2014-08-19T16:50:04.971,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",731,active,1} [ns_server:debug,2014-08-19T16:50:04.971,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{728, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:50:04.984,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:50:04.991,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:04.991,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 7472 us [ns_server:debug,2014-08-19T16:50:04.992,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:04.993,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{484, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [views:debug,2014-08-19T16:50:05.003,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/727. Updated state: active (1) [ns_server:debug,2014-08-19T16:50:05.003,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",727,active,1} [ns_server:debug,2014-08-19T16:50:05.009,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:50:05.012,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:05.012,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 3325 us [ns_server:debug,2014-08-19T16:50:05.013,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:05.013,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{727, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [views:debug,2014-08-19T16:50:05.036,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/734. Updated state: active (1) [ns_server:debug,2014-08-19T16:50:05.036,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",734,active,1} [ns_server:debug,2014-08-19T16:50:05.038,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:50:05.040,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 1206 us [ns_server:debug,2014-08-19T16:50:05.040,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:05.040,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:05.041,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{735, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:50:05.056,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:50:05.059,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:05.060,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 3174 us [ns_server:debug,2014-08-19T16:50:05.060,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:05.061,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{472, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [views:debug,2014-08-19T16:50:05.071,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/732. Updated state: active (1) [ns_server:debug,2014-08-19T16:50:05.071,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",732,active,1} [ns_server:debug,2014-08-19T16:50:05.079,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:50:05.081,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 2558 us [ns_server:debug,2014-08-19T16:50:05.081,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:05.082,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:05.082,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{739, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:50:05.100,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:50:05.103,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:05.103,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 3229 us [ns_server:debug,2014-08-19T16:50:05.103,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [views:debug,2014-08-19T16:50:05.104,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/726. Updated state: active (1) [ns_server:debug,2014-08-19T16:50:05.104,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",726,active,1} [ns_server:debug,2014-08-19T16:50:05.104,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{482, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:50:05.131,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:50:05.138,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 7125 us [ns_server:debug,2014-08-19T16:50:05.138,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:05.139,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:05.140,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{734, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:50:05.158,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:50:05.160,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 1640 us [ns_server:debug,2014-08-19T16:50:05.160,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:05.160,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:05.161,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{732, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:50:05.174,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:50:05.177,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 2406 us [ns_server:debug,2014-08-19T16:50:05.177,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:05.177,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:05.178,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{483, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:50:05.199,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:50:05.202,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 3371 us [ns_server:debug,2014-08-19T16:50:05.203,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:05.203,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:05.204,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{726, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:50:05.218,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:50:05.220,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 1852 us [ns_server:debug,2014-08-19T16:50:05.220,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:05.220,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:05.221,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{485, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:50:05.239,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:50:05.242,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:05.242,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 3210 us [ns_server:debug,2014-08-19T16:50:05.243,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:05.243,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{474, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:info,2014-08-19T16:50:05.261,ns_1@10.242.238.90:<0.18787.0>:ns_memcached:do_handle_call:527]Changed vbucket 979 state to replica [ns_server:info,2014-08-19T16:50:05.265,ns_1@10.242.238.90:<0.21466.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 979 to state replica [ns_server:debug,2014-08-19T16:50:05.302,ns_1@10.242.238.90:<0.21466.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_979_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:50:05.303,ns_1@10.242.238.90:<0.21466.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[979]}, {checkpoints,[{979,0}]}, {name,<<"replication_building_979_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[979]}, {takeover,false}, {suffix,"building_979_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",979,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,false}]} [rebalance:debug,2014-08-19T16:50:05.304,ns_1@10.242.238.90:<0.21466.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.21481.0> [rebalance:debug,2014-08-19T16:50:05.304,ns_1@10.242.238.90:<0.21466.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:50:05.305,ns_1@10.242.238.90:<0.21466.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.1362.1>,#Ref<16550.0.1.86904>}]} [rebalance:info,2014-08-19T16:50:05.305,ns_1@10.242.238.90:<0.21466.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 979 [rebalance:debug,2014-08-19T16:50:05.305,ns_1@10.242.238.90:<0.21466.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.1362.1>,#Ref<16550.0.1.86904>}] [ns_server:debug,2014-08-19T16:50:05.306,ns_1@10.242.238.90:<0.21466.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:debug,2014-08-19T16:50:05.317,ns_1@10.242.238.90:<0.21482.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 979 [ns_server:info,2014-08-19T16:50:05.323,ns_1@10.242.238.90:<0.18787.0>:ns_memcached:do_handle_call:527]Changed vbucket 725 state to replica [ns_server:info,2014-08-19T16:50:05.328,ns_1@10.242.238.90:<0.21485.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 725 to state replica [ns_server:debug,2014-08-19T16:50:05.346,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 979. Nacking mccouch update. [views:debug,2014-08-19T16:50:05.346,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/979. Updated state: replica (0) [ns_server:debug,2014-08-19T16:50:05.347,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",979,replica,0} [ns_server:debug,2014-08-19T16:50:05.347,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,750,984,737,1016,987,756,740,1019,1003,990,759,743,727,1022,1006,993,762, 746,730,1009,996,980,765,749,733,1012,999,983,752,736,1015,986,755,739,1018, 1002,989,758,742,726,1021,1005,992,761,745,729,1008,995,979,764,748,732,1011, 998,982,767,751,735,1014,985,754,738,1017,1001,988,757,741,1020,1004,991,760, 744,728,1023,1007,994,763,747,731,1010,981,766,734,1013,753,1000] [ns_server:debug,2014-08-19T16:50:05.373,ns_1@10.242.238.90:<0.21485.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_725_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:50:05.375,ns_1@10.242.238.90:<0.21485.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[725]}, {checkpoints,[{725,0}]}, {name,<<"replication_building_725_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[725]}, {takeover,false}, {suffix,"building_725_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",725,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,true}]} [rebalance:debug,2014-08-19T16:50:05.375,ns_1@10.242.238.90:<0.21485.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.21492.0> [rebalance:debug,2014-08-19T16:50:05.376,ns_1@10.242.238.90:<0.21485.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:50:05.376,ns_1@10.242.238.90:<0.21485.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.1382.1>,#Ref<16550.0.1.87022>}]} [rebalance:info,2014-08-19T16:50:05.376,ns_1@10.242.238.90:<0.21485.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 725 [rebalance:debug,2014-08-19T16:50:05.377,ns_1@10.242.238.90:<0.21485.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.1382.1>,#Ref<16550.0.1.87022>}] [ns_server:debug,2014-08-19T16:50:05.377,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.21493.0> (ok) [ns_server:debug,2014-08-19T16:50:05.377,ns_1@10.242.238.90:<0.21485.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:debug,2014-08-19T16:50:05.380,ns_1@10.242.238.90:<0.21494.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 725 [views:debug,2014-08-19T16:50:05.397,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/979. Updated state: replica (0) [ns_server:debug,2014-08-19T16:50:05.397,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",979,replica,0} [ns_server:info,2014-08-19T16:50:05.454,ns_1@10.242.238.90:<0.18787.0>:ns_memcached:do_handle_call:527]Changed vbucket 978 state to replica [ns_server:info,2014-08-19T16:50:05.458,ns_1@10.242.238.90:<0.21511.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 978 to state replica [ns_server:debug,2014-08-19T16:50:05.472,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 725. Nacking mccouch update. [views:debug,2014-08-19T16:50:05.472,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/725. Updated state: pending (0) [ns_server:debug,2014-08-19T16:50:05.472,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",725,pending,0} [ns_server:debug,2014-08-19T16:50:05.473,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,750,984,737,1016,987,756,740,1019,1003,990,759,743,727,1022,1006,993,762, 746,730,1009,996,980,765,749,733,1012,999,983,752,736,1015,986,755,739,1018, 1002,989,758,742,726,1021,1005,992,761,745,729,1008,995,979,764,748,732,1011, 998,982,767,751,735,1014,985,754,738,1017,1001,988,757,741,725,1020,1004,991, 760,744,728,1023,1007,994,763,747,731,1010,981,766,734,1013,753,1000] [ns_server:debug,2014-08-19T16:50:05.490,ns_1@10.242.238.90:<0.21511.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_978_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:50:05.492,ns_1@10.242.238.90:<0.21511.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[978]}, {checkpoints,[{978,0}]}, {name,<<"replication_building_978_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[978]}, {takeover,false}, {suffix,"building_978_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",978,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,false}]} [rebalance:debug,2014-08-19T16:50:05.492,ns_1@10.242.238.90:<0.21511.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.21512.0> [rebalance:debug,2014-08-19T16:50:05.493,ns_1@10.242.238.90:<0.21511.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:50:05.493,ns_1@10.242.238.90:<0.21511.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.1440.1>,#Ref<16550.0.1.87319>}]} [rebalance:info,2014-08-19T16:50:05.493,ns_1@10.242.238.90:<0.21511.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 978 [rebalance:debug,2014-08-19T16:50:05.494,ns_1@10.242.238.90:<0.21511.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.1440.1>,#Ref<16550.0.1.87319>}] [ns_server:debug,2014-08-19T16:50:05.495,ns_1@10.242.238.90:<0.21511.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:debug,2014-08-19T16:50:05.512,ns_1@10.242.238.90:<0.21513.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 978 [views:debug,2014-08-19T16:50:05.513,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/725. Updated state: pending (0) [ns_server:debug,2014-08-19T16:50:05.514,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",725,pending,0} [rebalance:debug,2014-08-19T16:50:05.515,ns_1@10.242.238.90:<0.21494.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:50:05.515,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.21494.0> (ok) [ns_server:info,2014-08-19T16:50:05.518,ns_1@10.242.238.90:<0.18787.0>:ns_memcached:do_handle_call:527]Changed vbucket 724 state to replica [ns_server:info,2014-08-19T16:50:05.524,ns_1@10.242.238.90:<0.21516.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 724 to state replica [ns_server:debug,2014-08-19T16:50:05.568,ns_1@10.242.238.90:<0.21516.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_724_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:50:05.570,ns_1@10.242.238.90:<0.21516.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[724]}, {checkpoints,[{724,0}]}, {name,<<"replication_building_724_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[724]}, {takeover,false}, {suffix,"building_724_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",724,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,true}]} [rebalance:debug,2014-08-19T16:50:05.571,ns_1@10.242.238.90:<0.21516.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.21517.0> [rebalance:debug,2014-08-19T16:50:05.571,ns_1@10.242.238.90:<0.21516.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:50:05.571,ns_1@10.242.238.90:<0.21516.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.1460.1>,#Ref<16550.0.1.87436>}]} [rebalance:info,2014-08-19T16:50:05.572,ns_1@10.242.238.90:<0.21516.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 724 [rebalance:debug,2014-08-19T16:50:05.572,ns_1@10.242.238.90:<0.21516.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.1460.1>,#Ref<16550.0.1.87436>}] [ns_server:debug,2014-08-19T16:50:05.573,ns_1@10.242.238.90:<0.21516.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:50:05.573,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.21518.0> (ok) [rebalance:debug,2014-08-19T16:50:05.574,ns_1@10.242.238.90:<0.21519.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 724 [ns_server:info,2014-08-19T16:50:05.642,ns_1@10.242.238.90:<0.18787.0>:ns_memcached:do_handle_call:527]Changed vbucket 977 state to replica [ns_server:info,2014-08-19T16:50:05.646,ns_1@10.242.238.90:<0.21536.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 977 to state replica [ns_server:debug,2014-08-19T16:50:05.647,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 978. Nacking mccouch update. [views:debug,2014-08-19T16:50:05.647,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/978. Updated state: replica (0) [ns_server:debug,2014-08-19T16:50:05.647,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",978,replica,0} [ns_server:debug,2014-08-19T16:50:05.647,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,750,984,737,1016,987,756,740,1019,1003,990,759,743,727,1022,1006,993,762, 746,730,1009,996,980,765,749,733,1012,999,983,752,736,1015,986,755,739,1018, 1002,989,758,742,726,1021,1005,992,761,745,729,1008,995,979,764,748,732,1011, 998,982,767,751,735,1014,985,754,738,1017,1001,988,757,741,725,1020,1004,991, 760,744,728,1023,1007,994,978,763,747,731,1010,981,766,734,1013,753,1000] [ns_server:debug,2014-08-19T16:50:05.677,ns_1@10.242.238.90:<0.21536.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_977_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:50:05.679,ns_1@10.242.238.90:<0.21536.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[977]}, {checkpoints,[{977,0}]}, {name,<<"replication_building_977_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[977]}, {takeover,false}, {suffix,"building_977_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",977,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,false}]} [rebalance:debug,2014-08-19T16:50:05.679,ns_1@10.242.238.90:<0.21536.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.21537.0> [rebalance:debug,2014-08-19T16:50:05.679,ns_1@10.242.238.90:<0.21536.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:50:05.680,ns_1@10.242.238.90:<0.21536.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.1517.1>,#Ref<16550.0.1.87684>}]} [rebalance:info,2014-08-19T16:50:05.680,ns_1@10.242.238.90:<0.21536.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 977 [rebalance:debug,2014-08-19T16:50:05.680,ns_1@10.242.238.90:<0.21536.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.1517.1>,#Ref<16550.0.1.87684>}] [ns_server:debug,2014-08-19T16:50:05.681,ns_1@10.242.238.90:<0.21536.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:debug,2014-08-19T16:50:05.700,ns_1@10.242.238.90:<0.21538.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 977 [ns_server:info,2014-08-19T16:50:05.706,ns_1@10.242.238.90:<0.18787.0>:ns_memcached:do_handle_call:527]Changed vbucket 723 state to replica [ns_server:info,2014-08-19T16:50:05.712,ns_1@10.242.238.90:<0.21541.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 723 to state replica [views:debug,2014-08-19T16:50:05.714,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/978. Updated state: replica (0) [ns_server:debug,2014-08-19T16:50:05.715,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",978,replica,0} [ns_server:debug,2014-08-19T16:50:05.759,ns_1@10.242.238.90:<0.21541.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_723_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:50:05.760,ns_1@10.242.238.90:<0.21541.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[723]}, {checkpoints,[{723,0}]}, {name,<<"replication_building_723_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[723]}, {takeover,false}, {suffix,"building_723_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",723,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,true}]} [rebalance:debug,2014-08-19T16:50:05.760,ns_1@10.242.238.90:<0.21541.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.21556.0> [rebalance:debug,2014-08-19T16:50:05.761,ns_1@10.242.238.90:<0.21541.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:50:05.761,ns_1@10.242.238.90:<0.21541.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.1537.1>,#Ref<16550.0.1.87798>}]} [rebalance:info,2014-08-19T16:50:05.761,ns_1@10.242.238.90:<0.21541.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 723 [rebalance:debug,2014-08-19T16:50:05.761,ns_1@10.242.238.90:<0.21541.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.1537.1>,#Ref<16550.0.1.87798>}] [ns_server:debug,2014-08-19T16:50:05.762,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.21557.0> (ok) [ns_server:debug,2014-08-19T16:50:05.762,ns_1@10.242.238.90:<0.21541.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:debug,2014-08-19T16:50:05.763,ns_1@10.242.238.90:<0.21558.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 723 [ns_server:debug,2014-08-19T16:50:05.798,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 724. Nacking mccouch update. [views:debug,2014-08-19T16:50:05.798,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/724. Updated state: pending (0) [ns_server:debug,2014-08-19T16:50:05.798,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",724,pending,0} [ns_server:debug,2014-08-19T16:50:05.798,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,750,984,737,1016,987,756,740,724,1019,1003,990,759,743,727,1022,1006,993, 762,746,730,1009,996,980,765,749,733,1012,999,983,752,736,1015,986,755,739, 1018,1002,989,758,742,726,1021,1005,992,761,745,729,1008,995,979,764,748,732, 1011,998,982,767,751,735,1014,985,754,738,1017,1001,988,757,741,725,1020, 1004,991,760,744,728,1023,1007,994,978,763,747,731,1010,981,766,734,1013,753, 1000] [ns_server:info,2014-08-19T16:50:05.833,ns_1@10.242.238.90:<0.18787.0>:ns_memcached:do_handle_call:527]Changed vbucket 976 state to replica [ns_server:info,2014-08-19T16:50:05.845,ns_1@10.242.238.90:<0.21561.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 976 to state replica [views:debug,2014-08-19T16:50:05.865,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/724. Updated state: pending (0) [ns_server:debug,2014-08-19T16:50:05.865,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",724,pending,0} [ns_server:debug,2014-08-19T16:50:05.877,ns_1@10.242.238.90:<0.21561.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_976_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:50:05.878,ns_1@10.242.238.90:<0.21561.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[976]}, {checkpoints,[{976,0}]}, {name,<<"replication_building_976_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[976]}, {takeover,false}, {suffix,"building_976_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",976,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,false}]} [rebalance:debug,2014-08-19T16:50:05.879,ns_1@10.242.238.90:<0.21561.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.21562.0> [rebalance:debug,2014-08-19T16:50:05.879,ns_1@10.242.238.90:<0.21561.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:50:05.879,ns_1@10.242.238.90:<0.21561.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.1594.1>,#Ref<16550.0.1.88063>}]} [rebalance:info,2014-08-19T16:50:05.880,ns_1@10.242.238.90:<0.21561.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 976 [rebalance:debug,2014-08-19T16:50:05.880,ns_1@10.242.238.90:<0.21561.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.1594.1>,#Ref<16550.0.1.88063>}] [ns_server:debug,2014-08-19T16:50:05.881,ns_1@10.242.238.90:<0.21561.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:debug,2014-08-19T16:50:05.901,ns_1@10.242.238.90:<0.21563.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 976 [ns_server:info,2014-08-19T16:50:05.908,ns_1@10.242.238.90:<0.18787.0>:ns_memcached:do_handle_call:527]Changed vbucket 722 state to replica [ns_server:info,2014-08-19T16:50:05.915,ns_1@10.242.238.90:<0.21566.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 722 to state replica [ns_server:debug,2014-08-19T16:50:05.962,ns_1@10.242.238.90:<0.21566.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_722_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:50:05.963,ns_1@10.242.238.90:<0.21566.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[722]}, {checkpoints,[{722,0}]}, {name,<<"replication_building_722_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[722]}, {takeover,false}, {suffix,"building_722_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",722,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,true}]} [rebalance:debug,2014-08-19T16:50:05.964,ns_1@10.242.238.90:<0.21566.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.21581.0> [rebalance:debug,2014-08-19T16:50:05.964,ns_1@10.242.238.90:<0.21566.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:50:05.964,ns_1@10.242.238.90:<0.21566.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.1614.1>,#Ref<16550.0.1.88179>}]} [rebalance:info,2014-08-19T16:50:05.965,ns_1@10.242.238.90:<0.21566.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 722 [rebalance:debug,2014-08-19T16:50:05.965,ns_1@10.242.238.90:<0.21566.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.1614.1>,#Ref<16550.0.1.88179>}] [ns_server:debug,2014-08-19T16:50:05.966,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.21582.0> (ok) [ns_server:debug,2014-08-19T16:50:05.966,ns_1@10.242.238.90:<0.21566.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:debug,2014-08-19T16:50:05.967,ns_1@10.242.238.90:<0.21583.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 722 [ns_server:debug,2014-08-19T16:50:06.015,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 977. Nacking mccouch update. [views:debug,2014-08-19T16:50:06.016,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/977. Updated state: replica (0) [ns_server:debug,2014-08-19T16:50:06.016,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",977,replica,0} [ns_server:debug,2014-08-19T16:50:06.016,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,750,984,737,1016,756,724,1003,990,759,743,727,1022,1006,993,977,762,746, 730,1009,996,980,765,749,733,1012,999,983,752,736,1015,986,755,739,1018,1002, 989,758,742,726,1021,1005,992,761,745,729,1008,995,979,764,748,732,1011,998, 982,767,751,735,1014,985,754,738,1017,1001,988,757,741,725,1020,1004,991,760, 744,728,1023,1007,994,978,763,747,731,1010,981,766,734,1013,753,1000,987,740, 1019] [ns_server:info,2014-08-19T16:50:06.034,ns_1@10.242.238.90:<0.18787.0>:ns_memcached:do_handle_call:527]Changed vbucket 975 state to replica [ns_server:info,2014-08-19T16:50:06.038,ns_1@10.242.238.90:<0.21586.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 975 to state replica [views:debug,2014-08-19T16:50:06.066,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/977. Updated state: replica (0) [ns_server:debug,2014-08-19T16:50:06.066,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",977,replica,0} [ns_server:debug,2014-08-19T16:50:06.070,ns_1@10.242.238.90:<0.21586.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_975_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:50:06.071,ns_1@10.242.238.90:<0.21586.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[975]}, {checkpoints,[{975,0}]}, {name,<<"replication_building_975_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[975]}, {takeover,false}, {suffix,"building_975_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",975,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,false}]} [rebalance:debug,2014-08-19T16:50:06.072,ns_1@10.242.238.90:<0.21586.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.21587.0> [rebalance:debug,2014-08-19T16:50:06.072,ns_1@10.242.238.90:<0.21586.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:50:06.073,ns_1@10.242.238.90:<0.21586.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.1671.1>,#Ref<16550.0.1.88441>}]} [rebalance:info,2014-08-19T16:50:06.073,ns_1@10.242.238.90:<0.21586.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 975 [rebalance:debug,2014-08-19T16:50:06.073,ns_1@10.242.238.90:<0.21586.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.1671.1>,#Ref<16550.0.1.88441>}] [ns_server:debug,2014-08-19T16:50:06.074,ns_1@10.242.238.90:<0.21586.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:debug,2014-08-19T16:50:06.091,ns_1@10.242.238.90:<0.21588.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 975 [ns_server:info,2014-08-19T16:50:06.097,ns_1@10.242.238.90:<0.18787.0>:ns_memcached:do_handle_call:527]Changed vbucket 721 state to replica [ns_server:info,2014-08-19T16:50:06.103,ns_1@10.242.238.90:<0.21591.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 721 to state replica [ns_server:debug,2014-08-19T16:50:06.149,ns_1@10.242.238.90:<0.21591.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_721_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:50:06.151,ns_1@10.242.238.90:<0.21591.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[721]}, {checkpoints,[{721,0}]}, {name,<<"replication_building_721_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[721]}, {takeover,false}, {suffix,"building_721_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",721,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,true}]} [rebalance:debug,2014-08-19T16:50:06.151,ns_1@10.242.238.90:<0.21591.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.21606.0> [rebalance:debug,2014-08-19T16:50:06.152,ns_1@10.242.238.90:<0.21591.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:50:06.152,ns_1@10.242.238.90:<0.21591.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.1691.1>,#Ref<16550.0.1.88551>}]} [rebalance:info,2014-08-19T16:50:06.152,ns_1@10.242.238.90:<0.21591.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 721 [rebalance:debug,2014-08-19T16:50:06.153,ns_1@10.242.238.90:<0.21591.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.1691.1>,#Ref<16550.0.1.88551>}] [ns_server:debug,2014-08-19T16:50:06.154,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.21607.0> (ok) [ns_server:debug,2014-08-19T16:50:06.154,ns_1@10.242.238.90:<0.21591.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:debug,2014-08-19T16:50:06.155,ns_1@10.242.238.90:<0.21608.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 721 [ns_server:debug,2014-08-19T16:50:06.208,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 723. Nacking mccouch update. [views:debug,2014-08-19T16:50:06.208,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/723. Updated state: pending (0) [ns_server:debug,2014-08-19T16:50:06.208,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",723,pending,0} [ns_server:debug,2014-08-19T16:50:06.208,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,750,984,737,1016,756,724,1003,990,759,743,727,1022,1006,993,977,762,746, 730,1009,996,980,765,749,733,1012,999,983,752,736,1015,986,755,739,723,1018, 1002,989,758,742,726,1021,1005,992,761,745,729,1008,995,979,764,748,732,1011, 998,982,767,751,735,1014,985,754,738,1017,1001,988,757,741,725,1020,1004,991, 760,744,728,1023,1007,994,978,763,747,731,1010,981,766,734,1013,753,1000,987, 740,1019] [ns_server:info,2014-08-19T16:50:06.229,ns_1@10.242.238.90:<0.18787.0>:ns_memcached:do_handle_call:527]Changed vbucket 974 state to replica [ns_server:info,2014-08-19T16:50:06.233,ns_1@10.242.238.90:<0.21611.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 974 to state replica [views:debug,2014-08-19T16:50:06.258,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/723. Updated state: pending (0) [ns_server:debug,2014-08-19T16:50:06.258,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",723,pending,0} [rebalance:debug,2014-08-19T16:50:06.259,ns_1@10.242.238.90:<0.21482.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:50:06.259,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.21482.0> (ok) [ns_server:debug,2014-08-19T16:50:06.266,ns_1@10.242.238.90:<0.21611.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_974_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:50:06.267,ns_1@10.242.238.90:<0.21611.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[974]}, {checkpoints,[{974,0}]}, {name,<<"replication_building_974_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[974]}, {takeover,false}, {suffix,"building_974_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",974,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,false}]} [rebalance:debug,2014-08-19T16:50:06.268,ns_1@10.242.238.90:<0.21611.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.21612.0> [rebalance:debug,2014-08-19T16:50:06.268,ns_1@10.242.238.90:<0.21611.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:50:06.269,ns_1@10.242.238.90:<0.21611.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.1748.1>,#Ref<16550.0.1.88834>}]} [rebalance:info,2014-08-19T16:50:06.269,ns_1@10.242.238.90:<0.21611.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 974 [rebalance:debug,2014-08-19T16:50:06.269,ns_1@10.242.238.90:<0.21611.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.1748.1>,#Ref<16550.0.1.88834>}] [ns_server:debug,2014-08-19T16:50:06.270,ns_1@10.242.238.90:<0.21611.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:debug,2014-08-19T16:50:06.287,ns_1@10.242.238.90:<0.21613.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 974 [ns_server:info,2014-08-19T16:50:06.294,ns_1@10.242.238.90:<0.18787.0>:ns_memcached:do_handle_call:527]Changed vbucket 720 state to replica [ns_server:info,2014-08-19T16:50:06.299,ns_1@10.242.238.90:<0.21616.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 720 to state replica [ns_server:debug,2014-08-19T16:50:06.344,ns_1@10.242.238.90:<0.21616.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_720_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:50:06.345,ns_1@10.242.238.90:<0.21616.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[720]}, {checkpoints,[{720,0}]}, {name,<<"replication_building_720_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[720]}, {takeover,false}, {suffix,"building_720_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",720,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,true}]} [rebalance:debug,2014-08-19T16:50:06.346,ns_1@10.242.238.90:<0.21616.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.21631.0> [rebalance:debug,2014-08-19T16:50:06.346,ns_1@10.242.238.90:<0.21616.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:50:06.347,ns_1@10.242.238.90:<0.21616.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.1776.1>,#Ref<16550.0.1.89028>}]} [rebalance:info,2014-08-19T16:50:06.347,ns_1@10.242.238.90:<0.21616.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 720 [rebalance:debug,2014-08-19T16:50:06.347,ns_1@10.242.238.90:<0.21616.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.1776.1>,#Ref<16550.0.1.89028>}] [ns_server:debug,2014-08-19T16:50:06.348,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.21632.0> (ok) [ns_server:debug,2014-08-19T16:50:06.348,ns_1@10.242.238.90:<0.21616.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:debug,2014-08-19T16:50:06.349,ns_1@10.242.238.90:<0.21633.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 720 [ns_server:debug,2014-08-19T16:50:06.398,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 976. Nacking mccouch update. [views:debug,2014-08-19T16:50:06.398,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/976. Updated state: replica (0) [ns_server:debug,2014-08-19T16:50:06.398,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",976,replica,0} [ns_server:debug,2014-08-19T16:50:06.398,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,750,984,737,1016,756,724,1003,990,759,743,727,1022,1006,993,977,762,746, 730,1009,996,980,765,749,733,1012,999,983,752,736,1015,986,755,739,723,1018, 1002,989,758,742,726,1021,1005,992,976,761,745,729,1008,995,979,764,748,732, 1011,998,982,767,751,735,1014,985,754,738,1017,1001,988,757,741,725,1020, 1004,991,760,744,728,1023,1007,994,978,763,747,731,1010,981,766,734,1013,753, 1000,987,740,1019] [ns_server:info,2014-08-19T16:50:06.421,ns_1@10.242.238.90:<0.18787.0>:ns_memcached:do_handle_call:527]Changed vbucket 973 state to replica [ns_server:info,2014-08-19T16:50:06.425,ns_1@10.242.238.90:<0.21636.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 973 to state replica [views:debug,2014-08-19T16:50:06.432,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/976. Updated state: replica (0) [ns_server:debug,2014-08-19T16:50:06.432,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",976,replica,0} [ns_server:debug,2014-08-19T16:50:06.458,ns_1@10.242.238.90:<0.21636.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_973_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:50:06.459,ns_1@10.242.238.90:<0.21636.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[973]}, {checkpoints,[{973,0}]}, {name,<<"replication_building_973_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[973]}, {takeover,false}, {suffix,"building_973_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",973,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,false}]} [rebalance:debug,2014-08-19T16:50:06.460,ns_1@10.242.238.90:<0.21636.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.21638.0> [rebalance:debug,2014-08-19T16:50:06.460,ns_1@10.242.238.90:<0.21636.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:50:06.461,ns_1@10.242.238.90:<0.21636.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.1835.1>,#Ref<16550.0.1.89327>}]} [rebalance:info,2014-08-19T16:50:06.461,ns_1@10.242.238.90:<0.21636.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 973 [rebalance:debug,2014-08-19T16:50:06.461,ns_1@10.242.238.90:<0.21636.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.1835.1>,#Ref<16550.0.1.89327>}] [ns_server:debug,2014-08-19T16:50:06.462,ns_1@10.242.238.90:<0.21636.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:debug,2014-08-19T16:50:06.480,ns_1@10.242.238.90:<0.21653.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 973 [ns_server:info,2014-08-19T16:50:06.485,ns_1@10.242.238.90:<0.18787.0>:ns_memcached:do_handle_call:527]Changed vbucket 719 state to replica [ns_server:info,2014-08-19T16:50:06.490,ns_1@10.242.238.90:<0.21656.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 719 to state replica [ns_server:debug,2014-08-19T16:50:06.507,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 974. Nacking mccouch update. [views:debug,2014-08-19T16:50:06.507,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/974. Updated state: replica (0) [ns_server:debug,2014-08-19T16:50:06.507,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",974,replica,0} [ns_server:debug,2014-08-19T16:50:06.508,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,750,984,737,1016,756,724,1003,990,974,759,743,727,1022,1006,993,977,762, 746,730,1009,996,980,765,749,733,1012,999,983,752,736,1015,986,755,739,723, 1018,1002,989,758,742,726,1021,1005,992,976,761,745,729,1008,995,979,764,748, 732,1011,998,982,767,751,735,1014,985,754,738,1017,1001,988,757,741,725,1020, 1004,991,760,744,728,1023,1007,994,978,763,747,731,1010,981,766,734,1013,753, 1000,987,740,1019] [ns_server:debug,2014-08-19T16:50:06.536,ns_1@10.242.238.90:<0.21656.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_719_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:50:06.537,ns_1@10.242.238.90:<0.21656.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[719]}, {checkpoints,[{719,0}]}, {name,<<"replication_building_719_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[719]}, {takeover,false}, {suffix,"building_719_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",719,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,true}]} [rebalance:debug,2014-08-19T16:50:06.538,ns_1@10.242.238.90:<0.21656.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.21657.0> [rebalance:debug,2014-08-19T16:50:06.538,ns_1@10.242.238.90:<0.21656.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:50:06.539,ns_1@10.242.238.90:<0.21656.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.1869.1>,#Ref<16550.0.1.89498>}]} [rebalance:info,2014-08-19T16:50:06.539,ns_1@10.242.238.90:<0.21656.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 719 [rebalance:debug,2014-08-19T16:50:06.539,ns_1@10.242.238.90:<0.21656.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.1869.1>,#Ref<16550.0.1.89498>}] [ns_server:debug,2014-08-19T16:50:06.540,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.21658.0> (ok) [ns_server:debug,2014-08-19T16:50:06.540,ns_1@10.242.238.90:<0.21656.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:debug,2014-08-19T16:50:06.541,ns_1@10.242.238.90:<0.21659.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 719 [views:debug,2014-08-19T16:50:06.558,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/974. Updated state: replica (0) [ns_server:debug,2014-08-19T16:50:06.558,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",974,replica,0} [ns_server:info,2014-08-19T16:50:06.609,ns_1@10.242.238.90:<0.18787.0>:ns_memcached:do_handle_call:527]Changed vbucket 972 state to replica [ns_server:info,2014-08-19T16:50:06.613,ns_1@10.242.238.90:<0.21676.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 972 to state replica [ns_server:debug,2014-08-19T16:50:06.645,ns_1@10.242.238.90:<0.21676.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_972_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:50:06.646,ns_1@10.242.238.90:<0.21676.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[972]}, {checkpoints,[{972,0}]}, {name,<<"replication_building_972_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[972]}, {takeover,false}, {suffix,"building_972_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",972,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,false}]} [rebalance:debug,2014-08-19T16:50:06.647,ns_1@10.242.238.90:<0.21676.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.21677.0> [rebalance:debug,2014-08-19T16:50:06.647,ns_1@10.242.238.90:<0.21676.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:50:06.648,ns_1@10.242.238.90:<0.21676.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.1927.1>,#Ref<16550.0.1.89767>}]} [rebalance:info,2014-08-19T16:50:06.648,ns_1@10.242.238.90:<0.21676.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 972 [rebalance:debug,2014-08-19T16:50:06.648,ns_1@10.242.238.90:<0.21676.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.1927.1>,#Ref<16550.0.1.89767>}] [ns_server:debug,2014-08-19T16:50:06.649,ns_1@10.242.238.90:<0.21676.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:50:06.658,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 722. Nacking mccouch update. [views:debug,2014-08-19T16:50:06.658,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/722. Updated state: pending (0) [ns_server:debug,2014-08-19T16:50:06.658,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",722,pending,0} [ns_server:debug,2014-08-19T16:50:06.658,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,750,984,737,1016,756,724,1003,990,974,759,743,727,1022,1006,993,977,762, 746,730,1009,996,980,765,749,733,1012,999,983,752,736,1015,986,755,739,723, 1018,1002,989,758,742,726,1021,1005,992,976,761,745,729,1008,995,979,764,748, 732,1011,998,982,767,751,735,1014,985,754,738,722,1017,1001,988,757,741,725, 1020,1004,991,760,744,728,1023,1007,994,978,763,747,731,1010,981,766,734, 1013,753,1000,987,740,1019] [rebalance:debug,2014-08-19T16:50:06.669,ns_1@10.242.238.90:<0.21678.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 972 [ns_server:info,2014-08-19T16:50:06.675,ns_1@10.242.238.90:<0.18787.0>:ns_memcached:do_handle_call:527]Changed vbucket 718 state to replica [ns_server:info,2014-08-19T16:50:06.682,ns_1@10.242.238.90:<0.21681.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 718 to state replica [ns_server:debug,2014-08-19T16:50:06.727,ns_1@10.242.238.90:<0.21681.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_718_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:50:06.728,ns_1@10.242.238.90:<0.21681.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[718]}, {checkpoints,[{718,0}]}, {name,<<"replication_building_718_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[718]}, {takeover,false}, {suffix,"building_718_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",718,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,true}]} [rebalance:debug,2014-08-19T16:50:06.729,ns_1@10.242.238.90:<0.21681.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.21682.0> [rebalance:debug,2014-08-19T16:50:06.729,ns_1@10.242.238.90:<0.21681.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:50:06.730,ns_1@10.242.238.90:<0.21681.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.1947.1>,#Ref<16550.0.1.89909>}]} [rebalance:info,2014-08-19T16:50:06.730,ns_1@10.242.238.90:<0.21681.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 718 [rebalance:debug,2014-08-19T16:50:06.730,ns_1@10.242.238.90:<0.21681.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.1947.1>,#Ref<16550.0.1.89909>}] [ns_server:debug,2014-08-19T16:50:06.731,ns_1@10.242.238.90:<0.21681.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:50:06.731,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.21683.0> (ok) [rebalance:debug,2014-08-19T16:50:06.732,ns_1@10.242.238.90:<0.21684.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 718 [views:debug,2014-08-19T16:50:06.733,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/722. Updated state: pending (0) [ns_server:debug,2014-08-19T16:50:06.733,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",722,pending,0} [ns_server:info,2014-08-19T16:50:06.800,ns_1@10.242.238.90:<0.18787.0>:ns_memcached:do_handle_call:527]Changed vbucket 971 state to replica [ns_server:info,2014-08-19T16:50:06.804,ns_1@10.242.238.90:<0.21701.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 971 to state replica [ns_server:debug,2014-08-19T16:50:06.834,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 972. Nacking mccouch update. [views:debug,2014-08-19T16:50:06.834,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/972. Updated state: replica (0) [ns_server:debug,2014-08-19T16:50:06.834,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",972,replica,0} [ns_server:debug,2014-08-19T16:50:06.834,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,750,984,737,1016,756,724,1003,990,743,1022,993,977,762,746,730,1009,996, 980,765,749,733,1012,999,983,752,736,1015,986,755,739,723,1018,1002,989,758, 742,726,1021,1005,992,976,761,745,729,1008,995,979,764,748,732,1011,998,982, 767,751,735,1014,985,754,738,722,1017,1001,988,972,757,741,725,1020,1004,991, 760,744,728,1023,1007,994,978,763,747,731,1010,981,766,734,1013,753,1000,987, 740,1019,974,759,727,1006] [ns_server:debug,2014-08-19T16:50:06.836,ns_1@10.242.238.90:<0.21701.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_971_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:50:06.837,ns_1@10.242.238.90:<0.21701.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[971]}, {checkpoints,[{971,0}]}, {name,<<"replication_building_971_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[971]}, {takeover,false}, {suffix,"building_971_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",971,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,false}]} [rebalance:debug,2014-08-19T16:50:06.838,ns_1@10.242.238.90:<0.21701.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.21702.0> [rebalance:debug,2014-08-19T16:50:06.838,ns_1@10.242.238.90:<0.21701.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:50:06.839,ns_1@10.242.238.90:<0.21701.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.2008.1>,#Ref<16550.0.1.90209>}]} [rebalance:info,2014-08-19T16:50:06.839,ns_1@10.242.238.90:<0.21701.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 971 [rebalance:debug,2014-08-19T16:50:06.840,ns_1@10.242.238.90:<0.21701.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.2008.1>,#Ref<16550.0.1.90209>}] [ns_server:debug,2014-08-19T16:50:06.840,ns_1@10.242.238.90:<0.21701.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:debug,2014-08-19T16:50:06.858,ns_1@10.242.238.90:<0.21703.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 971 [ns_server:info,2014-08-19T16:50:06.864,ns_1@10.242.238.90:<0.18787.0>:ns_memcached:do_handle_call:527]Changed vbucket 717 state to replica [ns_server:info,2014-08-19T16:50:06.870,ns_1@10.242.238.90:<0.21706.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 717 to state replica [views:debug,2014-08-19T16:50:06.892,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/972. Updated state: replica (0) [ns_server:debug,2014-08-19T16:50:06.893,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",972,replica,0} [ns_server:debug,2014-08-19T16:50:06.916,ns_1@10.242.238.90:<0.21706.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_717_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:50:06.917,ns_1@10.242.238.90:<0.21706.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[717]}, {checkpoints,[{717,0}]}, {name,<<"replication_building_717_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[717]}, {takeover,false}, {suffix,"building_717_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",717,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,true}]} [rebalance:debug,2014-08-19T16:50:06.918,ns_1@10.242.238.90:<0.21706.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.21707.0> [rebalance:debug,2014-08-19T16:50:06.918,ns_1@10.242.238.90:<0.21706.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:50:06.919,ns_1@10.242.238.90:<0.21706.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.2028.1>,#Ref<16550.0.1.90325>}]} [rebalance:info,2014-08-19T16:50:06.919,ns_1@10.242.238.90:<0.21706.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 717 [rebalance:debug,2014-08-19T16:50:06.919,ns_1@10.242.238.90:<0.21706.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.2028.1>,#Ref<16550.0.1.90325>}] [ns_server:debug,2014-08-19T16:50:06.920,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.21708.0> (ok) [ns_server:debug,2014-08-19T16:50:06.920,ns_1@10.242.238.90:<0.21706.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:debug,2014-08-19T16:50:06.922,ns_1@10.242.238.90:<0.21709.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 717 [ns_server:info,2014-08-19T16:50:06.997,ns_1@10.242.238.90:<0.18787.0>:ns_memcached:do_handle_call:527]Changed vbucket 970 state to replica [ns_server:info,2014-08-19T16:50:07.001,ns_1@10.242.238.90:<0.21726.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 970 to state replica [ns_server:debug,2014-08-19T16:50:07.032,ns_1@10.242.238.90:<0.21726.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_970_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:50:07.034,ns_1@10.242.238.90:<0.21726.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[970]}, {checkpoints,[{970,0}]}, {name,<<"replication_building_970_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[970]}, {takeover,false}, {suffix,"building_970_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",970,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,false}]} [rebalance:debug,2014-08-19T16:50:07.034,ns_1@10.242.238.90:<0.21726.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.21727.0> [rebalance:debug,2014-08-19T16:50:07.034,ns_1@10.242.238.90:<0.21726.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:50:07.035,ns_1@10.242.238.90:<0.21726.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.2085.1>,#Ref<16550.0.1.90612>}]} [rebalance:info,2014-08-19T16:50:07.035,ns_1@10.242.238.90:<0.21726.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 970 [rebalance:debug,2014-08-19T16:50:07.036,ns_1@10.242.238.90:<0.21726.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.2085.1>,#Ref<16550.0.1.90612>}] [ns_server:debug,2014-08-19T16:50:07.037,ns_1@10.242.238.90:<0.21726.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:50:07.042,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 720. Nacking mccouch update. [views:debug,2014-08-19T16:50:07.042,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/720. Updated state: pending (0) [ns_server:debug,2014-08-19T16:50:07.043,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",720,pending,0} [ns_server:debug,2014-08-19T16:50:07.043,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,750,984,737,1016,756,724,1003,990,743,1022,993,977,762,746,730,1009,996, 980,765,749,733,1012,999,983,752,736,720,1015,986,755,739,723,1018,1002,989, 758,742,726,1021,1005,992,976,761,745,729,1008,995,979,764,748,732,1011,998, 982,767,751,735,1014,985,754,738,722,1017,1001,988,972,757,741,725,1020,1004, 991,760,744,728,1023,1007,994,978,763,747,731,1010,981,766,734,1013,753,1000, 987,740,1019,974,759,727,1006] [rebalance:debug,2014-08-19T16:50:07.053,ns_1@10.242.238.90:<0.21728.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 970 [ns_server:info,2014-08-19T16:50:07.059,ns_1@10.242.238.90:<0.18787.0>:ns_memcached:do_handle_call:527]Changed vbucket 716 state to replica [ns_server:info,2014-08-19T16:50:07.065,ns_1@10.242.238.90:<0.21731.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 716 to state replica [views:debug,2014-08-19T16:50:07.110,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/720. Updated state: pending (0) [ns_server:debug,2014-08-19T16:50:07.110,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",720,pending,0} [ns_server:debug,2014-08-19T16:50:07.112,ns_1@10.242.238.90:<0.21731.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_716_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:50:07.113,ns_1@10.242.238.90:<0.21731.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[716]}, {checkpoints,[{716,0}]}, {name,<<"replication_building_716_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[716]}, {takeover,false}, {suffix,"building_716_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",716,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,true}]} [rebalance:debug,2014-08-19T16:50:07.114,ns_1@10.242.238.90:<0.21731.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.21732.0> [rebalance:debug,2014-08-19T16:50:07.114,ns_1@10.242.238.90:<0.21731.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:50:07.114,ns_1@10.242.238.90:<0.21731.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.2119.1>,#Ref<16550.0.1.90754>}]} [rebalance:info,2014-08-19T16:50:07.114,ns_1@10.242.238.90:<0.21731.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 716 [rebalance:debug,2014-08-19T16:50:07.115,ns_1@10.242.238.90:<0.21731.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.2119.1>,#Ref<16550.0.1.90754>}] [ns_server:debug,2014-08-19T16:50:07.115,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.21733.0> (ok) [ns_server:debug,2014-08-19T16:50:07.115,ns_1@10.242.238.90:<0.21731.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:debug,2014-08-19T16:50:07.116,ns_1@10.242.238.90:<0.21734.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 716 [ns_server:info,2014-08-19T16:50:07.187,ns_1@10.242.238.90:<0.18787.0>:ns_memcached:do_handle_call:527]Changed vbucket 969 state to replica [ns_server:info,2014-08-19T16:50:07.192,ns_1@10.242.238.90:<0.21751.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 969 to state replica [ns_server:debug,2014-08-19T16:50:07.223,ns_1@10.242.238.90:<0.21751.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_969_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:50:07.225,ns_1@10.242.238.90:<0.21751.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[969]}, {checkpoints,[{969,0}]}, {name,<<"replication_building_969_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[969]}, {takeover,false}, {suffix,"building_969_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",969,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,false}]} [rebalance:debug,2014-08-19T16:50:07.226,ns_1@10.242.238.90:<0.21751.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.21752.0> [rebalance:debug,2014-08-19T16:50:07.226,ns_1@10.242.238.90:<0.21751.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:50:07.226,ns_1@10.242.238.90:<0.21751.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.2167.1>,#Ref<16550.0.1.91008>}]} [rebalance:info,2014-08-19T16:50:07.226,ns_1@10.242.238.90:<0.21751.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 969 [rebalance:debug,2014-08-19T16:50:07.227,ns_1@10.242.238.90:<0.21751.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.2167.1>,#Ref<16550.0.1.91008>}] [ns_server:debug,2014-08-19T16:50:07.227,ns_1@10.242.238.90:<0.21751.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:debug,2014-08-19T16:50:07.248,ns_1@10.242.238.90:<0.21753.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 969 [ns_server:debug,2014-08-19T16:50:07.251,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 718. Nacking mccouch update. [views:debug,2014-08-19T16:50:07.251,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/718. Updated state: pending (0) [ns_server:debug,2014-08-19T16:50:07.251,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",718,pending,0} [ns_server:debug,2014-08-19T16:50:07.252,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,750,718,984,737,1016,756,724,1003,990,743,1022,993,977,762,746,730,1009, 996,980,765,749,733,1012,999,983,752,736,720,1015,986,755,739,723,1018,1002, 989,758,742,726,1021,1005,992,976,761,745,729,1008,995,979,764,748,732,1011, 998,982,767,751,735,1014,985,754,738,722,1017,1001,988,972,757,741,725,1020, 1004,991,760,744,728,1023,1007,994,978,763,747,731,1010,981,766,734,1013,753, 1000,987,740,1019,974,759,727,1006] [ns_server:info,2014-08-19T16:50:07.254,ns_1@10.242.238.90:<0.18787.0>:ns_memcached:do_handle_call:527]Changed vbucket 715 state to replica [ns_server:info,2014-08-19T16:50:07.260,ns_1@10.242.238.90:<0.21756.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 715 to state replica [ns_server:debug,2014-08-19T16:50:07.308,ns_1@10.242.238.90:<0.21756.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_715_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:50:07.309,ns_1@10.242.238.90:<0.21756.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[715]}, {checkpoints,[{715,0}]}, {name,<<"replication_building_715_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[715]}, {takeover,false}, {suffix,"building_715_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",715,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,true}]} [rebalance:debug,2014-08-19T16:50:07.310,ns_1@10.242.238.90:<0.21756.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.21757.0> [rebalance:debug,2014-08-19T16:50:07.310,ns_1@10.242.238.90:<0.21756.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:50:07.311,ns_1@10.242.238.90:<0.21756.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.2201.1>,#Ref<16550.0.1.91174>}]} [rebalance:info,2014-08-19T16:50:07.311,ns_1@10.242.238.90:<0.21756.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 715 [rebalance:debug,2014-08-19T16:50:07.311,ns_1@10.242.238.90:<0.21756.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.2201.1>,#Ref<16550.0.1.91174>}] [ns_server:debug,2014-08-19T16:50:07.312,ns_1@10.242.238.90:<0.21756.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:50:07.312,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.21758.0> (ok) [rebalance:debug,2014-08-19T16:50:07.314,ns_1@10.242.238.90:<0.21759.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 715 [views:debug,2014-08-19T16:50:07.327,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/718. Updated state: pending (0) [ns_server:debug,2014-08-19T16:50:07.327,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",718,pending,0} [ns_server:info,2014-08-19T16:50:07.383,ns_1@10.242.238.90:<0.18787.0>:ns_memcached:do_handle_call:527]Changed vbucket 968 state to replica [ns_server:info,2014-08-19T16:50:07.387,ns_1@10.242.238.90:<0.21768.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 968 to state replica [ns_server:debug,2014-08-19T16:50:07.418,ns_1@10.242.238.90:<0.21768.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_968_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:50:07.420,ns_1@10.242.238.90:<0.21768.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[968]}, {checkpoints,[{968,0}]}, {name,<<"replication_building_968_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[968]}, {takeover,false}, {suffix,"building_968_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",968,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,false}]} [rebalance:debug,2014-08-19T16:50:07.421,ns_1@10.242.238.90:<0.21768.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.21769.0> [rebalance:debug,2014-08-19T16:50:07.421,ns_1@10.242.238.90:<0.21768.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:50:07.421,ns_1@10.242.238.90:<0.21768.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.2258.1>,#Ref<16550.0.1.91461>}]} [rebalance:info,2014-08-19T16:50:07.422,ns_1@10.242.238.90:<0.21768.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 968 [rebalance:debug,2014-08-19T16:50:07.422,ns_1@10.242.238.90:<0.21768.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.2258.1>,#Ref<16550.0.1.91461>}] [ns_server:debug,2014-08-19T16:50:07.423,ns_1@10.242.238.90:<0.21768.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:debug,2014-08-19T16:50:07.440,ns_1@10.242.238.90:<0.21770.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 968 [ns_server:info,2014-08-19T16:50:07.446,ns_1@10.242.238.90:<0.18787.0>:ns_memcached:do_handle_call:527]Changed vbucket 714 state to replica [ns_server:info,2014-08-19T16:50:07.453,ns_1@10.242.238.90:<0.21787.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 714 to state replica [ns_server:debug,2014-08-19T16:50:07.477,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 975. Nacking mccouch update. [views:debug,2014-08-19T16:50:07.477,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/975. Updated state: replica (0) [ns_server:debug,2014-08-19T16:50:07.477,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",975,replica,0} [ns_server:debug,2014-08-19T16:50:07.478,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,750,718,984,737,1016,756,724,1003,990,743,1022,993,977,762,746,730,1009, 996,980,765,749,733,1012,999,983,752,736,720,1015,986,755,739,723,1018,1002, 989,758,742,726,1021,1005,992,976,761,745,729,1008,995,979,764,748,732,1011, 998,982,767,751,735,1014,985,754,738,722,1017,1001,988,972,757,741,725,1020, 1004,991,975,760,744,728,1023,1007,994,978,763,747,731,1010,981,766,734,1013, 753,1000,987,740,1019,974,759,727,1006] [ns_server:debug,2014-08-19T16:50:07.500,ns_1@10.242.238.90:<0.21787.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_714_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:50:07.502,ns_1@10.242.238.90:<0.21787.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[714]}, {checkpoints,[{714,0}]}, {name,<<"replication_building_714_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[714]}, {takeover,false}, {suffix,"building_714_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",714,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,true}]} [rebalance:debug,2014-08-19T16:50:07.502,ns_1@10.242.238.90:<0.21787.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.21788.0> [rebalance:debug,2014-08-19T16:50:07.502,ns_1@10.242.238.90:<0.21787.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:50:07.503,ns_1@10.242.238.90:<0.21787.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.2278.1>,#Ref<16550.0.1.91578>}]} [rebalance:info,2014-08-19T16:50:07.503,ns_1@10.242.238.90:<0.21787.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 714 [rebalance:debug,2014-08-19T16:50:07.503,ns_1@10.242.238.90:<0.21787.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.2278.1>,#Ref<16550.0.1.91578>}] [ns_server:debug,2014-08-19T16:50:07.504,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.21789.0> (ok) [ns_server:debug,2014-08-19T16:50:07.504,ns_1@10.242.238.90:<0.21787.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:debug,2014-08-19T16:50:07.505,ns_1@10.242.238.90:<0.21790.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 714 [views:debug,2014-08-19T16:50:07.536,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/975. Updated state: replica (0) [ns_server:debug,2014-08-19T16:50:07.536,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",975,replica,0} [ns_server:info,2014-08-19T16:50:07.574,ns_1@10.242.238.90:<0.18787.0>:ns_memcached:do_handle_call:527]Changed vbucket 967 state to replica [ns_server:info,2014-08-19T16:50:07.578,ns_1@10.242.238.90:<0.21793.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 967 to state replica [ns_server:debug,2014-08-19T16:50:07.610,ns_1@10.242.238.90:<0.21793.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_967_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:50:07.611,ns_1@10.242.238.90:<0.21793.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[967]}, {checkpoints,[{967,0}]}, {name,<<"replication_building_967_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[967]}, {takeover,false}, {suffix,"building_967_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",967,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,false}]} [rebalance:debug,2014-08-19T16:50:07.612,ns_1@10.242.238.90:<0.21793.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.21799.0> [rebalance:debug,2014-08-19T16:50:07.612,ns_1@10.242.238.90:<0.21793.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:50:07.613,ns_1@10.242.238.90:<0.21793.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.2335.1>,#Ref<16550.0.1.92843>}]} [rebalance:info,2014-08-19T16:50:07.613,ns_1@10.242.238.90:<0.21793.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 967 [rebalance:debug,2014-08-19T16:50:07.614,ns_1@10.242.238.90:<0.21793.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.2335.1>,#Ref<16550.0.1.92843>}] [ns_server:debug,2014-08-19T16:50:07.615,ns_1@10.242.238.90:<0.21793.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:debug,2014-08-19T16:50:07.633,ns_1@10.242.238.90:<0.21809.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 967 [ns_server:info,2014-08-19T16:50:07.640,ns_1@10.242.238.90:<0.18787.0>:ns_memcached:do_handle_call:527]Changed vbucket 713 state to replica [ns_server:info,2014-08-19T16:50:07.646,ns_1@10.242.238.90:<0.21812.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 713 to state replica [ns_server:debug,2014-08-19T16:50:07.691,ns_1@10.242.238.90:<0.21812.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_713_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:50:07.692,ns_1@10.242.238.90:<0.21812.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[713]}, {checkpoints,[{713,0}]}, {name,<<"replication_building_713_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[713]}, {takeover,false}, {suffix,"building_713_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",713,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,true}]} [rebalance:debug,2014-08-19T16:50:07.693,ns_1@10.242.238.90:<0.21812.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.21813.0> [rebalance:debug,2014-08-19T16:50:07.693,ns_1@10.242.238.90:<0.21812.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:50:07.694,ns_1@10.242.238.90:<0.21812.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.2355.1>,#Ref<16550.0.1.92960>}]} [rebalance:info,2014-08-19T16:50:07.694,ns_1@10.242.238.90:<0.21812.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 713 [rebalance:debug,2014-08-19T16:50:07.694,ns_1@10.242.238.90:<0.21812.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.2355.1>,#Ref<16550.0.1.92960>}] [ns_server:debug,2014-08-19T16:50:07.695,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 973. Nacking mccouch update. [views:debug,2014-08-19T16:50:07.695,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/973. Updated state: replica (0) [ns_server:debug,2014-08-19T16:50:07.695,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",973,replica,0} [ns_server:debug,2014-08-19T16:50:07.695,ns_1@10.242.238.90:<0.21812.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:50:07.695,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,750,718,984,737,1016,756,724,1003,990,743,1022,993,977,762,746,730,1009, 996,980,765,749,733,1012,999,983,752,736,720,1015,986,755,739,723,1018,1002, 989,973,758,742,726,1021,1005,992,976,761,745,729,1008,995,979,764,748,732, 1011,998,982,767,751,735,1014,985,754,738,722,1017,1001,988,972,757,741,725, 1020,1004,991,975,760,744,728,1023,1007,994,978,763,747,731,1010,981,766,734, 1013,753,1000,987,740,1019,974,759,727,1006] [ns_server:debug,2014-08-19T16:50:07.696,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.21814.0> (ok) [rebalance:debug,2014-08-19T16:50:07.698,ns_1@10.242.238.90:<0.21815.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 713 [views:debug,2014-08-19T16:50:07.755,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/973. Updated state: replica (0) [ns_server:debug,2014-08-19T16:50:07.755,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",973,replica,0} [ns_server:info,2014-08-19T16:50:07.766,ns_1@10.242.238.90:<0.18787.0>:ns_memcached:do_handle_call:527]Changed vbucket 966 state to replica [ns_server:info,2014-08-19T16:50:07.771,ns_1@10.242.238.90:<0.21819.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 966 to state replica [ns_server:debug,2014-08-19T16:50:07.803,ns_1@10.242.238.90:<0.21819.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_966_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:50:07.805,ns_1@10.242.238.90:<0.21819.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[966]}, {checkpoints,[{966,0}]}, {name,<<"replication_building_966_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[966]}, {takeover,false}, {suffix,"building_966_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",966,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,false}]} [rebalance:debug,2014-08-19T16:50:07.805,ns_1@10.242.238.90:<0.21819.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.21820.0> [rebalance:debug,2014-08-19T16:50:07.805,ns_1@10.242.238.90:<0.21819.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:50:07.806,ns_1@10.242.238.90:<0.21819.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.2412.1>,#Ref<16550.0.1.93221>}]} [rebalance:info,2014-08-19T16:50:07.806,ns_1@10.242.238.90:<0.21819.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 966 [rebalance:debug,2014-08-19T16:50:07.807,ns_1@10.242.238.90:<0.21819.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.2412.1>,#Ref<16550.0.1.93221>}] [ns_server:debug,2014-08-19T16:50:07.807,ns_1@10.242.238.90:<0.21819.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:debug,2014-08-19T16:50:07.825,ns_1@10.242.238.90:<0.21821.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 966 [ns_server:info,2014-08-19T16:50:07.831,ns_1@10.242.238.90:<0.18787.0>:ns_memcached:do_handle_call:527]Changed vbucket 712 state to replica [ns_server:info,2014-08-19T16:50:07.837,ns_1@10.242.238.90:<0.21835.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 712 to state replica [ns_server:debug,2014-08-19T16:50:07.883,ns_1@10.242.238.90:<0.21835.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_712_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:50:07.885,ns_1@10.242.238.90:<0.21835.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[712]}, {checkpoints,[{712,0}]}, {name,<<"replication_building_712_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[712]}, {takeover,false}, {suffix,"building_712_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",712,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,true}]} [rebalance:debug,2014-08-19T16:50:07.886,ns_1@10.242.238.90:<0.21835.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.21839.0> [rebalance:debug,2014-08-19T16:50:07.886,ns_1@10.242.238.90:<0.21835.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:50:07.886,ns_1@10.242.238.90:<0.21835.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.2432.1>,#Ref<16550.0.1.93318>}]} [rebalance:info,2014-08-19T16:50:07.886,ns_1@10.242.238.90:<0.21835.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 712 [rebalance:debug,2014-08-19T16:50:07.887,ns_1@10.242.238.90:<0.21835.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.2432.1>,#Ref<16550.0.1.93318>}] [ns_server:debug,2014-08-19T16:50:07.888,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.21840.0> (ok) [ns_server:debug,2014-08-19T16:50:07.888,ns_1@10.242.238.90:<0.21835.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:debug,2014-08-19T16:50:07.889,ns_1@10.242.238.90:<0.21841.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 712 [ns_server:debug,2014-08-19T16:50:07.918,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 971. Nacking mccouch update. [views:debug,2014-08-19T16:50:07.918,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/971. Updated state: replica (0) [ns_server:debug,2014-08-19T16:50:07.918,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",971,replica,0} [ns_server:debug,2014-08-19T16:50:07.918,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,750,718,984,737,1016,971,756,724,1003,990,743,1022,977,762,730,1009,996, 980,765,749,733,1012,999,983,752,736,720,1015,986,755,739,723,1018,1002,989, 973,758,742,726,1021,1005,992,976,761,745,729,1008,995,979,764,748,732,1011, 998,982,767,751,735,1014,985,754,738,722,1017,1001,988,972,757,741,725,1020, 1004,991,975,760,744,728,1023,1007,994,978,763,747,731,1010,981,766,734,1013, 753,1000,987,740,1019,974,759,727,1006,993,746] [views:debug,2014-08-19T16:50:07.952,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/971. Updated state: replica (0) [ns_server:debug,2014-08-19T16:50:07.952,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",971,replica,0} [ns_server:info,2014-08-19T16:50:07.963,ns_1@10.242.238.90:<0.18787.0>:ns_memcached:do_handle_call:527]Changed vbucket 965 state to replica [ns_server:info,2014-08-19T16:50:07.967,ns_1@10.242.238.90:<0.21844.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 965 to state replica [ns_server:debug,2014-08-19T16:50:07.998,ns_1@10.242.238.90:<0.21844.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_965_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:50:08.000,ns_1@10.242.238.90:<0.21844.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[965]}, {checkpoints,[{965,0}]}, {name,<<"replication_building_965_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[965]}, {takeover,false}, {suffix,"building_965_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",965,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,false}]} [rebalance:debug,2014-08-19T16:50:08.001,ns_1@10.242.238.90:<0.21844.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.21845.0> [rebalance:debug,2014-08-19T16:50:08.001,ns_1@10.242.238.90:<0.21844.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:50:08.001,ns_1@10.242.238.90:<0.21844.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.2516.1>,#Ref<16550.0.1.94087>}]} [rebalance:info,2014-08-19T16:50:08.001,ns_1@10.242.238.90:<0.21844.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 965 [rebalance:debug,2014-08-19T16:50:08.002,ns_1@10.242.238.90:<0.21844.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.2516.1>,#Ref<16550.0.1.94087>}] [ns_server:debug,2014-08-19T16:50:08.003,ns_1@10.242.238.90:<0.21844.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:50:08.035,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 969. Nacking mccouch update. [views:debug,2014-08-19T16:50:08.035,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/969. Updated state: replica (0) [ns_server:debug,2014-08-19T16:50:08.036,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",969,replica,0} [ns_server:debug,2014-08-19T16:50:08.036,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,750,718,984,737,1016,971,756,724,1003,990,743,1022,977,762,730,1009,996, 980,765,749,733,1012,999,983,752,736,720,1015,986,755,739,723,1018,1002,989, 973,758,742,726,1021,1005,992,976,761,745,729,1008,995,979,764,748,732,1011, 998,982,767,751,735,1014,985,969,754,738,722,1017,1001,988,972,757,741,725, 1020,1004,991,975,760,744,728,1023,1007,994,978,763,747,731,1010,981,766,734, 1013,753,1000,987,740,1019,974,759,727,1006,993,746] [rebalance:debug,2014-08-19T16:50:08.037,ns_1@10.242.238.90:<0.21860.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 965 [views:debug,2014-08-19T16:50:08.069,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/969. Updated state: replica (0) [ns_server:debug,2014-08-19T16:50:08.069,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",969,replica,0} [ns_server:info,2014-08-19T16:50:08.079,ns_1@10.242.238.90:<0.18786.0>:ns_memcached:do_handle_call:527]Changed vbucket 711 state to replica [ns_server:info,2014-08-19T16:50:08.085,ns_1@10.242.238.90:<0.21863.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 711 to state replica [ns_server:debug,2014-08-19T16:50:08.131,ns_1@10.242.238.90:<0.21863.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_711_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:50:08.132,ns_1@10.242.238.90:<0.21863.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[711]}, {checkpoints,[{711,0}]}, {name,<<"replication_building_711_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[711]}, {takeover,false}, {suffix,"building_711_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",711,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,true}]} [rebalance:debug,2014-08-19T16:50:08.132,ns_1@10.242.238.90:<0.21863.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.21864.0> [rebalance:debug,2014-08-19T16:50:08.133,ns_1@10.242.238.90:<0.21863.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:50:08.133,ns_1@10.242.238.90:<0.21863.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.2550.1>,#Ref<16550.0.1.94268>}]} [rebalance:info,2014-08-19T16:50:08.133,ns_1@10.242.238.90:<0.21863.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 711 [rebalance:debug,2014-08-19T16:50:08.134,ns_1@10.242.238.90:<0.21863.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.2550.1>,#Ref<16550.0.1.94268>}] [ns_server:debug,2014-08-19T16:50:08.134,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.21865.0> (ok) [ns_server:debug,2014-08-19T16:50:08.134,ns_1@10.242.238.90:<0.21863.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:debug,2014-08-19T16:50:08.136,ns_1@10.242.238.90:<0.21866.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 711 [ns_server:info,2014-08-19T16:50:08.206,ns_1@10.242.238.90:<0.18787.0>:ns_memcached:do_handle_call:527]Changed vbucket 964 state to replica [ns_server:info,2014-08-19T16:50:08.211,ns_1@10.242.238.90:<0.21883.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 964 to state replica [ns_server:debug,2014-08-19T16:50:08.236,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 721. Nacking mccouch update. [views:debug,2014-08-19T16:50:08.236,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/721. Updated state: pending (0) [ns_server:debug,2014-08-19T16:50:08.237,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",721,pending,0} [ns_server:debug,2014-08-19T16:50:08.237,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,750,718,984,737,1016,971,756,724,1003,990,743,1022,977,762,730,1009,996, 980,765,749,733,1012,999,983,752,736,720,1015,986,755,739,723,1018,1002,989, 973,758,742,726,1021,1005,992,976,761,745,729,1008,995,979,764,748,732,1011, 998,982,767,751,735,1014,985,969,754,738,722,1017,1001,988,972,757,741,725, 1020,1004,991,975,760,744,728,1023,1007,994,978,763,747,731,1010,981,766,734, 1013,753,721,1000,987,740,1019,974,759,727,1006,993,746] [ns_server:debug,2014-08-19T16:50:08.243,ns_1@10.242.238.90:<0.21883.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_964_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:50:08.244,ns_1@10.242.238.90:<0.21883.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[964]}, {checkpoints,[{964,0}]}, {name,<<"replication_building_964_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[964]}, {takeover,false}, {suffix,"building_964_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",964,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,false}]} [rebalance:debug,2014-08-19T16:50:08.245,ns_1@10.242.238.90:<0.21883.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.21884.0> [rebalance:debug,2014-08-19T16:50:08.245,ns_1@10.242.238.90:<0.21883.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:50:08.245,ns_1@10.242.238.90:<0.21883.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.2593.1>,#Ref<16550.0.1.94481>}]} [rebalance:info,2014-08-19T16:50:08.246,ns_1@10.242.238.90:<0.21883.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 964 [rebalance:debug,2014-08-19T16:50:08.246,ns_1@10.242.238.90:<0.21883.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.2593.1>,#Ref<16550.0.1.94481>}] [ns_server:debug,2014-08-19T16:50:08.247,ns_1@10.242.238.90:<0.21883.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:debug,2014-08-19T16:50:08.263,ns_1@10.242.238.90:<0.21885.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 964 [ns_server:info,2014-08-19T16:50:08.269,ns_1@10.242.238.90:<0.18787.0>:ns_memcached:do_handle_call:527]Changed vbucket 710 state to replica [views:debug,2014-08-19T16:50:08.270,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/721. Updated state: pending (0) [ns_server:debug,2014-08-19T16:50:08.270,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",721,pending,0} [ns_server:info,2014-08-19T16:50:08.275,ns_1@10.242.238.90:<0.21888.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 710 to state replica [ns_server:debug,2014-08-19T16:50:08.320,ns_1@10.242.238.90:<0.21888.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_710_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:50:08.321,ns_1@10.242.238.90:<0.21888.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[710]}, {checkpoints,[{710,0}]}, {name,<<"replication_building_710_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[710]}, {takeover,false}, {suffix,"building_710_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",710,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,true}]} [rebalance:debug,2014-08-19T16:50:08.322,ns_1@10.242.238.90:<0.21888.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.21895.0> [rebalance:debug,2014-08-19T16:50:08.322,ns_1@10.242.238.90:<0.21888.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:50:08.323,ns_1@10.242.238.90:<0.21888.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.2627.1>,#Ref<16550.0.1.94623>}]} [rebalance:info,2014-08-19T16:50:08.323,ns_1@10.242.238.90:<0.21888.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 710 [rebalance:debug,2014-08-19T16:50:08.323,ns_1@10.242.238.90:<0.21888.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.2627.1>,#Ref<16550.0.1.94623>}] [ns_server:debug,2014-08-19T16:50:08.324,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.21903.0> (ok) [ns_server:debug,2014-08-19T16:50:08.324,ns_1@10.242.238.90:<0.21888.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:debug,2014-08-19T16:50:08.325,ns_1@10.242.238.90:<0.21905.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 710 [ns_server:debug,2014-08-19T16:50:08.387,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 719. Nacking mccouch update. [views:debug,2014-08-19T16:50:08.387,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/719. Updated state: pending (0) [ns_server:debug,2014-08-19T16:50:08.387,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",719,pending,0} [ns_server:debug,2014-08-19T16:50:08.387,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,750,718,984,737,1016,971,756,724,1003,990,743,1022,977,762,730,1009,996, 980,765,749,733,1012,999,983,752,736,720,1015,986,755,739,723,1018,1002,989, 973,758,742,726,1021,1005,992,976,761,745,729,1008,995,979,764,748,732,1011, 998,982,767,751,735,719,1014,985,969,754,738,722,1017,1001,988,972,757,741, 725,1020,1004,991,975,760,744,728,1023,1007,994,978,763,747,731,1010,981,766, 734,1013,753,721,1000,987,740,1019,974,759,727,1006,993,746] [ns_server:info,2014-08-19T16:50:08.394,ns_1@10.242.238.90:<0.18787.0>:ns_memcached:do_handle_call:527]Changed vbucket 963 state to replica [ns_server:info,2014-08-19T16:50:08.398,ns_1@10.242.238.90:<0.21916.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 963 to state replica [ns_server:debug,2014-08-19T16:50:08.430,ns_1@10.242.238.90:<0.21916.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_963_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:50:08.432,ns_1@10.242.238.90:<0.21916.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[963]}, {checkpoints,[{963,0}]}, {name,<<"replication_building_963_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[963]}, {takeover,false}, {suffix,"building_963_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",963,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,false}]} [rebalance:debug,2014-08-19T16:50:08.433,ns_1@10.242.238.90:<0.21916.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.21919.0> [rebalance:debug,2014-08-19T16:50:08.433,ns_1@10.242.238.90:<0.21916.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:50:08.434,ns_1@10.242.238.90:<0.21916.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.2671.1>,#Ref<16550.0.1.94911>}]} [rebalance:info,2014-08-19T16:50:08.434,ns_1@10.242.238.90:<0.21916.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 963 [rebalance:debug,2014-08-19T16:50:08.434,ns_1@10.242.238.90:<0.21916.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.2671.1>,#Ref<16550.0.1.94911>}] [ns_server:debug,2014-08-19T16:50:08.435,ns_1@10.242.238.90:<0.21916.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [views:debug,2014-08-19T16:50:08.438,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/719. Updated state: pending (0) [ns_server:debug,2014-08-19T16:50:08.438,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",719,pending,0} [rebalance:debug,2014-08-19T16:50:08.452,ns_1@10.242.238.90:<0.21920.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 963 [ns_server:info,2014-08-19T16:50:08.458,ns_1@10.242.238.90:<0.18787.0>:ns_memcached:do_handle_call:527]Changed vbucket 709 state to replica [ns_server:info,2014-08-19T16:50:08.464,ns_1@10.242.238.90:<0.21923.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 709 to state replica [ns_server:debug,2014-08-19T16:50:08.510,ns_1@10.242.238.90:<0.21923.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_709_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:50:08.511,ns_1@10.242.238.90:<0.21923.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[709]}, {checkpoints,[{709,0}]}, {name,<<"replication_building_709_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[709]}, {takeover,false}, {suffix,"building_709_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",709,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,true}]} [rebalance:debug,2014-08-19T16:50:08.512,ns_1@10.242.238.90:<0.21923.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.21938.0> [rebalance:debug,2014-08-19T16:50:08.512,ns_1@10.242.238.90:<0.21923.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:50:08.513,ns_1@10.242.238.90:<0.21923.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.2705.1>,#Ref<16550.0.1.95058>}]} [rebalance:info,2014-08-19T16:50:08.513,ns_1@10.242.238.90:<0.21923.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 709 [rebalance:debug,2014-08-19T16:50:08.513,ns_1@10.242.238.90:<0.21923.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.2705.1>,#Ref<16550.0.1.95058>}] [ns_server:debug,2014-08-19T16:50:08.514,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.21939.0> (ok) [ns_server:debug,2014-08-19T16:50:08.514,ns_1@10.242.238.90:<0.21923.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:debug,2014-08-19T16:50:08.515,ns_1@10.242.238.90:<0.21940.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 709 [ns_server:debug,2014-08-19T16:50:08.555,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 717. Nacking mccouch update. [views:debug,2014-08-19T16:50:08.555,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/717. Updated state: pending (0) [ns_server:debug,2014-08-19T16:50:08.555,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",717,pending,0} [ns_server:debug,2014-08-19T16:50:08.555,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,750,718,984,737,1016,971,756,724,1003,990,743,1022,977,762,730,1009,996, 980,765,749,733,717,1012,999,983,752,736,720,1015,986,755,739,723,1018,1002, 989,973,758,742,726,1021,1005,992,976,761,745,729,1008,995,979,764,748,732, 1011,998,982,767,751,735,719,1014,985,969,754,738,722,1017,1001,988,972,757, 741,725,1020,1004,991,975,760,744,728,1023,1007,994,978,763,747,731,1010,981, 766,734,1013,753,721,1000,987,740,1019,974,759,727,1006,993,746] [ns_server:info,2014-08-19T16:50:08.589,ns_1@10.242.238.90:<0.18787.0>:ns_memcached:do_handle_call:527]Changed vbucket 962 state to replica [ns_server:info,2014-08-19T16:50:08.593,ns_1@10.242.238.90:<0.21943.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 962 to state replica [views:debug,2014-08-19T16:50:08.622,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/717. Updated state: pending (0) [ns_server:debug,2014-08-19T16:50:08.622,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",717,pending,0} [ns_server:debug,2014-08-19T16:50:08.626,ns_1@10.242.238.90:<0.21943.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_962_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:50:08.628,ns_1@10.242.238.90:<0.21943.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[962]}, {checkpoints,[{962,0}]}, {name,<<"replication_building_962_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[962]}, {takeover,false}, {suffix,"building_962_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",962,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,false}]} [rebalance:debug,2014-08-19T16:50:08.628,ns_1@10.242.238.90:<0.21943.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.21944.0> [rebalance:debug,2014-08-19T16:50:08.628,ns_1@10.242.238.90:<0.21943.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:50:08.629,ns_1@10.242.238.90:<0.21943.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.2762.1>,#Ref<16550.0.1.95342>}]} [rebalance:info,2014-08-19T16:50:08.629,ns_1@10.242.238.90:<0.21943.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 962 [rebalance:debug,2014-08-19T16:50:08.630,ns_1@10.242.238.90:<0.21943.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.2762.1>,#Ref<16550.0.1.95342>}] [ns_server:debug,2014-08-19T16:50:08.630,ns_1@10.242.238.90:<0.21943.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:debug,2014-08-19T16:50:08.647,ns_1@10.242.238.90:<0.21945.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 962 [ns_server:info,2014-08-19T16:50:08.652,ns_1@10.242.238.90:<0.18787.0>:ns_memcached:do_handle_call:527]Changed vbucket 708 state to replica [ns_server:info,2014-08-19T16:50:08.661,ns_1@10.242.238.90:<0.21948.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 708 to state replica [ns_server:debug,2014-08-19T16:50:08.707,ns_1@10.242.238.90:<0.21948.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_708_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:50:08.708,ns_1@10.242.238.90:<0.21948.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[708]}, {checkpoints,[{708,0}]}, {name,<<"replication_building_708_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[708]}, {takeover,false}, {suffix,"building_708_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",708,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,true}]} [rebalance:debug,2014-08-19T16:50:08.709,ns_1@10.242.238.90:<0.21948.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.21963.0> [rebalance:debug,2014-08-19T16:50:08.709,ns_1@10.242.238.90:<0.21948.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:50:08.710,ns_1@10.242.238.90:<0.21948.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.2782.1>,#Ref<16550.0.1.95482>}]} [rebalance:info,2014-08-19T16:50:08.710,ns_1@10.242.238.90:<0.21948.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 708 [rebalance:debug,2014-08-19T16:50:08.710,ns_1@10.242.238.90:<0.21948.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.2782.1>,#Ref<16550.0.1.95482>}] [ns_server:debug,2014-08-19T16:50:08.711,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.21964.0> (ok) [ns_server:debug,2014-08-19T16:50:08.711,ns_1@10.242.238.90:<0.21948.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:debug,2014-08-19T16:50:08.713,ns_1@10.242.238.90:<0.21965.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 708 [ns_server:debug,2014-08-19T16:50:08.722,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 715. Nacking mccouch update. [views:debug,2014-08-19T16:50:08.722,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/715. Updated state: pending (0) [ns_server:debug,2014-08-19T16:50:08.722,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",715,pending,0} [ns_server:debug,2014-08-19T16:50:08.723,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,750,718,984,737,1016,971,756,724,1003,990,743,1022,977,762,730,1009,996, 749,717,999,983,752,736,720,1015,986,755,739,723,1018,1002,989,973,758,742, 726,1021,1005,992,976,761,745,729,1008,995,979,764,748,732,1011,998,982,767, 751,735,719,1014,985,969,754,738,722,1017,1001,988,972,757,741,725,1020,1004, 991,975,760,744,728,1023,1007,994,978,763,747,731,715,1010,981,766,734,1013, 753,721,1000,987,740,1019,974,759,727,1006,993,746,980,765,733,1012] [ns_server:info,2014-08-19T16:50:08.781,ns_1@10.242.238.90:<0.18787.0>:ns_memcached:do_handle_call:527]Changed vbucket 961 state to replica [ns_server:info,2014-08-19T16:50:08.785,ns_1@10.242.238.90:<0.21968.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 961 to state replica [views:debug,2014-08-19T16:50:08.789,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/715. Updated state: pending (0) [ns_server:debug,2014-08-19T16:50:08.789,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",715,pending,0} [ns_server:debug,2014-08-19T16:50:08.816,ns_1@10.242.238.90:<0.21968.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_961_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:50:08.818,ns_1@10.242.238.90:<0.21968.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[961]}, {checkpoints,[{961,0}]}, {name,<<"replication_building_961_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[961]}, {takeover,false}, {suffix,"building_961_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",961,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,false}]} [rebalance:debug,2014-08-19T16:50:08.818,ns_1@10.242.238.90:<0.21968.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.21969.0> [rebalance:debug,2014-08-19T16:50:08.818,ns_1@10.242.238.90:<0.21968.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:50:08.819,ns_1@10.242.238.90:<0.21968.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.2839.1>,#Ref<16550.0.1.95769>}]} [rebalance:info,2014-08-19T16:50:08.819,ns_1@10.242.238.90:<0.21968.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 961 [rebalance:debug,2014-08-19T16:50:08.820,ns_1@10.242.238.90:<0.21968.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.2839.1>,#Ref<16550.0.1.95769>}] [ns_server:debug,2014-08-19T16:50:08.821,ns_1@10.242.238.90:<0.21968.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:debug,2014-08-19T16:50:08.841,ns_1@10.242.238.90:<0.21970.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 961 [ns_server:info,2014-08-19T16:50:08.846,ns_1@10.242.238.90:<0.18787.0>:ns_memcached:do_handle_call:527]Changed vbucket 707 state to replica [ns_server:info,2014-08-19T16:50:08.852,ns_1@10.242.238.90:<0.21973.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 707 to state replica [ns_server:debug,2014-08-19T16:50:08.899,ns_1@10.242.238.90:<0.21973.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_707_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:50:08.900,ns_1@10.242.238.90:<0.21973.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[707]}, {checkpoints,[{707,0}]}, {name,<<"replication_building_707_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[707]}, {takeover,false}, {suffix,"building_707_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",707,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,true}]} [rebalance:debug,2014-08-19T16:50:08.901,ns_1@10.242.238.90:<0.21973.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.21988.0> [rebalance:debug,2014-08-19T16:50:08.901,ns_1@10.242.238.90:<0.21973.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:50:08.901,ns_1@10.242.238.90:<0.21973.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.2873.1>,#Ref<16550.0.1.95936>}]} [rebalance:info,2014-08-19T16:50:08.902,ns_1@10.242.238.90:<0.21973.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 707 [rebalance:debug,2014-08-19T16:50:08.902,ns_1@10.242.238.90:<0.21973.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.2873.1>,#Ref<16550.0.1.95936>}] [ns_server:debug,2014-08-19T16:50:08.902,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.21989.0> (ok) [ns_server:debug,2014-08-19T16:50:08.903,ns_1@10.242.238.90:<0.21973.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:debug,2014-08-19T16:50:08.904,ns_1@10.242.238.90:<0.21990.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 707 [ns_server:debug,2014-08-19T16:50:08.940,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 967. Nacking mccouch update. [views:debug,2014-08-19T16:50:08.940,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/967. Updated state: replica (0) [ns_server:debug,2014-08-19T16:50:08.940,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",967,replica,0} [ns_server:debug,2014-08-19T16:50:08.940,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,750,718,984,737,1016,971,756,724,1003,990,743,1022,977,762,730,1009,996, 749,717,999,983,967,752,736,720,1015,986,755,739,723,1018,1002,989,973,758, 742,726,1021,1005,992,976,761,745,729,1008,995,979,764,748,732,1011,998,982, 767,751,735,719,1014,985,969,754,738,722,1017,1001,988,972,757,741,725,1020, 1004,991,975,760,744,728,1023,1007,994,978,763,747,731,715,1010,981,766,734, 1013,753,721,1000,987,740,1019,974,759,727,1006,993,746,980,765,733,1012] [ns_server:info,2014-08-19T16:50:08.977,ns_1@10.242.238.90:<0.18787.0>:ns_memcached:do_handle_call:527]Changed vbucket 960 state to replica [ns_server:info,2014-08-19T16:50:08.981,ns_1@10.242.238.90:<0.21993.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 960 to state replica [views:debug,2014-08-19T16:50:08.991,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/967. Updated state: replica (0) [ns_server:debug,2014-08-19T16:50:08.991,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",967,replica,0} [ns_server:debug,2014-08-19T16:50:09.015,ns_1@10.242.238.90:<0.21993.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_960_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:50:09.016,ns_1@10.242.238.90:<0.21993.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[960]}, {checkpoints,[{960,0}]}, {name,<<"replication_building_960_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[960]}, {takeover,false}, {suffix,"building_960_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",960,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,false}]} [rebalance:debug,2014-08-19T16:50:09.017,ns_1@10.242.238.90:<0.21993.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.21994.0> [rebalance:debug,2014-08-19T16:50:09.017,ns_1@10.242.238.90:<0.21993.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:50:09.018,ns_1@10.242.238.90:<0.21993.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.2930.1>,#Ref<16550.0.1.96202>}]} [rebalance:info,2014-08-19T16:50:09.018,ns_1@10.242.238.90:<0.21993.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 960 [rebalance:debug,2014-08-19T16:50:09.018,ns_1@10.242.238.90:<0.21993.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.2930.1>,#Ref<16550.0.1.96202>}] [ns_server:debug,2014-08-19T16:50:09.019,ns_1@10.242.238.90:<0.21993.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:debug,2014-08-19T16:50:09.035,ns_1@10.242.238.90:<0.21995.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 960 [ns_server:info,2014-08-19T16:50:09.040,ns_1@10.242.238.90:<0.18787.0>:ns_memcached:do_handle_call:527]Changed vbucket 706 state to replica [ns_server:info,2014-08-19T16:50:09.046,ns_1@10.242.238.90:<0.21998.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 706 to state replica [ns_server:debug,2014-08-19T16:50:09.095,ns_1@10.242.238.90:<0.21998.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_706_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:50:09.097,ns_1@10.242.238.90:<0.21998.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[706]}, {checkpoints,[{706,0}]}, {name,<<"replication_building_706_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[706]}, {takeover,false}, {suffix,"building_706_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",706,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,true}]} [rebalance:debug,2014-08-19T16:50:09.098,ns_1@10.242.238.90:<0.21998.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.22013.0> [rebalance:debug,2014-08-19T16:50:09.098,ns_1@10.242.238.90:<0.21998.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:50:09.098,ns_1@10.242.238.90:<0.21998.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.2950.1>,#Ref<16550.0.1.96339>}]} [rebalance:info,2014-08-19T16:50:09.098,ns_1@10.242.238.90:<0.21998.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 706 [rebalance:debug,2014-08-19T16:50:09.099,ns_1@10.242.238.90:<0.21998.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.2950.1>,#Ref<16550.0.1.96339>}] [ns_server:debug,2014-08-19T16:50:09.099,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.22014.0> (ok) [ns_server:debug,2014-08-19T16:50:09.100,ns_1@10.242.238.90:<0.21998.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:debug,2014-08-19T16:50:09.101,ns_1@10.242.238.90:<0.22015.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 706 [ns_server:debug,2014-08-19T16:50:09.141,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 965. Nacking mccouch update. [views:debug,2014-08-19T16:50:09.141,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/965. Updated state: replica (0) [ns_server:debug,2014-08-19T16:50:09.141,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",965,replica,0} [ns_server:debug,2014-08-19T16:50:09.141,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,965,750,718,984,737,1016,971,756,724,1003,990,743,1022,977,762,730,1009, 996,749,717,999,983,967,752,736,720,1015,986,755,739,723,1018,1002,989,973, 758,742,726,1021,1005,992,976,761,745,729,1008,995,979,764,748,732,1011,998, 982,767,751,735,719,1014,985,969,754,738,722,1017,1001,988,972,757,741,725, 1020,1004,991,975,760,744,728,1023,1007,994,978,763,747,731,715,1010,981,766, 734,1013,753,721,1000,987,740,1019,974,759,727,1006,993,746,980,765,733,1012] [ns_server:info,2014-08-19T16:50:09.171,ns_1@10.242.238.90:<0.18787.0>:ns_memcached:do_handle_call:527]Changed vbucket 705 state to replica [ns_server:info,2014-08-19T16:50:09.178,ns_1@10.242.238.90:<0.22018.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 705 to state replica [views:debug,2014-08-19T16:50:09.208,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/965. Updated state: replica (0) [ns_server:debug,2014-08-19T16:50:09.208,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",965,replica,0} [ns_server:debug,2014-08-19T16:50:09.224,ns_1@10.242.238.90:<0.22018.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_705_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:50:09.225,ns_1@10.242.238.90:<0.22018.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[705]}, {checkpoints,[{705,0}]}, {name,<<"replication_building_705_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[705]}, {takeover,false}, {suffix,"building_705_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",705,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,true}]} [rebalance:debug,2014-08-19T16:50:09.226,ns_1@10.242.238.90:<0.22018.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.22019.0> [rebalance:debug,2014-08-19T16:50:09.226,ns_1@10.242.238.90:<0.22018.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:50:09.227,ns_1@10.242.238.90:<0.22018.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.3011.1>,#Ref<16550.0.1.96601>}]} [rebalance:info,2014-08-19T16:50:09.227,ns_1@10.242.238.90:<0.22018.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 705 [rebalance:debug,2014-08-19T16:50:09.227,ns_1@10.242.238.90:<0.22018.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.3011.1>,#Ref<16550.0.1.96601>}] [ns_server:debug,2014-08-19T16:50:09.228,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.22020.0> (ok) [ns_server:debug,2014-08-19T16:50:09.228,ns_1@10.242.238.90:<0.22018.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:debug,2014-08-19T16:50:09.229,ns_1@10.242.238.90:<0.22021.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 705 [ns_server:info,2014-08-19T16:50:09.297,ns_1@10.242.238.90:<0.18787.0>:ns_memcached:do_handle_call:527]Changed vbucket 704 state to replica [ns_server:info,2014-08-19T16:50:09.303,ns_1@10.242.238.90:<0.22038.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 704 to state replica [ns_server:debug,2014-08-19T16:50:09.349,ns_1@10.242.238.90:<0.22038.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_704_'ns_1@10.242.238.90' [ns_server:debug,2014-08-19T16:50:09.350,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 963. Nacking mccouch update. [views:debug,2014-08-19T16:50:09.350,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/963. Updated state: replica (0) [ns_server:debug,2014-08-19T16:50:09.350,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",963,replica,0} [ns_server:debug,2014-08-19T16:50:09.350,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,965,750,718,984,737,1016,971,756,724,1003,990,743,1022,977,762,730,1009, 996,749,717,999,983,967,752,736,720,1015,986,755,739,723,1018,1002,989,973, 758,742,726,1021,1005,992,976,761,745,729,1008,995,979,963,764,748,732,1011, 998,982,767,751,735,719,1014,985,969,754,738,722,1017,1001,988,972,757,741, 725,1020,1004,991,975,760,744,728,1023,1007,994,978,763,747,731,715,1010,981, 766,734,1013,753,721,1000,987,740,1019,974,759,727,1006,993,746,980,765,733, 1012] [rebalance:info,2014-08-19T16:50:09.350,ns_1@10.242.238.90:<0.22038.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[704]}, {checkpoints,[{704,0}]}, {name,<<"replication_building_704_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[704]}, {takeover,false}, {suffix,"building_704_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",704,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,true}]} [rebalance:debug,2014-08-19T16:50:09.351,ns_1@10.242.238.90:<0.22038.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.22039.0> [rebalance:debug,2014-08-19T16:50:09.351,ns_1@10.242.238.90:<0.22038.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:50:09.352,ns_1@10.242.238.90:<0.22038.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.3053.1>,#Ref<16550.0.1.96812>}]} [rebalance:info,2014-08-19T16:50:09.352,ns_1@10.242.238.90:<0.22038.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 704 [rebalance:debug,2014-08-19T16:50:09.352,ns_1@10.242.238.90:<0.22038.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.3053.1>,#Ref<16550.0.1.96812>}] [ns_server:debug,2014-08-19T16:50:09.353,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.22040.0> (ok) [ns_server:debug,2014-08-19T16:50:09.353,ns_1@10.242.238.90:<0.22038.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:debug,2014-08-19T16:50:09.355,ns_1@10.242.238.90:<0.22041.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 704 [views:debug,2014-08-19T16:50:09.413,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/963. Updated state: replica (0) [ns_server:debug,2014-08-19T16:50:09.413,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",963,replica,0} [ns_server:debug,2014-08-19T16:50:09.481,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 961. Nacking mccouch update. [views:debug,2014-08-19T16:50:09.481,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/961. Updated state: replica (0) [ns_server:debug,2014-08-19T16:50:09.481,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",961,replica,0} [ns_server:debug,2014-08-19T16:50:09.482,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,965,750,718,984,737,1016,971,756,724,1003,990,743,1022,977,762,730,1009, 996,749,717,999,983,967,752,736,720,1015,986,755,739,723,1018,1002,989,973, 758,742,726,1021,1005,992,976,761,745,729,1008,995,979,963,764,748,732,1011, 998,982,767,751,735,719,1014,985,969,754,738,722,1017,1001,988,972,757,741, 725,1020,1004,991,975,760,744,728,1023,1007,994,978,763,747,731,715,1010,981, 766,734,1013,753,721,1000,987,740,1019,974,759,727,1006,993,961,746,980,765, 733,1012] [views:debug,2014-08-19T16:50:09.515,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/961. Updated state: replica (0) [ns_server:debug,2014-08-19T16:50:09.515,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",961,replica,0} [ns_server:debug,2014-08-19T16:50:09.674,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 713. Nacking mccouch update. [views:debug,2014-08-19T16:50:09.674,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/713. Updated state: pending (0) [ns_server:debug,2014-08-19T16:50:09.674,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",713,pending,0} [ns_server:debug,2014-08-19T16:50:09.674,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,965,750,718,984,737,1016,971,756,724,1003,990,743,1022,977,762,730,1009, 996,749,717,983,736,1015,986,755,739,723,1018,1002,989,973,758,742,726,1021, 1005,992,976,761,745,729,713,1008,995,979,963,764,748,732,1011,998,982,767, 751,735,719,1014,985,969,754,738,722,1017,1001,988,972,757,741,725,1020,1004, 991,975,760,744,728,1023,1007,994,978,763,747,731,715,1010,981,766,734,1013, 753,721,1000,987,740,1019,974,759,727,1006,993,961,746,980,765,733,1012,999, 967,752,720] [views:debug,2014-08-19T16:50:09.741,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/713. Updated state: pending (0) [ns_server:debug,2014-08-19T16:50:09.741,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",713,pending,0} [ns_server:debug,2014-08-19T16:50:09.874,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 711. Nacking mccouch update. [views:debug,2014-08-19T16:50:09.875,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/711. Updated state: pending (0) [ns_server:debug,2014-08-19T16:50:09.875,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",711,pending,0} [ns_server:debug,2014-08-19T16:50:09.875,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,965,750,718,984,737,1016,971,756,724,1003,990,743,711,1022,977,762,730, 1009,996,749,717,983,736,1015,986,755,739,723,1018,1002,989,973,758,742,726, 1021,1005,992,976,761,745,729,713,1008,995,979,963,764,748,732,1011,998,982, 767,751,735,719,1014,985,969,754,738,722,1017,1001,988,972,757,741,725,1020, 1004,991,975,760,744,728,1023,1007,994,978,763,747,731,715,1010,981,766,734, 1013,753,721,1000,987,740,1019,974,759,727,1006,993,961,746,980,765,733,1012, 999,967,752,720] [views:debug,2014-08-19T16:50:09.942,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/711. Updated state: pending (0) [ns_server:debug,2014-08-19T16:50:09.942,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",711,pending,0} [ns_server:debug,2014-08-19T16:50:10.100,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 709. Nacking mccouch update. [views:debug,2014-08-19T16:50:10.100,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/709. Updated state: pending (0) [ns_server:debug,2014-08-19T16:50:10.100,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",709,pending,0} [ns_server:debug,2014-08-19T16:50:10.101,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,965,750,718,984,737,1016,971,756,724,1003,990,743,711,1022,977,762,730, 1009,996,749,717,983,736,1015,986,755,739,723,1018,1002,989,973,758,742,726, 1021,1005,992,976,761,745,729,713,1008,995,979,963,764,748,732,1011,998,982, 767,751,735,719,1014,985,969,754,738,722,1017,1001,988,972,757,741,725,709, 1020,1004,991,975,760,744,728,1023,1007,994,978,763,747,731,715,1010,981,766, 734,1013,753,721,1000,987,740,1019,974,759,727,1006,993,961,746,980,765,733, 1012,999,967,752,720] [views:debug,2014-08-19T16:50:10.167,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/709. Updated state: pending (0) [ns_server:debug,2014-08-19T16:50:10.168,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",709,pending,0} [ns_server:debug,2014-08-19T16:50:10.242,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 970. Nacking mccouch update. [views:debug,2014-08-19T16:50:10.242,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/970. Updated state: replica (0) [ns_server:debug,2014-08-19T16:50:10.242,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",970,replica,0} [ns_server:debug,2014-08-19T16:50:10.242,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,965,750,718,984,737,1016,971,756,724,1003,990,743,711,1022,977,762,730, 1009,996,749,717,983,736,1015,986,970,755,739,723,1018,1002,989,973,758,742, 726,1021,1005,992,976,761,745,729,713,1008,995,979,963,764,748,732,1011,998, 982,767,751,735,719,1014,985,969,754,738,722,1017,1001,988,972,757,741,725, 709,1020,1004,991,975,760,744,728,1023,1007,994,978,763,747,731,715,1010,981, 766,734,1013,753,721,1000,987,740,1019,974,759,727,1006,993,961,746,980,765, 733,1012,999,967,752,720] [views:debug,2014-08-19T16:50:10.276,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/970. Updated state: replica (0) [ns_server:debug,2014-08-19T16:50:10.276,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",970,replica,0} [ns_server:debug,2014-08-19T16:50:10.360,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 968. Nacking mccouch update. [views:debug,2014-08-19T16:50:10.360,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/968. Updated state: replica (0) [ns_server:debug,2014-08-19T16:50:10.360,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",968,replica,0} [ns_server:debug,2014-08-19T16:50:10.360,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,965,750,718,984,737,1016,971,756,724,1003,990,743,711,1022,977,762,730, 1009,996,749,717,983,736,1015,986,970,755,739,723,1018,1002,989,973,758,742, 726,1021,1005,992,976,761,745,729,713,1008,995,979,963,764,748,732,1011,998, 982,767,751,735,719,1014,985,969,754,738,722,1017,1001,988,972,757,741,725, 709,1020,1004,991,975,760,744,728,1023,1007,994,978,763,747,731,715,1010,981, 766,734,1013,968,753,721,1000,987,740,1019,974,759,727,1006,993,961,746,980, 765,733,1012,999,967,752,720] [views:debug,2014-08-19T16:50:10.393,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/968. Updated state: replica (0) [ns_server:debug,2014-08-19T16:50:10.393,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",968,replica,0} [ns_server:debug,2014-08-19T16:50:10.477,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 966. Nacking mccouch update. [views:debug,2014-08-19T16:50:10.477,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/966. Updated state: replica (0) [ns_server:debug,2014-08-19T16:50:10.477,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",966,replica,0} [ns_server:debug,2014-08-19T16:50:10.477,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,965,750,718,984,737,1016,971,756,724,1003,990,743,711,1022,977,762,730, 1009,996,749,717,983,736,1015,970,755,723,1002,989,973,758,742,726,1021,1005, 992,976,761,745,729,713,1008,995,979,963,764,748,732,1011,998,982,966,767, 751,735,719,1014,985,969,754,738,722,1017,1001,988,972,757,741,725,709,1020, 1004,991,975,760,744,728,1023,1007,994,978,763,747,731,715,1010,981,766,734, 1013,968,753,721,1000,987,740,1019,974,759,727,1006,993,961,746,980,765,733, 1012,999,967,752,720,986,739,1018] [views:debug,2014-08-19T16:50:10.511,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/966. Updated state: replica (0) [ns_server:debug,2014-08-19T16:50:10.511,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",966,replica,0} [ns_server:debug,2014-08-19T16:50:10.578,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 964. Nacking mccouch update. [views:debug,2014-08-19T16:50:10.578,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/964. Updated state: replica (0) [ns_server:debug,2014-08-19T16:50:10.578,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",964,replica,0} [ns_server:debug,2014-08-19T16:50:10.578,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,965,750,718,984,737,1016,971,756,724,1003,990,743,711,1022,977,762,730, 1009,996,964,749,717,983,736,1015,970,755,723,1002,989,973,758,742,726,1021, 1005,992,976,761,745,729,713,1008,995,979,963,764,748,732,1011,998,982,966, 767,751,735,719,1014,985,969,754,738,722,1017,1001,988,972,757,741,725,709, 1020,1004,991,975,760,744,728,1023,1007,994,978,763,747,731,715,1010,981,766, 734,1013,968,753,721,1000,987,740,1019,974,759,727,1006,993,961,746,980,765, 733,1012,999,967,752,720,986,739,1018] [views:debug,2014-08-19T16:50:10.612,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/964. Updated state: replica (0) [ns_server:debug,2014-08-19T16:50:10.612,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",964,replica,0} [ns_server:debug,2014-08-19T16:50:10.692,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 962. Nacking mccouch update. [views:debug,2014-08-19T16:50:10.693,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/962. Updated state: replica (0) [ns_server:debug,2014-08-19T16:50:10.693,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",962,replica,0} [ns_server:debug,2014-08-19T16:50:10.693,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,965,750,718,984,737,1016,971,756,724,1003,990,743,711,1022,977,762,730, 1009,996,964,749,717,983,736,1015,970,755,723,1002,989,973,758,742,726,1021, 1005,992,976,761,745,729,713,1008,995,979,963,764,748,732,1011,998,982,966, 767,751,735,719,1014,985,969,754,738,722,1017,1001,988,972,757,741,725,709, 1020,1004,991,975,760,744,728,1023,1007,994,978,962,763,747,731,715,1010,981, 766,734,1013,968,753,721,1000,987,740,1019,974,759,727,1006,993,961,746,980, 765,733,1012,999,967,752,720,986,739,1018] [views:debug,2014-08-19T16:50:10.768,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/962. Updated state: replica (0) [ns_server:debug,2014-08-19T16:50:10.768,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",962,replica,0} [ns_server:debug,2014-08-19T16:50:10.935,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 960. Nacking mccouch update. [views:debug,2014-08-19T16:50:10.935,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/960. Updated state: replica (0) [ns_server:debug,2014-08-19T16:50:10.935,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",960,replica,0} [ns_server:debug,2014-08-19T16:50:10.936,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,965,750,718,984,737,1016,971,756,724,1003,990,743,711,1022,977,762,730, 1009,996,964,749,717,983,736,1015,970,755,723,1002,989,973,758,742,726,1021, 1005,992,976,960,761,745,729,713,1008,995,979,963,764,748,732,1011,998,982, 966,767,751,735,719,1014,985,969,754,738,722,1017,1001,988,972,757,741,725, 709,1020,1004,991,975,760,744,728,1023,1007,994,978,962,763,747,731,715,1010, 981,766,734,1013,968,753,721,1000,987,740,1019,974,759,727,1006,993,961,746, 980,765,733,1012,999,967,752,720,986,739,1018] [views:debug,2014-08-19T16:50:11.011,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/960. Updated state: replica (0) [ns_server:debug,2014-08-19T16:50:11.011,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",960,replica,0} [ns_server:debug,2014-08-19T16:50:11.177,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 716. Nacking mccouch update. [views:debug,2014-08-19T16:50:11.178,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/716. Updated state: pending (0) [ns_server:debug,2014-08-19T16:50:11.178,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",716,pending,0} [ns_server:debug,2014-08-19T16:50:11.178,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,965,750,718,984,737,1016,971,756,724,1003,990,743,711,1022,977,762,730, 1009,996,964,749,717,983,736,1015,970,755,723,1002,989,973,758,742,726,1021, 1005,992,976,960,761,745,729,713,1008,995,979,963,764,748,732,716,1011,998, 982,966,767,751,735,719,1014,985,969,754,738,722,1017,1001,988,972,757,741, 725,709,1020,1004,991,975,760,744,728,1023,1007,994,978,962,763,747,731,715, 1010,981,766,734,1013,968,753,721,1000,987,740,1019,974,759,727,1006,993,961, 746,980,765,733,1012,999,967,752,720,986,739,1018] [views:debug,2014-08-19T16:50:11.245,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/716. Updated state: pending (0) [ns_server:debug,2014-08-19T16:50:11.245,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",716,pending,0} [ns_server:debug,2014-08-19T16:50:11.378,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 714. Nacking mccouch update. [views:debug,2014-08-19T16:50:11.378,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/714. Updated state: pending (0) [ns_server:debug,2014-08-19T16:50:11.378,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",714,pending,0} [ns_server:debug,2014-08-19T16:50:11.379,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,965,750,718,984,737,1016,971,756,724,1003,990,743,711,1022,977,762,730, 1009,996,964,749,717,983,736,1015,970,755,723,1002,989,742,1021,992,976,960, 761,745,729,713,1008,995,979,963,764,748,732,716,1011,998,982,966,767,751, 735,719,1014,985,969,754,738,722,1017,1001,988,972,757,741,725,709,1020,1004, 991,975,760,744,728,1023,1007,994,978,962,763,747,731,715,1010,981,766,734, 1013,968,753,721,1000,987,740,1019,974,759,727,1006,993,961,746,714,980,765, 733,1012,999,967,752,720,986,739,1018,973,758,726,1005] [views:debug,2014-08-19T16:50:11.496,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/714. Updated state: pending (0) [ns_server:debug,2014-08-19T16:50:11.496,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",714,pending,0} [ns_server:debug,2014-08-19T16:50:11.621,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 712. Nacking mccouch update. [views:debug,2014-08-19T16:50:11.621,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/712. Updated state: pending (0) [ns_server:debug,2014-08-19T16:50:11.621,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",712,pending,0} [ns_server:debug,2014-08-19T16:50:11.621,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,965,750,718,984,737,1016,971,756,724,1003,990,743,711,1022,977,762,730, 1009,996,964,749,717,983,736,1015,970,755,723,1002,989,742,1021,992,976,960, 761,745,729,713,1008,995,979,963,764,748,732,716,1011,998,982,966,767,751, 735,719,1014,985,969,754,738,722,1017,1001,988,972,757,741,725,709,1020,1004, 991,975,760,744,728,712,1023,1007,994,978,962,763,747,731,715,1010,981,766, 734,1013,968,753,721,1000,987,740,1019,974,759,727,1006,993,961,746,714,980, 765,733,1012,999,967,752,720,986,739,1018,973,758,726,1005] [views:debug,2014-08-19T16:50:11.696,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/712. Updated state: pending (0) [ns_server:debug,2014-08-19T16:50:11.697,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",712,pending,0} [ns_server:debug,2014-08-19T16:50:11.764,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 710. Nacking mccouch update. [views:debug,2014-08-19T16:50:11.764,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/710. Updated state: pending (0) [ns_server:debug,2014-08-19T16:50:11.764,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",710,pending,0} [ns_server:debug,2014-08-19T16:50:11.764,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,965,750,718,984,737,1016,971,756,724,1003,990,743,711,1022,977,762,730, 1009,996,964,749,717,983,736,1015,970,755,723,1002,989,742,710,1021,992,976, 960,761,745,729,713,1008,995,979,963,764,748,732,716,1011,998,982,966,767, 751,735,719,1014,985,969,754,738,722,1017,1001,988,972,757,741,725,709,1020, 1004,991,975,760,744,728,712,1023,1007,994,978,962,763,747,731,715,1010,981, 766,734,1013,968,753,721,1000,987,740,1019,974,759,727,1006,993,961,746,714, 980,765,733,1012,999,967,752,720,986,739,1018,973,758,726,1005] [views:debug,2014-08-19T16:50:11.798,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/710. Updated state: pending (0) [ns_server:debug,2014-08-19T16:50:11.798,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",710,pending,0} [ns_server:debug,2014-08-19T16:50:11.865,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 708. Nacking mccouch update. [views:debug,2014-08-19T16:50:11.865,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/708. Updated state: pending (0) [ns_server:debug,2014-08-19T16:50:11.865,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",708,pending,0} [ns_server:debug,2014-08-19T16:50:11.865,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,965,750,718,984,737,1016,971,756,724,1003,990,743,711,1022,977,762,730, 1009,996,964,749,717,983,736,1015,970,755,723,1002,989,742,710,1021,992,976, 960,761,745,729,713,1008,995,979,963,764,748,732,716,1011,998,982,966,767, 751,735,719,1014,985,969,754,738,722,1017,1001,988,972,757,741,725,709,1020, 1004,991,975,760,744,728,712,1023,1007,994,978,962,763,747,731,715,1010,981, 766,734,1013,968,753,721,1000,987,740,708,1019,974,759,727,1006,993,961,746, 714,980,765,733,1012,999,967,752,720,986,739,1018,973,758,726,1005] [views:debug,2014-08-19T16:50:11.899,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/708. Updated state: pending (0) [ns_server:debug,2014-08-19T16:50:11.899,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",708,pending,0} [ns_server:debug,2014-08-19T16:50:11.965,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 706. Nacking mccouch update. [views:debug,2014-08-19T16:50:11.966,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/706. Updated state: pending (0) [ns_server:debug,2014-08-19T16:50:11.966,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",706,pending,0} [ns_server:debug,2014-08-19T16:50:11.966,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,965,750,718,984,737,1016,971,756,724,1003,990,743,711,1022,977,762,730, 1009,996,964,749,717,983,736,1015,970,755,723,1002,989,742,710,1021,992,976, 960,761,745,729,713,1008,995,979,963,764,748,732,716,1011,998,982,966,767, 751,735,719,1014,985,969,754,738,722,706,1017,1001,988,972,757,741,725,709, 1020,1004,991,975,760,744,728,712,1023,1007,994,978,962,763,747,731,715,1010, 981,766,734,1013,968,753,721,1000,987,740,708,1019,974,759,727,1006,993,961, 746,714,980,765,733,1012,999,967,752,720,986,739,1018,973,758,726,1005] [views:debug,2014-08-19T16:50:12.000,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/706. Updated state: pending (0) [ns_server:debug,2014-08-19T16:50:12.000,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",706,pending,0} [ns_server:debug,2014-08-19T16:50:12.066,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 704. Nacking mccouch update. [views:debug,2014-08-19T16:50:12.066,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/704. Updated state: pending (0) [ns_server:debug,2014-08-19T16:50:12.066,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",704,pending,0} [ns_server:debug,2014-08-19T16:50:12.067,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,965,750,718,984,737,1016,971,756,724,1003,990,743,711,1022,977,762,730, 1009,996,964,749,717,983,736,704,1015,970,755,723,1002,989,742,710,1021,976, 761,729,1008,995,979,963,764,748,732,716,1011,998,982,966,767,751,735,719, 1014,985,969,754,738,722,706,1017,1001,988,972,757,741,725,709,1020,1004,991, 975,760,744,728,712,1023,1007,994,978,962,763,747,731,715,1010,981,766,734, 1013,968,753,721,1000,987,740,708,1019,974,759,727,1006,993,961,746,714,980, 765,733,1012,999,967,752,720,986,739,1018,973,758,726,1005,992,960,745,713] [views:debug,2014-08-19T16:50:12.125,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/704. Updated state: pending (0) [ns_server:debug,2014-08-19T16:50:12.125,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",704,pending,0} [ns_server:debug,2014-08-19T16:50:12.238,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 707. Nacking mccouch update. [views:debug,2014-08-19T16:50:12.238,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/707. Updated state: pending (0) [ns_server:debug,2014-08-19T16:50:12.238,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",707,pending,0} [ns_server:debug,2014-08-19T16:50:12.239,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,965,750,718,984,737,1016,971,756,724,1003,990,743,711,1022,977,762,730, 1009,996,964,749,717,983,736,704,1015,970,755,723,1002,989,742,710,1021,976, 761,729,1008,995,979,963,764,748,732,716,1011,998,982,966,767,751,735,719, 1014,985,969,754,738,722,706,1017,1001,988,972,757,741,725,709,1020,1004,991, 975,760,744,728,712,1023,1007,994,978,962,763,747,731,715,1010,981,766,734, 1013,968,753,721,1000,987,740,708,1019,974,759,727,1006,993,961,746,714,980, 765,733,1012,999,967,752,720,986,739,707,1018,973,758,726,1005,992,960,745, 713] [views:debug,2014-08-19T16:50:12.314,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/707. Updated state: pending (0) [ns_server:debug,2014-08-19T16:50:12.314,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",707,pending,0} [ns_server:debug,2014-08-19T16:50:12.464,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 705. Nacking mccouch update. [views:debug,2014-08-19T16:50:12.464,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/705. Updated state: pending (0) [ns_server:debug,2014-08-19T16:50:12.464,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",705,pending,0} [ns_server:debug,2014-08-19T16:50:12.464,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,965,750,718,984,737,705,1016,971,756,724,1003,990,743,711,1022,977,762, 730,1009,996,964,749,717,983,736,704,1015,970,755,723,1002,989,742,710,1021, 976,761,729,1008,995,979,963,764,748,732,716,1011,998,982,966,767,751,735, 719,1014,985,969,754,738,722,706,1017,1001,988,972,757,741,725,709,1020,1004, 991,975,760,744,728,712,1023,1007,994,978,962,763,747,731,715,1010,981,766, 734,1013,968,753,721,1000,987,740,708,1019,974,759,727,1006,993,961,746,714, 980,765,733,1012,999,967,752,720,986,739,707,1018,973,758,726,1005,992,960, 745,713] [views:debug,2014-08-19T16:50:12.533,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/705. Updated state: pending (0) [ns_server:debug,2014-08-19T16:50:12.533,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",705,pending,0} [rebalance:debug,2014-08-19T16:50:12.534,ns_1@10.242.238.90:<0.21905.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:50:12.535,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.21905.0> (ok) [rebalance:debug,2014-08-19T16:50:12.610,ns_1@10.242.238.90:<0.21841.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:50:12.610,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.21841.0> (ok) [rebalance:debug,2014-08-19T16:50:12.610,ns_1@10.242.238.90:<0.22021.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:50:12.610,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.22021.0> (ok) [rebalance:debug,2014-08-19T16:50:12.733,ns_1@10.242.238.90:<0.21790.0>:janitor_agent:handle_call:795]Done [rebalance:debug,2014-08-19T16:50:12.733,ns_1@10.242.238.90:<0.21990.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:50:12.733,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.21790.0> (ok) [ns_server:debug,2014-08-19T16:50:12.733,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.21990.0> (ok) [rebalance:debug,2014-08-19T16:50:12.867,ns_1@10.242.238.90:<0.21734.0>:janitor_agent:handle_call:795]Done [rebalance:debug,2014-08-19T16:50:12.867,ns_1@10.242.238.90:<0.21940.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:50:12.867,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.21734.0> (ok) [ns_server:debug,2014-08-19T16:50:12.867,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.21940.0> (ok) [rebalance:debug,2014-08-19T16:50:13.001,ns_1@10.242.238.90:<0.21684.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:50:13.001,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.21684.0> (ok) [rebalance:debug,2014-08-19T16:50:13.001,ns_1@10.242.238.90:<0.21866.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:50:13.001,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.21866.0> (ok) [rebalance:debug,2014-08-19T16:50:13.126,ns_1@10.242.238.90:<0.21633.0>:janitor_agent:handle_call:795]Done [rebalance:debug,2014-08-19T16:50:13.126,ns_1@10.242.238.90:<0.21815.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:50:13.126,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.21633.0> (ok) [ns_server:debug,2014-08-19T16:50:13.126,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.21815.0> (ok) [rebalance:debug,2014-08-19T16:50:13.242,ns_1@10.242.238.90:<0.21759.0>:janitor_agent:handle_call:795]Done [rebalance:debug,2014-08-19T16:50:13.242,ns_1@10.242.238.90:<0.21583.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:50:13.243,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.21759.0> (ok) [ns_server:debug,2014-08-19T16:50:13.243,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.21583.0> (ok) [rebalance:debug,2014-08-19T16:50:13.334,ns_1@10.242.238.90:<0.21519.0>:janitor_agent:handle_call:795]Done [rebalance:debug,2014-08-19T16:50:13.335,ns_1@10.242.238.90:<0.21709.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:50:13.335,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.21519.0> (ok) [ns_server:debug,2014-08-19T16:50:13.335,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.21709.0> (ok) [rebalance:debug,2014-08-19T16:50:13.468,ns_1@10.242.238.90:<0.21945.0>:janitor_agent:handle_call:795]Done [rebalance:debug,2014-08-19T16:50:13.468,ns_1@10.242.238.90:<0.21659.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:50:13.468,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.21945.0> (ok) [ns_server:debug,2014-08-19T16:50:13.469,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.21659.0> (ok) [rebalance:debug,2014-08-19T16:50:13.536,ns_1@10.242.238.90:<0.21885.0>:janitor_agent:handle_call:795]Done [rebalance:debug,2014-08-19T16:50:13.536,ns_1@10.242.238.90:<0.21608.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:50:13.536,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.21885.0> (ok) [ns_server:debug,2014-08-19T16:50:13.536,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.21608.0> (ok) [rebalance:debug,2014-08-19T16:50:13.628,ns_1@10.242.238.90:<0.21821.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:50:13.628,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.21821.0> (ok) [rebalance:debug,2014-08-19T16:50:13.628,ns_1@10.242.238.90:<0.21558.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:50:13.628,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.21558.0> (ok) [rebalance:debug,2014-08-19T16:50:13.737,ns_1@10.242.238.90:<0.21770.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:50:13.737,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.21770.0> (ok) [rebalance:debug,2014-08-19T16:50:13.737,ns_1@10.242.238.90:<0.21970.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:50:13.737,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.21970.0> (ok) [rebalance:debug,2014-08-19T16:50:13.829,ns_1@10.242.238.90:<0.21728.0>:janitor_agent:handle_call:795]Done [rebalance:debug,2014-08-19T16:50:13.829,ns_1@10.242.238.90:<0.21920.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:50:13.829,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.21728.0> (ok) [ns_server:debug,2014-08-19T16:50:13.829,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.21920.0> (ok) [rebalance:debug,2014-08-19T16:50:13.921,ns_1@10.242.238.90:<0.21860.0>:janitor_agent:handle_call:795]Done [rebalance:debug,2014-08-19T16:50:13.921,ns_1@10.242.238.90:<0.21678.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:50:13.921,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.21860.0> (ok) [ns_server:debug,2014-08-19T16:50:13.921,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.21678.0> (ok) [rebalance:debug,2014-08-19T16:50:14.035,ns_1@10.242.238.90:<0.21809.0>:janitor_agent:handle_call:795]Done [rebalance:debug,2014-08-19T16:50:14.035,ns_1@10.242.238.90:<0.21613.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:50:14.035,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.21809.0> (ok) [ns_server:debug,2014-08-19T16:50:14.035,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.21613.0> (ok) [rebalance:debug,2014-08-19T16:50:14.160,ns_1@10.242.238.90:<0.21563.0>:janitor_agent:handle_call:795]Done [rebalance:debug,2014-08-19T16:50:14.161,ns_1@10.242.238.90:<0.21753.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:50:14.161,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.21563.0> (ok) [ns_server:debug,2014-08-19T16:50:14.161,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.21753.0> (ok) [rebalance:debug,2014-08-19T16:50:14.261,ns_1@10.242.238.90:<0.21513.0>:janitor_agent:handle_call:795]Done [rebalance:debug,2014-08-19T16:50:14.261,ns_1@10.242.238.90:<0.21703.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:50:14.261,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.21513.0> (ok) [ns_server:debug,2014-08-19T16:50:14.261,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.21703.0> (ok) [rebalance:debug,2014-08-19T16:50:14.378,ns_1@10.242.238.90:<0.21653.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:50:14.378,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.21653.0> (ok) [rebalance:debug,2014-08-19T16:50:14.428,ns_1@10.242.238.90:<0.21588.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:50:14.428,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.21588.0> (ok) [rebalance:debug,2014-08-19T16:50:14.478,ns_1@10.242.238.90:<0.21538.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:50:14.478,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.21538.0> (ok) [rebalance:debug,2014-08-19T16:50:14.529,ns_1@10.242.238.90:<0.22041.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:50:14.529,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.22041.0> (ok) [rebalance:debug,2014-08-19T16:50:14.579,ns_1@10.242.238.90:<0.22015.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:50:14.579,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.22015.0> (ok) [rebalance:debug,2014-08-19T16:50:14.646,ns_1@10.242.238.90:<0.21965.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:50:14.646,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.21965.0> (ok) [rebalance:debug,2014-08-19T16:50:14.721,ns_1@10.242.238.90:<0.21995.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:50:14.721,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.21995.0> (ok) [rebalance:debug,2014-08-19T16:50:16.210,ns_1@10.242.238.90:<0.22348.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 718 [rebalance:debug,2014-08-19T16:50:16.211,ns_1@10.242.238.90:<0.22348.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:50:16.212,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.22348.0> (ok) [rebalance:debug,2014-08-19T16:50:16.331,ns_1@10.242.238.90:<0.22351.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 720 [rebalance:debug,2014-08-19T16:50:16.331,ns_1@10.242.238.90:<0.22354.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 719 [rebalance:debug,2014-08-19T16:50:16.332,ns_1@10.242.238.90:<0.22351.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:50:16.333,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.22351.0> (ok) [rebalance:debug,2014-08-19T16:50:16.333,ns_1@10.242.238.90:<0.22354.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:50:16.333,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.22354.0> (ok) [rebalance:debug,2014-08-19T16:50:16.415,ns_1@10.242.238.90:<0.22357.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 722 [rebalance:debug,2014-08-19T16:50:16.415,ns_1@10.242.238.90:<0.22360.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 721 [rebalance:debug,2014-08-19T16:50:16.416,ns_1@10.242.238.90:<0.22357.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:50:16.416,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.22357.0> (ok) [rebalance:debug,2014-08-19T16:50:16.416,ns_1@10.242.238.90:<0.22360.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:50:16.417,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.22360.0> (ok) [rebalance:debug,2014-08-19T16:50:16.515,ns_1@10.242.238.90:<0.22363.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 724 [rebalance:debug,2014-08-19T16:50:16.515,ns_1@10.242.238.90:<0.22366.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 723 [rebalance:debug,2014-08-19T16:50:16.517,ns_1@10.242.238.90:<0.22363.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:50:16.517,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.22363.0> (ok) [rebalance:debug,2014-08-19T16:50:16.517,ns_1@10.242.238.90:<0.22366.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:50:16.517,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.22366.0> (ok) [rebalance:debug,2014-08-19T16:50:16.599,ns_1@10.242.238.90:<0.22369.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 725 [rebalance:debug,2014-08-19T16:50:16.599,ns_1@10.242.238.90:<0.22372.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 972 [rebalance:debug,2014-08-19T16:50:16.600,ns_1@10.242.238.90:<0.22372.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:50:16.600,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.22372.0> (ok) [rebalance:debug,2014-08-19T16:50:16.600,ns_1@10.242.238.90:<0.22369.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:50:16.601,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.22369.0> (ok) [rebalance:debug,2014-08-19T16:50:16.666,ns_1@10.242.238.90:<0.22375.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 974 [rebalance:debug,2014-08-19T16:50:16.666,ns_1@10.242.238.90:<0.22378.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 973 [rebalance:debug,2014-08-19T16:50:16.667,ns_1@10.242.238.90:<0.22375.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:50:16.667,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.22375.0> (ok) [rebalance:debug,2014-08-19T16:50:16.668,ns_1@10.242.238.90:<0.22378.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:50:16.668,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.22378.0> (ok) [rebalance:debug,2014-08-19T16:50:16.750,ns_1@10.242.238.90:<0.22381.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 976 [rebalance:debug,2014-08-19T16:50:16.750,ns_1@10.242.238.90:<0.22384.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 975 [rebalance:debug,2014-08-19T16:50:16.751,ns_1@10.242.238.90:<0.22381.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:50:16.751,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.22381.0> (ok) [rebalance:debug,2014-08-19T16:50:16.752,ns_1@10.242.238.90:<0.22384.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:50:16.752,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.22384.0> (ok) [rebalance:debug,2014-08-19T16:50:16.842,ns_1@10.242.238.90:<0.22387.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 978 [rebalance:debug,2014-08-19T16:50:16.842,ns_1@10.242.238.90:<0.22390.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 977 [rebalance:debug,2014-08-19T16:50:16.843,ns_1@10.242.238.90:<0.22387.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:50:16.843,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.22387.0> (ok) [rebalance:debug,2014-08-19T16:50:16.844,ns_1@10.242.238.90:<0.22390.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:50:16.844,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.22390.0> (ok) [rebalance:debug,2014-08-19T16:50:16.958,ns_1@10.242.238.90:<0.22393.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 979 [rebalance:debug,2014-08-19T16:50:16.959,ns_1@10.242.238.90:<0.22393.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:50:16.959,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.22393.0> (ok) [ns_server:info,2014-08-19T16:50:17.497,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:do_pull:341]Pulling config from: 'ns_1@10.242.238.91' [ns_server:debug,2014-08-19T16:50:17.959,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:50:17.967,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 7418 us [ns_server:debug,2014-08-19T16:50:17.967,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:17.968,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:17.969,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{464, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [rebalance:debug,2014-08-19T16:50:17.972,ns_1@10.242.238.90:<0.22406.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 704 [rebalance:debug,2014-08-19T16:50:17.973,ns_1@10.242.238.90:<0.22406.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:50:17.973,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.22406.0> (ok) [rebalance:debug,2014-08-19T16:50:18.001,ns_1@10.242.238.90:<0.22409.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 706 [ns_server:debug,2014-08-19T16:50:18.001,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [rebalance:debug,2014-08-19T16:50:18.003,ns_1@10.242.238.90:<0.22409.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:50:18.003,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.22409.0> (ok) [ns_server:debug,2014-08-19T16:50:18.006,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:18.006,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 4622 us [ns_server:debug,2014-08-19T16:50:18.006,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:18.007,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{466, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:50:18.045,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [rebalance:debug,2014-08-19T16:50:18.046,ns_1@10.242.238.90:<0.22413.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 705 [rebalance:debug,2014-08-19T16:50:18.046,ns_1@10.242.238.90:<0.22416.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 708 [rebalance:debug,2014-08-19T16:50:18.047,ns_1@10.242.238.90:<0.22416.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:50:18.047,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.22416.0> (ok) [rebalance:debug,2014-08-19T16:50:18.048,ns_1@10.242.238.90:<0.22413.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:50:18.048,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.22413.0> (ok) [ns_server:debug,2014-08-19T16:50:18.048,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 3214 us [ns_server:debug,2014-08-19T16:50:18.048,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:18.049,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:18.049,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{465, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:50:18.065,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:50:18.069,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:18.070,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 4758 us [ns_server:debug,2014-08-19T16:50:18.070,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:18.071,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{467, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:50:18.085,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:50:18.088,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 2501 us [ns_server:debug,2014-08-19T16:50:18.088,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:18.088,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:18.089,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{469, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:50:18.115,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:50:18.119,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:18.119,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 3814 us [ns_server:debug,2014-08-19T16:50:18.120,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:18.121,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{468, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [rebalance:debug,2014-08-19T16:50:18.145,ns_1@10.242.238.90:<0.22422.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 710 [rebalance:debug,2014-08-19T16:50:18.145,ns_1@10.242.238.90:<0.22423.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 707 [rebalance:debug,2014-08-19T16:50:18.147,ns_1@10.242.238.90:<0.22423.0>:janitor_agent:handle_call:795]Done [rebalance:debug,2014-08-19T16:50:18.147,ns_1@10.242.238.90:<0.22422.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:50:18.147,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.22423.0> (ok) [ns_server:debug,2014-08-19T16:50:18.147,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.22422.0> (ok) [rebalance:debug,2014-08-19T16:50:18.256,ns_1@10.242.238.90:<0.22428.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 712 [rebalance:debug,2014-08-19T16:50:18.256,ns_1@10.242.238.90:<0.22431.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 709 [rebalance:debug,2014-08-19T16:50:18.257,ns_1@10.242.238.90:<0.22428.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:50:18.257,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.22428.0> (ok) [rebalance:debug,2014-08-19T16:50:18.258,ns_1@10.242.238.90:<0.22431.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:50:18.258,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.22431.0> (ok) [rebalance:debug,2014-08-19T16:50:18.338,ns_1@10.242.238.90:<0.22437.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 714 [rebalance:debug,2014-08-19T16:50:18.338,ns_1@10.242.238.90:<0.22440.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 711 [rebalance:debug,2014-08-19T16:50:18.339,ns_1@10.242.238.90:<0.22437.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:50:18.339,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.22437.0> (ok) [rebalance:debug,2014-08-19T16:50:18.339,ns_1@10.242.238.90:<0.22440.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:50:18.339,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.22440.0> (ok) [rebalance:debug,2014-08-19T16:50:18.405,ns_1@10.242.238.90:<0.22448.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 713 [rebalance:debug,2014-08-19T16:50:18.405,ns_1@10.242.238.90:<0.22451.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 716 [rebalance:debug,2014-08-19T16:50:18.406,ns_1@10.242.238.90:<0.22451.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:50:18.406,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.22451.0> (ok) [rebalance:debug,2014-08-19T16:50:18.406,ns_1@10.242.238.90:<0.22448.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:50:18.407,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.22448.0> (ok) [ns_server:debug,2014-08-19T16:50:18.486,ns_1@10.242.238.90:<0.22458.0>:capi_set_view_manager:do_wait_index_updated:618]References to wait: [] ("default", 718) [ns_server:debug,2014-08-19T16:50:18.486,ns_1@10.242.238.90:<0.22458.0>:capi_set_view_manager:do_wait_index_updated:638]All refs fired [ns_server:debug,2014-08-19T16:50:18.486,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.22457.0> (ok) [rebalance:debug,2014-08-19T16:50:18.487,ns_1@10.242.238.90:<0.21681.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:50:18.487,ns_1@10.242.238.90:<0.21681.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:50:18.487,ns_1@10.242.238.90:<0.22459.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:50:18.487,ns_1@10.242.238.90:<0.22459.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:50:18.487,ns_1@10.242.238.90:<0.21681.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [rebalance:debug,2014-08-19T16:50:18.488,ns_1@10.242.238.90:<0.22460.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 715 [rebalance:debug,2014-08-19T16:50:18.489,ns_1@10.242.238.90:<0.22460.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:50:18.489,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.22460.0> (ok) [ns_server:debug,2014-08-19T16:50:18.519,ns_1@10.242.238.90:<0.22464.0>:capi_set_view_manager:do_wait_index_updated:618]References to wait: [] ("default", 720) [ns_server:debug,2014-08-19T16:50:18.520,ns_1@10.242.238.90:<0.22464.0>:capi_set_view_manager:do_wait_index_updated:638]All refs fired [ns_server:debug,2014-08-19T16:50:18.520,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.22463.0> (ok) [rebalance:debug,2014-08-19T16:50:18.520,ns_1@10.242.238.90:<0.21616.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:50:18.521,ns_1@10.242.238.90:<0.21616.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:50:18.521,ns_1@10.242.238.90:<0.22465.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:50:18.521,ns_1@10.242.238.90:<0.22465.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:debug,2014-08-19T16:50:18.521,ns_1@10.242.238.90:<0.22466.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 717 [rebalance:info,2014-08-19T16:50:18.521,ns_1@10.242.238.90:<0.21616.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [rebalance:debug,2014-08-19T16:50:18.522,ns_1@10.242.238.90:<0.22466.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:50:18.522,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.22466.0> (ok) [ns_server:info,2014-08-19T16:50:18.537,ns_1@10.242.238.90:<0.18787.0>:ns_memcached:do_handle_call:527]Changed vbucket 718 state to active [ns_server:debug,2014-08-19T16:50:18.559,ns_1@10.242.238.90:<0.22470.0>:capi_set_view_manager:do_wait_index_updated:618]References to wait: [] ("default", 722) [ns_server:debug,2014-08-19T16:50:18.559,ns_1@10.242.238.90:<0.22470.0>:capi_set_view_manager:do_wait_index_updated:638]All refs fired [ns_server:debug,2014-08-19T16:50:18.559,ns_1@10.242.238.90:<0.22472.0>:capi_set_view_manager:do_wait_index_updated:618]References to wait: [] ("default", 721) [ns_server:debug,2014-08-19T16:50:18.559,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.22469.0> (ok) [ns_server:debug,2014-08-19T16:50:18.559,ns_1@10.242.238.90:<0.22472.0>:capi_set_view_manager:do_wait_index_updated:638]All refs fired [ns_server:debug,2014-08-19T16:50:18.559,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.22471.0> (ok) [ns_server:debug,2014-08-19T16:50:18.559,ns_1@10.242.238.90:<0.22474.0>:capi_set_view_manager:do_wait_index_updated:618]References to wait: [] ("default", 724) [ns_server:debug,2014-08-19T16:50:18.560,ns_1@10.242.238.90:<0.22474.0>:capi_set_view_manager:do_wait_index_updated:638]All refs fired [ns_server:debug,2014-08-19T16:50:18.560,ns_1@10.242.238.90:<0.22477.0>:capi_set_view_manager:do_wait_index_updated:618]References to wait: [] ("default", 723) [ns_server:debug,2014-08-19T16:50:18.560,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.22473.0> (ok) [ns_server:debug,2014-08-19T16:50:18.560,ns_1@10.242.238.90:<0.22477.0>:capi_set_view_manager:do_wait_index_updated:638]All refs fired [ns_server:debug,2014-08-19T16:50:18.560,ns_1@10.242.238.90:<0.22478.0>:capi_set_view_manager:do_wait_index_updated:618]References to wait: [] ("default", 719) [ns_server:debug,2014-08-19T16:50:18.560,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.22475.0> (ok) [ns_server:debug,2014-08-19T16:50:18.560,ns_1@10.242.238.90:<0.22478.0>:capi_set_view_manager:do_wait_index_updated:638]All refs fired [ns_server:debug,2014-08-19T16:50:18.560,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.22476.0> (ok) [rebalance:debug,2014-08-19T16:50:18.561,ns_1@10.242.238.90:<0.21566.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [rebalance:debug,2014-08-19T16:50:18.561,ns_1@10.242.238.90:<0.21541.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [rebalance:debug,2014-08-19T16:50:18.561,ns_1@10.242.238.90:<0.21656.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:50:18.561,ns_1@10.242.238.90:<0.21566.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [rebalance:debug,2014-08-19T16:50:18.561,ns_1@10.242.238.90:<0.21516.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:50:18.561,ns_1@10.242.238.90:<0.21541.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:50:18.561,ns_1@10.242.238.90:<0.22480.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:50:18.561,ns_1@10.242.238.90:<0.22479.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [rebalance:debug,2014-08-19T16:50:18.561,ns_1@10.242.238.90:<0.21591.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:50:18.561,ns_1@10.242.238.90:<0.22480.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [ns_server:debug,2014-08-19T16:50:18.561,ns_1@10.242.238.90:<0.21656.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:50:18.562,ns_1@10.242.238.90:<0.22479.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [ns_server:debug,2014-08-19T16:50:18.562,ns_1@10.242.238.90:<0.21591.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:50:18.562,ns_1@10.242.238.90:<0.21516.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:50:18.562,ns_1@10.242.238.90:<0.22483.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:50:18.562,ns_1@10.242.238.90:<0.22482.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [rebalance:info,2014-08-19T16:50:18.562,ns_1@10.242.238.90:<0.21541.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:50:18.562,ns_1@10.242.238.90:<0.22481.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [rebalance:info,2014-08-19T16:50:18.562,ns_1@10.242.238.90:<0.21566.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:50:18.562,ns_1@10.242.238.90:<0.22482.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [ns_server:debug,2014-08-19T16:50:18.562,ns_1@10.242.238.90:<0.22483.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [ns_server:debug,2014-08-19T16:50:18.562,ns_1@10.242.238.90:<0.22481.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:50:18.562,ns_1@10.242.238.90:<0.21516.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [rebalance:info,2014-08-19T16:50:18.562,ns_1@10.242.238.90:<0.21656.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [rebalance:info,2014-08-19T16:50:18.562,ns_1@10.242.238.90:<0.21591.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [rebalance:debug,2014-08-19T16:50:18.562,ns_1@10.242.238.90:<0.22484.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 960 [ns_server:debug,2014-08-19T16:50:18.564,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:info,2014-08-19T16:50:18.571,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 720 state to active [ns_server:debug,2014-08-19T16:50:18.572,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 7886 us [ns_server:debug,2014-08-19T16:50:18.572,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:18.573,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:18.573,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{718, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:50:18.599,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:18.599,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:50:18.600,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 9 us [ns_server:debug,2014-08-19T16:50:18.600,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:18.600,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{720, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [views:debug,2014-08-19T16:50:18.605,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/718. Updated state: active (1) [ns_server:debug,2014-08-19T16:50:18.605,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",718,active,1} [rebalance:debug,2014-08-19T16:50:18.606,ns_1@10.242.238.90:<0.22484.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:50:18.606,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.22484.0> (ok) [ns_server:info,2014-08-19T16:50:18.610,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 723 state to active [ns_server:info,2014-08-19T16:50:18.631,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 719 state to active [ns_server:debug,2014-08-19T16:50:18.632,ns_1@10.242.238.90:<0.22490.0>:capi_set_view_manager:do_wait_index_updated:618]References to wait: [] ("default", 725) [ns_server:debug,2014-08-19T16:50:18.632,ns_1@10.242.238.90:<0.22490.0>:capi_set_view_manager:do_wait_index_updated:638]All refs fired [ns_server:debug,2014-08-19T16:50:18.632,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.22489.0> (ok) [rebalance:debug,2014-08-19T16:50:18.633,ns_1@10.242.238.90:<0.21485.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:50:18.633,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [rebalance:debug,2014-08-19T16:50:18.634,ns_1@10.242.238.90:<0.22491.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 962 [ns_server:debug,2014-08-19T16:50:18.634,ns_1@10.242.238.90:<0.21485.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:50:18.634,ns_1@10.242.238.90:<0.22492.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:50:18.634,ns_1@10.242.238.90:<0.22492.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:50:18.634,ns_1@10.242.238.90:<0.21485.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:50:18.636,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:18.637,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 3067 us [ns_server:debug,2014-08-19T16:50:18.637,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:18.638,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{723, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:info,2014-08-19T16:50:18.642,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 721 state to active [ns_server:debug,2014-08-19T16:50:18.654,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:50:18.658,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 3275 us [ns_server:debug,2014-08-19T16:50:18.658,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:18.658,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:18.659,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{719, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:info,2014-08-19T16:50:18.665,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 722 state to active [ns_server:debug,2014-08-19T16:50:18.679,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:50:18.681,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 1128 us [ns_server:debug,2014-08-19T16:50:18.681,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [views:debug,2014-08-19T16:50:18.681,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/720. Updated state: active (1) [ns_server:debug,2014-08-19T16:50:18.681,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:18.681,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",720,active,1} [ns_server:debug,2014-08-19T16:50:18.682,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{721, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:info,2014-08-19T16:50:18.682,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 724 state to active [rebalance:debug,2014-08-19T16:50:18.684,ns_1@10.242.238.90:<0.22497.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 964 [rebalance:debug,2014-08-19T16:50:18.684,ns_1@10.242.238.90:<0.22500.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 961 [ns_server:info,2014-08-19T16:50:18.698,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 725 state to active [ns_server:debug,2014-08-19T16:50:18.703,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:50:18.707,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:18.707,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 3143 us [ns_server:debug,2014-08-19T16:50:18.707,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:18.708,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{722, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [views:debug,2014-08-19T16:50:18.721,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/723. Updated state: active (1) [ns_server:debug,2014-08-19T16:50:18.721,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",723,active,1} [ns_server:debug,2014-08-19T16:50:18.724,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:50:18.732,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 8170 us [ns_server:debug,2014-08-19T16:50:18.732,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:18.733,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:18.733,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{724, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:50:18.750,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [views:debug,2014-08-19T16:50:18.755,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/721. Updated state: active (1) [ns_server:debug,2014-08-19T16:50:18.755,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",721,active,1} [ns_server:debug,2014-08-19T16:50:18.755,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:18.755,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 4917 us [ns_server:debug,2014-08-19T16:50:18.756,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:18.756,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{725, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [views:debug,2014-08-19T16:50:18.788,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/719. Updated state: active (1) [ns_server:debug,2014-08-19T16:50:18.788,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",719,active,1} [rebalance:debug,2014-08-19T16:50:18.808,ns_1@10.242.238.90:<0.22506.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 966 [rebalance:debug,2014-08-19T16:50:18.808,ns_1@10.242.238.90:<0.22507.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 963 [views:debug,2014-08-19T16:50:18.839,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/725. Updated state: active (1) [ns_server:debug,2014-08-19T16:50:18.839,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",725,active,1} [views:debug,2014-08-19T16:50:18.897,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/724. Updated state: active (1) [ns_server:debug,2014-08-19T16:50:18.897,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",724,active,1} [rebalance:debug,2014-08-19T16:50:18.910,ns_1@10.242.238.90:<0.22512.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 968 [rebalance:debug,2014-08-19T16:50:18.910,ns_1@10.242.238.90:<0.22515.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 965 [views:debug,2014-08-19T16:50:18.932,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/722. Updated state: active (1) [ns_server:debug,2014-08-19T16:50:18.932,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",722,active,1} [rebalance:debug,2014-08-19T16:50:18.933,ns_1@10.242.238.90:<0.22491.0>:janitor_agent:handle_call:795]Done [rebalance:debug,2014-08-19T16:50:18.933,ns_1@10.242.238.90:<0.22500.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:50:18.933,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.22491.0> (ok) [rebalance:debug,2014-08-19T16:50:18.933,ns_1@10.242.238.90:<0.22507.0>:janitor_agent:handle_call:795]Done [rebalance:debug,2014-08-19T16:50:18.933,ns_1@10.242.238.90:<0.22497.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:50:18.933,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.22500.0> (ok) [rebalance:debug,2014-08-19T16:50:18.933,ns_1@10.242.238.90:<0.22506.0>:janitor_agent:handle_call:795]Done [rebalance:debug,2014-08-19T16:50:18.933,ns_1@10.242.238.90:<0.22515.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:50:18.933,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.22507.0> (ok) [rebalance:debug,2014-08-19T16:50:18.933,ns_1@10.242.238.90:<0.22512.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:50:18.934,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.22497.0> (ok) [ns_server:debug,2014-08-19T16:50:18.934,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.22506.0> (ok) [ns_server:debug,2014-08-19T16:50:18.934,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.22515.0> (ok) [ns_server:debug,2014-08-19T16:50:18.934,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.22512.0> (ok) [rebalance:debug,2014-08-19T16:50:19.025,ns_1@10.242.238.90:<0.22518.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 970 [rebalance:debug,2014-08-19T16:50:19.026,ns_1@10.242.238.90:<0.22521.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 967 [rebalance:debug,2014-08-19T16:50:19.027,ns_1@10.242.238.90:<0.22518.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:50:19.027,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.22518.0> (ok) [rebalance:debug,2014-08-19T16:50:19.027,ns_1@10.242.238.90:<0.22521.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:50:19.027,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.22521.0> (ok) [rebalance:debug,2014-08-19T16:50:19.142,ns_1@10.242.238.90:<0.22524.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 969 [rebalance:debug,2014-08-19T16:50:19.143,ns_1@10.242.238.90:<0.22524.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:50:19.143,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.22524.0> (ok) [rebalance:debug,2014-08-19T16:50:19.187,ns_1@10.242.238.90:<0.21676.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:50:19.188,ns_1@10.242.238.90:<0.21676.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:50:19.188,ns_1@10.242.238.90:<0.22527.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:50:19.188,ns_1@10.242.238.90:<0.22527.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:50:19.188,ns_1@10.242.238.90:<0.21676.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:info,2014-08-19T16:50:19.192,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 972 state to replica [ns_server:info,2014-08-19T16:50:19.192,ns_1@10.242.238.90:tap_replication_manager-default<0.18775.0>:tap_replication_manager:change_vbucket_filter:200]Going to change replication from 'ns_1@10.242.238.91' to have [972,980,981,982,983,984,985,986,987,988,989,990,991,992,993,994,995,996,997, 998,999,1000,1001,1002,1003,1004,1005,1006,1007,1008,1009,1010,1011,1012, 1013,1014,1015,1016,1017,1018,1019,1020,1021,1022,1023] ([972], []) [ns_server:debug,2014-08-19T16:50:19.193,ns_1@10.242.238.90:<0.22528.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:135]Registered myself under id:{"default", {new_child_id, [972,980,981,982,983,984,985,986,987,988,989, 990,991,992,993,994,995,996,997,998,999,1000, 1001,1002,1003,1004,1005,1006,1007,1008,1009, 1010,1011,1012,1013,1014,1015,1016,1017,1018, 1019,1020,1021,1022,1023], 'ns_1@10.242.238.91'}, #Ref<0.0.0.249014>} Args:[{"10.242.238.91",11209}, {"10.242.238.90",11209}, [{old_state_retriever,#Fun}, {on_not_ready_vbuckets,#Fun}, {username,"default"}, {password,get_from_config}, {vbuckets,[972,980,981,982,983,984,985,986,987,988,989,990,991,992,993, 994,995,996,997,998,999,1000,1001,1002,1003,1004,1005,1006, 1007,1008,1009,1010,1011,1012,1013,1014,1015,1016,1017,1018, 1019,1020,1021,1022,1023]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]] [ns_server:debug,2014-08-19T16:50:19.193,ns_1@10.242.238.90:<0.22528.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:139]Linked myself to old ebucketmigrator <0.21456.0> [ns_server:info,2014-08-19T16:50:19.194,ns_1@10.242.238.90:<0.21456.0>:ebucketmigrator_srv:handle_call:270]Starting new-style vbucket filter change on stream `replication_ns_1@10.242.238.90` [ns_server:info,2014-08-19T16:50:19.207,ns_1@10.242.238.90:<0.21456.0>:ebucketmigrator_srv:handle_call:297]Changing vbucket filter on tap stream `replication_ns_1@10.242.238.90`: [{972,1}, {980,1}, {981,1}, {982,1}, {983,1}, {984,1}, {985,1}, {986,1}, {987,1}, {988,1}, {989,1}, {990,1}, {991,1}, {992,1}, {993,1}, {994,1}, {995,1}, {996,1}, {997,1}, {998,1}, {999,1}, {1000,1}, {1001,1}, {1002,1}, {1003,1}, {1004,1}, {1005,1}, {1006,1}, {1007,1}, {1008,1}, {1009,1}, {1010,1}, {1011,1}, {1012,1}, {1013,1}, {1014,1}, {1015,1}, {1016,1}, {1017,1}, {1018,1}, {1019,1}, {1020,1}, {1021,1}, {1022,1}, {1023,1}] [ns_server:info,2014-08-19T16:50:19.208,ns_1@10.242.238.90:<0.21456.0>:ebucketmigrator_srv:handle_call:307]Successfully changed vbucket filter on tap stream `replication_ns_1@10.242.238.90`. [ns_server:info,2014-08-19T16:50:19.208,ns_1@10.242.238.90:<0.21456.0>:ebucketmigrator_srv:process_upstream:1027]Got vbucket filter change completion message. Silencing upstream sender [ns_server:info,2014-08-19T16:50:19.208,ns_1@10.242.238.90:<0.21456.0>:ebucketmigrator_srv:handle_info:366]Got reply from upstream silencing request. Completing state transition to a new ebucketmigrator. [ns_server:debug,2014-08-19T16:50:19.209,ns_1@10.242.238.90:<0.21456.0>:ebucketmigrator_srv:complete_native_vb_filter_change:170]Proceeding with reading unread binaries [ns_server:debug,2014-08-19T16:50:19.209,ns_1@10.242.238.90:<0.21456.0>:ebucketmigrator_srv:confirm_downstream:197]Going to confirm reception downstream messages [ns_server:debug,2014-08-19T16:50:19.209,ns_1@10.242.238.90:<0.21456.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:50:19.209,ns_1@10.242.238.90:<0.22530.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:50:19.209,ns_1@10.242.238.90:<0.22530.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:50:19.209,ns_1@10.242.238.90:<0.21456.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:50:19.209,ns_1@10.242.238.90:<0.21456.0>:ebucketmigrator_srv:confirm_downstream:201]Confirmed upstream messages are feeded to kernel [ns_server:debug,2014-08-19T16:50:19.209,ns_1@10.242.238.90:<0.21456.0>:ebucketmigrator_srv:reply_and_die:210]Passed old state to caller [ns_server:debug,2014-08-19T16:50:19.209,ns_1@10.242.238.90:<0.21456.0>:ebucketmigrator_srv:reply_and_die:213]Sent out state. Preparing to die [ns_server:debug,2014-08-19T16:50:19.209,ns_1@10.242.238.90:<0.22528.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:143]Got old state from previous ebucketmigrator: <0.21456.0> [ns_server:debug,2014-08-19T16:50:19.210,ns_1@10.242.238.90:<0.22528.0>:ns_vbm_new_sup:perform_vbucket_filter_change_loop:184]Sent old state to new instance [ns_server:info,2014-08-19T16:50:19.210,ns_1@10.242.238.90:<0.22532.0>:ns_vbm_new_sup:mk_old_state_retriever:83]Got vbucket filter change old state. Proceeding vbucket filter change operation [ns_server:debug,2014-08-19T16:50:19.210,ns_1@10.242.238.90:<0.22532.0>:ebucketmigrator_srv:init:494]Got old ebucketmigrator state from <0.21456.0>: {state,#Port<0.13886>,#Port<0.13883>,#Port<0.13887>,#Port<0.13884>, <0.21457.0>,<<"cut off">>,<<"cut off">>,[],136,false,false,0, {1408,452619,208725}, completed, {<0.22528.0>,#Ref<0.0.0.249027>}, <<"replication_ns_1@10.242.238.90">>,<0.21456.0>, {had_backfill,false,undefined,[]}, completed,false}. [ns_server:debug,2014-08-19T16:50:19.210,ns_1@10.242.238.90:<0.17162.0>:ns_process_registry:handle_info:98]Got exit msg: {'EXIT',<0.22528.0>,{#Ref<0.0.0.249016>,<0.22532.0>}} [rebalance:debug,2014-08-19T16:50:19.211,ns_1@10.242.238.90:<0.22533.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 971 [error_logger:info,2014-08-19T16:50:19.210,ns_1@10.242.238.90:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,'ns_vbm_new_sup-default'} started: [{pid,<0.22532.0>}, {name, {new_child_id, [972,980,981,982,983,984,985,986,987,988,989, 990,991,992,993,994,995,996,997,998,999,1000, 1001,1002,1003,1004,1005,1006,1007,1008,1009, 1010,1011,1012,1013,1014,1015,1016,1017,1018, 1019,1020,1021,1022,1023], 'ns_1@10.242.238.91'}}, {mfargs, {ebucketmigrator_srv,start_link, [{"10.242.238.91",11209}, {"10.242.238.90",11209}, [{old_state_retriever, #Fun}, {on_not_ready_vbuckets, #Fun}, {username,"default"}, {password,get_from_config}, {vbuckets, [972,980,981,982,983,984,985,986,987,988, 989,990,991,992,993,994,995,996,997,998, 999,1000,1001,1002,1003,1004,1005,1006, 1007,1008,1009,1010,1011,1012,1013,1014, 1015,1016,1017,1018,1019,1020,1021,1022, 1023]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]]}}, {restart_type,temporary}, {shutdown,60000}, {child_type,worker}] [rebalance:debug,2014-08-19T16:50:19.212,ns_1@10.242.238.90:<0.22533.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:50:19.213,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.22533.0> (ok) [ns_server:debug,2014-08-19T16:50:19.217,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:50:19.220,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 2359 us [ns_server:debug,2014-08-19T16:50:19.220,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:19.220,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:19.221,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{972, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:50:19.223,ns_1@10.242.238.90:<0.22532.0>:ebucketmigrator_srv:init:621]Reusing old upstream: [{vbuckets,[972,980,981,982,983,984,985,986,987,988,989,990,991,992,993,994, 995,996,997,998,999,1000,1001,1002,1003,1004,1005,1006,1007,1008, 1009,1010,1011,1012,1013,1014,1015,1016,1017,1018,1019,1020,1021, 1022,1023]}, {name,<<"replication_ns_1@10.242.238.90">>}, {takeover,false}] [rebalance:debug,2014-08-19T16:50:19.224,ns_1@10.242.238.90:<0.22532.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.22537.0> [rebalance:debug,2014-08-19T16:50:19.236,ns_1@10.242.238.90:<0.21611.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:50:19.236,ns_1@10.242.238.90:<0.21611.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:50:19.236,ns_1@10.242.238.90:<0.22538.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:50:19.237,ns_1@10.242.238.90:<0.22538.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:50:19.237,ns_1@10.242.238.90:<0.21611.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:info,2014-08-19T16:50:19.240,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 974 state to replica [ns_server:info,2014-08-19T16:50:19.240,ns_1@10.242.238.90:tap_replication_manager-default<0.18775.0>:tap_replication_manager:change_vbucket_filter:200]Going to change replication from 'ns_1@10.242.238.91' to have [972,974,980,981,982,983,984,985,986,987,988,989,990,991,992,993,994,995,996, 997,998,999,1000,1001,1002,1003,1004,1005,1006,1007,1008,1009,1010,1011,1012, 1013,1014,1015,1016,1017,1018,1019,1020,1021,1022,1023] ([974], []) [ns_server:debug,2014-08-19T16:50:19.241,ns_1@10.242.238.90:<0.22539.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:135]Registered myself under id:{"default", {new_child_id, [972,974,980,981,982,983,984,985,986,987,988, 989,990,991,992,993,994,995,996,997,998,999, 1000,1001,1002,1003,1004,1005,1006,1007,1008, 1009,1010,1011,1012,1013,1014,1015,1016,1017, 1018,1019,1020,1021,1022,1023], 'ns_1@10.242.238.91'}, #Ref<0.0.0.249180>} Args:[{"10.242.238.91",11209}, {"10.242.238.90",11209}, [{old_state_retriever,#Fun}, {on_not_ready_vbuckets,#Fun}, {username,"default"}, {password,get_from_config}, {vbuckets,[972,974,980,981,982,983,984,985,986,987,988,989,990,991,992, 993,994,995,996,997,998,999,1000,1001,1002,1003,1004,1005, 1006,1007,1008,1009,1010,1011,1012,1013,1014,1015,1016,1017, 1018,1019,1020,1021,1022,1023]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]] [ns_server:debug,2014-08-19T16:50:19.241,ns_1@10.242.238.90:<0.22539.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:139]Linked myself to old ebucketmigrator <0.22532.0> [ns_server:info,2014-08-19T16:50:19.241,ns_1@10.242.238.90:<0.22532.0>:ebucketmigrator_srv:handle_call:270]Starting new-style vbucket filter change on stream `replication_ns_1@10.242.238.90` [ns_server:info,2014-08-19T16:50:19.250,ns_1@10.242.238.90:<0.22532.0>:ebucketmigrator_srv:handle_call:297]Changing vbucket filter on tap stream `replication_ns_1@10.242.238.90`: [{972,1}, {974,1}, {980,1}, {981,1}, {982,1}, {983,1}, {984,1}, {985,1}, {986,1}, {987,1}, {988,1}, {989,1}, {990,1}, {991,1}, {992,1}, {993,1}, {994,1}, {995,1}, {996,1}, {997,1}, {998,1}, {999,1}, {1000,1}, {1001,1}, {1002,1}, {1003,1}, {1004,1}, {1005,1}, {1006,1}, {1007,1}, {1008,1}, {1009,1}, {1010,1}, {1011,1}, {1012,1}, {1013,1}, {1014,1}, {1015,1}, {1016,1}, {1017,1}, {1018,1}, {1019,1}, {1020,1}, {1021,1}, {1022,1}, {1023,1}] [ns_server:info,2014-08-19T16:50:19.251,ns_1@10.242.238.90:<0.22532.0>:ebucketmigrator_srv:handle_call:307]Successfully changed vbucket filter on tap stream `replication_ns_1@10.242.238.90`. [ns_server:info,2014-08-19T16:50:19.251,ns_1@10.242.238.90:<0.22532.0>:ebucketmigrator_srv:process_upstream:1027]Got vbucket filter change completion message. Silencing upstream sender [ns_server:info,2014-08-19T16:50:19.251,ns_1@10.242.238.90:<0.22532.0>:ebucketmigrator_srv:handle_info:366]Got reply from upstream silencing request. Completing state transition to a new ebucketmigrator. [ns_server:debug,2014-08-19T16:50:19.251,ns_1@10.242.238.90:<0.22532.0>:ebucketmigrator_srv:complete_native_vb_filter_change:170]Proceeding with reading unread binaries [ns_server:debug,2014-08-19T16:50:19.251,ns_1@10.242.238.90:<0.22532.0>:ebucketmigrator_srv:confirm_downstream:197]Going to confirm reception downstream messages [ns_server:debug,2014-08-19T16:50:19.251,ns_1@10.242.238.90:<0.22532.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:50:19.251,ns_1@10.242.238.90:<0.22541.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:50:19.251,ns_1@10.242.238.90:<0.22541.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:50:19.252,ns_1@10.242.238.90:<0.22532.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:50:19.252,ns_1@10.242.238.90:<0.22532.0>:ebucketmigrator_srv:confirm_downstream:201]Confirmed upstream messages are feeded to kernel [ns_server:debug,2014-08-19T16:50:19.252,ns_1@10.242.238.90:<0.22532.0>:ebucketmigrator_srv:reply_and_die:210]Passed old state to caller [ns_server:debug,2014-08-19T16:50:19.252,ns_1@10.242.238.90:<0.22532.0>:ebucketmigrator_srv:reply_and_die:213]Sent out state. Preparing to die [ns_server:debug,2014-08-19T16:50:19.252,ns_1@10.242.238.90:<0.22539.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:143]Got old state from previous ebucketmigrator: <0.22532.0> [ns_server:debug,2014-08-19T16:50:19.252,ns_1@10.242.238.90:<0.22539.0>:ns_vbm_new_sup:perform_vbucket_filter_change_loop:184]Sent old state to new instance [ns_server:info,2014-08-19T16:50:19.252,ns_1@10.242.238.90:<0.22543.0>:ns_vbm_new_sup:mk_old_state_retriever:83]Got vbucket filter change old state. Proceeding vbucket filter change operation [ns_server:debug,2014-08-19T16:50:19.253,ns_1@10.242.238.90:<0.22543.0>:ebucketmigrator_srv:init:494]Got old ebucketmigrator state from <0.22532.0>: {state,#Port<0.13886>,#Port<0.13883>,#Port<0.13887>,#Port<0.13884>, <0.22537.0>,<<"cut off">>,<<"cut off">>,[],139,false,false,0, {1408,452619,251395}, completed, {<0.22539.0>,#Ref<0.0.0.249193>}, <<"replication_ns_1@10.242.238.90">>,<0.22532.0>, {had_backfill,false,undefined,[]}, completed,false}. [ns_server:debug,2014-08-19T16:50:19.253,ns_1@10.242.238.90:<0.17162.0>:ns_process_registry:handle_info:98]Got exit msg: {'EXIT',<0.22539.0>,{#Ref<0.0.0.249182>,<0.22543.0>}} [error_logger:info,2014-08-19T16:50:19.253,ns_1@10.242.238.90:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,'ns_vbm_new_sup-default'} started: [{pid,<0.22543.0>}, {name, {new_child_id, [972,974,980,981,982,983,984,985,986,987,988, 989,990,991,992,993,994,995,996,997,998,999, 1000,1001,1002,1003,1004,1005,1006,1007,1008, 1009,1010,1011,1012,1013,1014,1015,1016,1017, 1018,1019,1020,1021,1022,1023], 'ns_1@10.242.238.91'}}, {mfargs, {ebucketmigrator_srv,start_link, [{"10.242.238.91",11209}, {"10.242.238.90",11209}, [{old_state_retriever, #Fun}, {on_not_ready_vbuckets, #Fun}, {username,"default"}, {password,get_from_config}, {vbuckets, [972,974,980,981,982,983,984,985,986,987, 988,989,990,991,992,993,994,995,996,997, 998,999,1000,1001,1002,1003,1004,1005, 1006,1007,1008,1009,1010,1011,1012,1013, 1014,1015,1016,1017,1018,1019,1020,1021, 1022,1023]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]]}}, {restart_type,temporary}, {shutdown,60000}, {child_type,worker}] [ns_server:debug,2014-08-19T16:50:19.257,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:50:19.261,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:19.261,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 3632 us [ns_server:debug,2014-08-19T16:50:19.261,ns_1@10.242.238.90:<0.22543.0>:ebucketmigrator_srv:init:621]Reusing old upstream: [{vbuckets,[972,974,980,981,982,983,984,985,986,987,988,989,990,991,992,993, 994,995,996,997,998,999,1000,1001,1002,1003,1004,1005,1006,1007, 1008,1009,1010,1011,1012,1013,1014,1015,1016,1017,1018,1019,1020, 1021,1022,1023]}, {name,<<"replication_ns_1@10.242.238.90">>}, {takeover,false}] [rebalance:debug,2014-08-19T16:50:19.261,ns_1@10.242.238.90:<0.22543.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.22545.0> [ns_server:debug,2014-08-19T16:50:19.261,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:19.262,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{974, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [rebalance:debug,2014-08-19T16:50:19.291,ns_1@10.242.238.90:<0.21561.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:50:19.292,ns_1@10.242.238.90:<0.21561.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:50:19.292,ns_1@10.242.238.90:<0.22546.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:50:19.292,ns_1@10.242.238.90:<0.22546.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:50:19.292,ns_1@10.242.238.90:<0.21561.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:info,2014-08-19T16:50:19.295,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 976 state to replica [ns_server:info,2014-08-19T16:50:19.296,ns_1@10.242.238.90:tap_replication_manager-default<0.18775.0>:tap_replication_manager:change_vbucket_filter:200]Going to change replication from 'ns_1@10.242.238.91' to have [972,974,976,980,981,982,983,984,985,986,987,988,989,990,991,992,993,994,995, 996,997,998,999,1000,1001,1002,1003,1004,1005,1006,1007,1008,1009,1010,1011, 1012,1013,1014,1015,1016,1017,1018,1019,1020,1021,1022,1023] ([976], []) [ns_server:debug,2014-08-19T16:50:19.297,ns_1@10.242.238.90:<0.22547.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:135]Registered myself under id:{"default", {new_child_id, [972,974,976,980,981,982,983,984,985,986,987, 988,989,990,991,992,993,994,995,996,997,998, 999,1000,1001,1002,1003,1004,1005,1006,1007, 1008,1009,1010,1011,1012,1013,1014,1015,1016, 1017,1018,1019,1020,1021,1022,1023], 'ns_1@10.242.238.91'}, #Ref<0.0.0.249340>} Args:[{"10.242.238.91",11209}, {"10.242.238.90",11209}, [{old_state_retriever,#Fun}, {on_not_ready_vbuckets,#Fun}, {username,"default"}, {password,get_from_config}, {vbuckets,[972,974,976,980,981,982,983,984,985,986,987,988,989,990,991, 992,993,994,995,996,997,998,999,1000,1001,1002,1003,1004, 1005,1006,1007,1008,1009,1010,1011,1012,1013,1014,1015,1016, 1017,1018,1019,1020,1021,1022,1023]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]] [ns_server:debug,2014-08-19T16:50:19.297,ns_1@10.242.238.90:<0.22547.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:139]Linked myself to old ebucketmigrator <0.22543.0> [ns_server:info,2014-08-19T16:50:19.297,ns_1@10.242.238.90:<0.22543.0>:ebucketmigrator_srv:handle_call:270]Starting new-style vbucket filter change on stream `replication_ns_1@10.242.238.90` [rebalance:debug,2014-08-19T16:50:19.308,ns_1@10.242.238.90:<0.21636.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:50:19.308,ns_1@10.242.238.90:<0.21636.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:50:19.308,ns_1@10.242.238.90:<0.22549.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:50:19.308,ns_1@10.242.238.90:<0.22549.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:50:19.308,ns_1@10.242.238.90:<0.21636.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:info,2014-08-19T16:50:19.311,ns_1@10.242.238.90:<0.22543.0>:ebucketmigrator_srv:handle_call:297]Changing vbucket filter on tap stream `replication_ns_1@10.242.238.90`: [{972,1}, {974,1}, {976,1}, {980,1}, {981,1}, {982,1}, {983,1}, {984,1}, {985,1}, {986,1}, {987,1}, {988,1}, {989,1}, {990,1}, {991,1}, {992,1}, {993,1}, {994,1}, {995,1}, {996,1}, {997,1}, {998,1}, {999,1}, {1000,1}, {1001,1}, {1002,1}, {1003,1}, {1004,1}, {1005,1}, {1006,1}, {1007,1}, {1008,1}, {1009,1}, {1010,1}, {1011,1}, {1012,1}, {1013,1}, {1014,1}, {1015,1}, {1016,1}, {1017,1}, {1018,1}, {1019,1}, {1020,1}, {1021,1}, {1022,1}, {1023,1}] [ns_server:info,2014-08-19T16:50:19.312,ns_1@10.242.238.90:<0.22543.0>:ebucketmigrator_srv:handle_call:307]Successfully changed vbucket filter on tap stream `replication_ns_1@10.242.238.90`. [ns_server:info,2014-08-19T16:50:19.312,ns_1@10.242.238.90:<0.22543.0>:ebucketmigrator_srv:process_upstream:1027]Got vbucket filter change completion message. Silencing upstream sender [ns_server:info,2014-08-19T16:50:19.312,ns_1@10.242.238.90:<0.22543.0>:ebucketmigrator_srv:handle_info:366]Got reply from upstream silencing request. Completing state transition to a new ebucketmigrator. [ns_server:debug,2014-08-19T16:50:19.313,ns_1@10.242.238.90:<0.22543.0>:ebucketmigrator_srv:complete_native_vb_filter_change:170]Proceeding with reading unread binaries [ns_server:debug,2014-08-19T16:50:19.313,ns_1@10.242.238.90:<0.22543.0>:ebucketmigrator_srv:confirm_downstream:197]Going to confirm reception downstream messages [ns_server:debug,2014-08-19T16:50:19.313,ns_1@10.242.238.90:<0.22543.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:50:19.313,ns_1@10.242.238.90:<0.22550.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:50:19.313,ns_1@10.242.238.90:<0.22550.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:50:19.313,ns_1@10.242.238.90:<0.22543.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:50:19.313,ns_1@10.242.238.90:<0.22543.0>:ebucketmigrator_srv:confirm_downstream:201]Confirmed upstream messages are feeded to kernel [ns_server:debug,2014-08-19T16:50:19.313,ns_1@10.242.238.90:<0.22543.0>:ebucketmigrator_srv:reply_and_die:210]Passed old state to caller [ns_server:debug,2014-08-19T16:50:19.313,ns_1@10.242.238.90:<0.22543.0>:ebucketmigrator_srv:reply_and_die:213]Sent out state. Preparing to die [ns_server:debug,2014-08-19T16:50:19.313,ns_1@10.242.238.90:<0.22547.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:143]Got old state from previous ebucketmigrator: <0.22543.0> [ns_server:debug,2014-08-19T16:50:19.314,ns_1@10.242.238.90:<0.22547.0>:ns_vbm_new_sup:perform_vbucket_filter_change_loop:184]Sent old state to new instance [ns_server:info,2014-08-19T16:50:19.314,ns_1@10.242.238.90:<0.22552.0>:ns_vbm_new_sup:mk_old_state_retriever:83]Got vbucket filter change old state. Proceeding vbucket filter change operation [ns_server:debug,2014-08-19T16:50:19.314,ns_1@10.242.238.90:<0.22552.0>:ebucketmigrator_srv:init:494]Got old ebucketmigrator state from <0.22543.0>: {state,#Port<0.13886>,#Port<0.13883>,#Port<0.13887>,#Port<0.13884>, <0.22545.0>,<<"cut off">>,<<"cut off">>,[],142,false,false,0, {1408,452619,312758}, completed, {<0.22547.0>,#Ref<0.0.0.249353>}, <<"replication_ns_1@10.242.238.90">>,<0.22543.0>, {had_backfill,false,undefined,[]}, completed,false}. [ns_server:debug,2014-08-19T16:50:19.314,ns_1@10.242.238.90:<0.17162.0>:ns_process_registry:handle_info:98]Got exit msg: {'EXIT',<0.22547.0>,{#Ref<0.0.0.249342>,<0.22552.0>}} [error_logger:info,2014-08-19T16:50:19.314,ns_1@10.242.238.90:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,'ns_vbm_new_sup-default'} started: [{pid,<0.22552.0>}, {name, {new_child_id, [972,974,976,980,981,982,983,984,985,986,987, 988,989,990,991,992,993,994,995,996,997,998, 999,1000,1001,1002,1003,1004,1005,1006,1007, 1008,1009,1010,1011,1012,1013,1014,1015,1016, 1017,1018,1019,1020,1021,1022,1023], 'ns_1@10.242.238.91'}}, {mfargs, {ebucketmigrator_srv,start_link, [{"10.242.238.91",11209}, {"10.242.238.90",11209}, [{old_state_retriever, #Fun}, {on_not_ready_vbuckets, #Fun}, {username,"default"}, {password,get_from_config}, {vbuckets, [972,974,976,980,981,982,983,984,985,986, 987,988,989,990,991,992,993,994,995,996, 997,998,999,1000,1001,1002,1003,1004, 1005,1006,1007,1008,1009,1010,1011,1012, 1013,1014,1015,1016,1017,1018,1019,1020, 1021,1022,1023]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]]}}, {restart_type,temporary}, {shutdown,60000}, {child_type,worker}] [ns_server:debug,2014-08-19T16:50:19.319,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:50:19.324,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:19.326,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{976, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:50:19.326,ns_1@10.242.238.90:<0.22552.0>:ebucketmigrator_srv:init:621]Reusing old upstream: [{vbuckets,[972,974,976,980,981,982,983,984,985,986,987,988,989,990,991,992, 993,994,995,996,997,998,999,1000,1001,1002,1003,1004,1005,1006, 1007,1008,1009,1010,1011,1012,1013,1014,1015,1016,1017,1018,1019, 1020,1021,1022,1023]}, {name,<<"replication_ns_1@10.242.238.90">>}, {takeover,false}] [rebalance:debug,2014-08-19T16:50:19.326,ns_1@10.242.238.90:<0.22552.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.22554.0> [ns_server:debug,2014-08-19T16:50:19.326,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 6853 us [ns_server:debug,2014-08-19T16:50:19.326,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:info,2014-08-19T16:50:19.331,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 973 state to replica [ns_server:info,2014-08-19T16:50:19.331,ns_1@10.242.238.90:tap_replication_manager-default<0.18775.0>:tap_replication_manager:change_vbucket_filter:200]Going to change replication from 'ns_1@10.242.238.91' to have [972,973,974,976,980,981,982,983,984,985,986,987,988,989,990,991,992,993,994, 995,996,997,998,999,1000,1001,1002,1003,1004,1005,1006,1007,1008,1009,1010, 1011,1012,1013,1014,1015,1016,1017,1018,1019,1020,1021,1022,1023] ([973], []) [ns_server:debug,2014-08-19T16:50:19.332,ns_1@10.242.238.90:<0.22555.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:135]Registered myself under id:{"default", {new_child_id, [972,973,974,976,980,981,982,983,984,985,986, 987,988,989,990,991,992,993,994,995,996,997, 998,999,1000,1001,1002,1003,1004,1005,1006, 1007,1008,1009,1010,1011,1012,1013,1014,1015, 1016,1017,1018,1019,1020,1021,1022,1023], 'ns_1@10.242.238.91'}, #Ref<0.0.0.249489>} Args:[{"10.242.238.91",11209}, {"10.242.238.90",11209}, [{old_state_retriever,#Fun}, {on_not_ready_vbuckets,#Fun}, {username,"default"}, {password,get_from_config}, {vbuckets,[972,973,974,976,980,981,982,983,984,985,986,987,988,989,990, 991,992,993,994,995,996,997,998,999,1000,1001,1002,1003, 1004,1005,1006,1007,1008,1009,1010,1011,1012,1013,1014,1015, 1016,1017,1018,1019,1020,1021,1022,1023]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]] [ns_server:debug,2014-08-19T16:50:19.332,ns_1@10.242.238.90:<0.22555.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:139]Linked myself to old ebucketmigrator <0.22552.0> [ns_server:info,2014-08-19T16:50:19.332,ns_1@10.242.238.90:<0.22552.0>:ebucketmigrator_srv:handle_call:270]Starting new-style vbucket filter change on stream `replication_ns_1@10.242.238.90` [ns_server:info,2014-08-19T16:50:19.341,ns_1@10.242.238.90:<0.22552.0>:ebucketmigrator_srv:handle_call:297]Changing vbucket filter on tap stream `replication_ns_1@10.242.238.90`: [{972,1}, {973,1}, {974,1}, {976,1}, {980,1}, {981,1}, {982,1}, {983,1}, {984,1}, {985,1}, {986,1}, {987,1}, {988,1}, {989,1}, {990,1}, {991,1}, {992,1}, {993,1}, {994,1}, {995,1}, {996,1}, {997,1}, {998,1}, {999,1}, {1000,1}, {1001,1}, {1002,1}, {1003,1}, {1004,1}, {1005,1}, {1006,1}, {1007,1}, {1008,1}, {1009,1}, {1010,1}, {1011,1}, {1012,1}, {1013,1}, {1014,1}, {1015,1}, {1016,1}, {1017,1}, {1018,1}, {1019,1}, {1020,1}, {1021,1}, {1022,1}, {1023,1}] [ns_server:info,2014-08-19T16:50:19.341,ns_1@10.242.238.90:<0.22552.0>:ebucketmigrator_srv:handle_call:307]Successfully changed vbucket filter on tap stream `replication_ns_1@10.242.238.90`. [ns_server:info,2014-08-19T16:50:19.342,ns_1@10.242.238.90:<0.22552.0>:ebucketmigrator_srv:process_upstream:1027]Got vbucket filter change completion message. Silencing upstream sender [ns_server:info,2014-08-19T16:50:19.342,ns_1@10.242.238.90:<0.22552.0>:ebucketmigrator_srv:handle_info:366]Got reply from upstream silencing request. Completing state transition to a new ebucketmigrator. [ns_server:debug,2014-08-19T16:50:19.342,ns_1@10.242.238.90:<0.22552.0>:ebucketmigrator_srv:complete_native_vb_filter_change:170]Proceeding with reading unread binaries [ns_server:debug,2014-08-19T16:50:19.342,ns_1@10.242.238.90:<0.22552.0>:ebucketmigrator_srv:confirm_downstream:197]Going to confirm reception downstream messages [ns_server:debug,2014-08-19T16:50:19.342,ns_1@10.242.238.90:<0.22552.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:50:19.342,ns_1@10.242.238.90:<0.22557.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:50:19.342,ns_1@10.242.238.90:<0.22557.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:50:19.342,ns_1@10.242.238.90:<0.22552.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:50:19.343,ns_1@10.242.238.90:<0.22552.0>:ebucketmigrator_srv:confirm_downstream:201]Confirmed upstream messages are feeded to kernel [ns_server:debug,2014-08-19T16:50:19.343,ns_1@10.242.238.90:<0.22552.0>:ebucketmigrator_srv:reply_and_die:210]Passed old state to caller [ns_server:debug,2014-08-19T16:50:19.343,ns_1@10.242.238.90:<0.22552.0>:ebucketmigrator_srv:reply_and_die:213]Sent out state. Preparing to die [ns_server:debug,2014-08-19T16:50:19.343,ns_1@10.242.238.90:<0.22555.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:143]Got old state from previous ebucketmigrator: <0.22552.0> [ns_server:debug,2014-08-19T16:50:19.343,ns_1@10.242.238.90:<0.22555.0>:ns_vbm_new_sup:perform_vbucket_filter_change_loop:184]Sent old state to new instance [ns_server:info,2014-08-19T16:50:19.343,ns_1@10.242.238.90:<0.22559.0>:ns_vbm_new_sup:mk_old_state_retriever:83]Got vbucket filter change old state. Proceeding vbucket filter change operation [ns_server:debug,2014-08-19T16:50:19.343,ns_1@10.242.238.90:<0.22559.0>:ebucketmigrator_srv:init:494]Got old ebucketmigrator state from <0.22552.0>: {state,#Port<0.13886>,#Port<0.13883>,#Port<0.13887>,#Port<0.13884>, <0.22554.0>,<<"cut off">>,<<"cut off">>,[],145,false,false,0, {1408,452619,342151}, completed, {<0.22555.0>,#Ref<0.0.0.249502>}, <<"replication_ns_1@10.242.238.90">>,<0.22552.0>, {had_backfill,false,undefined,[]}, completed,false}. [ns_server:debug,2014-08-19T16:50:19.344,ns_1@10.242.238.90:<0.17162.0>:ns_process_registry:handle_info:98]Got exit msg: {'EXIT',<0.22555.0>,{#Ref<0.0.0.249491>,<0.22559.0>}} [error_logger:info,2014-08-19T16:50:19.344,ns_1@10.242.238.90:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,'ns_vbm_new_sup-default'} started: [{pid,<0.22559.0>}, {name, {new_child_id, [972,973,974,976,980,981,982,983,984,985,986, 987,988,989,990,991,992,993,994,995,996,997, 998,999,1000,1001,1002,1003,1004,1005,1006, 1007,1008,1009,1010,1011,1012,1013,1014,1015, 1016,1017,1018,1019,1020,1021,1022,1023], 'ns_1@10.242.238.91'}}, {mfargs, {ebucketmigrator_srv,start_link, [{"10.242.238.91",11209}, {"10.242.238.90",11209}, [{old_state_retriever, #Fun}, {on_not_ready_vbuckets, #Fun}, {username,"default"}, {password,get_from_config}, {vbuckets, [972,973,974,976,980,981,982,983,984,985, 986,987,988,989,990,991,992,993,994,995, 996,997,998,999,1000,1001,1002,1003, 1004,1005,1006,1007,1008,1009,1010,1011, 1012,1013,1014,1015,1016,1017,1018,1019, 1020,1021,1022,1023]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]]}}, {restart_type,temporary}, {shutdown,60000}, {child_type,worker}] [ns_server:debug,2014-08-19T16:50:19.348,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:50:19.352,ns_1@10.242.238.90:<0.22559.0>:ebucketmigrator_srv:init:621]Reusing old upstream: [{vbuckets,[972,973,974,976,980,981,982,983,984,985,986,987,988,989,990,991, 992,993,994,995,996,997,998,999,1000,1001,1002,1003,1004,1005, 1006,1007,1008,1009,1010,1011,1012,1013,1014,1015,1016,1017,1018, 1019,1020,1021,1022,1023]}, {name,<<"replication_ns_1@10.242.238.90">>}, {takeover,false}] [rebalance:debug,2014-08-19T16:50:19.352,ns_1@10.242.238.90:<0.22559.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.22560.0> [ns_server:debug,2014-08-19T16:50:19.353,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:19.353,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 4642 us [ns_server:debug,2014-08-19T16:50:19.353,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:19.354,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{973, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [rebalance:debug,2014-08-19T16:50:19.433,ns_1@10.242.238.90:<0.21586.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:50:19.433,ns_1@10.242.238.90:<0.21586.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:50:19.433,ns_1@10.242.238.90:<0.22568.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:50:19.433,ns_1@10.242.238.90:<0.22568.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:50:19.433,ns_1@10.242.238.90:<0.21586.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:info,2014-08-19T16:50:19.437,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 975 state to replica [ns_server:info,2014-08-19T16:50:19.437,ns_1@10.242.238.90:tap_replication_manager-default<0.18775.0>:tap_replication_manager:change_vbucket_filter:200]Going to change replication from 'ns_1@10.242.238.91' to have [972,973,974,975,976,980,981,982,983,984,985,986,987,988,989,990,991,992,993, 994,995,996,997,998,999,1000,1001,1002,1003,1004,1005,1006,1007,1008,1009, 1010,1011,1012,1013,1014,1015,1016,1017,1018,1019,1020,1021,1022,1023] ([975], []) [ns_server:debug,2014-08-19T16:50:19.438,ns_1@10.242.238.90:<0.22569.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:135]Registered myself under id:{"default", {new_child_id, [972,973,974,975,976,980,981,982,983,984,985, 986,987,988,989,990,991,992,993,994,995,996, 997,998,999,1000,1001,1002,1003,1004,1005, 1006,1007,1008,1009,1010,1011,1012,1013,1014, 1015,1016,1017,1018,1019,1020,1021,1022,1023], 'ns_1@10.242.238.91'}, #Ref<0.0.0.249654>} Args:[{"10.242.238.91",11209}, {"10.242.238.90",11209}, [{old_state_retriever,#Fun}, {on_not_ready_vbuckets,#Fun}, {username,"default"}, {password,get_from_config}, {vbuckets,[972,973,974,975,976,980,981,982,983,984,985,986,987,988,989, 990,991,992,993,994,995,996,997,998,999,1000,1001,1002,1003, 1004,1005,1006,1007,1008,1009,1010,1011,1012,1013,1014,1015, 1016,1017,1018,1019,1020,1021,1022,1023]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]] [ns_server:debug,2014-08-19T16:50:19.439,ns_1@10.242.238.90:<0.22569.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:139]Linked myself to old ebucketmigrator <0.22559.0> [ns_server:info,2014-08-19T16:50:19.439,ns_1@10.242.238.90:<0.22559.0>:ebucketmigrator_srv:handle_call:270]Starting new-style vbucket filter change on stream `replication_ns_1@10.242.238.90` [rebalance:debug,2014-08-19T16:50:19.448,ns_1@10.242.238.90:<0.21511.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:50:19.449,ns_1@10.242.238.90:<0.21511.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:50:19.449,ns_1@10.242.238.90:<0.22571.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:50:19.449,ns_1@10.242.238.90:<0.22571.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:50:19.449,ns_1@10.242.238.90:<0.21511.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:info,2014-08-19T16:50:19.451,ns_1@10.242.238.90:<0.22559.0>:ebucketmigrator_srv:handle_call:297]Changing vbucket filter on tap stream `replication_ns_1@10.242.238.90`: [{972,1}, {973,1}, {974,1}, {975,1}, {976,1}, {980,1}, {981,1}, {982,1}, {983,1}, {984,1}, {985,1}, {986,1}, {987,1}, {988,1}, {989,1}, {990,1}, {991,1}, {992,1}, {993,1}, {994,1}, {995,1}, {996,1}, {997,1}, {998,1}, {999,1}, {1000,1}, {1001,1}, {1002,1}, {1003,1}, {1004,1}, {1005,1}, {1006,1}, {1007,1}, {1008,1}, {1009,1}, {1010,1}, {1011,1}, {1012,1}, {1013,1}, {1014,1}, {1015,1}, {1016,1}, {1017,1}, {1018,1}, {1019,1}, {1020,1}, {1021,1}, {1022,1}, {1023,1}] [ns_server:info,2014-08-19T16:50:19.452,ns_1@10.242.238.90:<0.22559.0>:ebucketmigrator_srv:handle_call:307]Successfully changed vbucket filter on tap stream `replication_ns_1@10.242.238.90`. [ns_server:info,2014-08-19T16:50:19.453,ns_1@10.242.238.90:<0.22559.0>:ebucketmigrator_srv:process_upstream:1027]Got vbucket filter change completion message. Silencing upstream sender [ns_server:info,2014-08-19T16:50:19.453,ns_1@10.242.238.90:<0.22559.0>:ebucketmigrator_srv:handle_info:366]Got reply from upstream silencing request. Completing state transition to a new ebucketmigrator. [ns_server:debug,2014-08-19T16:50:19.453,ns_1@10.242.238.90:<0.22559.0>:ebucketmigrator_srv:complete_native_vb_filter_change:170]Proceeding with reading unread binaries [ns_server:debug,2014-08-19T16:50:19.453,ns_1@10.242.238.90:<0.22559.0>:ebucketmigrator_srv:confirm_downstream:197]Going to confirm reception downstream messages [ns_server:debug,2014-08-19T16:50:19.453,ns_1@10.242.238.90:<0.22559.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:50:19.453,ns_1@10.242.238.90:<0.22572.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:50:19.453,ns_1@10.242.238.90:<0.22572.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:50:19.453,ns_1@10.242.238.90:<0.22559.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:50:19.454,ns_1@10.242.238.90:<0.22559.0>:ebucketmigrator_srv:confirm_downstream:201]Confirmed upstream messages are feeded to kernel [ns_server:debug,2014-08-19T16:50:19.454,ns_1@10.242.238.90:<0.22559.0>:ebucketmigrator_srv:reply_and_die:210]Passed old state to caller [ns_server:debug,2014-08-19T16:50:19.454,ns_1@10.242.238.90:<0.22559.0>:ebucketmigrator_srv:reply_and_die:213]Sent out state. Preparing to die [ns_server:debug,2014-08-19T16:50:19.454,ns_1@10.242.238.90:<0.22569.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:143]Got old state from previous ebucketmigrator: <0.22559.0> [ns_server:debug,2014-08-19T16:50:19.454,ns_1@10.242.238.90:<0.22569.0>:ns_vbm_new_sup:perform_vbucket_filter_change_loop:184]Sent old state to new instance [ns_server:info,2014-08-19T16:50:19.454,ns_1@10.242.238.90:<0.22574.0>:ns_vbm_new_sup:mk_old_state_retriever:83]Got vbucket filter change old state. Proceeding vbucket filter change operation [ns_server:debug,2014-08-19T16:50:19.454,ns_1@10.242.238.90:<0.22574.0>:ebucketmigrator_srv:init:494]Got old ebucketmigrator state from <0.22559.0>: {state,#Port<0.13886>,#Port<0.13883>,#Port<0.13887>,#Port<0.13884>, <0.22560.0>,<<"cut off">>,<<"cut off">>,[],148,false,false,0, {1408,452619,453179}, completed, {<0.22569.0>,#Ref<0.0.0.249667>}, <<"replication_ns_1@10.242.238.90">>,<0.22559.0>, {had_backfill,false,undefined,[]}, completed,false}. [ns_server:debug,2014-08-19T16:50:19.455,ns_1@10.242.238.90:<0.17162.0>:ns_process_registry:handle_info:98]Got exit msg: {'EXIT',<0.22569.0>,{#Ref<0.0.0.249656>,<0.22574.0>}} [error_logger:info,2014-08-19T16:50:19.455,ns_1@10.242.238.90:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,'ns_vbm_new_sup-default'} started: [{pid,<0.22574.0>}, {name, {new_child_id, [972,973,974,975,976,980,981,982,983,984,985, 986,987,988,989,990,991,992,993,994,995,996, 997,998,999,1000,1001,1002,1003,1004,1005, 1006,1007,1008,1009,1010,1011,1012,1013,1014, 1015,1016,1017,1018,1019,1020,1021,1022,1023], 'ns_1@10.242.238.91'}}, {mfargs, {ebucketmigrator_srv,start_link, [{"10.242.238.91",11209}, {"10.242.238.90",11209}, [{old_state_retriever, #Fun}, {on_not_ready_vbuckets, #Fun}, {username,"default"}, {password,get_from_config}, {vbuckets, [972,973,974,975,976,980,981,982,983,984, 985,986,987,988,989,990,991,992,993,994, 995,996,997,998,999,1000,1001,1002,1003, 1004,1005,1006,1007,1008,1009,1010,1011, 1012,1013,1014,1015,1016,1017,1018,1019, 1020,1021,1022,1023]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]]}}, {restart_type,temporary}, {shutdown,60000}, {child_type,worker}] [ns_server:debug,2014-08-19T16:50:19.459,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:50:19.464,ns_1@10.242.238.90:<0.22574.0>:ebucketmigrator_srv:init:621]Reusing old upstream: [{vbuckets,[972,973,974,975,976,980,981,982,983,984,985,986,987,988,989,990, 991,992,993,994,995,996,997,998,999,1000,1001,1002,1003,1004,1005, 1006,1007,1008,1009,1010,1011,1012,1013,1014,1015,1016,1017,1018, 1019,1020,1021,1022,1023]}, {name,<<"replication_ns_1@10.242.238.90">>}, {takeover,false}] [rebalance:debug,2014-08-19T16:50:19.466,ns_1@10.242.238.90:<0.22574.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.22575.0> [ns_server:debug,2014-08-19T16:50:19.467,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:19.467,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 7734 us [ns_server:debug,2014-08-19T16:50:19.468,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:19.468,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{975, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:info,2014-08-19T16:50:19.470,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 978 state to replica [ns_server:info,2014-08-19T16:50:19.470,ns_1@10.242.238.90:tap_replication_manager-default<0.18775.0>:tap_replication_manager:change_vbucket_filter:200]Going to change replication from 'ns_1@10.242.238.91' to have [972,973,974,975,976,978,980,981,982,983,984,985,986,987,988,989,990,991,992, 993,994,995,996,997,998,999,1000,1001,1002,1003,1004,1005,1006,1007,1008, 1009,1010,1011,1012,1013,1014,1015,1016,1017,1018,1019,1020,1021,1022,1023] ([978], []) [ns_server:debug,2014-08-19T16:50:19.471,ns_1@10.242.238.90:<0.22577.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:135]Registered myself under id:{"default", {new_child_id, [972,973,974,975,976,978,980,981,982,983,984, 985,986,987,988,989,990,991,992,993,994,995, 996,997,998,999,1000,1001,1002,1003,1004, 1005,1006,1007,1008,1009,1010,1011,1012,1013, 1014,1015,1016,1017,1018,1019,1020,1021,1022, 1023], 'ns_1@10.242.238.91'}, #Ref<0.0.0.249810>} Args:[{"10.242.238.91",11209}, {"10.242.238.90",11209}, [{old_state_retriever,#Fun}, {on_not_ready_vbuckets,#Fun}, {username,"default"}, {password,get_from_config}, {vbuckets,[972,973,974,975,976,978,980,981,982,983,984,985,986,987,988, 989,990,991,992,993,994,995,996,997,998,999,1000,1001,1002, 1003,1004,1005,1006,1007,1008,1009,1010,1011,1012,1013,1014, 1015,1016,1017,1018,1019,1020,1021,1022,1023]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]] [ns_server:debug,2014-08-19T16:50:19.471,ns_1@10.242.238.90:<0.22577.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:139]Linked myself to old ebucketmigrator <0.22574.0> [ns_server:info,2014-08-19T16:50:19.471,ns_1@10.242.238.90:<0.22574.0>:ebucketmigrator_srv:handle_call:270]Starting new-style vbucket filter change on stream `replication_ns_1@10.242.238.90` [ns_server:info,2014-08-19T16:50:19.480,ns_1@10.242.238.90:<0.22574.0>:ebucketmigrator_srv:handle_call:297]Changing vbucket filter on tap stream `replication_ns_1@10.242.238.90`: [{972,1}, {973,1}, {974,1}, {975,1}, {976,1}, {978,1}, {980,1}, {981,1}, {982,1}, {983,1}, {984,1}, {985,1}, {986,1}, {987,1}, {988,1}, {989,1}, {990,1}, {991,1}, {992,1}, {993,1}, {994,1}, {995,1}, {996,1}, {997,1}, {998,1}, {999,1}, {1000,1}, {1001,1}, {1002,1}, {1003,1}, {1004,1}, {1005,1}, {1006,1}, {1007,1}, {1008,1}, {1009,1}, {1010,1}, {1011,1}, {1012,1}, {1013,1}, {1014,1}, {1015,1}, {1016,1}, {1017,1}, {1018,1}, {1019,1}, {1020,1}, {1021,1}, {1022,1}, {1023,1}] [ns_server:info,2014-08-19T16:50:19.481,ns_1@10.242.238.90:<0.22574.0>:ebucketmigrator_srv:handle_call:307]Successfully changed vbucket filter on tap stream `replication_ns_1@10.242.238.90`. [ns_server:info,2014-08-19T16:50:19.481,ns_1@10.242.238.90:<0.22574.0>:ebucketmigrator_srv:process_upstream:1027]Got vbucket filter change completion message. Silencing upstream sender [ns_server:info,2014-08-19T16:50:19.481,ns_1@10.242.238.90:<0.22574.0>:ebucketmigrator_srv:handle_info:366]Got reply from upstream silencing request. Completing state transition to a new ebucketmigrator. [ns_server:debug,2014-08-19T16:50:19.481,ns_1@10.242.238.90:<0.22574.0>:ebucketmigrator_srv:complete_native_vb_filter_change:170]Proceeding with reading unread binaries [ns_server:debug,2014-08-19T16:50:19.482,ns_1@10.242.238.90:<0.22574.0>:ebucketmigrator_srv:confirm_downstream:197]Going to confirm reception downstream messages [ns_server:debug,2014-08-19T16:50:19.482,ns_1@10.242.238.90:<0.22574.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:50:19.482,ns_1@10.242.238.90:<0.22579.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:50:19.482,ns_1@10.242.238.90:<0.22579.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:50:19.482,ns_1@10.242.238.90:<0.22574.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:50:19.482,ns_1@10.242.238.90:<0.22574.0>:ebucketmigrator_srv:confirm_downstream:201]Confirmed upstream messages are feeded to kernel [ns_server:debug,2014-08-19T16:50:19.482,ns_1@10.242.238.90:<0.22574.0>:ebucketmigrator_srv:reply_and_die:210]Passed old state to caller [ns_server:debug,2014-08-19T16:50:19.482,ns_1@10.242.238.90:<0.22574.0>:ebucketmigrator_srv:reply_and_die:213]Sent out state. Preparing to die [ns_server:debug,2014-08-19T16:50:19.482,ns_1@10.242.238.90:<0.22577.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:143]Got old state from previous ebucketmigrator: <0.22574.0> [ns_server:debug,2014-08-19T16:50:19.483,ns_1@10.242.238.90:<0.22577.0>:ns_vbm_new_sup:perform_vbucket_filter_change_loop:184]Sent old state to new instance [ns_server:info,2014-08-19T16:50:19.483,ns_1@10.242.238.90:<0.22581.0>:ns_vbm_new_sup:mk_old_state_retriever:83]Got vbucket filter change old state. Proceeding vbucket filter change operation [ns_server:debug,2014-08-19T16:50:19.483,ns_1@10.242.238.90:<0.22581.0>:ebucketmigrator_srv:init:494]Got old ebucketmigrator state from <0.22574.0>: {state,#Port<0.13886>,#Port<0.13883>,#Port<0.13887>,#Port<0.13884>, <0.22575.0>,<<"cut off">>,<<"cut off">>,[],151,false,false,0, {1408,452619,481659}, completed, {<0.22577.0>,#Ref<0.0.0.249823>}, <<"replication_ns_1@10.242.238.90">>,<0.22574.0>, {had_backfill,false,undefined,[]}, completed,false}. [ns_server:debug,2014-08-19T16:50:19.483,ns_1@10.242.238.90:<0.17162.0>:ns_process_registry:handle_info:98]Got exit msg: {'EXIT',<0.22577.0>,{#Ref<0.0.0.249812>,<0.22581.0>}} [error_logger:info,2014-08-19T16:50:19.483,ns_1@10.242.238.90:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,'ns_vbm_new_sup-default'} started: [{pid,<0.22581.0>}, {name, {new_child_id, [972,973,974,975,976,978,980,981,982,983,984, 985,986,987,988,989,990,991,992,993,994,995, 996,997,998,999,1000,1001,1002,1003,1004,1005, 1006,1007,1008,1009,1010,1011,1012,1013,1014, 1015,1016,1017,1018,1019,1020,1021,1022,1023], 'ns_1@10.242.238.91'}}, {mfargs, {ebucketmigrator_srv,start_link, [{"10.242.238.91",11209}, {"10.242.238.90",11209}, [{old_state_retriever, #Fun}, {on_not_ready_vbuckets, #Fun}, {username,"default"}, {password,get_from_config}, {vbuckets, [972,973,974,975,976,978,980,981,982,983, 984,985,986,987,988,989,990,991,992,993, 994,995,996,997,998,999,1000,1001,1002, 1003,1004,1005,1006,1007,1008,1009,1010, 1011,1012,1013,1014,1015,1016,1017,1018, 1019,1020,1021,1022,1023]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]]}}, {restart_type,temporary}, {shutdown,60000}, {child_type,worker}] [ns_server:debug,2014-08-19T16:50:19.489,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:50:19.492,ns_1@10.242.238.90:<0.22581.0>:ebucketmigrator_srv:init:621]Reusing old upstream: [{vbuckets,[972,973,974,975,976,978,980,981,982,983,984,985,986,987,988,989, 990,991,992,993,994,995,996,997,998,999,1000,1001,1002,1003,1004, 1005,1006,1007,1008,1009,1010,1011,1012,1013,1014,1015,1016,1017, 1018,1019,1020,1021,1022,1023]}, {name,<<"replication_ns_1@10.242.238.90">>}, {takeover,false}] [rebalance:debug,2014-08-19T16:50:19.493,ns_1@10.242.238.90:<0.22581.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.22582.0> [ns_server:debug,2014-08-19T16:50:19.495,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:19.495,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 6085 us [ns_server:debug,2014-08-19T16:50:19.496,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:19.497,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{978, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:50:19.607,ns_1@10.242.238.90:<0.22587.0>:capi_set_view_manager:do_wait_index_updated:618]References to wait: [] ("default", 712) [ns_server:debug,2014-08-19T16:50:19.607,ns_1@10.242.238.90:<0.22587.0>:capi_set_view_manager:do_wait_index_updated:638]All refs fired [ns_server:debug,2014-08-19T16:50:19.607,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.22584.0> (ok) [ns_server:debug,2014-08-19T16:50:19.607,ns_1@10.242.238.90:<0.22589.0>:capi_set_view_manager:do_wait_index_updated:618]References to wait: [] ("default", 706) [ns_server:debug,2014-08-19T16:50:19.607,ns_1@10.242.238.90:<0.22589.0>:capi_set_view_manager:do_wait_index_updated:638]All refs fired [ns_server:debug,2014-08-19T16:50:19.607,ns_1@10.242.238.90:<0.22593.0>:capi_set_view_manager:do_wait_index_updated:618]References to wait: [] ("default", 707) [ns_server:debug,2014-08-19T16:50:19.608,ns_1@10.242.238.90:<0.22593.0>:capi_set_view_manager:do_wait_index_updated:638]All refs fired [ns_server:debug,2014-08-19T16:50:19.608,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.22585.0> (ok) [ns_server:debug,2014-08-19T16:50:19.608,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.22586.0> (ok) [ns_server:debug,2014-08-19T16:50:19.608,ns_1@10.242.238.90:<0.22596.0>:capi_set_view_manager:do_wait_index_updated:618]References to wait: [] ("default", 708) [ns_server:debug,2014-08-19T16:50:19.608,ns_1@10.242.238.90:<0.22596.0>:capi_set_view_manager:do_wait_index_updated:638]All refs fired [ns_server:debug,2014-08-19T16:50:19.608,ns_1@10.242.238.90:<0.22599.0>:capi_set_view_manager:do_wait_index_updated:618]References to wait: [] ("default", 710) [ns_server:debug,2014-08-19T16:50:19.608,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.22588.0> (ok) [ns_server:debug,2014-08-19T16:50:19.608,ns_1@10.242.238.90:<0.22599.0>:capi_set_view_manager:do_wait_index_updated:638]All refs fired [ns_server:debug,2014-08-19T16:50:19.608,ns_1@10.242.238.90:<0.22601.0>:capi_set_view_manager:do_wait_index_updated:618]References to wait: [] ("default", 704) [ns_server:debug,2014-08-19T16:50:19.608,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.22590.0> (ok) [ns_server:debug,2014-08-19T16:50:19.608,ns_1@10.242.238.90:<0.22601.0>:capi_set_view_manager:do_wait_index_updated:638]All refs fired [ns_server:debug,2014-08-19T16:50:19.608,ns_1@10.242.238.90:<0.22603.0>:capi_set_view_manager:do_wait_index_updated:618]References to wait: [] ("default", 709) [ns_server:debug,2014-08-19T16:50:19.608,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.22591.0> (ok) [ns_server:debug,2014-08-19T16:50:19.608,ns_1@10.242.238.90:<0.22603.0>:capi_set_view_manager:do_wait_index_updated:638]All refs fired [ns_server:debug,2014-08-19T16:50:19.608,ns_1@10.242.238.90:<0.22604.0>:capi_set_view_manager:do_wait_index_updated:618]References to wait: [] ("default", 705) [ns_server:debug,2014-08-19T16:50:19.608,ns_1@10.242.238.90:<0.22604.0>:capi_set_view_manager:do_wait_index_updated:638]All refs fired [ns_server:debug,2014-08-19T16:50:19.608,ns_1@10.242.238.90:<0.22605.0>:capi_set_view_manager:do_wait_index_updated:618]References to wait: [] ("default", 715) [ns_server:debug,2014-08-19T16:50:19.608,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.22592.0> (ok) [rebalance:debug,2014-08-19T16:50:19.608,ns_1@10.242.238.90:<0.21835.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:50:19.609,ns_1@10.242.238.90:<0.22605.0>:capi_set_view_manager:do_wait_index_updated:638]All refs fired [ns_server:debug,2014-08-19T16:50:19.609,ns_1@10.242.238.90:<0.22606.0>:capi_set_view_manager:do_wait_index_updated:618]References to wait: [] ("default", 713) [rebalance:debug,2014-08-19T16:50:19.609,ns_1@10.242.238.90:<0.21998.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:50:19.609,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.22594.0> (ok) [rebalance:debug,2014-08-19T16:50:19.609,ns_1@10.242.238.90:<0.21973.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:50:19.609,ns_1@10.242.238.90:<0.22606.0>:capi_set_view_manager:do_wait_index_updated:638]All refs fired [rebalance:debug,2014-08-19T16:50:19.609,ns_1@10.242.238.90:<0.21948.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:50:19.609,ns_1@10.242.238.90:<0.21835.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:50:19.609,ns_1@10.242.238.90:<0.22607.0>:capi_set_view_manager:do_wait_index_updated:618]References to wait: [] ("default", 714) [ns_server:debug,2014-08-19T16:50:19.609,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.22595.0> (ok) [ns_server:debug,2014-08-19T16:50:19.609,ns_1@10.242.238.90:<0.22609.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:50:19.609,ns_1@10.242.238.90:<0.22607.0>:capi_set_view_manager:do_wait_index_updated:638]All refs fired [ns_server:debug,2014-08-19T16:50:19.609,ns_1@10.242.238.90:<0.21973.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:50:19.609,ns_1@10.242.238.90:<0.21998.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:50:19.609,ns_1@10.242.238.90:<0.22610.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:50:19.609,ns_1@10.242.238.90:<0.21948.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:50:19.609,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.22597.0> (ok) [ns_server:debug,2014-08-19T16:50:19.609,ns_1@10.242.238.90:<0.22611.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:50:19.609,ns_1@10.242.238.90:<0.22613.0>:capi_set_view_manager:do_wait_index_updated:618]References to wait: [] ("default", 716) [rebalance:debug,2014-08-19T16:50:19.609,ns_1@10.242.238.90:<0.21888.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:50:19.609,ns_1@10.242.238.90:<0.22612.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:50:19.609,ns_1@10.242.238.90:<0.22609.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:debug,2014-08-19T16:50:19.609,ns_1@10.242.238.90:<0.22038.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:50:19.609,ns_1@10.242.238.90:<0.22610.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:debug,2014-08-19T16:50:19.609,ns_1@10.242.238.90:<0.21923.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:50:19.609,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.22598.0> (ok) [ns_server:debug,2014-08-19T16:50:19.609,ns_1@10.242.238.90:<0.22611.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [ns_server:debug,2014-08-19T16:50:19.609,ns_1@10.242.238.90:<0.22613.0>:capi_set_view_manager:do_wait_index_updated:638]All refs fired [ns_server:debug,2014-08-19T16:50:19.609,ns_1@10.242.238.90:<0.22612.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:debug,2014-08-19T16:50:19.610,ns_1@10.242.238.90:<0.22018.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:50:19.610,ns_1@10.242.238.90:<0.22614.0>:capi_set_view_manager:do_wait_index_updated:618]References to wait: [] ("default", 711) [rebalance:info,2014-08-19T16:50:19.610,ns_1@10.242.238.90:<0.21973.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [rebalance:info,2014-08-19T16:50:19.610,ns_1@10.242.238.90:<0.21835.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [rebalance:info,2014-08-19T16:50:19.610,ns_1@10.242.238.90:<0.21948.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [rebalance:info,2014-08-19T16:50:19.610,ns_1@10.242.238.90:<0.21998.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:50:19.610,ns_1@10.242.238.90:<0.21923.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:50:19.610,ns_1@10.242.238.90:<0.21888.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:50:19.610,ns_1@10.242.238.90:<0.22038.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:50:19.610,ns_1@10.242.238.90:<0.22616.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:50:19.610,ns_1@10.242.238.90:<0.22615.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:50:19.610,ns_1@10.242.238.90:<0.22614.0>:capi_set_view_manager:do_wait_index_updated:638]All refs fired [ns_server:debug,2014-08-19T16:50:19.610,ns_1@10.242.238.90:<0.22617.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:50:19.610,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.22600.0> (ok) [rebalance:debug,2014-08-19T16:50:19.610,ns_1@10.242.238.90:<0.21756.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:50:19.610,ns_1@10.242.238.90:<0.22018.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:50:19.610,ns_1@10.242.238.90:<0.22618.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [rebalance:debug,2014-08-19T16:50:19.610,ns_1@10.242.238.90:<0.21812.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:50:19.610,ns_1@10.242.238.90:<0.22619.0>:capi_set_view_manager:do_wait_index_updated:618]References to wait: [] ("default", 717) [ns_server:debug,2014-08-19T16:50:19.610,ns_1@10.242.238.90:<0.22616.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [ns_server:debug,2014-08-19T16:50:19.610,ns_1@10.242.238.90:<0.22615.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [ns_server:debug,2014-08-19T16:50:19.610,ns_1@10.242.238.90:<0.22617.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [ns_server:debug,2014-08-19T16:50:19.610,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.22602.0> (ok) [rebalance:info,2014-08-19T16:50:19.610,ns_1@10.242.238.90:<0.22038.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [rebalance:info,2014-08-19T16:50:19.610,ns_1@10.242.238.90:<0.21888.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [rebalance:info,2014-08-19T16:50:19.610,ns_1@10.242.238.90:<0.21923.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:50:19.610,ns_1@10.242.238.90:<0.22619.0>:capi_set_view_manager:do_wait_index_updated:638]All refs fired [ns_server:debug,2014-08-19T16:50:19.610,ns_1@10.242.238.90:<0.21756.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:50:19.610,ns_1@10.242.238.90:<0.22620.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:50:19.610,ns_1@10.242.238.90:<0.22618.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [ns_server:debug,2014-08-19T16:50:19.610,ns_1@10.242.238.90:<0.21812.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:50:19.610,ns_1@10.242.238.90:<0.22621.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [rebalance:info,2014-08-19T16:50:19.610,ns_1@10.242.238.90:<0.22018.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:50:19.611,ns_1@10.242.238.90:<0.22620.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [ns_server:debug,2014-08-19T16:50:19.611,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.22608.0> (ok) [rebalance:debug,2014-08-19T16:50:19.610,ns_1@10.242.238.90:<0.21787.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:50:19.611,ns_1@10.242.238.90:<0.22621.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:50:19.611,ns_1@10.242.238.90:<0.21756.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [rebalance:debug,2014-08-19T16:50:19.611,ns_1@10.242.238.90:<0.21731.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [rebalance:debug,2014-08-19T16:50:19.611,ns_1@10.242.238.90:<0.21863.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:50:19.611,ns_1@10.242.238.90:<0.22622.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:50:19.611,ns_1@10.242.238.90:<0.21787.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [rebalance:info,2014-08-19T16:50:19.611,ns_1@10.242.238.90:<0.21812.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:50:19.611,ns_1@10.242.238.90:<0.22622.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [ns_server:debug,2014-08-19T16:50:19.611,ns_1@10.242.238.90:<0.21731.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:50:19.611,ns_1@10.242.238.90:<0.22623.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:50:19.611,ns_1@10.242.238.90:<0.21863.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:50:19.611,ns_1@10.242.238.90:<0.22624.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [rebalance:info,2014-08-19T16:50:19.611,ns_1@10.242.238.90:<0.21787.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [rebalance:debug,2014-08-19T16:50:19.611,ns_1@10.242.238.90:<0.21706.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:50:19.611,ns_1@10.242.238.90:<0.22623.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [ns_server:debug,2014-08-19T16:50:19.611,ns_1@10.242.238.90:<0.22624.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:50:19.611,ns_1@10.242.238.90:<0.21731.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:50:19.612,ns_1@10.242.238.90:<0.21706.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:50:19.612,ns_1@10.242.238.90:<0.22625.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [rebalance:info,2014-08-19T16:50:19.612,ns_1@10.242.238.90:<0.21863.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:50:19.612,ns_1@10.242.238.90:<0.22625.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:50:19.612,ns_1@10.242.238.90:<0.21706.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [rebalance:debug,2014-08-19T16:50:19.697,ns_1@10.242.238.90:<0.21536.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:50:19.698,ns_1@10.242.238.90:<0.21536.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:50:19.698,ns_1@10.242.238.90:<0.22626.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:50:19.698,ns_1@10.242.238.90:<0.22626.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:50:19.698,ns_1@10.242.238.90:<0.21536.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:info,2014-08-19T16:50:19.702,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 977 state to replica [ns_server:info,2014-08-19T16:50:19.702,ns_1@10.242.238.90:tap_replication_manager-default<0.18775.0>:tap_replication_manager:change_vbucket_filter:200]Going to change replication from 'ns_1@10.242.238.91' to have [972,973,974,975,976,977,978,980,981,982,983,984,985,986,987,988,989,990,991, 992,993,994,995,996,997,998,999,1000,1001,1002,1003,1004,1005,1006,1007,1008, 1009,1010,1011,1012,1013,1014,1015,1016,1017,1018,1019,1020,1021,1022,1023] ([977], []) [ns_server:debug,2014-08-19T16:50:19.703,ns_1@10.242.238.90:<0.22627.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:135]Registered myself under id:{"default", {new_child_id, [972,973,974,975,976,977,978,980,981,982,983, 984,985,986,987,988,989,990,991,992,993,994, 995,996,997,998,999,1000,1001,1002,1003,1004, 1005,1006,1007,1008,1009,1010,1011,1012,1013, 1014,1015,1016,1017,1018,1019,1020,1021,1022, 1023], 'ns_1@10.242.238.91'}, #Ref<0.0.0.250331>} Args:[{"10.242.238.91",11209}, {"10.242.238.90",11209}, [{old_state_retriever,#Fun}, {on_not_ready_vbuckets,#Fun}, {username,"default"}, {password,get_from_config}, {vbuckets,[972,973,974,975,976,977,978,980,981,982,983,984,985,986,987, 988,989,990,991,992,993,994,995,996,997,998,999,1000,1001, 1002,1003,1004,1005,1006,1007,1008,1009,1010,1011,1012,1013, 1014,1015,1016,1017,1018,1019,1020,1021,1022,1023]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]] [ns_server:debug,2014-08-19T16:50:19.703,ns_1@10.242.238.90:<0.22627.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:139]Linked myself to old ebucketmigrator <0.22581.0> [ns_server:info,2014-08-19T16:50:19.704,ns_1@10.242.238.90:<0.22581.0>:ebucketmigrator_srv:handle_call:270]Starting new-style vbucket filter change on stream `replication_ns_1@10.242.238.90` [ns_server:info,2014-08-19T16:50:19.717,ns_1@10.242.238.90:<0.22581.0>:ebucketmigrator_srv:handle_call:297]Changing vbucket filter on tap stream `replication_ns_1@10.242.238.90`: [{972,1}, {973,1}, {974,1}, {975,1}, {976,1}, {977,1}, {978,1}, {980,1}, {981,1}, {982,1}, {983,1}, {984,1}, {985,1}, {986,1}, {987,1}, {988,1}, {989,1}, {990,1}, {991,1}, {992,1}, {993,1}, {994,1}, {995,1}, {996,1}, {997,1}, {998,1}, {999,1}, {1000,1}, {1001,1}, {1002,1}, {1003,1}, {1004,1}, {1005,1}, {1006,1}, {1007,1}, {1008,1}, {1009,1}, {1010,1}, {1011,1}, {1012,1}, {1013,1}, {1014,1}, {1015,1}, {1016,1}, {1017,1}, {1018,1}, {1019,1}, {1020,1}, {1021,1}, {1022,1}, {1023,1}] [ns_server:info,2014-08-19T16:50:19.718,ns_1@10.242.238.90:<0.22581.0>:ebucketmigrator_srv:handle_call:307]Successfully changed vbucket filter on tap stream `replication_ns_1@10.242.238.90`. [ns_server:info,2014-08-19T16:50:19.718,ns_1@10.242.238.90:<0.22581.0>:ebucketmigrator_srv:process_upstream:1027]Got vbucket filter change completion message. Silencing upstream sender [ns_server:info,2014-08-19T16:50:19.718,ns_1@10.242.238.90:<0.22581.0>:ebucketmigrator_srv:handle_info:366]Got reply from upstream silencing request. Completing state transition to a new ebucketmigrator. [ns_server:debug,2014-08-19T16:50:19.718,ns_1@10.242.238.90:<0.22581.0>:ebucketmigrator_srv:complete_native_vb_filter_change:170]Proceeding with reading unread binaries [ns_server:debug,2014-08-19T16:50:19.718,ns_1@10.242.238.90:<0.22581.0>:ebucketmigrator_srv:confirm_downstream:197]Going to confirm reception downstream messages [ns_server:debug,2014-08-19T16:50:19.718,ns_1@10.242.238.90:<0.22581.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:50:19.718,ns_1@10.242.238.90:<0.22629.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:50:19.718,ns_1@10.242.238.90:<0.22629.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:50:19.719,ns_1@10.242.238.90:<0.22581.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:50:19.719,ns_1@10.242.238.90:<0.22581.0>:ebucketmigrator_srv:confirm_downstream:201]Confirmed upstream messages are feeded to kernel [ns_server:debug,2014-08-19T16:50:19.719,ns_1@10.242.238.90:<0.22581.0>:ebucketmigrator_srv:reply_and_die:210]Passed old state to caller [ns_server:debug,2014-08-19T16:50:19.719,ns_1@10.242.238.90:<0.22581.0>:ebucketmigrator_srv:reply_and_die:213]Sent out state. Preparing to die [ns_server:debug,2014-08-19T16:50:19.719,ns_1@10.242.238.90:<0.22627.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:143]Got old state from previous ebucketmigrator: <0.22581.0> [ns_server:debug,2014-08-19T16:50:19.719,ns_1@10.242.238.90:<0.22627.0>:ns_vbm_new_sup:perform_vbucket_filter_change_loop:184]Sent old state to new instance [ns_server:info,2014-08-19T16:50:19.719,ns_1@10.242.238.90:<0.22631.0>:ns_vbm_new_sup:mk_old_state_retriever:83]Got vbucket filter change old state. Proceeding vbucket filter change operation [ns_server:debug,2014-08-19T16:50:19.720,ns_1@10.242.238.90:<0.22631.0>:ebucketmigrator_srv:init:494]Got old ebucketmigrator state from <0.22581.0>: {state,#Port<0.13886>,#Port<0.13883>,#Port<0.13887>,#Port<0.13884>, <0.22582.0>,<<"cut off">>,<<"cut off">>,[],154,false,false,0, {1408,452619,718296}, completed, {<0.22627.0>,#Ref<0.0.0.250344>}, <<"replication_ns_1@10.242.238.90">>,<0.22581.0>, {had_backfill,false,undefined,[]}, completed,false}. [ns_server:debug,2014-08-19T16:50:19.720,ns_1@10.242.238.90:<0.17162.0>:ns_process_registry:handle_info:98]Got exit msg: {'EXIT',<0.22627.0>,{#Ref<0.0.0.250333>,<0.22631.0>}} [error_logger:info,2014-08-19T16:50:19.720,ns_1@10.242.238.90:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,'ns_vbm_new_sup-default'} started: [{pid,<0.22631.0>}, {name, {new_child_id, [972,973,974,975,976,977,978,980,981,982,983, 984,985,986,987,988,989,990,991,992,993,994, 995,996,997,998,999,1000,1001,1002,1003,1004, 1005,1006,1007,1008,1009,1010,1011,1012,1013, 1014,1015,1016,1017,1018,1019,1020,1021,1022, 1023], 'ns_1@10.242.238.91'}}, {mfargs, {ebucketmigrator_srv,start_link, [{"10.242.238.91",11209}, {"10.242.238.90",11209}, [{old_state_retriever, #Fun}, {on_not_ready_vbuckets, #Fun}, {username,"default"}, {password,get_from_config}, {vbuckets, [972,973,974,975,976,977,978,980,981,982, 983,984,985,986,987,988,989,990,991,992, 993,994,995,996,997,998,999,1000,1001, 1002,1003,1004,1005,1006,1007,1008,1009, 1010,1011,1012,1013,1014,1015,1016,1017, 1018,1019,1020,1021,1022,1023]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]]}}, {restart_type,temporary}, {shutdown,60000}, {child_type,worker}] [ns_server:debug,2014-08-19T16:50:19.727,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:50:19.729,ns_1@10.242.238.90:<0.22631.0>:ebucketmigrator_srv:init:621]Reusing old upstream: [{vbuckets,[972,973,974,975,976,977,978,980,981,982,983,984,985,986,987,988, 989,990,991,992,993,994,995,996,997,998,999,1000,1001,1002,1003, 1004,1005,1006,1007,1008,1009,1010,1011,1012,1013,1014,1015,1016, 1017,1018,1019,1020,1021,1022,1023]}, {name,<<"replication_ns_1@10.242.238.90">>}, {takeover,false}] [rebalance:debug,2014-08-19T16:50:19.729,ns_1@10.242.238.90:<0.22631.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.22632.0> [ns_server:debug,2014-08-19T16:50:19.730,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:19.730,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 3298 us [ns_server:debug,2014-08-19T16:50:19.730,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:19.731,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{977, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:50:19.754,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:50:19.757,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:19.758,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 3765 us [ns_server:debug,2014-08-19T16:50:19.758,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:19.758,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{462, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:50:19.774,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:50:19.777,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:19.777,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 2700 us [ns_server:debug,2014-08-19T16:50:19.777,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:19.778,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{452, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:info,2014-08-19T16:50:19.787,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 708 state to active [ns_server:debug,2014-08-19T16:50:19.806,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:50:19.810,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:19.810,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 3362 us [ns_server:debug,2014-08-19T16:50:19.810,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:19.811,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{448, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:info,2014-08-19T16:50:19.820,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 713 state to active [ns_server:debug,2014-08-19T16:50:19.835,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{463, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:50:19.833,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:50:19.833,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:19.840,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 19 us [ns_server:debug,2014-08-19T16:50:19.841,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [rebalance:debug,2014-08-19T16:50:19.864,ns_1@10.242.238.90:<0.21819.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:50:19.864,ns_1@10.242.238.90:<0.21819.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:50:19.864,ns_1@10.242.238.90:<0.22638.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:50:19.865,ns_1@10.242.238.90:<0.22638.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:50:19.865,ns_1@10.242.238.90:<0.21819.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [views:debug,2014-08-19T16:50:19.865,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/708. Updated state: active (1) [ns_server:debug,2014-08-19T16:50:19.865,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",708,active,1} [ns_server:debug,2014-08-19T16:50:19.870,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:50:19.874,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:19.874,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 3938 us [ns_server:debug,2014-08-19T16:50:19.874,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:19.875,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{450, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:info,2014-08-19T16:50:19.895,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 707 state to active [ns_server:debug,2014-08-19T16:50:19.899,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [views:debug,2014-08-19T16:50:19.900,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/713. Updated state: active (1) [ns_server:debug,2014-08-19T16:50:19.901,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 1293 us [ns_server:debug,2014-08-19T16:50:19.901,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",713,active,1} [ns_server:debug,2014-08-19T16:50:19.901,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:19.901,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:19.902,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{708, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:50:19.918,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:50:19.921,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:19.921,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 2924 us [ns_server:debug,2014-08-19T16:50:19.921,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:19.922,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{461, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [views:debug,2014-08-19T16:50:19.932,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/707. Updated state: active (1) [ns_server:debug,2014-08-19T16:50:19.933,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",707,active,1} [rebalance:debug,2014-08-19T16:50:19.937,ns_1@10.242.238.90:<0.21943.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:50:19.937,ns_1@10.242.238.90:<0.21943.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:50:19.938,ns_1@10.242.238.90:<0.22641.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:50:19.938,ns_1@10.242.238.90:<0.22641.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:50:19.938,ns_1@10.242.238.90:<0.21943.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:50:19.941,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:50:19.943,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 1399 us [ns_server:debug,2014-08-19T16:50:19.943,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:19.943,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:19.944,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{713, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:50:19.966,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:info,2014-08-19T16:50:19.968,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 710 state to active [ns_server:debug,2014-08-19T16:50:19.969,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 2695 us [ns_server:debug,2014-08-19T16:50:19.969,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:19.970,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{455, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:50:19.970,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:info,2014-08-19T16:50:19.972,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 966 state to replica [ns_server:info,2014-08-19T16:50:19.973,ns_1@10.242.238.90:tap_replication_manager-default<0.18775.0>:tap_replication_manager:change_vbucket_filter:200]Going to change replication from 'ns_1@10.242.238.91' to have [966,972,973,974,975,976,977,978,980,981,982,983,984,985,986,987,988,989,990, 991,992,993,994,995,996,997,998,999,1000,1001,1002,1003,1004,1005,1006,1007, 1008,1009,1010,1011,1012,1013,1014,1015,1016,1017,1018,1019,1020,1021,1022, 1023] ([966], []) [ns_server:debug,2014-08-19T16:50:19.974,ns_1@10.242.238.90:<0.22643.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:135]Registered myself under id:{"default", {new_child_id, [966,972,973,974,975,976,977,978,980,981,982, 983,984,985,986,987,988,989,990,991,992,993, 994,995,996,997,998,999,1000,1001,1002,1003, 1004,1005,1006,1007,1008,1009,1010,1011,1012, 1013,1014,1015,1016,1017,1018,1019,1020,1021, 1022,1023], 'ns_1@10.242.238.91'}, #Ref<0.0.0.250831>} Args:[{"10.242.238.91",11209}, {"10.242.238.90",11209}, [{old_state_retriever,#Fun}, {on_not_ready_vbuckets,#Fun}, {username,"default"}, {password,get_from_config}, {vbuckets,[966,972,973,974,975,976,977,978,980,981,982,983,984,985,986, 987,988,989,990,991,992,993,994,995,996,997,998,999,1000, 1001,1002,1003,1004,1005,1006,1007,1008,1009,1010,1011,1012, 1013,1014,1015,1016,1017,1018,1019,1020,1021,1022,1023]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]] [ns_server:debug,2014-08-19T16:50:19.974,ns_1@10.242.238.90:<0.22643.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:139]Linked myself to old ebucketmigrator <0.22631.0> [ns_server:info,2014-08-19T16:50:19.974,ns_1@10.242.238.90:<0.22631.0>:ebucketmigrator_srv:handle_call:270]Starting new-style vbucket filter change on stream `replication_ns_1@10.242.238.90` [ns_server:info,2014-08-19T16:50:19.986,ns_1@10.242.238.90:<0.22631.0>:ebucketmigrator_srv:handle_call:297]Changing vbucket filter on tap stream `replication_ns_1@10.242.238.90`: [{966,1}, {972,1}, {973,1}, {974,1}, {975,1}, {976,1}, {977,1}, {978,1}, {980,1}, {981,1}, {982,1}, {983,1}, {984,1}, {985,1}, {986,1}, {987,1}, {988,1}, {989,1}, {990,1}, {991,1}, {992,1}, {993,1}, {994,1}, {995,1}, {996,1}, {997,1}, {998,1}, {999,1}, {1000,1}, {1001,1}, {1002,1}, {1003,1}, {1004,1}, {1005,1}, {1006,1}, {1007,1}, {1008,1}, {1009,1}, {1010,1}, {1011,1}, {1012,1}, {1013,1}, {1014,1}, {1015,1}, {1016,1}, {1017,1}, {1018,1}, {1019,1}, {1020,1}, {1021,1}, {1022,1}, {1023,1}] [ns_server:info,2014-08-19T16:50:19.987,ns_1@10.242.238.90:<0.22631.0>:ebucketmigrator_srv:handle_call:307]Successfully changed vbucket filter on tap stream `replication_ns_1@10.242.238.90`. [ns_server:info,2014-08-19T16:50:19.987,ns_1@10.242.238.90:<0.22631.0>:ebucketmigrator_srv:process_upstream:1027]Got vbucket filter change completion message. Silencing upstream sender [ns_server:info,2014-08-19T16:50:19.987,ns_1@10.242.238.90:<0.22631.0>:ebucketmigrator_srv:handle_info:366]Got reply from upstream silencing request. Completing state transition to a new ebucketmigrator. [ns_server:debug,2014-08-19T16:50:19.988,ns_1@10.242.238.90:<0.22631.0>:ebucketmigrator_srv:complete_native_vb_filter_change:170]Proceeding with reading unread binaries [ns_server:debug,2014-08-19T16:50:19.988,ns_1@10.242.238.90:<0.22631.0>:ebucketmigrator_srv:confirm_downstream:197]Going to confirm reception downstream messages [ns_server:debug,2014-08-19T16:50:19.988,ns_1@10.242.238.90:<0.22631.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:50:19.988,ns_1@10.242.238.90:<0.22645.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:50:19.988,ns_1@10.242.238.90:<0.22645.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:50:19.988,ns_1@10.242.238.90:<0.22631.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:50:19.988,ns_1@10.242.238.90:<0.22631.0>:ebucketmigrator_srv:confirm_downstream:201]Confirmed upstream messages are feeded to kernel [ns_server:debug,2014-08-19T16:50:19.989,ns_1@10.242.238.90:<0.22631.0>:ebucketmigrator_srv:reply_and_die:210]Passed old state to caller [ns_server:debug,2014-08-19T16:50:19.989,ns_1@10.242.238.90:<0.22631.0>:ebucketmigrator_srv:reply_and_die:213]Sent out state. Preparing to die [ns_server:debug,2014-08-19T16:50:19.989,ns_1@10.242.238.90:<0.22643.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:143]Got old state from previous ebucketmigrator: <0.22631.0> [ns_server:debug,2014-08-19T16:50:19.989,ns_1@10.242.238.90:<0.22643.0>:ns_vbm_new_sup:perform_vbucket_filter_change_loop:184]Sent old state to new instance [ns_server:info,2014-08-19T16:50:19.989,ns_1@10.242.238.90:<0.22647.0>:ns_vbm_new_sup:mk_old_state_retriever:83]Got vbucket filter change old state. Proceeding vbucket filter change operation [ns_server:debug,2014-08-19T16:50:19.989,ns_1@10.242.238.90:<0.22647.0>:ebucketmigrator_srv:init:494]Got old ebucketmigrator state from <0.22631.0>: {state,#Port<0.13886>,#Port<0.13883>,#Port<0.13887>,#Port<0.13884>, <0.22632.0>,<<"cut off">>,<<"cut off">>,[],157,false,false,0, {1408,452619,987773}, completed, {<0.22643.0>,#Ref<0.0.0.250844>}, <<"replication_ns_1@10.242.238.90">>,<0.22631.0>, {had_backfill,false,undefined,[]}, completed,false}. [ns_server:debug,2014-08-19T16:50:19.989,ns_1@10.242.238.90:<0.17162.0>:ns_process_registry:handle_info:98]Got exit msg: {'EXIT',<0.22643.0>,{#Ref<0.0.0.250833>,<0.22647.0>}} [error_logger:info,2014-08-19T16:50:19.989,ns_1@10.242.238.90:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,'ns_vbm_new_sup-default'} started: [{pid,<0.22647.0>}, {name, {new_child_id, [966,972,973,974,975,976,977,978,980,981,982, 983,984,985,986,987,988,989,990,991,992,993, 994,995,996,997,998,999,1000,1001,1002,1003, 1004,1005,1006,1007,1008,1009,1010,1011,1012, 1013,1014,1015,1016,1017,1018,1019,1020,1021, 1022,1023], 'ns_1@10.242.238.91'}}, {mfargs, {ebucketmigrator_srv,start_link, [{"10.242.238.91",11209}, {"10.242.238.90",11209}, [{old_state_retriever, #Fun}, {on_not_ready_vbuckets, #Fun}, {username,"default"}, {password,get_from_config}, {vbuckets, [966,972,973,974,975,976,977,978,980,981, 982,983,984,985,986,987,988,989,990,991, 992,993,994,995,996,997,998,999,1000, 1001,1002,1003,1004,1005,1006,1007,1008, 1009,1010,1011,1012,1013,1014,1015,1016, 1017,1018,1019,1020,1021,1022,1023]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]]}}, {restart_type,temporary}, {shutdown,60000}, {child_type,worker}] [ns_server:info,2014-08-19T16:50:19.990,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 704 state to active [ns_server:debug,2014-08-19T16:50:19.996,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:50:19.999,ns_1@10.242.238.90:<0.22647.0>:ebucketmigrator_srv:init:621]Reusing old upstream: [{vbuckets,[966,972,973,974,975,976,977,978,980,981,982,983,984,985,986,987, 988,989,990,991,992,993,994,995,996,997,998,999,1000,1001,1002, 1003,1004,1005,1006,1007,1008,1009,1010,1011,1012,1013,1014,1015, 1016,1017,1018,1019,1020,1021,1022,1023]}, {name,<<"replication_ns_1@10.242.238.90">>}, {takeover,false}] [views:debug,2014-08-19T16:50:20.000,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/710. Updated state: active (1) [rebalance:debug,2014-08-19T16:50:20.000,ns_1@10.242.238.90:<0.22647.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.22648.0> [ns_server:debug,2014-08-19T16:50:20.000,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",710,active,1} [ns_server:debug,2014-08-19T16:50:20.005,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 9122 us [ns_server:debug,2014-08-19T16:50:20.005,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:20.007,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{966, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:50:20.007,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:info,2014-08-19T16:50:20.017,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 709 state to active [ns_server:debug,2014-08-19T16:50:20.027,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:50:20.031,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:20.031,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 3663 us [ns_server:debug,2014-08-19T16:50:20.031,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:20.032,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{460, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [views:debug,2014-08-19T16:50:20.033,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/704. Updated state: active (1) [ns_server:debug,2014-08-19T16:50:20.033,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",704,active,1} [ns_server:debug,2014-08-19T16:50:20.049,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [rebalance:debug,2014-08-19T16:50:20.051,ns_1@10.242.238.90:<0.21968.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:50:20.051,ns_1@10.242.238.90:<0.21968.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:50:20.051,ns_1@10.242.238.90:<0.22651.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:50:20.051,ns_1@10.242.238.90:<0.22651.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:50:20.051,ns_1@10.242.238.90:<0.21968.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:50:20.052,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 2748 us [ns_server:debug,2014-08-19T16:50:20.052,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:20.052,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:20.053,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{707, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [rebalance:debug,2014-08-19T16:50:20.072,ns_1@10.242.238.90:<0.21466.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:50:20.072,ns_1@10.242.238.90:<0.21466.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:50:20.072,ns_1@10.242.238.90:<0.22653.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:50:20.073,ns_1@10.242.238.90:<0.22653.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:50:20.074,ns_1@10.242.238.90:<0.21466.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:50:20.080,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [rebalance:debug,2014-08-19T16:50:20.082,ns_1@10.242.238.90:<0.21916.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:50:20.082,ns_1@10.242.238.90:<0.21916.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:50:20.082,ns_1@10.242.238.90:<0.22654.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:50:20.082,ns_1@10.242.238.90:<0.22654.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:50:20.083,ns_1@10.242.238.90:<0.21916.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [views:debug,2014-08-19T16:50:20.084,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/709. Updated state: active (1) [ns_server:debug,2014-08-19T16:50:20.084,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",709,active,1} [ns_server:debug,2014-08-19T16:50:20.084,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:20.085,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 4190 us [ns_server:debug,2014-08-19T16:50:20.086,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:20.086,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{459, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:info,2014-08-19T16:50:20.088,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 962 state to replica [ns_server:info,2014-08-19T16:50:20.088,ns_1@10.242.238.90:tap_replication_manager-default<0.18775.0>:tap_replication_manager:change_vbucket_filter:200]Going to change replication from 'ns_1@10.242.238.91' to have [962,966,972,973,974,975,976,977,978,980,981,982,983,984,985,986,987,988,989, 990,991,992,993,994,995,996,997,998,999,1000,1001,1002,1003,1004,1005,1006, 1007,1008,1009,1010,1011,1012,1013,1014,1015,1016,1017,1018,1019,1020,1021, 1022,1023] ([962], []) [ns_server:debug,2014-08-19T16:50:20.090,ns_1@10.242.238.90:<0.22655.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:135]Registered myself under id:{"default", {new_child_id, [962,966,972,973,974,975,976,977,978,980,981, 982,983,984,985,986,987,988,989,990,991,992, 993,994,995,996,997,998,999,1000,1001,1002, 1003,1004,1005,1006,1007,1008,1009,1010,1011, 1012,1013,1014,1015,1016,1017,1018,1019,1020, 1021,1022,1023], 'ns_1@10.242.238.91'}, #Ref<0.0.0.251199>} Args:[{"10.242.238.91",11209}, {"10.242.238.90",11209}, [{old_state_retriever,#Fun}, {on_not_ready_vbuckets,#Fun}, {username,"default"}, {password,get_from_config}, {vbuckets,[962,966,972,973,974,975,976,977,978,980,981,982,983,984,985, 986,987,988,989,990,991,992,993,994,995,996,997,998,999, 1000,1001,1002,1003,1004,1005,1006,1007,1008,1009,1010,1011, 1012,1013,1014,1015,1016,1017,1018,1019,1020,1021,1022, 1023]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]] [ns_server:debug,2014-08-19T16:50:20.090,ns_1@10.242.238.90:<0.22655.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:139]Linked myself to old ebucketmigrator <0.22647.0> [ns_server:info,2014-08-19T16:50:20.091,ns_1@10.242.238.90:<0.22647.0>:ebucketmigrator_srv:handle_call:270]Starting new-style vbucket filter change on stream `replication_ns_1@10.242.238.90` [ns_server:info,2014-08-19T16:50:20.105,ns_1@10.242.238.90:<0.22647.0>:ebucketmigrator_srv:handle_call:297]Changing vbucket filter on tap stream `replication_ns_1@10.242.238.90`: [{962,1}, {966,1}, {972,1}, {973,1}, {974,1}, {975,1}, {976,1}, {977,1}, {978,1}, {980,1}, {981,1}, {982,1}, {983,1}, {984,1}, {985,1}, {986,1}, {987,1}, {988,1}, {989,1}, {990,1}, {991,1}, {992,1}, {993,1}, {994,1}, {995,1}, {996,1}, {997,1}, {998,1}, {999,1}, {1000,1}, {1001,1}, {1002,1}, {1003,1}, {1004,1}, {1005,1}, {1006,1}, {1007,1}, {1008,1}, {1009,1}, {1010,1}, {1011,1}, {1012,1}, {1013,1}, {1014,1}, {1015,1}, {1016,1}, {1017,1}, {1018,1}, {1019,1}, {1020,1}, {1021,1}, {1022,1}, {1023,1}] [ns_server:info,2014-08-19T16:50:20.106,ns_1@10.242.238.90:<0.22647.0>:ebucketmigrator_srv:handle_call:307]Successfully changed vbucket filter on tap stream `replication_ns_1@10.242.238.90`. [ns_server:info,2014-08-19T16:50:20.106,ns_1@10.242.238.90:<0.22647.0>:ebucketmigrator_srv:process_upstream:1027]Got vbucket filter change completion message. Silencing upstream sender [ns_server:info,2014-08-19T16:50:20.106,ns_1@10.242.238.90:<0.22647.0>:ebucketmigrator_srv:handle_info:366]Got reply from upstream silencing request. Completing state transition to a new ebucketmigrator. [ns_server:debug,2014-08-19T16:50:20.106,ns_1@10.242.238.90:<0.22647.0>:ebucketmigrator_srv:complete_native_vb_filter_change:170]Proceeding with reading unread binaries [ns_server:debug,2014-08-19T16:50:20.106,ns_1@10.242.238.90:<0.22647.0>:ebucketmigrator_srv:confirm_downstream:197]Going to confirm reception downstream messages [ns_server:debug,2014-08-19T16:50:20.106,ns_1@10.242.238.90:<0.22647.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:50:20.106,ns_1@10.242.238.90:<0.22658.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:50:20.107,ns_1@10.242.238.90:<0.22658.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:50:20.107,ns_1@10.242.238.90:<0.22647.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:50:20.107,ns_1@10.242.238.90:<0.22647.0>:ebucketmigrator_srv:confirm_downstream:201]Confirmed upstream messages are feeded to kernel [ns_server:debug,2014-08-19T16:50:20.107,ns_1@10.242.238.90:<0.22647.0>:ebucketmigrator_srv:reply_and_die:210]Passed old state to caller [ns_server:debug,2014-08-19T16:50:20.107,ns_1@10.242.238.90:<0.22647.0>:ebucketmigrator_srv:reply_and_die:213]Sent out state. Preparing to die [ns_server:debug,2014-08-19T16:50:20.107,ns_1@10.242.238.90:<0.22655.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:143]Got old state from previous ebucketmigrator: <0.22647.0> [ns_server:debug,2014-08-19T16:50:20.107,ns_1@10.242.238.90:<0.22655.0>:ns_vbm_new_sup:perform_vbucket_filter_change_loop:184]Sent old state to new instance [ns_server:info,2014-08-19T16:50:20.107,ns_1@10.242.238.90:<0.22660.0>:ns_vbm_new_sup:mk_old_state_retriever:83]Got vbucket filter change old state. Proceeding vbucket filter change operation [ns_server:debug,2014-08-19T16:50:20.108,ns_1@10.242.238.90:<0.22660.0>:ebucketmigrator_srv:init:494]Got old ebucketmigrator state from <0.22647.0>: {state,#Port<0.13886>,#Port<0.13883>,#Port<0.13887>,#Port<0.13884>, <0.22648.0>,<<"cut off">>,<<"cut off">>,[],160,false,false,0, {1408,452620,106333}, completed, {<0.22655.0>,#Ref<0.0.0.251212>}, <<"replication_ns_1@10.242.238.90">>,<0.22647.0>, {had_backfill,false,undefined,[]}, completed,false}. [ns_server:debug,2014-08-19T16:50:20.108,ns_1@10.242.238.90:<0.17162.0>:ns_process_registry:handle_info:98]Got exit msg: {'EXIT',<0.22655.0>,{#Ref<0.0.0.251201>,<0.22660.0>}} [error_logger:info,2014-08-19T16:50:20.108,ns_1@10.242.238.90:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,'ns_vbm_new_sup-default'} started: [{pid,<0.22660.0>}, {name, {new_child_id, [962,966,972,973,974,975,976,977,978,980,981, 982,983,984,985,986,987,988,989,990,991,992, 993,994,995,996,997,998,999,1000,1001,1002, 1003,1004,1005,1006,1007,1008,1009,1010,1011, 1012,1013,1014,1015,1016,1017,1018,1019,1020, 1021,1022,1023], 'ns_1@10.242.238.91'}}, {mfargs, {ebucketmigrator_srv,start_link, [{"10.242.238.91",11209}, {"10.242.238.90",11209}, [{old_state_retriever, #Fun}, {on_not_ready_vbuckets, #Fun}, {username,"default"}, {password,get_from_config}, {vbuckets, [962,966,972,973,974,975,976,977,978,980, 981,982,983,984,985,986,987,988,989,990, 991,992,993,994,995,996,997,998,999, 1000,1001,1002,1003,1004,1005,1006,1007, 1008,1009,1010,1011,1012,1013,1014,1015, 1016,1017,1018,1019,1020,1021,1022, 1023]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]]}}, {restart_type,temporary}, {shutdown,60000}, {child_type,worker}] [ns_server:debug,2014-08-19T16:50:20.113,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:50:20.117,ns_1@10.242.238.90:<0.22660.0>:ebucketmigrator_srv:init:621]Reusing old upstream: [{vbuckets,[962,966,972,973,974,975,976,977,978,980,981,982,983,984,985,986, 987,988,989,990,991,992,993,994,995,996,997,998,999,1000,1001, 1002,1003,1004,1005,1006,1007,1008,1009,1010,1011,1012,1013,1014, 1015,1016,1017,1018,1019,1020,1021,1022,1023]}, {name,<<"replication_ns_1@10.242.238.90">>}, {takeover,false}] [ns_server:debug,2014-08-19T16:50:20.118,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 4394 us [rebalance:debug,2014-08-19T16:50:20.118,ns_1@10.242.238.90:<0.22660.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.22661.0> [ns_server:debug,2014-08-19T16:50:20.118,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:20.118,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [rebalance:debug,2014-08-19T16:50:20.120,ns_1@10.242.238.90:<0.21993.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:50:20.120,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{962, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:50:20.120,ns_1@10.242.238.90:<0.21993.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:50:20.120,ns_1@10.242.238.90:<0.22662.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:50:20.120,ns_1@10.242.238.90:<0.22662.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:50:20.121,ns_1@10.242.238.90:<0.21993.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:info,2014-08-19T16:50:20.130,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 715 state to active [ns_server:debug,2014-08-19T16:50:20.138,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:50:20.142,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{457, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:50:20.142,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 3473 us [ns_server:debug,2014-08-19T16:50:20.144,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:20.145,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:info,2014-08-19T16:50:20.150,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 705 state to active [views:debug,2014-08-19T16:50:20.158,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/715. Updated state: active (1) [ns_server:debug,2014-08-19T16:50:20.159,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",715,active,1} [ns_server:debug,2014-08-19T16:50:20.164,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:info,2014-08-19T16:50:20.168,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 714 state to active [ns_server:debug,2014-08-19T16:50:20.172,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 7876 us [ns_server:debug,2014-08-19T16:50:20.173,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:20.173,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:20.174,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{710, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [rebalance:debug,2014-08-19T16:50:20.188,ns_1@10.242.238.90:<0.21768.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:50:20.188,ns_1@10.242.238.90:<0.21768.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:50:20.188,ns_1@10.242.238.90:<0.22665.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:50:20.188,ns_1@10.242.238.90:<0.22665.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:50:20.189,ns_1@10.242.238.90:<0.21768.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:50:20.190,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:50:20.193,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:20.193,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 2977 us [ns_server:debug,2014-08-19T16:50:20.194,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:20.194,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{704, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [views:debug,2014-08-19T16:50:20.204,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/705. Updated state: active (1) [ns_server:debug,2014-08-19T16:50:20.205,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",705,active,1} [rebalance:debug,2014-08-19T16:50:20.210,ns_1@10.242.238.90:<0.21883.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:50:20.210,ns_1@10.242.238.90:<0.21883.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:50:20.210,ns_1@10.242.238.90:<0.22667.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:50:20.211,ns_1@10.242.238.90:<0.22667.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:50:20.211,ns_1@10.242.238.90:<0.21883.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:50:20.215,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:50:20.216,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 1141 us [ns_server:debug,2014-08-19T16:50:20.217,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:20.218,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:20.218,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{709, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:info,2014-08-19T16:50:20.220,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 716 state to active [ns_server:debug,2014-08-19T16:50:20.235,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:info,2014-08-19T16:50:20.237,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 706 state to active [ns_server:debug,2014-08-19T16:50:20.238,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:20.239,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 3817 us [ns_server:debug,2014-08-19T16:50:20.239,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:20.240,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{453, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:info,2014-08-19T16:50:20.253,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 717 state to active [ns_server:debug,2014-08-19T16:50:20.256,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:50:20.259,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 3007 us [ns_server:debug,2014-08-19T16:50:20.259,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:20.260,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:20.261,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{454, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:info,2014-08-19T16:50:20.266,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 961 state to replica [ns_server:info,2014-08-19T16:50:20.267,ns_1@10.242.238.90:tap_replication_manager-default<0.18775.0>:tap_replication_manager:change_vbucket_filter:200]Going to change replication from 'ns_1@10.242.238.91' to have [961,962,966,972,973,974,975,976,977,978,980,981,982,983,984,985,986,987,988, 989,990,991,992,993,994,995,996,997,998,999,1000,1001,1002,1003,1004,1005, 1006,1007,1008,1009,1010,1011,1012,1013,1014,1015,1016,1017,1018,1019,1020, 1021,1022,1023] ([961], []) [ns_server:debug,2014-08-19T16:50:20.268,ns_1@10.242.238.90:<0.22670.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:135]Registered myself under id:{"default", {new_child_id, [961,962,966,972,973,974,975,976,977,978,980, 981,982,983,984,985,986,987,988,989,990,991, 992,993,994,995,996,997,998,999,1000,1001, 1002,1003,1004,1005,1006,1007,1008,1009,1010, 1011,1012,1013,1014,1015,1016,1017,1018,1019, 1020,1021,1022,1023], 'ns_1@10.242.238.91'}, #Ref<0.0.0.251647>} Args:[{"10.242.238.91",11209}, {"10.242.238.90",11209}, [{old_state_retriever,#Fun}, {on_not_ready_vbuckets,#Fun}, {username,"default"}, {password,get_from_config}, {vbuckets,[961,962,966,972,973,974,975,976,977,978,980,981,982,983,984, 985,986,987,988,989,990,991,992,993,994,995,996,997,998,999, 1000,1001,1002,1003,1004,1005,1006,1007,1008,1009,1010,1011, 1012,1013,1014,1015,1016,1017,1018,1019,1020,1021,1022, 1023]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]] [ns_server:debug,2014-08-19T16:50:20.268,ns_1@10.242.238.90:<0.22670.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:139]Linked myself to old ebucketmigrator <0.22660.0> [ns_server:info,2014-08-19T16:50:20.268,ns_1@10.242.238.90:<0.22660.0>:ebucketmigrator_srv:handle_call:270]Starting new-style vbucket filter change on stream `replication_ns_1@10.242.238.90` [views:debug,2014-08-19T16:50:20.271,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/714. Updated state: active (1) [ns_server:debug,2014-08-19T16:50:20.272,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",714,active,1} [rebalance:debug,2014-08-19T16:50:20.277,ns_1@10.242.238.90:<0.21751.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:50:20.278,ns_1@10.242.238.90:<0.21751.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:50:20.278,ns_1@10.242.238.90:<0.22672.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:50:20.278,ns_1@10.242.238.90:<0.22672.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:50:20.278,ns_1@10.242.238.90:<0.21751.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:info,2014-08-19T16:50:20.283,ns_1@10.242.238.90:<0.22660.0>:ebucketmigrator_srv:handle_call:297]Changing vbucket filter on tap stream `replication_ns_1@10.242.238.90`: [{961,1}, {962,1}, {966,1}, {972,1}, {973,1}, {974,1}, {975,1}, {976,1}, {977,1}, {978,1}, {980,1}, {981,1}, {982,1}, {983,1}, {984,1}, {985,1}, {986,1}, {987,1}, {988,1}, {989,1}, {990,1}, {991,1}, {992,1}, {993,1}, {994,1}, {995,1}, {996,1}, {997,1}, {998,1}, {999,1}, {1000,1}, {1001,1}, {1002,1}, {1003,1}, {1004,1}, {1005,1}, {1006,1}, {1007,1}, {1008,1}, {1009,1}, {1010,1}, {1011,1}, {1012,1}, {1013,1}, {1014,1}, {1015,1}, {1016,1}, {1017,1}, {1018,1}, {1019,1}, {1020,1}, {1021,1}, {1022,1}, {1023,1}] [ns_server:info,2014-08-19T16:50:20.283,ns_1@10.242.238.90:<0.22660.0>:ebucketmigrator_srv:handle_call:307]Successfully changed vbucket filter on tap stream `replication_ns_1@10.242.238.90`. [ns_server:info,2014-08-19T16:50:20.284,ns_1@10.242.238.90:<0.22660.0>:ebucketmigrator_srv:process_upstream:1027]Got vbucket filter change completion message. Silencing upstream sender [ns_server:info,2014-08-19T16:50:20.284,ns_1@10.242.238.90:<0.22660.0>:ebucketmigrator_srv:handle_info:366]Got reply from upstream silencing request. Completing state transition to a new ebucketmigrator. [ns_server:debug,2014-08-19T16:50:20.284,ns_1@10.242.238.90:<0.22660.0>:ebucketmigrator_srv:complete_native_vb_filter_change:170]Proceeding with reading unread binaries [ns_server:debug,2014-08-19T16:50:20.284,ns_1@10.242.238.90:<0.22660.0>:ebucketmigrator_srv:confirm_downstream:197]Going to confirm reception downstream messages [ns_server:debug,2014-08-19T16:50:20.284,ns_1@10.242.238.90:<0.22660.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:50:20.284,ns_1@10.242.238.90:<0.22673.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:50:20.284,ns_1@10.242.238.90:<0.22673.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:50:20.284,ns_1@10.242.238.90:<0.22660.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:50:20.285,ns_1@10.242.238.90:<0.22660.0>:ebucketmigrator_srv:confirm_downstream:201]Confirmed upstream messages are feeded to kernel [ns_server:debug,2014-08-19T16:50:20.285,ns_1@10.242.238.90:<0.22660.0>:ebucketmigrator_srv:reply_and_die:210]Passed old state to caller [ns_server:debug,2014-08-19T16:50:20.285,ns_1@10.242.238.90:<0.22660.0>:ebucketmigrator_srv:reply_and_die:213]Sent out state. Preparing to die [ns_server:debug,2014-08-19T16:50:20.285,ns_1@10.242.238.90:<0.22670.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:143]Got old state from previous ebucketmigrator: <0.22660.0> [ns_server:debug,2014-08-19T16:50:20.285,ns_1@10.242.238.90:<0.22670.0>:ns_vbm_new_sup:perform_vbucket_filter_change_loop:184]Sent old state to new instance [ns_server:info,2014-08-19T16:50:20.285,ns_1@10.242.238.90:<0.22675.0>:ns_vbm_new_sup:mk_old_state_retriever:83]Got vbucket filter change old state. Proceeding vbucket filter change operation [ns_server:debug,2014-08-19T16:50:20.285,ns_1@10.242.238.90:<0.22675.0>:ebucketmigrator_srv:init:494]Got old ebucketmigrator state from <0.22660.0>: {state,#Port<0.13886>,#Port<0.13883>,#Port<0.13887>,#Port<0.13884>, <0.22661.0>,<<"cut off">>,<<"cut off">>,[],163,false,false,0, {1408,452620,284082}, completed, {<0.22670.0>,#Ref<0.0.0.251662>}, <<"replication_ns_1@10.242.238.90">>,<0.22660.0>, {had_backfill,false,undefined,[]}, completed,false}. [ns_server:debug,2014-08-19T16:50:20.286,ns_1@10.242.238.90:<0.17162.0>:ns_process_registry:handle_info:98]Got exit msg: {'EXIT',<0.22670.0>,{#Ref<0.0.0.251649>,<0.22675.0>}} [error_logger:info,2014-08-19T16:50:20.286,ns_1@10.242.238.90:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,'ns_vbm_new_sup-default'} started: [{pid,<0.22675.0>}, {name, {new_child_id, [961,962,966,972,973,974,975,976,977,978,980, 981,982,983,984,985,986,987,988,989,990,991, 992,993,994,995,996,997,998,999,1000,1001, 1002,1003,1004,1005,1006,1007,1008,1009,1010, 1011,1012,1013,1014,1015,1016,1017,1018,1019, 1020,1021,1022,1023], 'ns_1@10.242.238.91'}}, {mfargs, {ebucketmigrator_srv,start_link, [{"10.242.238.91",11209}, {"10.242.238.90",11209}, [{old_state_retriever, #Fun}, {on_not_ready_vbuckets, #Fun}, {username,"default"}, {password,get_from_config}, {vbuckets, [961,962,966,972,973,974,975,976,977,978, 980,981,982,983,984,985,986,987,988,989, 990,991,992,993,994,995,996,997,998,999, 1000,1001,1002,1003,1004,1005,1006,1007, 1008,1009,1010,1011,1012,1013,1014,1015, 1016,1017,1018,1019,1020,1021,1022, 1023]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]]}}, {restart_type,temporary}, {shutdown,60000}, {child_type,worker}] [ns_server:debug,2014-08-19T16:50:20.291,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:50:20.293,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:20.293,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 2708 us [ns_server:debug,2014-08-19T16:50:20.294,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:20.294,ns_1@10.242.238.90:<0.22675.0>:ebucketmigrator_srv:init:621]Reusing old upstream: [{vbuckets,[961,962,966,972,973,974,975,976,977,978,980,981,982,983,984,985, 986,987,988,989,990,991,992,993,994,995,996,997,998,999,1000,1001, 1002,1003,1004,1005,1006,1007,1008,1009,1010,1011,1012,1013,1014, 1015,1016,1017,1018,1019,1020,1021,1022,1023]}, {name,<<"replication_ns_1@10.242.238.90">>}, {takeover,false}] [ns_server:debug,2014-08-19T16:50:20.295,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{961, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [rebalance:debug,2014-08-19T16:50:20.295,ns_1@10.242.238.90:<0.22675.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.22676.0> [ns_server:info,2014-08-19T16:50:20.297,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 979 state to replica [ns_server:info,2014-08-19T16:50:20.297,ns_1@10.242.238.90:tap_replication_manager-default<0.18775.0>:tap_replication_manager:change_vbucket_filter:200]Going to change replication from 'ns_1@10.242.238.91' to have [961,962,966,972,973,974,975,976,977,978,979,980,981,982,983,984,985,986,987, 988,989,990,991,992,993,994,995,996,997,998,999,1000,1001,1002,1003,1004, 1005,1006,1007,1008,1009,1010,1011,1012,1013,1014,1015,1016,1017,1018,1019, 1020,1021,1022,1023] ([979], []) [ns_server:debug,2014-08-19T16:50:20.298,ns_1@10.242.238.90:<0.22678.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:135]Registered myself under id:{"default", {new_child_id, [961,962,966,972,973,974,975,976,977,978,979, 980,981,982,983,984,985,986,987,988,989,990, 991,992,993,994,995,996,997,998,999,1000, 1001,1002,1003,1004,1005,1006,1007,1008,1009, 1010,1011,1012,1013,1014,1015,1016,1017,1018, 1019,1020,1021,1022,1023], 'ns_1@10.242.238.91'}, #Ref<0.0.0.251827>} Args:[{"10.242.238.91",11209}, {"10.242.238.90",11209}, [{old_state_retriever,#Fun}, {on_not_ready_vbuckets,#Fun}, {username,"default"}, {password,get_from_config}, {vbuckets,[961,962,966,972,973,974,975,976,977,978,979,980,981,982,983, 984,985,986,987,988,989,990,991,992,993,994,995,996,997,998, 999,1000,1001,1002,1003,1004,1005,1006,1007,1008,1009,1010, 1011,1012,1013,1014,1015,1016,1017,1018,1019,1020,1021,1022, 1023]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]] [ns_server:debug,2014-08-19T16:50:20.299,ns_1@10.242.238.90:<0.22678.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:139]Linked myself to old ebucketmigrator <0.22675.0> [ns_server:info,2014-08-19T16:50:20.299,ns_1@10.242.238.90:<0.22675.0>:ebucketmigrator_srv:handle_call:270]Starting new-style vbucket filter change on stream `replication_ns_1@10.242.238.90` [ns_server:info,2014-08-19T16:50:20.307,ns_1@10.242.238.90:<0.22675.0>:ebucketmigrator_srv:handle_call:297]Changing vbucket filter on tap stream `replication_ns_1@10.242.238.90`: [{961,1}, {962,1}, {966,1}, {972,1}, {973,1}, {974,1}, {975,1}, {976,1}, {977,1}, {978,1}, {979,1}, {980,1}, {981,1}, {982,1}, {983,1}, {984,1}, {985,1}, {986,1}, {987,1}, {988,1}, {989,1}, {990,1}, {991,1}, {992,1}, {993,1}, {994,1}, {995,1}, {996,1}, {997,1}, {998,1}, {999,1}, {1000,1}, {1001,1}, {1002,1}, {1003,1}, {1004,1}, {1005,1}, {1006,1}, {1007,1}, {1008,1}, {1009,1}, {1010,1}, {1011,1}, {1012,1}, {1013,1}, {1014,1}, {1015,1}, {1016,1}, {1017,1}, {1018,1}, {1019,1}, {1020,1}, {1021,1}, {1022,1}, {1023,1}] [ns_server:info,2014-08-19T16:50:20.308,ns_1@10.242.238.90:<0.22675.0>:ebucketmigrator_srv:handle_call:307]Successfully changed vbucket filter on tap stream `replication_ns_1@10.242.238.90`. [ns_server:info,2014-08-19T16:50:20.308,ns_1@10.242.238.90:<0.22675.0>:ebucketmigrator_srv:process_upstream:1027]Got vbucket filter change completion message. Silencing upstream sender [ns_server:info,2014-08-19T16:50:20.308,ns_1@10.242.238.90:<0.22675.0>:ebucketmigrator_srv:handle_info:366]Got reply from upstream silencing request. Completing state transition to a new ebucketmigrator. [ns_server:debug,2014-08-19T16:50:20.309,ns_1@10.242.238.90:<0.22675.0>:ebucketmigrator_srv:complete_native_vb_filter_change:170]Proceeding with reading unread binaries [ns_server:debug,2014-08-19T16:50:20.309,ns_1@10.242.238.90:<0.22675.0>:ebucketmigrator_srv:confirm_downstream:197]Going to confirm reception downstream messages [ns_server:debug,2014-08-19T16:50:20.309,ns_1@10.242.238.90:<0.22675.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:50:20.309,ns_1@10.242.238.90:<0.22680.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:50:20.309,ns_1@10.242.238.90:<0.22680.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:50:20.309,ns_1@10.242.238.90:<0.22675.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:50:20.309,ns_1@10.242.238.90:<0.22675.0>:ebucketmigrator_srv:confirm_downstream:201]Confirmed upstream messages are feeded to kernel [ns_server:debug,2014-08-19T16:50:20.309,ns_1@10.242.238.90:<0.22675.0>:ebucketmigrator_srv:reply_and_die:210]Passed old state to caller [ns_server:debug,2014-08-19T16:50:20.309,ns_1@10.242.238.90:<0.22675.0>:ebucketmigrator_srv:reply_and_die:213]Sent out state. Preparing to die [ns_server:debug,2014-08-19T16:50:20.309,ns_1@10.242.238.90:<0.22678.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:143]Got old state from previous ebucketmigrator: <0.22675.0> [ns_server:debug,2014-08-19T16:50:20.310,ns_1@10.242.238.90:<0.22678.0>:ns_vbm_new_sup:perform_vbucket_filter_change_loop:184]Sent old state to new instance [ns_server:info,2014-08-19T16:50:20.310,ns_1@10.242.238.90:<0.22682.0>:ns_vbm_new_sup:mk_old_state_retriever:83]Got vbucket filter change old state. Proceeding vbucket filter change operation [ns_server:debug,2014-08-19T16:50:20.310,ns_1@10.242.238.90:<0.22682.0>:ebucketmigrator_srv:init:494]Got old ebucketmigrator state from <0.22675.0>: {state,#Port<0.13886>,#Port<0.13883>,#Port<0.13887>,#Port<0.13884>, <0.22676.0>,<<"cut off">>,<<"cut off">>,[],166,false,false,0, {1408,452620,308761}, completed, {<0.22678.0>,#Ref<0.0.0.251840>}, <<"replication_ns_1@10.242.238.90">>,<0.22675.0>, {had_backfill,false,undefined,[]}, completed,false}. [ns_server:debug,2014-08-19T16:50:20.310,ns_1@10.242.238.90:<0.17162.0>:ns_process_registry:handle_info:98]Got exit msg: {'EXIT',<0.22678.0>,{#Ref<0.0.0.251829>,<0.22682.0>}} [error_logger:info,2014-08-19T16:50:20.310,ns_1@10.242.238.90:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,'ns_vbm_new_sup-default'} started: [{pid,<0.22682.0>}, {name, {new_child_id, [961,962,966,972,973,974,975,976,977,978,979, 980,981,982,983,984,985,986,987,988,989,990, 991,992,993,994,995,996,997,998,999,1000,1001, 1002,1003,1004,1005,1006,1007,1008,1009,1010, 1011,1012,1013,1014,1015,1016,1017,1018,1019, 1020,1021,1022,1023], 'ns_1@10.242.238.91'}}, {mfargs, {ebucketmigrator_srv,start_link, [{"10.242.238.91",11209}, {"10.242.238.90",11209}, [{old_state_retriever, #Fun}, {on_not_ready_vbuckets, #Fun}, {username,"default"}, {password,get_from_config}, {vbuckets, [961,962,966,972,973,974,975,976,977,978, 979,980,981,982,983,984,985,986,987,988, 989,990,991,992,993,994,995,996,997,998, 999,1000,1001,1002,1003,1004,1005,1006, 1007,1008,1009,1010,1011,1012,1013,1014, 1015,1016,1017,1018,1019,1020,1021,1022, 1023]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]]}}, {restart_type,temporary}, {shutdown,60000}, {child_type,worker}] [rebalance:debug,2014-08-19T16:50:20.311,ns_1@10.242.238.90:<0.21726.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:50:20.311,ns_1@10.242.238.90:<0.21726.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:50:20.312,ns_1@10.242.238.90:<0.22683.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:50:20.312,ns_1@10.242.238.90:<0.22683.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:50:20.312,ns_1@10.242.238.90:<0.21726.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:50:20.315,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:50:20.321,ns_1@10.242.238.90:<0.22682.0>:ebucketmigrator_srv:init:621]Reusing old upstream: [{vbuckets,[961,962,966,972,973,974,975,976,977,978,979,980,981,982,983,984, 985,986,987,988,989,990,991,992,993,994,995,996,997,998,999,1000, 1001,1002,1003,1004,1005,1006,1007,1008,1009,1010,1011,1012,1013, 1014,1015,1016,1017,1018,1019,1020,1021,1022,1023]}, {name,<<"replication_ns_1@10.242.238.90">>}, {takeover,false}] [rebalance:debug,2014-08-19T16:50:20.322,ns_1@10.242.238.90:<0.22682.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.22684.0> [views:debug,2014-08-19T16:50:20.322,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/716. Updated state: active (1) [ns_server:debug,2014-08-19T16:50:20.322,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",716,active,1} [ns_server:debug,2014-08-19T16:50:20.324,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 8027 us [ns_server:debug,2014-08-19T16:50:20.324,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:20.324,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:20.325,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{979, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:info,2014-08-19T16:50:20.327,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 963 state to replica [ns_server:info,2014-08-19T16:50:20.327,ns_1@10.242.238.90:tap_replication_manager-default<0.18775.0>:tap_replication_manager:change_vbucket_filter:200]Going to change replication from 'ns_1@10.242.238.91' to have [961,962,963,966,972,973,974,975,976,977,978,979,980,981,982,983,984,985,986, 987,988,989,990,991,992,993,994,995,996,997,998,999,1000,1001,1002,1003,1004, 1005,1006,1007,1008,1009,1010,1011,1012,1013,1014,1015,1016,1017,1018,1019, 1020,1021,1022,1023] ([963], []) [ns_server:debug,2014-08-19T16:50:20.328,ns_1@10.242.238.90:<0.22685.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:135]Registered myself under id:{"default", {new_child_id, [961,962,963,966,972,973,974,975,976,977,978, 979,980,981,982,983,984,985,986,987,988,989, 990,991,992,993,994,995,996,997,998,999,1000, 1001,1002,1003,1004,1005,1006,1007,1008,1009, 1010,1011,1012,1013,1014,1015,1016,1017,1018, 1019,1020,1021,1022,1023], 'ns_1@10.242.238.91'}, #Ref<0.0.0.251999>} Args:[{"10.242.238.91",11209}, {"10.242.238.90",11209}, [{old_state_retriever,#Fun}, {on_not_ready_vbuckets,#Fun}, {username,"default"}, {password,get_from_config}, {vbuckets,[961,962,963,966,972,973,974,975,976,977,978,979,980,981,982, 983,984,985,986,987,988,989,990,991,992,993,994,995,996,997, 998,999,1000,1001,1002,1003,1004,1005,1006,1007,1008,1009, 1010,1011,1012,1013,1014,1015,1016,1017,1018,1019,1020,1021, 1022,1023]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]] [ns_server:debug,2014-08-19T16:50:20.328,ns_1@10.242.238.90:<0.22685.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:139]Linked myself to old ebucketmigrator <0.22682.0> [ns_server:info,2014-08-19T16:50:20.328,ns_1@10.242.238.90:<0.22682.0>:ebucketmigrator_srv:handle_call:270]Starting new-style vbucket filter change on stream `replication_ns_1@10.242.238.90` [ns_server:info,2014-08-19T16:50:20.337,ns_1@10.242.238.90:<0.22682.0>:ebucketmigrator_srv:handle_call:297]Changing vbucket filter on tap stream `replication_ns_1@10.242.238.90`: [{961,1}, {962,1}, {963,1}, {966,1}, {972,1}, {973,1}, {974,1}, {975,1}, {976,1}, {977,1}, {978,1}, {979,1}, {980,1}, {981,1}, {982,1}, {983,1}, {984,1}, {985,1}, {986,1}, {987,1}, {988,1}, {989,1}, {990,1}, {991,1}, {992,1}, {993,1}, {994,1}, {995,1}, {996,1}, {997,1}, {998,1}, {999,1}, {1000,1}, {1001,1}, {1002,1}, {1003,1}, {1004,1}, {1005,1}, {1006,1}, {1007,1}, {1008,1}, {1009,1}, {1010,1}, {1011,1}, {1012,1}, {1013,1}, {1014,1}, {1015,1}, {1016,1}, {1017,1}, {1018,1}, {1019,1}, {1020,1}, {1021,1}, {1022,1}, {1023,1}] [ns_server:info,2014-08-19T16:50:20.338,ns_1@10.242.238.90:<0.22682.0>:ebucketmigrator_srv:handle_call:307]Successfully changed vbucket filter on tap stream `replication_ns_1@10.242.238.90`. [ns_server:info,2014-08-19T16:50:20.338,ns_1@10.242.238.90:<0.22682.0>:ebucketmigrator_srv:process_upstream:1027]Got vbucket filter change completion message. Silencing upstream sender [ns_server:info,2014-08-19T16:50:20.338,ns_1@10.242.238.90:<0.22682.0>:ebucketmigrator_srv:handle_info:366]Got reply from upstream silencing request. Completing state transition to a new ebucketmigrator. [ns_server:debug,2014-08-19T16:50:20.338,ns_1@10.242.238.90:<0.22682.0>:ebucketmigrator_srv:complete_native_vb_filter_change:170]Proceeding with reading unread binaries [ns_server:debug,2014-08-19T16:50:20.338,ns_1@10.242.238.90:<0.22682.0>:ebucketmigrator_srv:confirm_downstream:197]Going to confirm reception downstream messages [ns_server:debug,2014-08-19T16:50:20.338,ns_1@10.242.238.90:<0.22682.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:50:20.338,ns_1@10.242.238.90:<0.22688.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:50:20.339,ns_1@10.242.238.90:<0.22688.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:50:20.339,ns_1@10.242.238.90:<0.22682.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:50:20.339,ns_1@10.242.238.90:<0.22682.0>:ebucketmigrator_srv:confirm_downstream:201]Confirmed upstream messages are feeded to kernel [ns_server:debug,2014-08-19T16:50:20.339,ns_1@10.242.238.90:<0.22682.0>:ebucketmigrator_srv:reply_and_die:210]Passed old state to caller [ns_server:debug,2014-08-19T16:50:20.339,ns_1@10.242.238.90:<0.22682.0>:ebucketmigrator_srv:reply_and_die:213]Sent out state. Preparing to die [ns_server:debug,2014-08-19T16:50:20.339,ns_1@10.242.238.90:<0.22685.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:143]Got old state from previous ebucketmigrator: <0.22682.0> [ns_server:debug,2014-08-19T16:50:20.339,ns_1@10.242.238.90:<0.22685.0>:ns_vbm_new_sup:perform_vbucket_filter_change_loop:184]Sent old state to new instance [ns_server:info,2014-08-19T16:50:20.339,ns_1@10.242.238.90:<0.22690.0>:ns_vbm_new_sup:mk_old_state_retriever:83]Got vbucket filter change old state. Proceeding vbucket filter change operation [ns_server:debug,2014-08-19T16:50:20.340,ns_1@10.242.238.90:<0.22690.0>:ebucketmigrator_srv:init:494]Got old ebucketmigrator state from <0.22682.0>: {state,#Port<0.13886>,#Port<0.13883>,#Port<0.13887>,#Port<0.13884>, <0.22684.0>,<<"cut off">>,<<"cut off">>,[],169,false,false,0, {1408,452620,338331}, completed, {<0.22685.0>,#Ref<0.0.0.252013>}, <<"replication_ns_1@10.242.238.90">>,<0.22682.0>, {had_backfill,false,undefined,[]}, completed,false}. [ns_server:debug,2014-08-19T16:50:20.340,ns_1@10.242.238.90:<0.17162.0>:ns_process_registry:handle_info:98]Got exit msg: {'EXIT',<0.22685.0>,{#Ref<0.0.0.252001>,<0.22690.0>}} [error_logger:info,2014-08-19T16:50:20.340,ns_1@10.242.238.90:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,'ns_vbm_new_sup-default'} started: [{pid,<0.22690.0>}, {name, {new_child_id, [961,962,963,966,972,973,974,975,976,977,978, 979,980,981,982,983,984,985,986,987,988,989, 990,991,992,993,994,995,996,997,998,999,1000, 1001,1002,1003,1004,1005,1006,1007,1008,1009, 1010,1011,1012,1013,1014,1015,1016,1017,1018, 1019,1020,1021,1022,1023], 'ns_1@10.242.238.91'}}, {mfargs, {ebucketmigrator_srv,start_link, [{"10.242.238.91",11209}, {"10.242.238.90",11209}, [{old_state_retriever, #Fun}, {on_not_ready_vbuckets, #Fun}, {username,"default"}, {password,get_from_config}, {vbuckets, [961,962,963,966,972,973,974,975,976,977, 978,979,980,981,982,983,984,985,986,987, 988,989,990,991,992,993,994,995,996,997, 998,999,1000,1001,1002,1003,1004,1005, 1006,1007,1008,1009,1010,1011,1012,1013, 1014,1015,1016,1017,1018,1019,1020,1021, 1022,1023]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]]}}, {restart_type,temporary}, {shutdown,60000}, {child_type,worker}] [rebalance:debug,2014-08-19T16:50:20.343,ns_1@10.242.238.90:<0.21793.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:50:20.344,ns_1@10.242.238.90:<0.21793.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:50:20.344,ns_1@10.242.238.90:<0.22691.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:50:20.344,ns_1@10.242.238.90:<0.22691.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:50:20.344,ns_1@10.242.238.90:<0.21793.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:50:20.344,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:50:20.348,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:20.348,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 3728 us [ns_server:debug,2014-08-19T16:50:20.349,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:20.349,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{963, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:50:20.353,ns_1@10.242.238.90:<0.22690.0>:ebucketmigrator_srv:init:621]Reusing old upstream: [{vbuckets,[961,962,963,966,972,973,974,975,976,977,978,979,980,981,982,983, 984,985,986,987,988,989,990,991,992,993,994,995,996,997,998,999, 1000,1001,1002,1003,1004,1005,1006,1007,1008,1009,1010,1011,1012, 1013,1014,1015,1016,1017,1018,1019,1020,1021,1022,1023]}, {name,<<"replication_ns_1@10.242.238.90">>}, {takeover,false}] [rebalance:debug,2014-08-19T16:50:20.354,ns_1@10.242.238.90:<0.22690.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.22692.0> [ns_server:info,2014-08-19T16:50:20.359,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 712 state to active [ns_server:debug,2014-08-19T16:50:20.368,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:50:20.372,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 4239 us [ns_server:debug,2014-08-19T16:50:20.373,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:20.373,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:20.374,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{451, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:info,2014-08-19T16:50:20.375,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 960 state to replica [ns_server:info,2014-08-19T16:50:20.375,ns_1@10.242.238.90:tap_replication_manager-default<0.18775.0>:tap_replication_manager:change_vbucket_filter:200]Going to change replication from 'ns_1@10.242.238.91' to have [960,961,962,963,966,972,973,974,975,976,977,978,979,980,981,982,983,984,985, 986,987,988,989,990,991,992,993,994,995,996,997,998,999,1000,1001,1002,1003, 1004,1005,1006,1007,1008,1009,1010,1011,1012,1013,1014,1015,1016,1017,1018, 1019,1020,1021,1022,1023] ([960], []) [ns_server:debug,2014-08-19T16:50:20.378,ns_1@10.242.238.90:<0.22694.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:135]Registered myself under id:{"default", {new_child_id, [960,961,962,963,966,972,973,974,975,976,977, 978,979,980,981,982,983,984,985,986,987,988, 989,990,991,992,993,994,995,996,997,998,999, 1000,1001,1002,1003,1004,1005,1006,1007,1008, 1009,1010,1011,1012,1013,1014,1015,1016,1017, 1018,1019,1020,1021,1022,1023], 'ns_1@10.242.238.91'}, #Ref<0.0.0.252192>} Args:[{"10.242.238.91",11209}, {"10.242.238.90",11209}, [{old_state_retriever,#Fun}, {on_not_ready_vbuckets,#Fun}, {username,"default"}, {password,get_from_config}, {vbuckets,[960,961,962,963,966,972,973,974,975,976,977,978,979,980,981, 982,983,984,985,986,987,988,989,990,991,992,993,994,995,996, 997,998,999,1000,1001,1002,1003,1004,1005,1006,1007,1008, 1009,1010,1011,1012,1013,1014,1015,1016,1017,1018,1019,1020, 1021,1022,1023]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]] [ns_server:debug,2014-08-19T16:50:20.378,ns_1@10.242.238.90:<0.22694.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:139]Linked myself to old ebucketmigrator <0.22690.0> [ns_server:info,2014-08-19T16:50:20.379,ns_1@10.242.238.90:<0.22690.0>:ebucketmigrator_srv:handle_call:270]Starting new-style vbucket filter change on stream `replication_ns_1@10.242.238.90` [rebalance:debug,2014-08-19T16:50:20.379,ns_1@10.242.238.90:<0.21844.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:50:20.379,ns_1@10.242.238.90:<0.21844.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:50:20.379,ns_1@10.242.238.90:<0.22696.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:50:20.379,ns_1@10.242.238.90:<0.22696.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:50:20.380,ns_1@10.242.238.90:<0.21844.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [views:debug,2014-08-19T16:50:20.389,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/706. Updated state: active (1) [ns_server:debug,2014-08-19T16:50:20.389,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",706,active,1} [ns_server:info,2014-08-19T16:50:20.392,ns_1@10.242.238.90:<0.22690.0>:ebucketmigrator_srv:handle_call:297]Changing vbucket filter on tap stream `replication_ns_1@10.242.238.90`: [{960,1}, {961,1}, {962,1}, {963,1}, {966,1}, {972,1}, {973,1}, {974,1}, {975,1}, {976,1}, {977,1}, {978,1}, {979,1}, {980,1}, {981,1}, {982,1}, {983,1}, {984,1}, {985,1}, {986,1}, {987,1}, {988,1}, {989,1}, {990,1}, {991,1}, {992,1}, {993,1}, {994,1}, {995,1}, {996,1}, {997,1}, {998,1}, {999,1}, {1000,1}, {1001,1}, {1002,1}, {1003,1}, {1004,1}, {1005,1}, {1006,1}, {1007,1}, {1008,1}, {1009,1}, {1010,1}, {1011,1}, {1012,1}, {1013,1}, {1014,1}, {1015,1}, {1016,1}, {1017,1}, {1018,1}, {1019,1}, {1020,1}, {1021,1}, {1022,1}, {1023,1}] [ns_server:info,2014-08-19T16:50:20.393,ns_1@10.242.238.90:<0.22690.0>:ebucketmigrator_srv:handle_call:307]Successfully changed vbucket filter on tap stream `replication_ns_1@10.242.238.90`. [ns_server:info,2014-08-19T16:50:20.393,ns_1@10.242.238.90:<0.22690.0>:ebucketmigrator_srv:process_upstream:1027]Got vbucket filter change completion message. Silencing upstream sender [ns_server:info,2014-08-19T16:50:20.393,ns_1@10.242.238.90:<0.22690.0>:ebucketmigrator_srv:handle_info:366]Got reply from upstream silencing request. Completing state transition to a new ebucketmigrator. [ns_server:debug,2014-08-19T16:50:20.393,ns_1@10.242.238.90:<0.22690.0>:ebucketmigrator_srv:complete_native_vb_filter_change:170]Proceeding with reading unread binaries [ns_server:debug,2014-08-19T16:50:20.393,ns_1@10.242.238.90:<0.22690.0>:ebucketmigrator_srv:confirm_downstream:197]Going to confirm reception downstream messages [ns_server:debug,2014-08-19T16:50:20.393,ns_1@10.242.238.90:<0.22690.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:50:20.393,ns_1@10.242.238.90:<0.22697.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:50:20.394,ns_1@10.242.238.90:<0.22697.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:50:20.394,ns_1@10.242.238.90:<0.22690.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:50:20.394,ns_1@10.242.238.90:<0.22690.0>:ebucketmigrator_srv:confirm_downstream:201]Confirmed upstream messages are feeded to kernel [ns_server:debug,2014-08-19T16:50:20.394,ns_1@10.242.238.90:<0.22690.0>:ebucketmigrator_srv:reply_and_die:210]Passed old state to caller [ns_server:debug,2014-08-19T16:50:20.394,ns_1@10.242.238.90:<0.22690.0>:ebucketmigrator_srv:reply_and_die:213]Sent out state. Preparing to die [ns_server:debug,2014-08-19T16:50:20.394,ns_1@10.242.238.90:<0.22694.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:143]Got old state from previous ebucketmigrator: <0.22690.0> [ns_server:debug,2014-08-19T16:50:20.394,ns_1@10.242.238.90:<0.22694.0>:ns_vbm_new_sup:perform_vbucket_filter_change_loop:184]Sent old state to new instance [ns_server:info,2014-08-19T16:50:20.395,ns_1@10.242.238.90:<0.22699.0>:ns_vbm_new_sup:mk_old_state_retriever:83]Got vbucket filter change old state. Proceeding vbucket filter change operation [ns_server:debug,2014-08-19T16:50:20.395,ns_1@10.242.238.90:<0.22699.0>:ebucketmigrator_srv:init:494]Got old ebucketmigrator state from <0.22690.0>: {state,#Port<0.13886>,#Port<0.13883>,#Port<0.13887>,#Port<0.13884>, <0.22692.0>,<<"cut off">>,<<"cut off">>,[],172,false,false,0, {1408,452620,393297}, completed, {<0.22694.0>,#Ref<0.0.0.252205>}, <<"replication_ns_1@10.242.238.90">>,<0.22690.0>, {had_backfill,false,undefined,[]}, completed,false}. [ns_server:debug,2014-08-19T16:50:20.395,ns_1@10.242.238.90:<0.17162.0>:ns_process_registry:handle_info:98]Got exit msg: {'EXIT',<0.22694.0>,{#Ref<0.0.0.252194>,<0.22699.0>}} [error_logger:info,2014-08-19T16:50:20.395,ns_1@10.242.238.90:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,'ns_vbm_new_sup-default'} started: [{pid,<0.22699.0>}, {name, {new_child_id, [960,961,962,963,966,972,973,974,975,976,977, 978,979,980,981,982,983,984,985,986,987,988, 989,990,991,992,993,994,995,996,997,998,999, 1000,1001,1002,1003,1004,1005,1006,1007,1008, 1009,1010,1011,1012,1013,1014,1015,1016,1017, 1018,1019,1020,1021,1022,1023], 'ns_1@10.242.238.91'}}, {mfargs, {ebucketmigrator_srv,start_link, [{"10.242.238.91",11209}, {"10.242.238.90",11209}, [{old_state_retriever, #Fun}, {on_not_ready_vbuckets, #Fun}, {username,"default"}, {password,get_from_config}, {vbuckets, [960,961,962,963,966,972,973,974,975,976, 977,978,979,980,981,982,983,984,985,986, 987,988,989,990,991,992,993,994,995,996, 997,998,999,1000,1001,1002,1003,1004, 1005,1006,1007,1008,1009,1010,1011,1012, 1013,1014,1015,1016,1017,1018,1019,1020, 1021,1022,1023]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]]}}, {restart_type,temporary}, {shutdown,60000}, {child_type,worker}] [ns_server:info,2014-08-19T16:50:20.396,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 711 state to active [ns_server:debug,2014-08-19T16:50:20.400,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:50:20.405,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:20.405,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 4682 us [ns_server:debug,2014-08-19T16:50:20.406,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:20.406,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{960, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:50:20.406,ns_1@10.242.238.90:<0.22699.0>:ebucketmigrator_srv:init:621]Reusing old upstream: [{vbuckets,[960,961,962,963,966,972,973,974,975,976,977,978,979,980,981,982, 983,984,985,986,987,988,989,990,991,992,993,994,995,996,997,998, 999,1000,1001,1002,1003,1004,1005,1006,1007,1008,1009,1010,1011, 1012,1013,1014,1015,1016,1017,1018,1019,1020,1021,1022,1023]}, {name,<<"replication_ns_1@10.242.238.90">>}, {takeover,false}] [rebalance:debug,2014-08-19T16:50:20.406,ns_1@10.242.238.90:<0.22699.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.22701.0> [ns_server:debug,2014-08-19T16:50:20.431,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:50:20.435,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 3077 us [ns_server:debug,2014-08-19T16:50:20.435,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:20.435,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [rebalance:debug,2014-08-19T16:50:20.436,ns_1@10.242.238.90:<0.21701.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:50:20.436,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{715, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:50:20.436,ns_1@10.242.238.90:<0.21701.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:50:20.436,ns_1@10.242.238.90:<0.22703.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:50:20.437,ns_1@10.242.238.90:<0.22703.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:50:20.438,ns_1@10.242.238.90:<0.21701.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [views:debug,2014-08-19T16:50:20.456,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/712. Updated state: active (1) [ns_server:debug,2014-08-19T16:50:20.456,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",712,active,1} [ns_server:debug,2014-08-19T16:50:20.461,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:50:20.464,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:20.464,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 2754 us [ns_server:debug,2014-08-19T16:50:20.465,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:20.465,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{705, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:50:20.485,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:50:20.493,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:20.493,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 7641 us [ns_server:debug,2014-08-19T16:50:20.493,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:20.494,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{714, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:info,2014-08-19T16:50:20.496,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 968 state to replica [ns_server:info,2014-08-19T16:50:20.496,ns_1@10.242.238.90:tap_replication_manager-default<0.18775.0>:tap_replication_manager:change_vbucket_filter:200]Going to change replication from 'ns_1@10.242.238.91' to have [960,961,962,963,966,968,972,973,974,975,976,977,978,979,980,981,982,983,984, 985,986,987,988,989,990,991,992,993,994,995,996,997,998,999,1000,1001,1002, 1003,1004,1005,1006,1007,1008,1009,1010,1011,1012,1013,1014,1015,1016,1017, 1018,1019,1020,1021,1022,1023] ([968], []) [ns_server:debug,2014-08-19T16:50:20.497,ns_1@10.242.238.90:<0.22706.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:135]Registered myself under id:{"default", {new_child_id, [960,961,962,963,966,968,972,973,974,975,976, 977,978,979,980,981,982,983,984,985,986,987, 988,989,990,991,992,993,994,995,996,997,998, 999,1000,1001,1002,1003,1004,1005,1006,1007, 1008,1009,1010,1011,1012,1013,1014,1015,1016, 1017,1018,1019,1020,1021,1022,1023], 'ns_1@10.242.238.91'}, #Ref<0.0.0.252498>} Args:[{"10.242.238.91",11209}, {"10.242.238.90",11209}, [{old_state_retriever,#Fun}, {on_not_ready_vbuckets,#Fun}, {username,"default"}, {password,get_from_config}, {vbuckets,[960,961,962,963,966,968,972,973,974,975,976,977,978,979,980, 981,982,983,984,985,986,987,988,989,990,991,992,993,994,995, 996,997,998,999,1000,1001,1002,1003,1004,1005,1006,1007, 1008,1009,1010,1011,1012,1013,1014,1015,1016,1017,1018,1019, 1020,1021,1022,1023]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]] [ns_server:debug,2014-08-19T16:50:20.497,ns_1@10.242.238.90:<0.22706.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:139]Linked myself to old ebucketmigrator <0.22699.0> [ns_server:info,2014-08-19T16:50:20.497,ns_1@10.242.238.90:<0.22699.0>:ebucketmigrator_srv:handle_call:270]Starting new-style vbucket filter change on stream `replication_ns_1@10.242.238.90` [ns_server:info,2014-08-19T16:50:20.511,ns_1@10.242.238.90:<0.22699.0>:ebucketmigrator_srv:handle_call:297]Changing vbucket filter on tap stream `replication_ns_1@10.242.238.90`: [{960,1}, {961,1}, {962,1}, {963,1}, {966,1}, {968,1}, {972,1}, {973,1}, {974,1}, {975,1}, {976,1}, {977,1}, {978,1}, {979,1}, {980,1}, {981,1}, {982,1}, {983,1}, {984,1}, {985,1}, {986,1}, {987,1}, {988,1}, {989,1}, {990,1}, {991,1}, {992,1}, {993,1}, {994,1}, {995,1}, {996,1}, {997,1}, {998,1}, {999,1}, {1000,1}, {1001,1}, {1002,1}, {1003,1}, {1004,1}, {1005,1}, {1006,1}, {1007,1}, {1008,1}, {1009,1}, {1010,1}, {1011,1}, {1012,1}, {1013,1}, {1014,1}, {1015,1}, {1016,1}, {1017,1}, {1018,1}, {1019,1}, {1020,1}, {1021,1}, {1022,1}, {1023,1}] [ns_server:info,2014-08-19T16:50:20.512,ns_1@10.242.238.90:<0.22699.0>:ebucketmigrator_srv:handle_call:307]Successfully changed vbucket filter on tap stream `replication_ns_1@10.242.238.90`. [ns_server:info,2014-08-19T16:50:20.512,ns_1@10.242.238.90:<0.22699.0>:ebucketmigrator_srv:process_upstream:1027]Got vbucket filter change completion message. Silencing upstream sender [ns_server:info,2014-08-19T16:50:20.512,ns_1@10.242.238.90:<0.22699.0>:ebucketmigrator_srv:handle_info:366]Got reply from upstream silencing request. Completing state transition to a new ebucketmigrator. [ns_server:debug,2014-08-19T16:50:20.512,ns_1@10.242.238.90:<0.22699.0>:ebucketmigrator_srv:complete_native_vb_filter_change:170]Proceeding with reading unread binaries [ns_server:debug,2014-08-19T16:50:20.512,ns_1@10.242.238.90:<0.22699.0>:ebucketmigrator_srv:confirm_downstream:197]Going to confirm reception downstream messages [ns_server:debug,2014-08-19T16:50:20.512,ns_1@10.242.238.90:<0.22699.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:50:20.512,ns_1@10.242.238.90:<0.22708.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:50:20.512,ns_1@10.242.238.90:<0.22708.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:50:20.512,ns_1@10.242.238.90:<0.22699.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:50:20.513,ns_1@10.242.238.90:<0.22699.0>:ebucketmigrator_srv:confirm_downstream:201]Confirmed upstream messages are feeded to kernel [ns_server:debug,2014-08-19T16:50:20.513,ns_1@10.242.238.90:<0.22699.0>:ebucketmigrator_srv:reply_and_die:210]Passed old state to caller [ns_server:debug,2014-08-19T16:50:20.513,ns_1@10.242.238.90:<0.22699.0>:ebucketmigrator_srv:reply_and_die:213]Sent out state. Preparing to die [ns_server:debug,2014-08-19T16:50:20.513,ns_1@10.242.238.90:<0.22706.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:143]Got old state from previous ebucketmigrator: <0.22699.0> [ns_server:debug,2014-08-19T16:50:20.513,ns_1@10.242.238.90:<0.22706.0>:ns_vbm_new_sup:perform_vbucket_filter_change_loop:184]Sent old state to new instance [ns_server:info,2014-08-19T16:50:20.513,ns_1@10.242.238.90:<0.22710.0>:ns_vbm_new_sup:mk_old_state_retriever:83]Got vbucket filter change old state. Proceeding vbucket filter change operation [ns_server:debug,2014-08-19T16:50:20.513,ns_1@10.242.238.90:<0.22710.0>:ebucketmigrator_srv:init:494]Got old ebucketmigrator state from <0.22699.0>: {state,#Port<0.13886>,#Port<0.13883>,#Port<0.13887>,#Port<0.13884>, <0.22701.0>,<<"cut off">>,<<"cut off">>,[],175,false,false,0, {1408,452620,512201}, completed, {<0.22706.0>,#Ref<0.0.0.252511>}, <<"replication_ns_1@10.242.238.90">>,<0.22699.0>, {had_backfill,false,undefined,[]}, completed,false}. [ns_server:debug,2014-08-19T16:50:20.514,ns_1@10.242.238.90:<0.17162.0>:ns_process_registry:handle_info:98]Got exit msg: {'EXIT',<0.22706.0>,{#Ref<0.0.0.252500>,<0.22710.0>}} [error_logger:info,2014-08-19T16:50:20.514,ns_1@10.242.238.90:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,'ns_vbm_new_sup-default'} started: [{pid,<0.22710.0>}, {name, {new_child_id, [960,961,962,963,966,968,972,973,974,975,976, 977,978,979,980,981,982,983,984,985,986,987, 988,989,990,991,992,993,994,995,996,997,998, 999,1000,1001,1002,1003,1004,1005,1006,1007, 1008,1009,1010,1011,1012,1013,1014,1015,1016, 1017,1018,1019,1020,1021,1022,1023], 'ns_1@10.242.238.91'}}, {mfargs, {ebucketmigrator_srv,start_link, [{"10.242.238.91",11209}, {"10.242.238.90",11209}, [{old_state_retriever, #Fun}, {on_not_ready_vbuckets, #Fun}, {username,"default"}, {password,get_from_config}, {vbuckets, [960,961,962,963,966,968,972,973,974,975, 976,977,978,979,980,981,982,983,984,985, 986,987,988,989,990,991,992,993,994,995, 996,997,998,999,1000,1001,1002,1003, 1004,1005,1006,1007,1008,1009,1010,1011, 1012,1013,1014,1015,1016,1017,1018,1019, 1020,1021,1022,1023]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]]}}, {restart_type,temporary}, {shutdown,60000}, {child_type,worker}] [ns_server:debug,2014-08-19T16:50:20.518,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:50:20.521,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:20.521,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 1660 us [ns_server:debug,2014-08-19T16:50:20.522,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:20.522,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{968, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:info,2014-08-19T16:50:20.524,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 964 state to replica [ns_server:info,2014-08-19T16:50:20.524,ns_1@10.242.238.90:tap_replication_manager-default<0.18775.0>:tap_replication_manager:change_vbucket_filter:200]Going to change replication from 'ns_1@10.242.238.91' to have [960,961,962,963,964,966,968,972,973,974,975,976,977,978,979,980,981,982,983, 984,985,986,987,988,989,990,991,992,993,994,995,996,997,998,999,1000,1001, 1002,1003,1004,1005,1006,1007,1008,1009,1010,1011,1012,1013,1014,1015,1016, 1017,1018,1019,1020,1021,1022,1023] ([964], []) [ns_server:debug,2014-08-19T16:50:20.528,ns_1@10.242.238.90:<0.22710.0>:ebucketmigrator_srv:init:621]Reusing old upstream: [{vbuckets,[960,961,962,963,966,968,972,973,974,975,976,977,978,979,980,981, 982,983,984,985,986,987,988,989,990,991,992,993,994,995,996,997, 998,999,1000,1001,1002,1003,1004,1005,1006,1007,1008,1009,1010, 1011,1012,1013,1014,1015,1016,1017,1018,1019,1020,1021,1022,1023]}, {name,<<"replication_ns_1@10.242.238.90">>}, {takeover,false}] [rebalance:debug,2014-08-19T16:50:20.528,ns_1@10.242.238.90:<0.22710.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.22713.0> [ns_server:debug,2014-08-19T16:50:20.528,ns_1@10.242.238.90:<0.22711.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:135]Registered myself under id:{"default", {new_child_id, [960,961,962,963,964,966,968,972,973,974,975, 976,977,978,979,980,981,982,983,984,985,986, 987,988,989,990,991,992,993,994,995,996,997, 998,999,1000,1001,1002,1003,1004,1005,1006, 1007,1008,1009,1010,1011,1012,1013,1014,1015, 1016,1017,1018,1019,1020,1021,1022,1023], 'ns_1@10.242.238.91'}, #Ref<0.0.0.252624>} Args:[{"10.242.238.91",11209}, {"10.242.238.90",11209}, [{old_state_retriever,#Fun}, {on_not_ready_vbuckets,#Fun}, {username,"default"}, {password,get_from_config}, {vbuckets,[960,961,962,963,964,966,968,972,973,974,975,976,977,978,979, 980,981,982,983,984,985,986,987,988,989,990,991,992,993,994, 995,996,997,998,999,1000,1001,1002,1003,1004,1005,1006,1007, 1008,1009,1010,1011,1012,1013,1014,1015,1016,1017,1018,1019, 1020,1021,1022,1023]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]] [ns_server:debug,2014-08-19T16:50:20.529,ns_1@10.242.238.90:<0.22711.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:139]Linked myself to old ebucketmigrator <0.22710.0> [ns_server:info,2014-08-19T16:50:20.529,ns_1@10.242.238.90:<0.22710.0>:ebucketmigrator_srv:handle_call:270]Starting new-style vbucket filter change on stream `replication_ns_1@10.242.238.90` [views:debug,2014-08-19T16:50:20.531,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/717. Updated state: active (1) [ns_server:debug,2014-08-19T16:50:20.531,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",717,active,1} [ns_server:info,2014-08-19T16:50:20.537,ns_1@10.242.238.90:<0.22710.0>:ebucketmigrator_srv:handle_call:297]Changing vbucket filter on tap stream `replication_ns_1@10.242.238.90`: [{960,1}, {961,1}, {962,1}, {963,1}, {964,1}, {966,1}, {968,1}, {972,1}, {973,1}, {974,1}, {975,1}, {976,1}, {977,1}, {978,1}, {979,1}, {980,1}, {981,1}, {982,1}, {983,1}, {984,1}, {985,1}, {986,1}, {987,1}, {988,1}, {989,1}, {990,1}, {991,1}, {992,1}, {993,1}, {994,1}, {995,1}, {996,1}, {997,1}, {998,1}, {999,1}, {1000,1}, {1001,1}, {1002,1}, {1003,1}, {1004,1}, {1005,1}, {1006,1}, {1007,1}, {1008,1}, {1009,1}, {1010,1}, {1011,1}, {1012,1}, {1013,1}, {1014,1}, {1015,1}, {1016,1}, {1017,1}, {1018,1}, {1019,1}, {1020,1}, {1021,1}, {1022,1}, {1023,1}] [ns_server:info,2014-08-19T16:50:20.538,ns_1@10.242.238.90:<0.22710.0>:ebucketmigrator_srv:handle_call:307]Successfully changed vbucket filter on tap stream `replication_ns_1@10.242.238.90`. [ns_server:info,2014-08-19T16:50:20.538,ns_1@10.242.238.90:<0.22710.0>:ebucketmigrator_srv:process_upstream:1027]Got vbucket filter change completion message. Silencing upstream sender [ns_server:info,2014-08-19T16:50:20.538,ns_1@10.242.238.90:<0.22710.0>:ebucketmigrator_srv:handle_info:366]Got reply from upstream silencing request. Completing state transition to a new ebucketmigrator. [ns_server:debug,2014-08-19T16:50:20.539,ns_1@10.242.238.90:<0.22710.0>:ebucketmigrator_srv:complete_native_vb_filter_change:170]Proceeding with reading unread binaries [ns_server:debug,2014-08-19T16:50:20.539,ns_1@10.242.238.90:<0.22710.0>:ebucketmigrator_srv:confirm_downstream:197]Going to confirm reception downstream messages [ns_server:debug,2014-08-19T16:50:20.539,ns_1@10.242.238.90:<0.22710.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:50:20.539,ns_1@10.242.238.90:<0.22715.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:50:20.539,ns_1@10.242.238.90:<0.22715.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:50:20.539,ns_1@10.242.238.90:<0.22710.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:50:20.539,ns_1@10.242.238.90:<0.22710.0>:ebucketmigrator_srv:confirm_downstream:201]Confirmed upstream messages are feeded to kernel [ns_server:debug,2014-08-19T16:50:20.539,ns_1@10.242.238.90:<0.22710.0>:ebucketmigrator_srv:reply_and_die:210]Passed old state to caller [ns_server:debug,2014-08-19T16:50:20.539,ns_1@10.242.238.90:<0.22710.0>:ebucketmigrator_srv:reply_and_die:213]Sent out state. Preparing to die [ns_server:debug,2014-08-19T16:50:20.539,ns_1@10.242.238.90:<0.22711.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:143]Got old state from previous ebucketmigrator: <0.22710.0> [ns_server:debug,2014-08-19T16:50:20.540,ns_1@10.242.238.90:<0.22711.0>:ns_vbm_new_sup:perform_vbucket_filter_change_loop:184]Sent old state to new instance [ns_server:info,2014-08-19T16:50:20.540,ns_1@10.242.238.90:<0.22717.0>:ns_vbm_new_sup:mk_old_state_retriever:83]Got vbucket filter change old state. Proceeding vbucket filter change operation [ns_server:debug,2014-08-19T16:50:20.540,ns_1@10.242.238.90:<0.22717.0>:ebucketmigrator_srv:init:494]Got old ebucketmigrator state from <0.22710.0>: {state,#Port<0.13886>,#Port<0.13883>,#Port<0.13887>,#Port<0.13884>, <0.22713.0>,<<"cut off">>,<<"cut off">>,[],178,false,false,0, {1408,452620,538763}, completed, {<0.22711.0>,#Ref<0.0.0.252645>}, <<"replication_ns_1@10.242.238.90">>,<0.22710.0>, {had_backfill,false,undefined,[]}, completed,false}. [ns_server:debug,2014-08-19T16:50:20.540,ns_1@10.242.238.90:<0.17162.0>:ns_process_registry:handle_info:98]Got exit msg: {'EXIT',<0.22711.0>,{#Ref<0.0.0.252626>,<0.22717.0>}} [error_logger:info,2014-08-19T16:50:20.540,ns_1@10.242.238.90:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,'ns_vbm_new_sup-default'} started: [{pid,<0.22717.0>}, {name, {new_child_id, [960,961,962,963,964,966,968,972,973,974,975, 976,977,978,979,980,981,982,983,984,985,986, 987,988,989,990,991,992,993,994,995,996,997, 998,999,1000,1001,1002,1003,1004,1005,1006, 1007,1008,1009,1010,1011,1012,1013,1014,1015, 1016,1017,1018,1019,1020,1021,1022,1023], 'ns_1@10.242.238.91'}}, {mfargs, {ebucketmigrator_srv,start_link, [{"10.242.238.91",11209}, {"10.242.238.90",11209}, [{old_state_retriever, #Fun}, {on_not_ready_vbuckets, #Fun}, {username,"default"}, {password,get_from_config}, {vbuckets, [960,961,962,963,964,966,968,972,973,974, 975,976,977,978,979,980,981,982,983,984, 985,986,987,988,989,990,991,992,993,994, 995,996,997,998,999,1000,1001,1002,1003, 1004,1005,1006,1007,1008,1009,1010,1011, 1012,1013,1014,1015,1016,1017,1018,1019, 1020,1021,1022,1023]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]]}}, {restart_type,temporary}, {shutdown,60000}, {child_type,worker}] [ns_server:debug,2014-08-19T16:50:20.545,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:50:20.549,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 4151 us [ns_server:debug,2014-08-19T16:50:20.550,ns_1@10.242.238.90:<0.22717.0>:ebucketmigrator_srv:init:621]Reusing old upstream: [{vbuckets,[960,961,962,963,964,966,968,972,973,974,975,976,977,978,979,980, 981,982,983,984,985,986,987,988,989,990,991,992,993,994,995,996, 997,998,999,1000,1001,1002,1003,1004,1005,1006,1007,1008,1009, 1010,1011,1012,1013,1014,1015,1016,1017,1018,1019,1020,1021,1022, 1023]}, {name,<<"replication_ns_1@10.242.238.90">>}, {takeover,false}] [rebalance:debug,2014-08-19T16:50:20.550,ns_1@10.242.238.90:<0.22717.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.22718.0> [ns_server:debug,2014-08-19T16:50:20.551,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:20.552,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:20.553,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{964, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:50:20.568,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:50:20.571,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:20.571,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 1476 us [ns_server:debug,2014-08-19T16:50:20.573,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{716, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:50:20.574,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:20.594,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:50:20.597,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 2962 us [ns_server:debug,2014-08-19T16:50:20.597,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:20.597,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [views:debug,2014-08-19T16:50:20.597,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/711. Updated state: active (1) [ns_server:debug,2014-08-19T16:50:20.598,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",711,active,1} [ns_server:debug,2014-08-19T16:50:20.598,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{706, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:50:20.620,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:50:20.623,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:20.623,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 2717 us [ns_server:debug,2014-08-19T16:50:20.623,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:20.624,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{717, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:info,2014-08-19T16:50:20.626,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 969 state to replica [ns_server:info,2014-08-19T16:50:20.626,ns_1@10.242.238.90:tap_replication_manager-default<0.18775.0>:tap_replication_manager:change_vbucket_filter:200]Going to change replication from 'ns_1@10.242.238.91' to have [960,961,962,963,964,966,968,969,972,973,974,975,976,977,978,979,980,981,982, 983,984,985,986,987,988,989,990,991,992,993,994,995,996,997,998,999,1000, 1001,1002,1003,1004,1005,1006,1007,1008,1009,1010,1011,1012,1013,1014,1015, 1016,1017,1018,1019,1020,1021,1022,1023] ([969], []) [ns_server:debug,2014-08-19T16:50:20.627,ns_1@10.242.238.90:<0.22722.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:135]Registered myself under id:{"default", {new_child_id, [960,961,962,963,964,966,968,969,972,973,974, 975,976,977,978,979,980,981,982,983,984,985, 986,987,988,989,990,991,992,993,994,995,996, 997,998,999,1000,1001,1002,1003,1004,1005, 1006,1007,1008,1009,1010,1011,1012,1013,1014, 1015,1016,1017,1018,1019,1020,1021,1022,1023], 'ns_1@10.242.238.91'}, #Ref<0.0.0.252912>} Args:[{"10.242.238.91",11209}, {"10.242.238.90",11209}, [{old_state_retriever,#Fun}, {on_not_ready_vbuckets,#Fun}, {username,"default"}, {password,get_from_config}, {vbuckets,[960,961,962,963,964,966,968,969,972,973,974,975,976,977,978, 979,980,981,982,983,984,985,986,987,988,989,990,991,992,993, 994,995,996,997,998,999,1000,1001,1002,1003,1004,1005,1006, 1007,1008,1009,1010,1011,1012,1013,1014,1015,1016,1017,1018, 1019,1020,1021,1022,1023]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]] [ns_server:debug,2014-08-19T16:50:20.627,ns_1@10.242.238.90:<0.22722.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:139]Linked myself to old ebucketmigrator <0.22717.0> [ns_server:info,2014-08-19T16:50:20.627,ns_1@10.242.238.90:<0.22717.0>:ebucketmigrator_srv:handle_call:270]Starting new-style vbucket filter change on stream `replication_ns_1@10.242.238.90` [ns_server:info,2014-08-19T16:50:20.642,ns_1@10.242.238.90:<0.22717.0>:ebucketmigrator_srv:handle_call:297]Changing vbucket filter on tap stream `replication_ns_1@10.242.238.90`: [{960,1}, {961,1}, {962,1}, {963,1}, {964,1}, {966,1}, {968,1}, {969,1}, {972,1}, {973,1}, {974,1}, {975,1}, {976,1}, {977,1}, {978,1}, {979,1}, {980,1}, {981,1}, {982,1}, {983,1}, {984,1}, {985,1}, {986,1}, {987,1}, {988,1}, {989,1}, {990,1}, {991,1}, {992,1}, {993,1}, {994,1}, {995,1}, {996,1}, {997,1}, {998,1}, {999,1}, {1000,1}, {1001,1}, {1002,1}, {1003,1}, {1004,1}, {1005,1}, {1006,1}, {1007,1}, {1008,1}, {1009,1}, {1010,1}, {1011,1}, {1012,1}, {1013,1}, {1014,1}, {1015,1}, {1016,1}, {1017,1}, {1018,1}, {1019,1}, {1020,1}, {1021,1}, {1022,1}, {1023,1}] [ns_server:info,2014-08-19T16:50:20.643,ns_1@10.242.238.90:<0.22717.0>:ebucketmigrator_srv:handle_call:307]Successfully changed vbucket filter on tap stream `replication_ns_1@10.242.238.90`. [ns_server:info,2014-08-19T16:50:20.643,ns_1@10.242.238.90:<0.22717.0>:ebucketmigrator_srv:process_upstream:1027]Got vbucket filter change completion message. Silencing upstream sender [ns_server:info,2014-08-19T16:50:20.643,ns_1@10.242.238.90:<0.22717.0>:ebucketmigrator_srv:handle_info:366]Got reply from upstream silencing request. Completing state transition to a new ebucketmigrator. [ns_server:debug,2014-08-19T16:50:20.644,ns_1@10.242.238.90:<0.22717.0>:ebucketmigrator_srv:complete_native_vb_filter_change:170]Proceeding with reading unread binaries [ns_server:debug,2014-08-19T16:50:20.644,ns_1@10.242.238.90:<0.22717.0>:ebucketmigrator_srv:confirm_downstream:197]Going to confirm reception downstream messages [ns_server:debug,2014-08-19T16:50:20.644,ns_1@10.242.238.90:<0.22717.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:50:20.644,ns_1@10.242.238.90:<0.22724.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:50:20.644,ns_1@10.242.238.90:<0.22724.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:50:20.644,ns_1@10.242.238.90:<0.22717.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:50:20.644,ns_1@10.242.238.90:<0.22717.0>:ebucketmigrator_srv:confirm_downstream:201]Confirmed upstream messages are feeded to kernel [ns_server:debug,2014-08-19T16:50:20.644,ns_1@10.242.238.90:<0.22717.0>:ebucketmigrator_srv:reply_and_die:210]Passed old state to caller [ns_server:debug,2014-08-19T16:50:20.644,ns_1@10.242.238.90:<0.22717.0>:ebucketmigrator_srv:reply_and_die:213]Sent out state. Preparing to die [ns_server:debug,2014-08-19T16:50:20.644,ns_1@10.242.238.90:<0.22722.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:143]Got old state from previous ebucketmigrator: <0.22717.0> [ns_server:debug,2014-08-19T16:50:20.645,ns_1@10.242.238.90:<0.22722.0>:ns_vbm_new_sup:perform_vbucket_filter_change_loop:184]Sent old state to new instance [ns_server:info,2014-08-19T16:50:20.645,ns_1@10.242.238.90:<0.22726.0>:ns_vbm_new_sup:mk_old_state_retriever:83]Got vbucket filter change old state. Proceeding vbucket filter change operation [ns_server:debug,2014-08-19T16:50:20.657,ns_1@10.242.238.90:<0.22726.0>:ebucketmigrator_srv:init:494]Got old ebucketmigrator state from <0.22717.0>: {state,#Port<0.13886>,#Port<0.13883>,#Port<0.13887>,#Port<0.13884>, <0.22718.0>,<<"cut off">>,<<"cut off">>,[],181,false,false,0, {1408,452620,643797}, completed, {<0.22722.0>,#Ref<0.0.0.252925>}, <<"replication_ns_1@10.242.238.90">>,<0.22717.0>, {had_backfill,false,undefined,[]}, completed,false}. [ns_server:debug,2014-08-19T16:50:20.658,ns_1@10.242.238.90:<0.17162.0>:ns_process_registry:handle_info:98]Got exit msg: {'EXIT',<0.22722.0>,{#Ref<0.0.0.252914>,<0.22726.0>}} [error_logger:info,2014-08-19T16:50:20.658,ns_1@10.242.238.90:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,'ns_vbm_new_sup-default'} started: [{pid,<0.22726.0>}, {name, {new_child_id, [960,961,962,963,964,966,968,969,972,973,974, 975,976,977,978,979,980,981,982,983,984,985, 986,987,988,989,990,991,992,993,994,995,996, 997,998,999,1000,1001,1002,1003,1004,1005, 1006,1007,1008,1009,1010,1011,1012,1013,1014, 1015,1016,1017,1018,1019,1020,1021,1022,1023], 'ns_1@10.242.238.91'}}, {mfargs, {ebucketmigrator_srv,start_link, [{"10.242.238.91",11209}, {"10.242.238.90",11209}, [{old_state_retriever, #Fun}, {on_not_ready_vbuckets, #Fun}, {username,"default"}, {password,get_from_config}, {vbuckets, [960,961,962,963,964,966,968,969,972,973, 974,975,976,977,978,979,980,981,982,983, 984,985,986,987,988,989,990,991,992,993, 994,995,996,997,998,999,1000,1001,1002, 1003,1004,1005,1006,1007,1008,1009,1010, 1011,1012,1013,1014,1015,1016,1017,1018, 1019,1020,1021,1022,1023]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]]}}, {restart_type,temporary}, {shutdown,60000}, {child_type,worker}] [ns_server:debug,2014-08-19T16:50:20.664,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:50:20.666,ns_1@10.242.238.90:<0.22726.0>:ebucketmigrator_srv:init:621]Reusing old upstream: [{vbuckets,[960,961,962,963,964,966,968,969,972,973,974,975,976,977,978,979, 980,981,982,983,984,985,986,987,988,989,990,991,992,993,994,995, 996,997,998,999,1000,1001,1002,1003,1004,1005,1006,1007,1008,1009, 1010,1011,1012,1013,1014,1015,1016,1017,1018,1019,1020,1021,1022, 1023]}, {name,<<"replication_ns_1@10.242.238.90">>}, {takeover,false}] [rebalance:debug,2014-08-19T16:50:20.666,ns_1@10.242.238.90:<0.22726.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.22727.0> [ns_server:debug,2014-08-19T16:50:20.672,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:20.673,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 8671 us [ns_server:debug,2014-08-19T16:50:20.673,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:20.673,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{969, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:50:20.695,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:50:20.698,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:20.698,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 3294 us [ns_server:debug,2014-08-19T16:50:20.699,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:20.699,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{458, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:info,2014-08-19T16:50:20.701,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 970 state to replica [ns_server:info,2014-08-19T16:50:20.702,ns_1@10.242.238.90:tap_replication_manager-default<0.18775.0>:tap_replication_manager:change_vbucket_filter:200]Going to change replication from 'ns_1@10.242.238.91' to have [960,961,962,963,964,966,968,969,970,972,973,974,975,976,977,978,979,980,981, 982,983,984,985,986,987,988,989,990,991,992,993,994,995,996,997,998,999,1000, 1001,1002,1003,1004,1005,1006,1007,1008,1009,1010,1011,1012,1013,1014,1015, 1016,1017,1018,1019,1020,1021,1022,1023] ([970], []) [ns_server:debug,2014-08-19T16:50:20.703,ns_1@10.242.238.90:<0.22729.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:135]Registered myself under id:{"default", {new_child_id, [960,961,962,963,964,966,968,969,970,972,973, 974,975,976,977,978,979,980,981,982,983,984, 985,986,987,988,989,990,991,992,993,994,995, 996,997,998,999,1000,1001,1002,1003,1004, 1005,1006,1007,1008,1009,1010,1011,1012,1013, 1014,1015,1016,1017,1018,1019,1020,1021,1022, 1023], 'ns_1@10.242.238.91'}, #Ref<0.0.0.253076>} Args:[{"10.242.238.91",11209}, {"10.242.238.90",11209}, [{old_state_retriever,#Fun}, {on_not_ready_vbuckets,#Fun}, {username,"default"}, {password,get_from_config}, {vbuckets,[960,961,962,963,964,966,968,969,970,972,973,974,975,976,977, 978,979,980,981,982,983,984,985,986,987,988,989,990,991,992, 993,994,995,996,997,998,999,1000,1001,1002,1003,1004,1005, 1006,1007,1008,1009,1010,1011,1012,1013,1014,1015,1016,1017, 1018,1019,1020,1021,1022,1023]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]] [ns_server:debug,2014-08-19T16:50:20.703,ns_1@10.242.238.90:<0.22729.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:139]Linked myself to old ebucketmigrator <0.22726.0> [ns_server:info,2014-08-19T16:50:20.703,ns_1@10.242.238.90:<0.22726.0>:ebucketmigrator_srv:handle_call:270]Starting new-style vbucket filter change on stream `replication_ns_1@10.242.238.90` [ns_server:info,2014-08-19T16:50:20.718,ns_1@10.242.238.90:<0.22726.0>:ebucketmigrator_srv:handle_call:297]Changing vbucket filter on tap stream `replication_ns_1@10.242.238.90`: [{960,1}, {961,1}, {962,1}, {963,1}, {964,1}, {966,1}, {968,1}, {969,1}, {970,1}, {972,1}, {973,1}, {974,1}, {975,1}, {976,1}, {977,1}, {978,1}, {979,1}, {980,1}, {981,1}, {982,1}, {983,1}, {984,1}, {985,1}, {986,1}, {987,1}, {988,1}, {989,1}, {990,1}, {991,1}, {992,1}, {993,1}, {994,1}, {995,1}, {996,1}, {997,1}, {998,1}, {999,1}, {1000,1}, {1001,1}, {1002,1}, {1003,1}, {1004,1}, {1005,1}, {1006,1}, {1007,1}, {1008,1}, {1009,1}, {1010,1}, {1011,1}, {1012,1}, {1013,1}, {1014,1}, {1015,1}, {1016,1}, {1017,1}, {1018,1}, {1019,1}, {1020,1}, {1021,1}, {1022,1}, {1023,1}] [ns_server:info,2014-08-19T16:50:20.719,ns_1@10.242.238.90:<0.22726.0>:ebucketmigrator_srv:handle_call:307]Successfully changed vbucket filter on tap stream `replication_ns_1@10.242.238.90`. [ns_server:info,2014-08-19T16:50:20.720,ns_1@10.242.238.90:<0.22726.0>:ebucketmigrator_srv:process_upstream:1027]Got vbucket filter change completion message. Silencing upstream sender [ns_server:info,2014-08-19T16:50:20.720,ns_1@10.242.238.90:<0.22726.0>:ebucketmigrator_srv:handle_info:366]Got reply from upstream silencing request. Completing state transition to a new ebucketmigrator. [ns_server:debug,2014-08-19T16:50:20.720,ns_1@10.242.238.90:<0.22726.0>:ebucketmigrator_srv:complete_native_vb_filter_change:170]Proceeding with reading unread binaries [ns_server:debug,2014-08-19T16:50:20.720,ns_1@10.242.238.90:<0.22726.0>:ebucketmigrator_srv:confirm_downstream:197]Going to confirm reception downstream messages [ns_server:debug,2014-08-19T16:50:20.720,ns_1@10.242.238.90:<0.22726.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:50:20.720,ns_1@10.242.238.90:<0.22732.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:50:20.720,ns_1@10.242.238.90:<0.22732.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:50:20.720,ns_1@10.242.238.90:<0.22726.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:50:20.720,ns_1@10.242.238.90:<0.22726.0>:ebucketmigrator_srv:confirm_downstream:201]Confirmed upstream messages are feeded to kernel [ns_server:debug,2014-08-19T16:50:20.720,ns_1@10.242.238.90:<0.22726.0>:ebucketmigrator_srv:reply_and_die:210]Passed old state to caller [ns_server:debug,2014-08-19T16:50:20.721,ns_1@10.242.238.90:<0.22726.0>:ebucketmigrator_srv:reply_and_die:213]Sent out state. Preparing to die [ns_server:debug,2014-08-19T16:50:20.721,ns_1@10.242.238.90:<0.22729.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:143]Got old state from previous ebucketmigrator: <0.22726.0> [ns_server:debug,2014-08-19T16:50:20.721,ns_1@10.242.238.90:<0.22729.0>:ns_vbm_new_sup:perform_vbucket_filter_change_loop:184]Sent old state to new instance [ns_server:info,2014-08-19T16:50:20.721,ns_1@10.242.238.90:<0.22734.0>:ns_vbm_new_sup:mk_old_state_retriever:83]Got vbucket filter change old state. Proceeding vbucket filter change operation [ns_server:debug,2014-08-19T16:50:20.721,ns_1@10.242.238.90:<0.22734.0>:ebucketmigrator_srv:init:494]Got old ebucketmigrator state from <0.22726.0>: {state,#Port<0.13886>,#Port<0.13883>,#Port<0.13887>,#Port<0.13884>, <0.22727.0>,<<"cut off">>,<<"cut off">>,[],184,false,false,0, {1408,452620,720005}, completed, {<0.22729.0>,#Ref<0.0.0.253089>}, <<"replication_ns_1@10.242.238.90">>,<0.22726.0>, {had_backfill,false,undefined,[]}, completed,false}. [ns_server:debug,2014-08-19T16:50:20.722,ns_1@10.242.238.90:<0.17162.0>:ns_process_registry:handle_info:98]Got exit msg: {'EXIT',<0.22729.0>,{#Ref<0.0.0.253078>,<0.22734.0>}} [error_logger:info,2014-08-19T16:50:20.721,ns_1@10.242.238.90:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,'ns_vbm_new_sup-default'} started: [{pid,<0.22734.0>}, {name, {new_child_id, [960,961,962,963,964,966,968,969,970,972,973, 974,975,976,977,978,979,980,981,982,983,984, 985,986,987,988,989,990,991,992,993,994,995, 996,997,998,999,1000,1001,1002,1003,1004,1005, 1006,1007,1008,1009,1010,1011,1012,1013,1014, 1015,1016,1017,1018,1019,1020,1021,1022,1023], 'ns_1@10.242.238.91'}}, {mfargs, {ebucketmigrator_srv,start_link, [{"10.242.238.91",11209}, {"10.242.238.90",11209}, [{old_state_retriever, #Fun}, {on_not_ready_vbuckets, #Fun}, {username,"default"}, {password,get_from_config}, {vbuckets, [960,961,962,963,964,966,968,969,970,972, 973,974,975,976,977,978,979,980,981,982, 983,984,985,986,987,988,989,990,991,992, 993,994,995,996,997,998,999,1000,1001, 1002,1003,1004,1005,1006,1007,1008,1009, 1010,1011,1012,1013,1014,1015,1016,1017, 1018,1019,1020,1021,1022,1023]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]]}}, {restart_type,temporary}, {shutdown,60000}, {child_type,worker}] [ns_server:debug,2014-08-19T16:50:20.727,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:50:20.730,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 2892 us [ns_server:debug,2014-08-19T16:50:20.730,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:20.730,ns_1@10.242.238.90:<0.22734.0>:ebucketmigrator_srv:init:621]Reusing old upstream: [{vbuckets,[960,961,962,963,964,966,968,969,970,972,973,974,975,976,977,978, 979,980,981,982,983,984,985,986,987,988,989,990,991,992,993,994, 995,996,997,998,999,1000,1001,1002,1003,1004,1005,1006,1007,1008, 1009,1010,1011,1012,1013,1014,1015,1016,1017,1018,1019,1020,1021, 1022,1023]}, {name,<<"replication_ns_1@10.242.238.90">>}, {takeover,false}] [rebalance:debug,2014-08-19T16:50:20.730,ns_1@10.242.238.90:<0.22734.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.22735.0> [ns_server:debug,2014-08-19T16:50:20.730,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:20.731,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{970, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:50:20.750,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:50:20.754,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:20.755,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{456, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:50:20.754,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 3841 us [ns_server:debug,2014-08-19T16:50:20.757,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:info,2014-08-19T16:50:20.760,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 967 state to replica [ns_server:info,2014-08-19T16:50:20.761,ns_1@10.242.238.90:tap_replication_manager-default<0.18775.0>:tap_replication_manager:change_vbucket_filter:200]Going to change replication from 'ns_1@10.242.238.91' to have [960,961,962,963,964,966,967,968,969,970,972,973,974,975,976,977,978,979,980, 981,982,983,984,985,986,987,988,989,990,991,992,993,994,995,996,997,998,999, 1000,1001,1002,1003,1004,1005,1006,1007,1008,1009,1010,1011,1012,1013,1014, 1015,1016,1017,1018,1019,1020,1021,1022,1023] ([967], []) [ns_server:debug,2014-08-19T16:50:20.762,ns_1@10.242.238.90:<0.22737.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:135]Registered myself under id:{"default", {new_child_id, [960,961,962,963,964,966,967,968,969,970,972, 973,974,975,976,977,978,979,980,981,982,983, 984,985,986,987,988,989,990,991,992,993,994, 995,996,997,998,999,1000,1001,1002,1003,1004, 1005,1006,1007,1008,1009,1010,1011,1012,1013, 1014,1015,1016,1017,1018,1019,1020,1021,1022, 1023], 'ns_1@10.242.238.91'}, #Ref<0.0.0.253242>} Args:[{"10.242.238.91",11209}, {"10.242.238.90",11209}, [{old_state_retriever,#Fun}, {on_not_ready_vbuckets,#Fun}, {username,"default"}, {password,get_from_config}, {vbuckets,[960,961,962,963,964,966,967,968,969,970,972,973,974,975,976, 977,978,979,980,981,982,983,984,985,986,987,988,989,990,991, 992,993,994,995,996,997,998,999,1000,1001,1002,1003,1004, 1005,1006,1007,1008,1009,1010,1011,1012,1013,1014,1015,1016, 1017,1018,1019,1020,1021,1022,1023]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]] [ns_server:debug,2014-08-19T16:50:20.762,ns_1@10.242.238.90:<0.22737.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:139]Linked myself to old ebucketmigrator <0.22734.0> [ns_server:info,2014-08-19T16:50:20.762,ns_1@10.242.238.90:<0.22734.0>:ebucketmigrator_srv:handle_call:270]Starting new-style vbucket filter change on stream `replication_ns_1@10.242.238.90` [ns_server:info,2014-08-19T16:50:20.775,ns_1@10.242.238.90:<0.22734.0>:ebucketmigrator_srv:handle_call:297]Changing vbucket filter on tap stream `replication_ns_1@10.242.238.90`: [{960,1}, {961,1}, {962,1}, {963,1}, {964,1}, {966,1}, {967,1}, {968,1}, {969,1}, {970,1}, {972,1}, {973,1}, {974,1}, {975,1}, {976,1}, {977,1}, {978,1}, {979,1}, {980,1}, {981,1}, {982,1}, {983,1}, {984,1}, {985,1}, {986,1}, {987,1}, {988,1}, {989,1}, {990,1}, {991,1}, {992,1}, {993,1}, {994,1}, {995,1}, {996,1}, {997,1}, {998,1}, {999,1}, {1000,1}, {1001,1}, {1002,1}, {1003,1}, {1004,1}, {1005,1}, {1006,1}, {1007,1}, {1008,1}, {1009,1}, {1010,1}, {1011,1}, {1012,1}, {1013,1}, {1014,1}, {1015,1}, {1016,1}, {1017,1}, {1018,1}, {1019,1}, {1020,1}, {1021,1}, {1022,1}, {1023,1}] [ns_server:info,2014-08-19T16:50:20.776,ns_1@10.242.238.90:<0.22734.0>:ebucketmigrator_srv:handle_call:307]Successfully changed vbucket filter on tap stream `replication_ns_1@10.242.238.90`. [ns_server:info,2014-08-19T16:50:20.776,ns_1@10.242.238.90:<0.22734.0>:ebucketmigrator_srv:process_upstream:1027]Got vbucket filter change completion message. Silencing upstream sender [ns_server:info,2014-08-19T16:50:20.776,ns_1@10.242.238.90:<0.22734.0>:ebucketmigrator_srv:handle_info:366]Got reply from upstream silencing request. Completing state transition to a new ebucketmigrator. [ns_server:debug,2014-08-19T16:50:20.776,ns_1@10.242.238.90:<0.22734.0>:ebucketmigrator_srv:complete_native_vb_filter_change:170]Proceeding with reading unread binaries [ns_server:debug,2014-08-19T16:50:20.777,ns_1@10.242.238.90:<0.22734.0>:ebucketmigrator_srv:confirm_downstream:197]Going to confirm reception downstream messages [ns_server:debug,2014-08-19T16:50:20.777,ns_1@10.242.238.90:<0.22734.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:50:20.777,ns_1@10.242.238.90:<0.22739.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:50:20.777,ns_1@10.242.238.90:<0.22739.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:50:20.777,ns_1@10.242.238.90:<0.22734.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:50:20.777,ns_1@10.242.238.90:<0.22734.0>:ebucketmigrator_srv:confirm_downstream:201]Confirmed upstream messages are feeded to kernel [ns_server:debug,2014-08-19T16:50:20.777,ns_1@10.242.238.90:<0.22734.0>:ebucketmigrator_srv:reply_and_die:210]Passed old state to caller [ns_server:debug,2014-08-19T16:50:20.777,ns_1@10.242.238.90:<0.22734.0>:ebucketmigrator_srv:reply_and_die:213]Sent out state. Preparing to die [ns_server:debug,2014-08-19T16:50:20.777,ns_1@10.242.238.90:<0.22737.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:143]Got old state from previous ebucketmigrator: <0.22734.0> [ns_server:debug,2014-08-19T16:50:20.778,ns_1@10.242.238.90:<0.22737.0>:ns_vbm_new_sup:perform_vbucket_filter_change_loop:184]Sent old state to new instance [ns_server:info,2014-08-19T16:50:20.778,ns_1@10.242.238.90:<0.22741.0>:ns_vbm_new_sup:mk_old_state_retriever:83]Got vbucket filter change old state. Proceeding vbucket filter change operation [ns_server:debug,2014-08-19T16:50:20.778,ns_1@10.242.238.90:<0.22741.0>:ebucketmigrator_srv:init:494]Got old ebucketmigrator state from <0.22734.0>: {state,#Port<0.13886>,#Port<0.13883>,#Port<0.13887>,#Port<0.13884>, <0.22735.0>,<<"cut off">>,<<"cut off">>,[],187,false,false,0, {1408,452620,776713}, completed, {<0.22737.0>,#Ref<0.0.0.253255>}, <<"replication_ns_1@10.242.238.90">>,<0.22734.0>, {had_backfill,false,undefined,[]}, completed,false}. [ns_server:debug,2014-08-19T16:50:20.778,ns_1@10.242.238.90:<0.17162.0>:ns_process_registry:handle_info:98]Got exit msg: {'EXIT',<0.22737.0>,{#Ref<0.0.0.253244>,<0.22741.0>}} [error_logger:info,2014-08-19T16:50:20.778,ns_1@10.242.238.90:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,'ns_vbm_new_sup-default'} started: [{pid,<0.22741.0>}, {name, {new_child_id, [960,961,962,963,964,966,967,968,969,970,972, 973,974,975,976,977,978,979,980,981,982,983, 984,985,986,987,988,989,990,991,992,993,994, 995,996,997,998,999,1000,1001,1002,1003,1004, 1005,1006,1007,1008,1009,1010,1011,1012,1013, 1014,1015,1016,1017,1018,1019,1020,1021,1022, 1023], 'ns_1@10.242.238.91'}}, {mfargs, {ebucketmigrator_srv,start_link, [{"10.242.238.91",11209}, {"10.242.238.90",11209}, [{old_state_retriever, #Fun}, {on_not_ready_vbuckets, #Fun}, {username,"default"}, {password,get_from_config}, {vbuckets, [960,961,962,963,964,966,967,968,969,970, 972,973,974,975,976,977,978,979,980,981, 982,983,984,985,986,987,988,989,990,991, 992,993,994,995,996,997,998,999,1000, 1001,1002,1003,1004,1005,1006,1007,1008, 1009,1010,1011,1012,1013,1014,1015,1016, 1017,1018,1019,1020,1021,1022,1023]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]]}}, {restart_type,temporary}, {shutdown,60000}, {child_type,worker}] [ns_server:debug,2014-08-19T16:50:20.782,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:50:20.787,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 4162 us [ns_server:debug,2014-08-19T16:50:20.787,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:20.788,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:20.788,ns_1@10.242.238.90:<0.22741.0>:ebucketmigrator_srv:init:621]Reusing old upstream: [{vbuckets,[960,961,962,963,964,966,967,968,969,970,972,973,974,975,976,977, 978,979,980,981,982,983,984,985,986,987,988,989,990,991,992,993, 994,995,996,997,998,999,1000,1001,1002,1003,1004,1005,1006,1007, 1008,1009,1010,1011,1012,1013,1014,1015,1016,1017,1018,1019,1020, 1021,1022,1023]}, {name,<<"replication_ns_1@10.242.238.90">>}, {takeover,false}] [rebalance:debug,2014-08-19T16:50:20.788,ns_1@10.242.238.90:<0.22741.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.22743.0> [ns_server:debug,2014-08-19T16:50:20.788,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{967, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:50:20.814,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:50:20.821,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:20.825,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 12 us [ns_server:debug,2014-08-19T16:50:20.826,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{712, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:50:20.826,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:info,2014-08-19T16:50:20.829,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 965 state to replica [ns_server:info,2014-08-19T16:50:20.829,ns_1@10.242.238.90:tap_replication_manager-default<0.18775.0>:tap_replication_manager:change_vbucket_filter:200]Going to change replication from 'ns_1@10.242.238.91' to have [960,961,962,963,964,965,966,967,968,969,970,972,973,974,975,976,977,978,979, 980,981,982,983,984,985,986,987,988,989,990,991,992,993,994,995,996,997,998, 999,1000,1001,1002,1003,1004,1005,1006,1007,1008,1009,1010,1011,1012,1013, 1014,1015,1016,1017,1018,1019,1020,1021,1022,1023] ([965], []) [ns_server:debug,2014-08-19T16:50:20.830,ns_1@10.242.238.90:<0.22745.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:135]Registered myself under id:{"default", {new_child_id, [960,961,962,963,964,965,966,967,968,969,970, 972,973,974,975,976,977,978,979,980,981,982, 983,984,985,986,987,988,989,990,991,992,993, 994,995,996,997,998,999,1000,1001,1002,1003, 1004,1005,1006,1007,1008,1009,1010,1011,1012, 1013,1014,1015,1016,1017,1018,1019,1020,1021, 1022,1023], 'ns_1@10.242.238.91'}, #Ref<0.0.0.253410>} Args:[{"10.242.238.91",11209}, {"10.242.238.90",11209}, [{old_state_retriever,#Fun}, {on_not_ready_vbuckets,#Fun}, {username,"default"}, {password,get_from_config}, {vbuckets,[960,961,962,963,964,965,966,967,968,969,970,972,973,974,975, 976,977,978,979,980,981,982,983,984,985,986,987,988,989,990, 991,992,993,994,995,996,997,998,999,1000,1001,1002,1003, 1004,1005,1006,1007,1008,1009,1010,1011,1012,1013,1014,1015, 1016,1017,1018,1019,1020,1021,1022,1023]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]] [ns_server:debug,2014-08-19T16:50:20.830,ns_1@10.242.238.90:<0.22745.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:139]Linked myself to old ebucketmigrator <0.22741.0> [ns_server:info,2014-08-19T16:50:20.831,ns_1@10.242.238.90:<0.22741.0>:ebucketmigrator_srv:handle_call:270]Starting new-style vbucket filter change on stream `replication_ns_1@10.242.238.90` [ns_server:info,2014-08-19T16:50:20.843,ns_1@10.242.238.90:<0.22741.0>:ebucketmigrator_srv:handle_call:297]Changing vbucket filter on tap stream `replication_ns_1@10.242.238.90`: [{960,1}, {961,1}, {962,1}, {963,1}, {964,1}, {965,1}, {966,1}, {967,1}, {968,1}, {969,1}, {970,1}, {972,1}, {973,1}, {974,1}, {975,1}, {976,1}, {977,1}, {978,1}, {979,1}, {980,1}, {981,1}, {982,1}, {983,1}, {984,1}, {985,1}, {986,1}, {987,1}, {988,1}, {989,1}, {990,1}, {991,1}, {992,1}, {993,1}, {994,1}, {995,1}, {996,1}, {997,1}, {998,1}, {999,1}, {1000,1}, {1001,1}, {1002,1}, {1003,1}, {1004,1}, {1005,1}, {1006,1}, {1007,1}, {1008,1}, {1009,1}, {1010,1}, {1011,1}, {1012,1}, {1013,1}, {1014,1}, {1015,1}, {1016,1}, {1017,1}, {1018,1}, {1019,1}, {1020,1}, {1021,1}, {1022,1}, {1023,1}] [ns_server:info,2014-08-19T16:50:20.844,ns_1@10.242.238.90:<0.22741.0>:ebucketmigrator_srv:handle_call:307]Successfully changed vbucket filter on tap stream `replication_ns_1@10.242.238.90`. [ns_server:info,2014-08-19T16:50:20.844,ns_1@10.242.238.90:<0.22741.0>:ebucketmigrator_srv:process_upstream:1027]Got vbucket filter change completion message. Silencing upstream sender [ns_server:info,2014-08-19T16:50:20.845,ns_1@10.242.238.90:<0.22741.0>:ebucketmigrator_srv:handle_info:366]Got reply from upstream silencing request. Completing state transition to a new ebucketmigrator. [ns_server:debug,2014-08-19T16:50:20.845,ns_1@10.242.238.90:<0.22741.0>:ebucketmigrator_srv:complete_native_vb_filter_change:170]Proceeding with reading unread binaries [ns_server:debug,2014-08-19T16:50:20.845,ns_1@10.242.238.90:<0.22741.0>:ebucketmigrator_srv:confirm_downstream:197]Going to confirm reception downstream messages [ns_server:debug,2014-08-19T16:50:20.845,ns_1@10.242.238.90:<0.22741.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:50:20.845,ns_1@10.242.238.90:<0.22747.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:50:20.845,ns_1@10.242.238.90:<0.22747.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:50:20.845,ns_1@10.242.238.90:<0.22741.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:50:20.845,ns_1@10.242.238.90:<0.22741.0>:ebucketmigrator_srv:confirm_downstream:201]Confirmed upstream messages are feeded to kernel [ns_server:debug,2014-08-19T16:50:20.845,ns_1@10.242.238.90:<0.22741.0>:ebucketmigrator_srv:reply_and_die:210]Passed old state to caller [ns_server:debug,2014-08-19T16:50:20.845,ns_1@10.242.238.90:<0.22741.0>:ebucketmigrator_srv:reply_and_die:213]Sent out state. Preparing to die [ns_server:debug,2014-08-19T16:50:20.846,ns_1@10.242.238.90:<0.22745.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:143]Got old state from previous ebucketmigrator: <0.22741.0> [ns_server:debug,2014-08-19T16:50:20.846,ns_1@10.242.238.90:<0.22745.0>:ns_vbm_new_sup:perform_vbucket_filter_change_loop:184]Sent old state to new instance [ns_server:info,2014-08-19T16:50:20.846,ns_1@10.242.238.90:<0.22749.0>:ns_vbm_new_sup:mk_old_state_retriever:83]Got vbucket filter change old state. Proceeding vbucket filter change operation [ns_server:debug,2014-08-19T16:50:20.846,ns_1@10.242.238.90:<0.22749.0>:ebucketmigrator_srv:init:494]Got old ebucketmigrator state from <0.22741.0>: {state,#Port<0.13886>,#Port<0.13883>,#Port<0.13887>,#Port<0.13884>, <0.22743.0>,<<"cut off">>,<<"cut off">>,[],190,false,false,0, {1408,452620,844961}, completed, {<0.22745.0>,#Ref<0.0.0.253423>}, <<"replication_ns_1@10.242.238.90">>,<0.22741.0>, {had_backfill,false,undefined,[]}, completed,false}. [ns_server:debug,2014-08-19T16:50:20.846,ns_1@10.242.238.90:<0.17162.0>:ns_process_registry:handle_info:98]Got exit msg: {'EXIT',<0.22745.0>,{#Ref<0.0.0.253412>,<0.22749.0>}} [error_logger:info,2014-08-19T16:50:20.846,ns_1@10.242.238.90:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,'ns_vbm_new_sup-default'} started: [{pid,<0.22749.0>}, {name, {new_child_id, [960,961,962,963,964,965,966,967,968,969,970, 972,973,974,975,976,977,978,979,980,981,982, 983,984,985,986,987,988,989,990,991,992,993, 994,995,996,997,998,999,1000,1001,1002,1003, 1004,1005,1006,1007,1008,1009,1010,1011,1012, 1013,1014,1015,1016,1017,1018,1019,1020,1021, 1022,1023], 'ns_1@10.242.238.91'}}, {mfargs, {ebucketmigrator_srv,start_link, [{"10.242.238.91",11209}, {"10.242.238.90",11209}, [{old_state_retriever, #Fun}, {on_not_ready_vbuckets, #Fun}, {username,"default"}, {password,get_from_config}, {vbuckets, [960,961,962,963,964,965,966,967,968,969, 970,972,973,974,975,976,977,978,979,980, 981,982,983,984,985,986,987,988,989,990, 991,992,993,994,995,996,997,998,999, 1000,1001,1002,1003,1004,1005,1006,1007, 1008,1009,1010,1011,1012,1013,1014,1015, 1016,1017,1018,1019,1020,1021,1022, 1023]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]]}}, {restart_type,temporary}, {shutdown,60000}, {child_type,worker}] [ns_server:debug,2014-08-19T16:50:20.850,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:50:20.855,ns_1@10.242.238.90:<0.22749.0>:ebucketmigrator_srv:init:621]Reusing old upstream: [{vbuckets,[960,961,962,963,964,965,966,967,968,969,970,972,973,974,975,976, 977,978,979,980,981,982,983,984,985,986,987,988,989,990,991,992, 993,994,995,996,997,998,999,1000,1001,1002,1003,1004,1005,1006, 1007,1008,1009,1010,1011,1012,1013,1014,1015,1016,1017,1018,1019, 1020,1021,1022,1023]}, {name,<<"replication_ns_1@10.242.238.90">>}, {takeover,false}] [rebalance:debug,2014-08-19T16:50:20.858,ns_1@10.242.238.90:<0.22749.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.22750.0> [ns_server:debug,2014-08-19T16:50:20.859,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:20.859,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 8264 us [ns_server:debug,2014-08-19T16:50:20.859,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:20.860,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{965, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:50:20.879,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:50:20.886,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 6123 us [ns_server:debug,2014-08-19T16:50:20.886,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:20.887,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:20.887,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{711, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:50:20.889,ns_1@10.242.238.90:compaction_daemon<0.17567.0>:compaction_daemon:handle_info:447]Starting compaction for the following buckets: [<<"default">>] [ns_server:debug,2014-08-19T16:50:20.890,ns_1@10.242.238.90:compaction_daemon<0.17567.0>:compaction_daemon:compact_next_bucket:1453]Going to spawn bucket compaction with forced view compaction for bucket default [ns_server:debug,2014-08-19T16:50:20.890,ns_1@10.242.238.90:compaction_daemon<0.17567.0>:compaction_daemon:compact_next_bucket:1482]Spawned 'uninhibited' compaction for default [ns_server:info,2014-08-19T16:50:20.892,ns_1@10.242.238.90:<0.22752.0>:compaction_daemon:try_to_cleanup_indexes:650]Cleaning up indexes for bucket `default` [ns_server:info,2014-08-19T16:50:20.893,ns_1@10.242.238.90:<0.22752.0>:compaction_daemon:spawn_bucket_compactor:609]Compacting bucket default with config: [forced_previously_inhibited_view_compaction, {database_fragmentation_threshold,{30,undefined}}, {view_fragmentation_threshold,{30,undefined}}] [ns_server:debug,2014-08-19T16:50:20.899,ns_1@10.242.238.90:<0.22756.0>:compaction_daemon:bucket_needs_compaction:1042]`default` data size is 17792, disk size is 2103040 [ns_server:debug,2014-08-19T16:50:20.899,ns_1@10.242.238.90:compaction_daemon<0.17567.0>:compaction_daemon:handle_info:505]Finished compaction iteration. [ns_server:debug,2014-08-19T16:50:20.899,ns_1@10.242.238.90:compaction_daemon<0.17567.0>:compaction_daemon:schedule_next_compaction:1519]Finished compaction too soon. Next run will be in 30s [ns_server:debug,2014-08-19T16:50:20.903,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:50:20.907,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 3288 us [ns_server:debug,2014-08-19T16:50:20.907,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:20.907,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:20.908,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{449, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:info,2014-08-19T16:50:20.909,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 971 state to replica [ns_server:info,2014-08-19T16:50:20.909,ns_1@10.242.238.90:tap_replication_manager-default<0.18775.0>:tap_replication_manager:change_vbucket_filter:200]Going to change replication from 'ns_1@10.242.238.91' to have [960,961,962,963,964,965,966,967,968,969,970,971,972,973,974,975,976,977,978, 979,980,981,982,983,984,985,986,987,988,989,990,991,992,993,994,995,996,997, 998,999,1000,1001,1002,1003,1004,1005,1006,1007,1008,1009,1010,1011,1012, 1013,1014,1015,1016,1017,1018,1019,1020,1021,1022,1023] ([971], []) [ns_server:debug,2014-08-19T16:50:20.910,ns_1@10.242.238.90:<0.22757.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:135]Registered myself under id:{"default", {new_child_id, [960,961,962,963,964,965,966,967,968,969,970, 971,972,973,974,975,976,977,978,979,980,981, 982,983,984,985,986,987,988,989,990,991,992, 993,994,995,996,997,998,999,1000,1001,1002, 1003,1004,1005,1006,1007,1008,1009,1010,1011, 1012,1013,1014,1015,1016,1017,1018,1019,1020, 1021,1022,1023], 'ns_1@10.242.238.91'}, #Ref<0.0.0.253744>} Args:[{"10.242.238.91",11209}, {"10.242.238.90",11209}, [{old_state_retriever,#Fun}, {on_not_ready_vbuckets,#Fun}, {username,"default"}, {password,get_from_config}, {vbuckets,[960,961,962,963,964,965,966,967,968,969,970,971,972,973,974, 975,976,977,978,979,980,981,982,983,984,985,986,987,988,989, 990,991,992,993,994,995,996,997,998,999,1000,1001,1002,1003, 1004,1005,1006,1007,1008,1009,1010,1011,1012,1013,1014,1015, 1016,1017,1018,1019,1020,1021,1022,1023]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]] [ns_server:debug,2014-08-19T16:50:20.911,ns_1@10.242.238.90:<0.22757.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:139]Linked myself to old ebucketmigrator <0.22749.0> [ns_server:info,2014-08-19T16:50:20.911,ns_1@10.242.238.90:<0.22749.0>:ebucketmigrator_srv:handle_call:270]Starting new-style vbucket filter change on stream `replication_ns_1@10.242.238.90` [ns_server:info,2014-08-19T16:50:20.924,ns_1@10.242.238.90:<0.22749.0>:ebucketmigrator_srv:handle_call:297]Changing vbucket filter on tap stream `replication_ns_1@10.242.238.90`: [{960,1}, {961,1}, {962,1}, {963,1}, {964,1}, {965,1}, {966,1}, {967,1}, {968,1}, {969,1}, {970,1}, {971,1}, {972,1}, {973,1}, {974,1}, {975,1}, {976,1}, {977,1}, {978,1}, {979,1}, {980,1}, {981,1}, {982,1}, {983,1}, {984,1}, {985,1}, {986,1}, {987,1}, {988,1}, {989,1}, {990,1}, {991,1}, {992,1}, {993,1}, {994,1}, {995,1}, {996,1}, {997,1}, {998,1}, {999,1}, {1000,1}, {1001,1}, {1002,1}, {1003,1}, {1004,1}, {1005,1}, {1006,1}, {1007,1}, {1008,1}, {1009,1}, {1010,1}, {1011,1}, {1012,1}, {1013,1}, {1014,1}, {1015,1}, {1016,1}, {1017,1}, {1018,1}, {1019,1}, {1020,1}, {1021,1}, {1022,1}, {1023,1}] [ns_server:info,2014-08-19T16:50:20.925,ns_1@10.242.238.90:<0.22749.0>:ebucketmigrator_srv:handle_call:307]Successfully changed vbucket filter on tap stream `replication_ns_1@10.242.238.90`. [ns_server:info,2014-08-19T16:50:20.925,ns_1@10.242.238.90:<0.22749.0>:ebucketmigrator_srv:process_upstream:1027]Got vbucket filter change completion message. Silencing upstream sender [ns_server:info,2014-08-19T16:50:20.925,ns_1@10.242.238.90:<0.22749.0>:ebucketmigrator_srv:handle_info:366]Got reply from upstream silencing request. Completing state transition to a new ebucketmigrator. [ns_server:debug,2014-08-19T16:50:20.925,ns_1@10.242.238.90:<0.22749.0>:ebucketmigrator_srv:complete_native_vb_filter_change:170]Proceeding with reading unread binaries [ns_server:debug,2014-08-19T16:50:20.925,ns_1@10.242.238.90:<0.22749.0>:ebucketmigrator_srv:confirm_downstream:197]Going to confirm reception downstream messages [ns_server:debug,2014-08-19T16:50:20.926,ns_1@10.242.238.90:<0.22749.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:50:20.926,ns_1@10.242.238.90:<0.22760.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:50:20.926,ns_1@10.242.238.90:<0.22760.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:50:20.926,ns_1@10.242.238.90:<0.22749.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:50:20.926,ns_1@10.242.238.90:<0.22749.0>:ebucketmigrator_srv:confirm_downstream:201]Confirmed upstream messages are feeded to kernel [ns_server:debug,2014-08-19T16:50:20.926,ns_1@10.242.238.90:<0.22749.0>:ebucketmigrator_srv:reply_and_die:210]Passed old state to caller [ns_server:debug,2014-08-19T16:50:20.926,ns_1@10.242.238.90:<0.22749.0>:ebucketmigrator_srv:reply_and_die:213]Sent out state. Preparing to die [ns_server:debug,2014-08-19T16:50:20.926,ns_1@10.242.238.90:<0.22757.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:143]Got old state from previous ebucketmigrator: <0.22749.0> [ns_server:debug,2014-08-19T16:50:20.927,ns_1@10.242.238.90:<0.22757.0>:ns_vbm_new_sup:perform_vbucket_filter_change_loop:184]Sent old state to new instance [ns_server:info,2014-08-19T16:50:20.927,ns_1@10.242.238.90:<0.22762.0>:ns_vbm_new_sup:mk_old_state_retriever:83]Got vbucket filter change old state. Proceeding vbucket filter change operation [ns_server:debug,2014-08-19T16:50:20.927,ns_1@10.242.238.90:<0.22762.0>:ebucketmigrator_srv:init:494]Got old ebucketmigrator state from <0.22749.0>: {state,#Port<0.13886>,#Port<0.13883>,#Port<0.13887>,#Port<0.13884>, <0.22750.0>,<<"cut off">>,<<"cut off">>,[],193,false,false,0, {1408,452620,925611}, completed, {<0.22757.0>,#Ref<0.0.0.253757>}, <<"replication_ns_1@10.242.238.90">>,<0.22749.0>, {had_backfill,false,undefined,[]}, completed,false}. [ns_server:debug,2014-08-19T16:50:20.927,ns_1@10.242.238.90:<0.17162.0>:ns_process_registry:handle_info:98]Got exit msg: {'EXIT',<0.22757.0>,{#Ref<0.0.0.253746>,<0.22762.0>}} [error_logger:info,2014-08-19T16:50:20.927,ns_1@10.242.238.90:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,'ns_vbm_new_sup-default'} started: [{pid,<0.22762.0>}, {name, {new_child_id, [960,961,962,963,964,965,966,967,968,969,970, 971,972,973,974,975,976,977,978,979,980,981, 982,983,984,985,986,987,988,989,990,991,992, 993,994,995,996,997,998,999,1000,1001,1002, 1003,1004,1005,1006,1007,1008,1009,1010,1011, 1012,1013,1014,1015,1016,1017,1018,1019,1020, 1021,1022,1023], 'ns_1@10.242.238.91'}}, {mfargs, {ebucketmigrator_srv,start_link, [{"10.242.238.91",11209}, {"10.242.238.90",11209}, [{old_state_retriever, #Fun}, {on_not_ready_vbuckets, #Fun}, {username,"default"}, {password,get_from_config}, {vbuckets, [960,961,962,963,964,965,966,967,968,969, 970,971,972,973,974,975,976,977,978,979, 980,981,982,983,984,985,986,987,988,989, 990,991,992,993,994,995,996,997,998,999, 1000,1001,1002,1003,1004,1005,1006,1007, 1008,1009,1010,1011,1012,1013,1014,1015, 1016,1017,1018,1019,1020,1021,1022, 1023]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]]}}, {restart_type,temporary}, {shutdown,60000}, {child_type,worker}] [ns_server:debug,2014-08-19T16:50:20.933,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:50:20.936,ns_1@10.242.238.90:<0.22762.0>:ebucketmigrator_srv:init:621]Reusing old upstream: [{vbuckets,[960,961,962,963,964,965,966,967,968,969,970,971,972,973,974,975, 976,977,978,979,980,981,982,983,984,985,986,987,988,989,990,991, 992,993,994,995,996,997,998,999,1000,1001,1002,1003,1004,1005, 1006,1007,1008,1009,1010,1011,1012,1013,1014,1015,1016,1017,1018, 1019,1020,1021,1022,1023]}, {name,<<"replication_ns_1@10.242.238.90">>}, {takeover,false}] [rebalance:debug,2014-08-19T16:50:20.936,ns_1@10.242.238.90:<0.22762.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.22763.0> [ns_server:debug,2014-08-19T16:50:20.937,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:20.937,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 3385 us [ns_server:debug,2014-08-19T16:50:20.938,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:20.938,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{971, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:info,2014-08-19T16:50:20.951,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 703 state to replica [ns_server:info,2014-08-19T16:50:20.960,ns_1@10.242.238.90:<0.22765.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 703 to state replica [ns_server:debug,2014-08-19T16:50:21.008,ns_1@10.242.238.90:<0.22765.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_703_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:50:21.010,ns_1@10.242.238.90:<0.22765.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[703]}, {checkpoints,[{703,0}]}, {name,<<"replication_building_703_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[703]}, {takeover,false}, {suffix,"building_703_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",703,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,true}]} [rebalance:debug,2014-08-19T16:50:21.011,ns_1@10.242.238.90:<0.22765.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.22766.0> [rebalance:debug,2014-08-19T16:50:21.011,ns_1@10.242.238.90:<0.22765.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:50:21.012,ns_1@10.242.238.90:<0.22765.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.5575.1>,#Ref<16550.0.1.121904>}]} [rebalance:info,2014-08-19T16:50:21.012,ns_1@10.242.238.90:<0.22765.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 703 [rebalance:debug,2014-08-19T16:50:21.012,ns_1@10.242.238.90:<0.22765.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.5575.1>,#Ref<16550.0.1.121904>}] [ns_server:debug,2014-08-19T16:50:21.013,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.22767.0> (ok) [ns_server:debug,2014-08-19T16:50:21.013,ns_1@10.242.238.90:<0.22765.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:debug,2014-08-19T16:50:21.015,ns_1@10.242.238.90:<0.22768.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 703 [ns_server:info,2014-08-19T16:50:21.020,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 959 state to replica [ns_server:info,2014-08-19T16:50:21.024,ns_1@10.242.238.90:<0.22771.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 959 to state replica [ns_server:debug,2014-08-19T16:50:21.055,ns_1@10.242.238.90:<0.22771.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_959_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:50:21.056,ns_1@10.242.238.90:<0.22771.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[959]}, {checkpoints,[{959,0}]}, {name,<<"replication_building_959_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[959]}, {takeover,false}, {suffix,"building_959_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",959,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,false}]} [rebalance:debug,2014-08-19T16:50:21.057,ns_1@10.242.238.90:<0.22771.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.22786.0> [rebalance:debug,2014-08-19T16:50:21.057,ns_1@10.242.238.90:<0.22771.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:50:21.057,ns_1@10.242.238.90:<0.22771.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.5610.1>,#Ref<16550.0.1.122069>}]} [rebalance:info,2014-08-19T16:50:21.057,ns_1@10.242.238.90:<0.22771.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 959 [rebalance:debug,2014-08-19T16:50:21.058,ns_1@10.242.238.90:<0.22771.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.5610.1>,#Ref<16550.0.1.122069>}] [ns_server:debug,2014-08-19T16:50:21.058,ns_1@10.242.238.90:<0.22771.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:debug,2014-08-19T16:50:21.073,ns_1@10.242.238.90:<0.22787.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 959 [ns_server:debug,2014-08-19T16:50:21.106,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 703. Nacking mccouch update. [views:debug,2014-08-19T16:50:21.106,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/703. Updated state: replica (0) [ns_server:debug,2014-08-19T16:50:21.107,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",703,replica,0} [ns_server:debug,2014-08-19T16:50:21.107,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,965,750,718,984,737,705,1016,971,756,724,1003,990,743,711,1022,977,762, 730,1009,996,964,749,717,983,736,704,1015,970,755,723,1002,989,742,710,1021, 976,761,729,1008,995,979,963,764,748,732,716,1011,998,982,966,767,751,735, 719,703,1014,985,969,754,738,722,706,1017,1001,988,972,757,741,725,709,1020, 1004,991,975,760,744,728,712,1023,1007,994,978,962,763,747,731,715,1010,981, 766,734,1013,968,753,721,1000,987,740,708,1019,974,759,727,1006,993,961,746, 714,980,765,733,1012,999,967,752,720,986,739,707,1018,973,758,726,1005,992, 960,745,713] [ns_server:info,2014-08-19T16:50:21.142,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 958 state to replica [ns_server:info,2014-08-19T16:50:21.146,ns_1@10.242.238.90:<0.22790.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 958 to state replica [ns_server:debug,2014-08-19T16:50:21.175,ns_1@10.242.238.90:<0.22790.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_958_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:50:21.177,ns_1@10.242.238.90:<0.22790.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[958]}, {checkpoints,[{958,0}]}, {name,<<"replication_building_958_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[958]}, {takeover,false}, {suffix,"building_958_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",958,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,false}]} [rebalance:debug,2014-08-19T16:50:21.178,ns_1@10.242.238.90:<0.22790.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.22791.0> [rebalance:debug,2014-08-19T16:50:21.178,ns_1@10.242.238.90:<0.22790.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:50:21.178,ns_1@10.242.238.90:<0.22790.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.5658.1>,#Ref<16550.0.1.122337>}]} [rebalance:info,2014-08-19T16:50:21.179,ns_1@10.242.238.90:<0.22790.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 958 [rebalance:debug,2014-08-19T16:50:21.179,ns_1@10.242.238.90:<0.22790.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.5658.1>,#Ref<16550.0.1.122337>}] [ns_server:debug,2014-08-19T16:50:21.180,ns_1@10.242.238.90:<0.22790.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [views:debug,2014-08-19T16:50:21.190,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/703. Updated state: replica (0) [ns_server:debug,2014-08-19T16:50:21.190,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",703,replica,0} [rebalance:debug,2014-08-19T16:50:21.195,ns_1@10.242.238.90:<0.22792.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 958 [ns_server:info,2014-08-19T16:50:21.201,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 702 state to replica [ns_server:info,2014-08-19T16:50:21.207,ns_1@10.242.238.90:<0.22795.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 702 to state replica [ns_server:debug,2014-08-19T16:50:21.248,ns_1@10.242.238.90:<0.22795.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_702_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:50:21.249,ns_1@10.242.238.90:<0.22795.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[702]}, {checkpoints,[{702,0}]}, {name,<<"replication_building_702_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[702]}, {takeover,false}, {suffix,"building_702_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",702,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,true}]} [rebalance:debug,2014-08-19T16:50:21.250,ns_1@10.242.238.90:<0.22795.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.22798.0> [rebalance:debug,2014-08-19T16:50:21.250,ns_1@10.242.238.90:<0.22795.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:50:21.250,ns_1@10.242.238.90:<0.22795.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.5678.1>,#Ref<16550.0.1.122432>}]} [rebalance:info,2014-08-19T16:50:21.251,ns_1@10.242.238.90:<0.22795.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 702 [rebalance:debug,2014-08-19T16:50:21.251,ns_1@10.242.238.90:<0.22795.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.5678.1>,#Ref<16550.0.1.122432>}] [ns_server:debug,2014-08-19T16:50:21.252,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.22805.0> (ok) [ns_server:debug,2014-08-19T16:50:21.252,ns_1@10.242.238.90:<0.22795.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:debug,2014-08-19T16:50:21.253,ns_1@10.242.238.90:<0.22812.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 702 [ns_server:info,2014-08-19T16:50:21.320,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 957 state to replica [ns_server:info,2014-08-19T16:50:21.323,ns_1@10.242.238.90:<0.22815.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 957 to state replica [ns_server:debug,2014-08-19T16:50:21.333,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 959. Nacking mccouch update. [views:debug,2014-08-19T16:50:21.333,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/959. Updated state: replica (0) [ns_server:debug,2014-08-19T16:50:21.333,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",959,replica,0} [ns_server:debug,2014-08-19T16:50:21.334,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,965,750,718,984,737,705,1016,971,756,724,1003,990,743,711,1022,977,762, 730,1009,996,964,749,717,983,736,704,1015,970,755,723,1002,989,742,710,1021, 976,761,729,1008,995,979,963,764,748,732,716,1011,998,982,966,767,751,735, 719,703,1014,985,969,754,738,722,706,1017,1001,988,972,757,741,725,709,1020, 1004,991,975,959,760,744,728,712,1023,1007,994,978,962,763,747,731,715,1010, 981,766,734,1013,968,753,721,1000,987,740,708,1019,974,759,727,1006,993,961, 746,714,980,765,733,1012,999,967,752,720,986,739,707,1018,973,758,726,1005, 992,960,745,713] [ns_server:debug,2014-08-19T16:50:21.354,ns_1@10.242.238.90:<0.22815.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_957_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:50:21.356,ns_1@10.242.238.90:<0.22815.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[957]}, {checkpoints,[{957,0}]}, {name,<<"replication_building_957_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[957]}, {takeover,false}, {suffix,"building_957_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",957,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,false}]} [rebalance:debug,2014-08-19T16:50:21.357,ns_1@10.242.238.90:<0.22815.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.22816.0> [rebalance:debug,2014-08-19T16:50:21.357,ns_1@10.242.238.90:<0.22815.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:50:21.357,ns_1@10.242.238.90:<0.22815.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.5743.1>,#Ref<16550.0.1.122731>}]} [rebalance:info,2014-08-19T16:50:21.358,ns_1@10.242.238.90:<0.22815.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 957 [rebalance:debug,2014-08-19T16:50:21.358,ns_1@10.242.238.90:<0.22815.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.5743.1>,#Ref<16550.0.1.122731>}] [ns_server:debug,2014-08-19T16:50:21.359,ns_1@10.242.238.90:<0.22815.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:debug,2014-08-19T16:50:21.372,ns_1@10.242.238.90:<0.22823.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 957 [ns_server:info,2014-08-19T16:50:21.377,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 701 state to replica [ns_server:info,2014-08-19T16:50:21.383,ns_1@10.242.238.90:<0.22826.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 701 to state replica [views:debug,2014-08-19T16:50:21.417,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/959. Updated state: replica (0) [ns_server:debug,2014-08-19T16:50:21.417,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",959,replica,0} [ns_server:debug,2014-08-19T16:50:21.424,ns_1@10.242.238.90:<0.22826.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_701_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:50:21.426,ns_1@10.242.238.90:<0.22826.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[701]}, {checkpoints,[{701,0}]}, {name,<<"replication_building_701_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[701]}, {takeover,false}, {suffix,"building_701_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",701,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,true}]} [rebalance:debug,2014-08-19T16:50:21.426,ns_1@10.242.238.90:<0.22826.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.22827.0> [rebalance:debug,2014-08-19T16:50:21.426,ns_1@10.242.238.90:<0.22826.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:50:21.427,ns_1@10.242.238.90:<0.22826.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.5765.1>,#Ref<16550.0.1.122862>}]} [rebalance:info,2014-08-19T16:50:21.427,ns_1@10.242.238.90:<0.22826.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 701 [rebalance:debug,2014-08-19T16:50:21.427,ns_1@10.242.238.90:<0.22826.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.5765.1>,#Ref<16550.0.1.122862>}] [ns_server:debug,2014-08-19T16:50:21.428,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.22828.0> (ok) [ns_server:debug,2014-08-19T16:50:21.428,ns_1@10.242.238.90:<0.22826.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:debug,2014-08-19T16:50:21.430,ns_1@10.242.238.90:<0.22830.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 701 [views:debug,2014-08-19T16:50:21.484,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/703. Updated state: pending (0) [ns_server:debug,2014-08-19T16:50:21.484,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",703,pending,0} [ns_server:info,2014-08-19T16:50:21.495,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 956 state to replica [ns_server:info,2014-08-19T16:50:21.498,ns_1@10.242.238.90:<0.22833.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 956 to state replica [ns_server:debug,2014-08-19T16:50:21.527,ns_1@10.242.238.90:<0.22833.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_956_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:50:21.528,ns_1@10.242.238.90:<0.22833.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[956]}, {checkpoints,[{956,0}]}, {name,<<"replication_building_956_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[956]}, {takeover,false}, {suffix,"building_956_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",956,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,false}]} [rebalance:debug,2014-08-19T16:50:21.529,ns_1@10.242.238.90:<0.22833.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.22834.0> [rebalance:debug,2014-08-19T16:50:21.529,ns_1@10.242.238.90:<0.22833.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:50:21.530,ns_1@10.242.238.90:<0.22833.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.5823.1>,#Ref<16550.0.1.123111>}]} [rebalance:info,2014-08-19T16:50:21.530,ns_1@10.242.238.90:<0.22833.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 956 [rebalance:debug,2014-08-19T16:50:21.531,ns_1@10.242.238.90:<0.22833.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.5823.1>,#Ref<16550.0.1.123111>}] [ns_server:debug,2014-08-19T16:50:21.531,ns_1@10.242.238.90:<0.22833.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:debug,2014-08-19T16:50:21.547,ns_1@10.242.238.90:<0.22849.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 956 [ns_server:info,2014-08-19T16:50:21.552,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 700 state to replica [ns_server:info,2014-08-19T16:50:21.559,ns_1@10.242.238.90:<0.22852.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 700 to state replica [ns_server:debug,2014-08-19T16:50:21.567,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 958. Nacking mccouch update. [views:debug,2014-08-19T16:50:21.567,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/958. Updated state: replica (0) [ns_server:debug,2014-08-19T16:50:21.567,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",958,replica,0} [ns_server:debug,2014-08-19T16:50:21.568,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,965,750,718,984,737,705,1016,971,756,724,1003,990,958,743,711,1022,977, 762,730,1009,996,964,749,717,983,736,704,1015,970,755,723,1002,989,742,710, 1021,976,761,729,1008,995,963,748,716,998,982,966,767,751,735,719,703,1014, 985,969,754,738,722,706,1017,1001,988,972,757,741,725,709,1020,1004,991,975, 959,760,744,728,712,1023,1007,994,978,962,763,747,731,715,1010,981,766,734, 1013,968,753,721,1000,987,740,708,1019,974,759,727,1006,993,961,746,714,980, 765,733,1012,999,967,752,720,986,739,707,1018,973,758,726,1005,992,960,745, 713,979,764,732,1011] [views:debug,2014-08-19T16:50:21.601,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/958. Updated state: replica (0) [ns_server:debug,2014-08-19T16:50:21.602,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",958,replica,0} [ns_server:debug,2014-08-19T16:50:21.602,ns_1@10.242.238.90:<0.22852.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_700_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:50:21.603,ns_1@10.242.238.90:<0.22852.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[700]}, {checkpoints,[{700,0}]}, {name,<<"replication_building_700_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[700]}, {takeover,false}, {suffix,"building_700_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",700,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,true}]} [rebalance:debug,2014-08-19T16:50:21.604,ns_1@10.242.238.90:<0.22852.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.22853.0> [rebalance:debug,2014-08-19T16:50:21.604,ns_1@10.242.238.90:<0.22852.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:50:21.604,ns_1@10.242.238.90:<0.22852.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.5843.1>,#Ref<16550.0.1.123227>}]} [rebalance:info,2014-08-19T16:50:21.605,ns_1@10.242.238.90:<0.22852.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 700 [rebalance:debug,2014-08-19T16:50:21.605,ns_1@10.242.238.90:<0.22852.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.5843.1>,#Ref<16550.0.1.123227>}] [ns_server:debug,2014-08-19T16:50:21.606,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.22854.0> (ok) [ns_server:debug,2014-08-19T16:50:21.606,ns_1@10.242.238.90:<0.22852.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:debug,2014-08-19T16:50:21.607,ns_1@10.242.238.90:<0.22855.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 700 [ns_server:info,2014-08-19T16:50:21.673,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 955 state to replica [ns_server:info,2014-08-19T16:50:21.677,ns_1@10.242.238.90:<0.22872.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 955 to state replica [ns_server:debug,2014-08-19T16:50:21.684,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 702. Nacking mccouch update. [views:debug,2014-08-19T16:50:21.685,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/702. Updated state: pending (0) [ns_server:debug,2014-08-19T16:50:21.685,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",702,pending,0} [ns_server:debug,2014-08-19T16:50:21.685,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,965,750,718,984,737,705,1016,971,756,724,1003,990,958,743,711,1022,977, 762,730,1009,996,964,749,717,983,736,704,1015,970,755,723,1002,989,742,710, 1021,976,761,729,1008,995,963,748,716,998,982,966,767,751,735,719,703,1014, 985,969,754,738,722,706,1017,1001,988,972,757,741,725,709,1020,1004,991,975, 959,760,744,728,712,1023,1007,994,978,962,763,747,731,715,1010,981,766,734, 702,1013,968,753,721,1000,987,740,708,1019,974,759,727,1006,993,961,746,714, 980,765,733,1012,999,967,752,720,986,739,707,1018,973,758,726,1005,992,960, 745,713,979,764,732,1011] [ns_server:debug,2014-08-19T16:50:21.706,ns_1@10.242.238.90:<0.22872.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_955_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:50:21.708,ns_1@10.242.238.90:<0.22872.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[955]}, {checkpoints,[{955,0}]}, {name,<<"replication_building_955_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[955]}, {takeover,false}, {suffix,"building_955_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",955,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,false}]} [rebalance:debug,2014-08-19T16:50:21.709,ns_1@10.242.238.90:<0.22872.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.22873.0> [rebalance:debug,2014-08-19T16:50:21.709,ns_1@10.242.238.90:<0.22872.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:50:21.710,ns_1@10.242.238.90:<0.22872.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.5900.1>,#Ref<16550.0.1.123520>}]} [rebalance:info,2014-08-19T16:50:21.710,ns_1@10.242.238.90:<0.22872.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 955 [rebalance:debug,2014-08-19T16:50:21.710,ns_1@10.242.238.90:<0.22872.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.5900.1>,#Ref<16550.0.1.123520>}] [ns_server:debug,2014-08-19T16:50:21.711,ns_1@10.242.238.90:<0.22872.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [views:debug,2014-08-19T16:50:21.718,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/702. Updated state: pending (0) [ns_server:debug,2014-08-19T16:50:21.719,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",702,pending,0} [rebalance:debug,2014-08-19T16:50:21.720,ns_1@10.242.238.90:<0.22855.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:50:21.720,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.22855.0> (ok) [rebalance:debug,2014-08-19T16:50:21.720,ns_1@10.242.238.90:<0.22830.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:50:21.720,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.22830.0> (ok) [rebalance:debug,2014-08-19T16:50:21.726,ns_1@10.242.238.90:<0.22874.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 955 [ns_server:info,2014-08-19T16:50:21.731,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 699 state to replica [ns_server:info,2014-08-19T16:50:21.737,ns_1@10.242.238.90:<0.22877.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 699 to state replica [ns_server:debug,2014-08-19T16:50:21.779,ns_1@10.242.238.90:<0.22877.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_699_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:50:21.781,ns_1@10.242.238.90:<0.22877.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[699]}, {checkpoints,[{699,0}]}, {name,<<"replication_building_699_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[699]}, {takeover,false}, {suffix,"building_699_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",699,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,true}]} [rebalance:debug,2014-08-19T16:50:21.781,ns_1@10.242.238.90:<0.22877.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.22892.0> [rebalance:debug,2014-08-19T16:50:21.782,ns_1@10.242.238.90:<0.22877.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:50:21.782,ns_1@10.242.238.90:<0.22877.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.5920.1>,#Ref<16550.0.1.123636>}]} [rebalance:info,2014-08-19T16:50:21.782,ns_1@10.242.238.90:<0.22877.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 699 [rebalance:debug,2014-08-19T16:50:21.782,ns_1@10.242.238.90:<0.22877.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.5920.1>,#Ref<16550.0.1.123636>}] [ns_server:debug,2014-08-19T16:50:21.783,ns_1@10.242.238.90:<0.22877.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:50:21.783,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.22893.0> (ok) [rebalance:debug,2014-08-19T16:50:21.785,ns_1@10.242.238.90:<0.22894.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 699 [ns_server:debug,2014-08-19T16:50:21.785,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 957. Nacking mccouch update. [views:debug,2014-08-19T16:50:21.785,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/957. Updated state: replica (0) [ns_server:debug,2014-08-19T16:50:21.785,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",957,replica,0} [ns_server:debug,2014-08-19T16:50:21.786,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,965,750,718,984,737,705,1016,971,756,724,1003,990,958,743,711,1022,977, 762,730,1009,996,964,749,717,983,736,704,1015,970,755,723,1002,989,957,742, 710,1021,976,761,729,1008,995,963,748,716,998,982,966,767,751,735,719,703, 1014,985,969,754,738,722,706,1017,1001,988,972,757,741,725,709,1020,1004,991, 975,959,760,744,728,712,1023,1007,994,978,962,763,747,731,715,1010,981,766, 734,702,1013,968,753,721,1000,987,740,708,1019,974,759,727,1006,993,961,746, 714,980,765,733,1012,999,967,752,720,986,739,707,1018,973,758,726,1005,992, 960,745,713,979,764,732,1011] [views:debug,2014-08-19T16:50:21.819,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/957. Updated state: replica (0) [ns_server:debug,2014-08-19T16:50:21.819,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",957,replica,0} [ns_server:info,2014-08-19T16:50:21.850,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 954 state to replica [ns_server:info,2014-08-19T16:50:21.854,ns_1@10.242.238.90:<0.22897.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 954 to state replica [ns_server:debug,2014-08-19T16:50:21.884,ns_1@10.242.238.90:<0.22897.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_954_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:50:21.885,ns_1@10.242.238.90:<0.22897.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[954]}, {checkpoints,[{954,0}]}, {name,<<"replication_building_954_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[954]}, {takeover,false}, {suffix,"building_954_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",954,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,false}]} [rebalance:debug,2014-08-19T16:50:21.885,ns_1@10.242.238.90:<0.22897.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.22912.0> [rebalance:debug,2014-08-19T16:50:21.886,ns_1@10.242.238.90:<0.22897.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [ns_server:debug,2014-08-19T16:50:21.886,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 955. Nacking mccouch update. [views:debug,2014-08-19T16:50:21.886,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/955. Updated state: replica (0) [rebalance:debug,2014-08-19T16:50:21.886,ns_1@10.242.238.90:<0.22897.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.5977.1>,#Ref<16550.0.1.123899>}]} [ns_server:debug,2014-08-19T16:50:21.886,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",955,replica,0} [rebalance:info,2014-08-19T16:50:21.886,ns_1@10.242.238.90:<0.22897.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 954 [rebalance:debug,2014-08-19T16:50:21.887,ns_1@10.242.238.90:<0.22897.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.5977.1>,#Ref<16550.0.1.123899>}] [ns_server:debug,2014-08-19T16:50:21.887,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,965,750,718,984,737,705,1016,971,756,724,1003,990,958,743,711,1022,977, 762,730,1009,996,964,749,717,983,736,704,1015,970,755,723,1002,989,957,742, 710,1021,976,761,729,1008,995,963,748,716,998,982,966,767,751,735,719,703, 1014,985,969,754,738,722,706,1017,1001,988,972,757,741,725,709,1020,1004,991, 975,959,760,744,728,712,1023,1007,994,978,962,763,747,731,715,1010,981,766, 734,702,1013,968,753,721,1000,987,955,740,708,1019,974,759,727,1006,993,961, 746,714,980,765,733,1012,999,967,752,720,986,739,707,1018,973,758,726,1005, 992,960,745,713,979,764,732,1011] [ns_server:debug,2014-08-19T16:50:21.888,ns_1@10.242.238.90:<0.22897.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:debug,2014-08-19T16:50:21.904,ns_1@10.242.238.90:<0.22913.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 954 [ns_server:info,2014-08-19T16:50:21.909,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 698 state to replica [ns_server:info,2014-08-19T16:50:21.915,ns_1@10.242.238.90:<0.22916.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 698 to state replica [views:debug,2014-08-19T16:50:21.920,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/955. Updated state: replica (0) [ns_server:debug,2014-08-19T16:50:21.920,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",955,replica,0} [ns_server:debug,2014-08-19T16:50:21.957,ns_1@10.242.238.90:<0.22916.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_698_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:50:21.958,ns_1@10.242.238.90:<0.22916.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[698]}, {checkpoints,[{698,0}]}, {name,<<"replication_building_698_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[698]}, {takeover,false}, {suffix,"building_698_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",698,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,true}]} [rebalance:debug,2014-08-19T16:50:21.959,ns_1@10.242.238.90:<0.22916.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.22931.0> [rebalance:debug,2014-08-19T16:50:21.959,ns_1@10.242.238.90:<0.22916.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:50:21.960,ns_1@10.242.238.90:<0.22916.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.5997.1>,#Ref<16550.0.1.124016>}]} [rebalance:info,2014-08-19T16:50:21.960,ns_1@10.242.238.90:<0.22916.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 698 [rebalance:debug,2014-08-19T16:50:21.960,ns_1@10.242.238.90:<0.22916.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.5997.1>,#Ref<16550.0.1.124016>}] [ns_server:debug,2014-08-19T16:50:21.960,ns_1@10.242.238.90:<0.22916.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:50:21.961,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.22932.0> (ok) [rebalance:debug,2014-08-19T16:50:21.962,ns_1@10.242.238.90:<0.22933.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 698 [ns_server:debug,2014-08-19T16:50:22.000,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 701. Nacking mccouch update. [views:debug,2014-08-19T16:50:22.001,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/701. Updated state: pending (1) [ns_server:debug,2014-08-19T16:50:22.001,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",701,pending,1} [ns_server:debug,2014-08-19T16:50:22.001,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,965,750,718,984,737,705,1016,971,756,724,1003,990,958,743,711,1022,977, 762,730,1009,996,964,749,717,983,736,704,1015,970,755,723,1002,989,957,742, 710,1021,976,761,729,1008,995,963,748,716,998,982,966,767,751,735,719,703, 1014,985,969,754,738,722,706,1017,1001,988,972,757,741,725,709,1020,1004,991, 975,959,760,744,728,712,1023,1007,994,978,962,763,747,731,715,1010,981,766, 734,702,1013,968,753,721,1000,987,955,740,708,1019,974,759,727,1006,993,961, 746,714,980,765,733,701,1012,999,967,752,720,986,739,707,1018,973,758,726, 1005,992,960,745,713,979,764,732,1011] [ns_server:info,2014-08-19T16:50:22.031,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 953 state to replica [ns_server:info,2014-08-19T16:50:22.035,ns_1@10.242.238.90:<0.22936.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 953 to state replica [ns_server:debug,2014-08-19T16:50:22.064,ns_1@10.242.238.90:<0.22936.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_953_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:50:22.066,ns_1@10.242.238.90:<0.22936.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[953]}, {checkpoints,[{953,0}]}, {name,<<"replication_building_953_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[953]}, {takeover,false}, {suffix,"building_953_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",953,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,false}]} [rebalance:debug,2014-08-19T16:50:22.067,ns_1@10.242.238.90:<0.22936.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.22937.0> [rebalance:debug,2014-08-19T16:50:22.067,ns_1@10.242.238.90:<0.22936.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:50:22.067,ns_1@10.242.238.90:<0.22936.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.6054.1>,#Ref<16550.0.1.124279>}]} [rebalance:info,2014-08-19T16:50:22.067,ns_1@10.242.238.90:<0.22936.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 953 [rebalance:debug,2014-08-19T16:50:22.068,ns_1@10.242.238.90:<0.22936.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.6054.1>,#Ref<16550.0.1.124279>}] [ns_server:debug,2014-08-19T16:50:22.068,ns_1@10.242.238.90:<0.22936.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [views:debug,2014-08-19T16:50:22.085,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/701. Updated state: pending (1) [ns_server:debug,2014-08-19T16:50:22.085,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",701,pending,1} [rebalance:debug,2014-08-19T16:50:22.088,ns_1@10.242.238.90:<0.22938.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 953 [ns_server:info,2014-08-19T16:50:22.093,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 697 state to replica [ns_server:info,2014-08-19T16:50:22.100,ns_1@10.242.238.90:<0.22941.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 697 to state replica [ns_server:debug,2014-08-19T16:50:22.142,ns_1@10.242.238.90:<0.22941.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_697_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:50:22.143,ns_1@10.242.238.90:<0.22941.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[697]}, {checkpoints,[{697,0}]}, {name,<<"replication_building_697_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[697]}, {takeover,false}, {suffix,"building_697_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",697,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,true}]} [rebalance:debug,2014-08-19T16:50:22.144,ns_1@10.242.238.90:<0.22941.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.22947.0> [rebalance:debug,2014-08-19T16:50:22.144,ns_1@10.242.238.90:<0.22941.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:50:22.145,ns_1@10.242.238.90:<0.22941.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.6082.1>,#Ref<16550.0.1.124442>}]} [rebalance:info,2014-08-19T16:50:22.145,ns_1@10.242.238.90:<0.22941.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 697 [rebalance:debug,2014-08-19T16:50:22.145,ns_1@10.242.238.90:<0.22941.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.6082.1>,#Ref<16550.0.1.124442>}] [ns_server:debug,2014-08-19T16:50:22.146,ns_1@10.242.238.90:<0.22941.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:50:22.146,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.22953.0> (ok) [rebalance:debug,2014-08-19T16:50:22.147,ns_1@10.242.238.90:<0.22958.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 697 [ns_server:info,2014-08-19T16:50:22.211,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 952 state to replica [ns_server:info,2014-08-19T16:50:22.216,ns_1@10.242.238.90:<0.22961.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 952 to state replica [ns_server:debug,2014-08-19T16:50:22.226,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 953. Nacking mccouch update. [views:debug,2014-08-19T16:50:22.226,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/953. Updated state: replica (0) [ns_server:debug,2014-08-19T16:50:22.227,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",953,replica,0} [ns_server:debug,2014-08-19T16:50:22.227,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,965,750,718,984,737,705,1016,971,756,724,1003,990,958,743,711,1022,977, 762,730,1009,996,964,749,717,983,736,704,1015,970,755,723,1002,989,957,742, 710,1021,976,761,729,1008,995,963,748,716,982,767,735,703,1014,985,969,953, 754,738,722,706,1017,1001,988,972,757,741,725,709,1020,1004,991,975,959,760, 744,728,712,1023,1007,994,978,962,763,747,731,715,1010,981,766,734,702,1013, 968,753,721,1000,987,955,740,708,1019,974,759,727,1006,993,961,746,714,980, 765,733,701,1012,999,967,752,720,986,739,707,1018,973,758,726,1005,992,960, 745,713,979,764,732,1011,998,966,751,719] [ns_server:debug,2014-08-19T16:50:22.244,ns_1@10.242.238.90:<0.22961.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_952_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:50:22.246,ns_1@10.242.238.90:<0.22961.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[952]}, {checkpoints,[{952,0}]}, {name,<<"replication_building_952_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[952]}, {takeover,false}, {suffix,"building_952_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",952,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,false}]} [rebalance:debug,2014-08-19T16:50:22.246,ns_1@10.242.238.90:<0.22961.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.22962.0> [rebalance:debug,2014-08-19T16:50:22.247,ns_1@10.242.238.90:<0.22961.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:50:22.247,ns_1@10.242.238.90:<0.22961.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.6145.1>,#Ref<16550.0.1.124745>}]} [rebalance:info,2014-08-19T16:50:22.247,ns_1@10.242.238.90:<0.22961.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 952 [rebalance:debug,2014-08-19T16:50:22.248,ns_1@10.242.238.90:<0.22961.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.6145.1>,#Ref<16550.0.1.124745>}] [ns_server:debug,2014-08-19T16:50:22.248,ns_1@10.242.238.90:<0.22961.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:debug,2014-08-19T16:50:22.265,ns_1@10.242.238.90:<0.22963.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 952 [ns_server:info,2014-08-19T16:50:22.270,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 696 state to replica [ns_server:info,2014-08-19T16:50:22.277,ns_1@10.242.238.90:<0.22966.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 696 to state replica [views:debug,2014-08-19T16:50:22.311,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/953. Updated state: replica (0) [ns_server:debug,2014-08-19T16:50:22.311,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",953,replica,0} [ns_server:debug,2014-08-19T16:50:22.322,ns_1@10.242.238.90:<0.22966.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_696_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:50:22.323,ns_1@10.242.238.90:<0.22966.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[696]}, {checkpoints,[{696,0}]}, {name,<<"replication_building_696_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[696]}, {takeover,false}, {suffix,"building_696_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",696,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,true}]} [rebalance:debug,2014-08-19T16:50:22.324,ns_1@10.242.238.90:<0.22966.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.22967.0> [rebalance:debug,2014-08-19T16:50:22.324,ns_1@10.242.238.90:<0.22966.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:50:22.325,ns_1@10.242.238.90:<0.22966.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.6165.1>,#Ref<16550.0.1.124862>}]} [rebalance:info,2014-08-19T16:50:22.325,ns_1@10.242.238.90:<0.22966.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 696 [rebalance:debug,2014-08-19T16:50:22.325,ns_1@10.242.238.90:<0.22966.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.6165.1>,#Ref<16550.0.1.124862>}] [ns_server:debug,2014-08-19T16:50:22.326,ns_1@10.242.238.90:<0.22966.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:50:22.326,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.22968.0> (ok) [rebalance:debug,2014-08-19T16:50:22.327,ns_1@10.242.238.90:<0.22969.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 696 [ns_server:info,2014-08-19T16:50:22.392,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 951 state to replica [ns_server:info,2014-08-19T16:50:22.396,ns_1@10.242.238.90:<0.22986.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 951 to state replica [ns_server:debug,2014-08-19T16:50:22.424,ns_1@10.242.238.90:<0.22986.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_951_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:50:22.426,ns_1@10.242.238.90:<0.22986.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[951]}, {checkpoints,[{951,0}]}, {name,<<"replication_building_951_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[951]}, {takeover,false}, {suffix,"building_951_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",951,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,false}]} [rebalance:debug,2014-08-19T16:50:22.426,ns_1@10.242.238.90:<0.22986.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.22987.0> [rebalance:debug,2014-08-19T16:50:22.426,ns_1@10.242.238.90:<0.22986.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:50:22.427,ns_1@10.242.238.90:<0.22986.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.6222.1>,#Ref<16550.0.1.125164>}]} [rebalance:info,2014-08-19T16:50:22.427,ns_1@10.242.238.90:<0.22986.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 951 [rebalance:debug,2014-08-19T16:50:22.428,ns_1@10.242.238.90:<0.22986.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.6222.1>,#Ref<16550.0.1.125164>}] [ns_server:debug,2014-08-19T16:50:22.428,ns_1@10.242.238.90:<0.22986.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:50:22.444,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 699. Nacking mccouch update. [views:debug,2014-08-19T16:50:22.444,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/699. Updated state: pending (0) [ns_server:debug,2014-08-19T16:50:22.444,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",699,pending,0} [ns_server:debug,2014-08-19T16:50:22.445,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,965,750,718,984,737,705,1016,971,756,724,1003,990,958,743,711,1022,977, 762,730,1009,996,964,749,717,983,736,704,1015,970,755,723,1002,989,957,742, 710,1021,976,761,729,1008,995,963,748,716,982,767,735,703,1014,985,969,953, 754,738,722,706,1017,1001,988,972,757,741,725,709,1020,1004,991,975,959,760, 744,728,712,1023,1007,994,978,962,763,747,731,715,699,1010,981,766,734,702, 1013,968,753,721,1000,987,955,740,708,1019,974,759,727,1006,993,961,746,714, 980,765,733,701,1012,999,967,752,720,986,739,707,1018,973,758,726,1005,992, 960,745,713,979,764,732,1011,998,966,751,719] [rebalance:debug,2014-08-19T16:50:22.447,ns_1@10.242.238.90:<0.22988.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 951 [ns_server:info,2014-08-19T16:50:22.452,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 695 state to replica [ns_server:info,2014-08-19T16:50:22.458,ns_1@10.242.238.90:<0.22991.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 695 to state replica [ns_server:debug,2014-08-19T16:50:22.500,ns_1@10.242.238.90:<0.22991.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_695_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:50:22.502,ns_1@10.242.238.90:<0.22991.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[695]}, {checkpoints,[{695,0}]}, {name,<<"replication_building_695_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[695]}, {takeover,false}, {suffix,"building_695_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",695,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,true}]} [rebalance:debug,2014-08-19T16:50:22.503,ns_1@10.242.238.90:<0.22991.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.22992.0> [rebalance:debug,2014-08-19T16:50:22.503,ns_1@10.242.238.90:<0.22991.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:50:22.503,ns_1@10.242.238.90:<0.22991.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.6242.1>,#Ref<16550.0.1.125283>}]} [rebalance:info,2014-08-19T16:50:22.504,ns_1@10.242.238.90:<0.22991.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 695 [rebalance:debug,2014-08-19T16:50:22.504,ns_1@10.242.238.90:<0.22991.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.6242.1>,#Ref<16550.0.1.125283>}] [ns_server:debug,2014-08-19T16:50:22.505,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.22993.0> (ok) [ns_server:debug,2014-08-19T16:50:22.505,ns_1@10.242.238.90:<0.22991.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:debug,2014-08-19T16:50:22.506,ns_1@10.242.238.90:<0.22994.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 695 [views:debug,2014-08-19T16:50:22.528,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/699. Updated state: pending (0) [ns_server:debug,2014-08-19T16:50:22.528,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",699,pending,0} [ns_server:info,2014-08-19T16:50:22.574,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 950 state to replica [ns_server:info,2014-08-19T16:50:22.578,ns_1@10.242.238.90:<0.22997.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 950 to state replica [ns_server:debug,2014-08-19T16:50:22.608,ns_1@10.242.238.90:<0.22997.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_950_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:50:22.609,ns_1@10.242.238.90:<0.22997.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[950]}, {checkpoints,[{950,0}]}, {name,<<"replication_building_950_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[950]}, {takeover,false}, {suffix,"building_950_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",950,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,false}]} [rebalance:debug,2014-08-19T16:50:22.610,ns_1@10.242.238.90:<0.22997.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.22998.0> [rebalance:debug,2014-08-19T16:50:22.610,ns_1@10.242.238.90:<0.22997.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:50:22.610,ns_1@10.242.238.90:<0.22997.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.6299.1>,#Ref<16550.0.1.126620>}]} [rebalance:info,2014-08-19T16:50:22.611,ns_1@10.242.238.90:<0.22997.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 950 [rebalance:debug,2014-08-19T16:50:22.611,ns_1@10.242.238.90:<0.22997.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.6299.1>,#Ref<16550.0.1.126620>}] [ns_server:debug,2014-08-19T16:50:22.612,ns_1@10.242.238.90:<0.22997.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:debug,2014-08-19T16:50:22.626,ns_1@10.242.238.90:<0.23013.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 950 [ns_server:info,2014-08-19T16:50:22.632,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 694 state to replica [ns_server:info,2014-08-19T16:50:22.640,ns_1@10.242.238.90:<0.23016.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 694 to state replica [ns_server:debug,2014-08-19T16:50:22.681,ns_1@10.242.238.90:<0.23016.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_694_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:50:22.683,ns_1@10.242.238.90:<0.23016.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[694]}, {checkpoints,[{694,0}]}, {name,<<"replication_building_694_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[694]}, {takeover,false}, {suffix,"building_694_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",694,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,true}]} [rebalance:debug,2014-08-19T16:50:22.684,ns_1@10.242.238.90:<0.23016.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.23017.0> [rebalance:debug,2014-08-19T16:50:22.684,ns_1@10.242.238.90:<0.23016.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:50:22.684,ns_1@10.242.238.90:<0.23016.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.6319.1>,#Ref<16550.0.1.126739>}]} [rebalance:info,2014-08-19T16:50:22.684,ns_1@10.242.238.90:<0.23016.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 694 [rebalance:debug,2014-08-19T16:50:22.685,ns_1@10.242.238.90:<0.23016.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.6319.1>,#Ref<16550.0.1.126739>}] [ns_server:debug,2014-08-19T16:50:22.685,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.23018.0> (ok) [ns_server:debug,2014-08-19T16:50:22.685,ns_1@10.242.238.90:<0.23016.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:debug,2014-08-19T16:50:22.687,ns_1@10.242.238.90:<0.23019.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 694 [ns_server:debug,2014-08-19T16:50:22.703,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 956. Nacking mccouch update. [views:debug,2014-08-19T16:50:22.703,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/956. Updated state: replica (0) [ns_server:debug,2014-08-19T16:50:22.703,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",956,replica,0} [ns_server:debug,2014-08-19T16:50:22.704,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,965,750,718,984,737,705,1016,971,756,724,1003,990,958,743,711,1022,977, 762,730,1009,996,964,749,717,983,736,704,1015,970,755,723,1002,989,957,742, 710,1021,976,761,729,1008,995,963,748,716,982,767,735,703,1014,985,969,953, 754,738,722,706,1017,1001,988,972,956,757,741,725,709,1020,1004,991,975,959, 760,744,728,712,1023,1007,994,978,962,763,747,731,715,699,1010,981,766,734, 702,1013,968,753,721,1000,987,955,740,708,1019,974,759,727,1006,993,961,746, 714,980,765,733,701,1012,999,967,752,720,986,739,707,1018,973,758,726,1005, 992,960,745,713,979,764,732,1011,998,966,751,719] [ns_server:info,2014-08-19T16:50:22.767,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 949 state to replica [ns_server:info,2014-08-19T16:50:22.771,ns_1@10.242.238.90:<0.23023.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 949 to state replica [views:debug,2014-08-19T16:50:22.779,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/956. Updated state: replica (0) [ns_server:debug,2014-08-19T16:50:22.780,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",956,replica,0} [ns_server:debug,2014-08-19T16:50:22.800,ns_1@10.242.238.90:<0.23023.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_949_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:50:22.802,ns_1@10.242.238.90:<0.23023.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[949]}, {checkpoints,[{949,0}]}, {name,<<"replication_building_949_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[949]}, {takeover,false}, {suffix,"building_949_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",949,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,false}]} [rebalance:debug,2014-08-19T16:50:22.802,ns_1@10.242.238.90:<0.23023.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.23024.0> [rebalance:debug,2014-08-19T16:50:22.803,ns_1@10.242.238.90:<0.23023.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:50:22.803,ns_1@10.242.238.90:<0.23023.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.6376.1>,#Ref<16550.0.1.127003>}]} [rebalance:info,2014-08-19T16:50:22.803,ns_1@10.242.238.90:<0.23023.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 949 [rebalance:debug,2014-08-19T16:50:22.804,ns_1@10.242.238.90:<0.23023.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.6376.1>,#Ref<16550.0.1.127003>}] [ns_server:debug,2014-08-19T16:50:22.805,ns_1@10.242.238.90:<0.23023.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:debug,2014-08-19T16:50:22.821,ns_1@10.242.238.90:<0.23025.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 949 [ns_server:info,2014-08-19T16:50:22.826,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 693 state to replica [ns_server:info,2014-08-19T16:50:22.833,ns_1@10.242.238.90:<0.23036.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 693 to state replica [ns_server:debug,2014-08-19T16:50:22.874,ns_1@10.242.238.90:<0.23036.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_693_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:50:22.875,ns_1@10.242.238.90:<0.23036.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[693]}, {checkpoints,[{693,0}]}, {name,<<"replication_building_693_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[693]}, {takeover,false}, {suffix,"building_693_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",693,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,true}]} [rebalance:debug,2014-08-19T16:50:22.876,ns_1@10.242.238.90:<0.23036.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.23043.0> [rebalance:debug,2014-08-19T16:50:22.876,ns_1@10.242.238.90:<0.23036.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:50:22.876,ns_1@10.242.238.90:<0.23036.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.6396.1>,#Ref<16550.0.1.127120>}]} [rebalance:info,2014-08-19T16:50:22.877,ns_1@10.242.238.90:<0.23036.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 693 [rebalance:debug,2014-08-19T16:50:22.877,ns_1@10.242.238.90:<0.23036.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.6396.1>,#Ref<16550.0.1.127120>}] [ns_server:debug,2014-08-19T16:50:22.878,ns_1@10.242.238.90:<0.23036.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:50:22.878,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.23044.0> (ok) [rebalance:debug,2014-08-19T16:50:22.879,ns_1@10.242.238.90:<0.23045.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 693 [ns_server:debug,2014-08-19T16:50:22.904,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 954. Nacking mccouch update. [views:debug,2014-08-19T16:50:22.904,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/954. Updated state: replica (0) [ns_server:debug,2014-08-19T16:50:22.904,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",954,replica,0} [ns_server:debug,2014-08-19T16:50:22.904,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,965,750,718,984,737,705,1016,971,756,724,1003,990,958,743,711,1022,977, 762,730,1009,996,964,749,717,983,736,704,1015,970,755,723,1002,989,957,742, 710,1021,976,761,729,1008,995,963,748,716,982,767,735,703,1014,985,969,953, 754,738,722,706,1017,1001,988,972,956,757,741,725,709,1020,1004,991,975,959, 760,744,728,712,1023,1007,994,978,962,763,747,731,715,699,1010,981,766,734, 702,1013,968,753,721,1000,987,955,740,708,1019,974,759,727,1006,993,961,746, 714,980,765,733,701,1012,999,967,752,720,986,954,739,707,1018,973,758,726, 1005,992,960,745,713,979,764,732,1011,998,966,751,719] [ns_server:info,2014-08-19T16:50:22.945,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 948 state to replica [ns_server:info,2014-08-19T16:50:22.949,ns_1@10.242.238.90:<0.23048.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 948 to state replica [views:debug,2014-08-19T16:50:22.971,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/954. Updated state: replica (0) [ns_server:debug,2014-08-19T16:50:22.971,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",954,replica,0} [ns_server:debug,2014-08-19T16:50:22.981,ns_1@10.242.238.90:<0.23048.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_948_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:50:22.983,ns_1@10.242.238.90:<0.23048.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[948]}, {checkpoints,[{948,0}]}, {name,<<"replication_building_948_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[948]}, {takeover,false}, {suffix,"building_948_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",948,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,false}]} [rebalance:debug,2014-08-19T16:50:22.983,ns_1@10.242.238.90:<0.23048.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.23049.0> [rebalance:debug,2014-08-19T16:50:22.983,ns_1@10.242.238.90:<0.23048.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:50:22.984,ns_1@10.242.238.90:<0.23048.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.6485.1>,#Ref<16550.0.1.127780>}]} [rebalance:info,2014-08-19T16:50:22.984,ns_1@10.242.238.90:<0.23048.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 948 [rebalance:debug,2014-08-19T16:50:22.985,ns_1@10.242.238.90:<0.23048.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.6485.1>,#Ref<16550.0.1.127780>}] [ns_server:debug,2014-08-19T16:50:22.985,ns_1@10.242.238.90:<0.23048.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:debug,2014-08-19T16:50:23.002,ns_1@10.242.238.90:<0.23050.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 948 [ns_server:info,2014-08-19T16:50:23.007,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 692 state to replica [ns_server:info,2014-08-19T16:50:23.014,ns_1@10.242.238.90:<0.23053.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 692 to state replica [ns_server:debug,2014-08-19T16:50:23.057,ns_1@10.242.238.90:<0.23053.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_692_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:50:23.058,ns_1@10.242.238.90:<0.23053.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[692]}, {checkpoints,[{692,0}]}, {name,<<"replication_building_692_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[692]}, {takeover,false}, {suffix,"building_692_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",692,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,true}]} [rebalance:debug,2014-08-19T16:50:23.059,ns_1@10.242.238.90:<0.23053.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.23068.0> [rebalance:debug,2014-08-19T16:50:23.059,ns_1@10.242.238.90:<0.23053.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:50:23.060,ns_1@10.242.238.90:<0.23053.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.6505.1>,#Ref<16550.0.1.127896>}]} [rebalance:info,2014-08-19T16:50:23.060,ns_1@10.242.238.90:<0.23053.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 692 [rebalance:debug,2014-08-19T16:50:23.060,ns_1@10.242.238.90:<0.23053.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.6505.1>,#Ref<16550.0.1.127896>}] [ns_server:debug,2014-08-19T16:50:23.061,ns_1@10.242.238.90:<0.23053.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:50:23.061,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.23069.0> (ok) [rebalance:debug,2014-08-19T16:50:23.063,ns_1@10.242.238.90:<0.23070.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 692 [ns_server:debug,2014-08-19T16:50:23.113,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 952. Nacking mccouch update. [views:debug,2014-08-19T16:50:23.113,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/952. Updated state: replica (0) [ns_server:debug,2014-08-19T16:50:23.113,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",952,replica,0} [ns_server:debug,2014-08-19T16:50:23.114,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,965,750,718,984,952,737,705,1016,971,756,724,1003,990,958,743,711,1022, 977,762,730,1009,996,964,749,717,983,736,704,1015,970,755,723,1002,989,957, 742,710,1021,976,761,729,1008,995,963,748,716,982,767,735,703,1014,985,969, 953,754,738,722,706,1017,1001,988,972,956,757,741,725,709,1020,1004,991,975, 959,760,744,728,712,1023,1007,994,978,962,763,747,731,715,699,1010,981,766, 734,702,1013,968,753,721,1000,987,955,740,708,1019,974,759,727,1006,993,961, 746,714,980,765,733,701,1012,999,967,752,720,986,954,739,707,1018,973,758, 726,1005,992,960,745,713,979,764,732,1011,998,966,751,719] [ns_server:info,2014-08-19T16:50:23.143,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 947 state to replica [views:debug,2014-08-19T16:50:23.147,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/952. Updated state: replica (0) [ns_server:debug,2014-08-19T16:50:23.147,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",952,replica,0} [ns_server:info,2014-08-19T16:50:23.147,ns_1@10.242.238.90:<0.23073.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 947 to state replica [ns_server:debug,2014-08-19T16:50:23.176,ns_1@10.242.238.90:<0.23073.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_947_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:50:23.178,ns_1@10.242.238.90:<0.23073.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[947]}, {checkpoints,[{947,0}]}, {name,<<"replication_building_947_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[947]}, {takeover,false}, {suffix,"building_947_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",947,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,false}]} [rebalance:debug,2014-08-19T16:50:23.179,ns_1@10.242.238.90:<0.23073.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.23074.0> [rebalance:debug,2014-08-19T16:50:23.179,ns_1@10.242.238.90:<0.23073.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:50:23.180,ns_1@10.242.238.90:<0.23073.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.6567.1>,#Ref<16550.0.1.128174>}]} [rebalance:info,2014-08-19T16:50:23.180,ns_1@10.242.238.90:<0.23073.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 947 [rebalance:debug,2014-08-19T16:50:23.180,ns_1@10.242.238.90:<0.23073.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.6567.1>,#Ref<16550.0.1.128174>}] [ns_server:debug,2014-08-19T16:50:23.181,ns_1@10.242.238.90:<0.23073.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:debug,2014-08-19T16:50:23.198,ns_1@10.242.238.90:<0.23075.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 947 [ns_server:info,2014-08-19T16:50:23.204,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 691 state to replica [ns_server:info,2014-08-19T16:50:23.211,ns_1@10.242.238.90:<0.23092.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 691 to state replica [ns_server:debug,2014-08-19T16:50:23.239,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 700. Nacking mccouch update. [views:debug,2014-08-19T16:50:23.239,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/700. Updated state: pending (1) [ns_server:debug,2014-08-19T16:50:23.239,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",700,pending,1} [ns_server:debug,2014-08-19T16:50:23.239,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,965,750,718,984,952,737,705,1016,971,756,724,1003,990,958,743,711,1022, 977,762,730,1009,996,964,749,717,983,736,704,1015,970,755,723,1002,989,957, 742,710,1021,976,761,729,1008,995,963,748,716,982,767,735,703,1014,969,754, 722,1001,988,972,956,757,741,725,709,1020,1004,991,975,959,760,744,728,712, 1023,1007,994,978,962,763,747,731,715,699,1010,981,766,734,702,1013,968,753, 721,1000,987,955,740,708,1019,974,759,727,1006,993,961,746,714,980,765,733, 701,1012,999,967,752,720,986,954,739,707,1018,973,758,726,1005,992,960,745, 713,979,764,732,700,1011,998,966,751,719,985,953,738,706,1017] [ns_server:debug,2014-08-19T16:50:23.252,ns_1@10.242.238.90:<0.23092.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_691_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:50:23.254,ns_1@10.242.238.90:<0.23092.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[691]}, {checkpoints,[{691,0}]}, {name,<<"replication_building_691_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[691]}, {takeover,false}, {suffix,"building_691_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",691,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,true}]} [rebalance:debug,2014-08-19T16:50:23.254,ns_1@10.242.238.90:<0.23092.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.23093.0> [rebalance:debug,2014-08-19T16:50:23.255,ns_1@10.242.238.90:<0.23092.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:50:23.255,ns_1@10.242.238.90:<0.23092.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.6587.1>,#Ref<16550.0.1.128291>}]} [rebalance:info,2014-08-19T16:50:23.255,ns_1@10.242.238.90:<0.23092.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 691 [rebalance:debug,2014-08-19T16:50:23.256,ns_1@10.242.238.90:<0.23092.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.6587.1>,#Ref<16550.0.1.128291>}] [ns_server:debug,2014-08-19T16:50:23.256,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.23094.0> (ok) [ns_server:debug,2014-08-19T16:50:23.256,ns_1@10.242.238.90:<0.23092.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:debug,2014-08-19T16:50:23.258,ns_1@10.242.238.90:<0.23095.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 691 [views:debug,2014-08-19T16:50:23.272,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/700. Updated state: pending (1) [ns_server:debug,2014-08-19T16:50:23.273,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",700,pending,1} [ns_server:info,2014-08-19T16:50:23.325,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 946 state to replica [ns_server:info,2014-08-19T16:50:23.329,ns_1@10.242.238.90:<0.23098.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 946 to state replica [ns_server:debug,2014-08-19T16:50:23.359,ns_1@10.242.238.90:<0.23098.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_946_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:50:23.361,ns_1@10.242.238.90:<0.23098.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[946]}, {checkpoints,[{946,0}]}, {name,<<"replication_building_946_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[946]}, {takeover,false}, {suffix,"building_946_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",946,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,false}]} [rebalance:debug,2014-08-19T16:50:23.362,ns_1@10.242.238.90:<0.23098.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.23121.0> [rebalance:debug,2014-08-19T16:50:23.362,ns_1@10.242.238.90:<0.23098.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:50:23.363,ns_1@10.242.238.90:<0.23098.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.6644.1>,#Ref<16550.0.1.128578>}]} [rebalance:info,2014-08-19T16:50:23.363,ns_1@10.242.238.90:<0.23098.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 946 [rebalance:debug,2014-08-19T16:50:23.363,ns_1@10.242.238.90:<0.23098.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.6644.1>,#Ref<16550.0.1.128578>}] [ns_server:debug,2014-08-19T16:50:23.364,ns_1@10.242.238.90:<0.23098.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:50:23.364,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 698. Nacking mccouch update. [views:debug,2014-08-19T16:50:23.364,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/698. Updated state: pending (0) [ns_server:debug,2014-08-19T16:50:23.364,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",698,pending,0} [ns_server:debug,2014-08-19T16:50:23.365,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,965,750,718,984,952,737,705,1016,971,756,724,1003,990,958,743,711,1022, 977,762,730,698,1009,996,964,749,717,983,736,704,1015,970,755,723,1002,989, 957,742,710,1021,976,761,729,1008,995,963,748,716,982,767,735,703,1014,969, 754,722,1001,988,972,956,757,741,725,709,1020,1004,991,975,959,760,744,728, 712,1023,1007,994,978,962,763,747,731,715,699,1010,981,766,734,702,1013,968, 753,721,1000,987,955,740,708,1019,974,759,727,1006,993,961,746,714,980,765, 733,701,1012,999,967,752,720,986,954,739,707,1018,973,758,726,1005,992,960, 745,713,979,764,732,700,1011,998,966,751,719,985,953,738,706,1017] [rebalance:debug,2014-08-19T16:50:23.380,ns_1@10.242.238.90:<0.23128.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 946 [ns_server:info,2014-08-19T16:50:23.386,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 690 state to replica [ns_server:info,2014-08-19T16:50:23.392,ns_1@10.242.238.90:<0.23131.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 690 to state replica [views:debug,2014-08-19T16:50:23.399,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/698. Updated state: pending (0) [ns_server:debug,2014-08-19T16:50:23.400,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",698,pending,0} [ns_server:debug,2014-08-19T16:50:23.441,ns_1@10.242.238.90:<0.23131.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_690_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:50:23.442,ns_1@10.242.238.90:<0.23131.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[690]}, {checkpoints,[{690,0}]}, {name,<<"replication_building_690_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[690]}, {takeover,false}, {suffix,"building_690_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",690,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,true}]} [rebalance:debug,2014-08-19T16:50:23.443,ns_1@10.242.238.90:<0.23131.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.23134.0> [rebalance:debug,2014-08-19T16:50:23.443,ns_1@10.242.238.90:<0.23131.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:50:23.444,ns_1@10.242.238.90:<0.23131.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.6665.1>,#Ref<16550.0.1.128714>}]} [rebalance:info,2014-08-19T16:50:23.444,ns_1@10.242.238.90:<0.23131.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 690 [rebalance:debug,2014-08-19T16:50:23.444,ns_1@10.242.238.90:<0.23131.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.6665.1>,#Ref<16550.0.1.128714>}] [ns_server:debug,2014-08-19T16:50:23.445,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.23135.0> (ok) [ns_server:debug,2014-08-19T16:50:23.450,ns_1@10.242.238.90:<0.23131.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:debug,2014-08-19T16:50:23.451,ns_1@10.242.238.90:<0.23136.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 690 [ns_server:debug,2014-08-19T16:50:23.483,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 696. Nacking mccouch update. [views:debug,2014-08-19T16:50:23.483,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/696. Updated state: pending (0) [ns_server:debug,2014-08-19T16:50:23.483,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",696,pending,0} [ns_server:debug,2014-08-19T16:50:23.484,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,965,750,718,984,952,737,705,1016,971,756,724,1003,990,958,743,711,1022, 977,762,730,698,1009,996,964,749,717,983,736,704,1015,970,755,723,1002,989, 957,742,710,1021,976,761,729,1008,995,963,748,716,982,767,735,703,1014,969, 754,722,1001,988,972,956,757,741,725,709,1020,1004,991,975,959,760,744,728, 712,696,1023,1007,994,978,962,763,747,731,715,699,1010,981,766,734,702,1013, 968,753,721,1000,987,955,740,708,1019,974,759,727,1006,993,961,746,714,980, 765,733,701,1012,999,967,752,720,986,954,739,707,1018,973,758,726,1005,992, 960,745,713,979,764,732,700,1011,998,966,751,719,985,953,738,706,1017] [ns_server:info,2014-08-19T16:50:23.517,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 945 state to replica [views:debug,2014-08-19T16:50:23.517,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/696. Updated state: pending (0) [ns_server:debug,2014-08-19T16:50:23.517,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",696,pending,0} [rebalance:debug,2014-08-19T16:50:23.518,ns_1@10.242.238.90:<0.22812.0>:janitor_agent:handle_call:795]Done [rebalance:debug,2014-08-19T16:50:23.518,ns_1@10.242.238.90:<0.22768.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:50:23.518,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.22812.0> (ok) [ns_server:debug,2014-08-19T16:50:23.518,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.22768.0> (ok) [ns_server:info,2014-08-19T16:50:23.521,ns_1@10.242.238.90:<0.23153.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 945 to state replica [ns_server:debug,2014-08-19T16:50:23.551,ns_1@10.242.238.90:<0.23153.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_945_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:50:23.552,ns_1@10.242.238.90:<0.23153.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[945]}, {checkpoints,[{945,0}]}, {name,<<"replication_building_945_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[945]}, {takeover,false}, {suffix,"building_945_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",945,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,false}]} [rebalance:debug,2014-08-19T16:50:23.553,ns_1@10.242.238.90:<0.23153.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.23154.0> [rebalance:debug,2014-08-19T16:50:23.553,ns_1@10.242.238.90:<0.23153.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:50:23.553,ns_1@10.242.238.90:<0.23153.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.6722.1>,#Ref<16550.0.1.129016>}]} [rebalance:info,2014-08-19T16:50:23.554,ns_1@10.242.238.90:<0.23153.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 945 [rebalance:debug,2014-08-19T16:50:23.554,ns_1@10.242.238.90:<0.23153.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.6722.1>,#Ref<16550.0.1.129016>}] [ns_server:debug,2014-08-19T16:50:23.555,ns_1@10.242.238.90:<0.23153.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:debug,2014-08-19T16:50:23.571,ns_1@10.242.238.90:<0.23155.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 945 [ns_server:info,2014-08-19T16:50:23.576,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 689 state to replica [ns_server:info,2014-08-19T16:50:23.583,ns_1@10.242.238.90:<0.23166.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 689 to state replica [ns_server:debug,2014-08-19T16:50:23.625,ns_1@10.242.238.90:<0.23166.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_689_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:50:23.626,ns_1@10.242.238.90:<0.23166.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[689]}, {checkpoints,[{689,0}]}, {name,<<"replication_building_689_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[689]}, {takeover,false}, {suffix,"building_689_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",689,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,true}]} [rebalance:debug,2014-08-19T16:50:23.626,ns_1@10.242.238.90:<0.23166.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.23173.0> [rebalance:debug,2014-08-19T16:50:23.627,ns_1@10.242.238.90:<0.23166.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:50:23.627,ns_1@10.242.238.90:<0.23166.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.6756.1>,#Ref<16550.0.1.129165>}]} [rebalance:info,2014-08-19T16:50:23.627,ns_1@10.242.238.90:<0.23166.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 689 [rebalance:debug,2014-08-19T16:50:23.628,ns_1@10.242.238.90:<0.23166.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.6756.1>,#Ref<16550.0.1.129165>}] [ns_server:debug,2014-08-19T16:50:23.628,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.23174.0> (ok) [ns_server:debug,2014-08-19T16:50:23.629,ns_1@10.242.238.90:<0.23166.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:debug,2014-08-19T16:50:23.630,ns_1@10.242.238.90:<0.23175.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 689 [ns_server:debug,2014-08-19T16:50:23.663,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 950. Nacking mccouch update. [views:debug,2014-08-19T16:50:23.663,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/950. Updated state: replica (0) [ns_server:debug,2014-08-19T16:50:23.663,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",950,replica,0} [ns_server:debug,2014-08-19T16:50:23.663,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,965,750,718,984,952,737,705,1016,971,756,724,1003,990,958,743,711,1022, 977,762,730,698,1009,996,964,749,717,983,736,704,1015,970,755,723,1002,989, 957,742,710,1021,976,761,729,1008,995,963,748,716,982,950,767,735,703,1014, 969,754,722,1001,988,972,956,757,741,725,709,1020,1004,991,975,959,760,744, 728,712,696,1023,1007,994,978,962,763,747,731,715,699,1010,981,766,734,702, 1013,968,753,721,1000,987,955,740,708,1019,974,759,727,1006,993,961,746,714, 980,765,733,701,1012,999,967,752,720,986,954,739,707,1018,973,758,726,1005, 992,960,745,713,979,764,732,700,1011,998,966,751,719,985,953,738,706,1017] [ns_server:info,2014-08-19T16:50:23.695,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 944 state to replica [ns_server:info,2014-08-19T16:50:23.699,ns_1@10.242.238.90:<0.23178.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 944 to state replica [ns_server:debug,2014-08-19T16:50:23.730,ns_1@10.242.238.90:<0.23178.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_944_'ns_1@10.242.238.90' [views:debug,2014-08-19T16:50:23.730,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/950. Updated state: replica (0) [ns_server:debug,2014-08-19T16:50:23.730,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",950,replica,0} [rebalance:info,2014-08-19T16:50:23.733,ns_1@10.242.238.90:<0.23178.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[944]}, {checkpoints,[{944,0}]}, {name,<<"replication_building_944_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[944]}, {takeover,false}, {suffix,"building_944_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",944,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,false}]} [rebalance:debug,2014-08-19T16:50:23.734,ns_1@10.242.238.90:<0.23178.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.23179.0> [rebalance:debug,2014-08-19T16:50:23.734,ns_1@10.242.238.90:<0.23178.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:50:23.735,ns_1@10.242.238.90:<0.23178.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.6799.1>,#Ref<16550.0.1.129401>}]} [rebalance:info,2014-08-19T16:50:23.735,ns_1@10.242.238.90:<0.23178.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 944 [rebalance:debug,2014-08-19T16:50:23.735,ns_1@10.242.238.90:<0.23178.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.6799.1>,#Ref<16550.0.1.129401>}] [ns_server:debug,2014-08-19T16:50:23.736,ns_1@10.242.238.90:<0.23178.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:debug,2014-08-19T16:50:23.750,ns_1@10.242.238.90:<0.23180.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 944 [ns_server:info,2014-08-19T16:50:23.755,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 688 state to replica [ns_server:info,2014-08-19T16:50:23.761,ns_1@10.242.238.90:<0.23183.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 688 to state replica [ns_server:debug,2014-08-19T16:50:23.805,ns_1@10.242.238.90:<0.23183.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_688_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:50:23.807,ns_1@10.242.238.90:<0.23183.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[688]}, {checkpoints,[{688,0}]}, {name,<<"replication_building_688_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[688]}, {takeover,false}, {suffix,"building_688_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",688,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,true}]} [rebalance:debug,2014-08-19T16:50:23.808,ns_1@10.242.238.90:<0.23183.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.23198.0> [rebalance:debug,2014-08-19T16:50:23.808,ns_1@10.242.238.90:<0.23183.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:50:23.808,ns_1@10.242.238.90:<0.23183.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.6833.1>,#Ref<16550.0.1.129546>}]} [rebalance:info,2014-08-19T16:50:23.808,ns_1@10.242.238.90:<0.23183.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 688 [rebalance:debug,2014-08-19T16:50:23.809,ns_1@10.242.238.90:<0.23183.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.6833.1>,#Ref<16550.0.1.129546>}] [ns_server:debug,2014-08-19T16:50:23.809,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.23199.0> (ok) [ns_server:debug,2014-08-19T16:50:23.810,ns_1@10.242.238.90:<0.23183.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:debug,2014-08-19T16:50:23.811,ns_1@10.242.238.90:<0.23200.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 688 [ns_server:debug,2014-08-19T16:50:23.855,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 948. Nacking mccouch update. [views:debug,2014-08-19T16:50:23.855,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/948. Updated state: replica (0) [ns_server:debug,2014-08-19T16:50:23.855,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",948,replica,0} [ns_server:debug,2014-08-19T16:50:23.856,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,965,750,718,984,952,737,705,1016,971,756,724,1003,990,958,743,711,1022, 977,762,730,698,1009,996,964,749,717,983,736,704,1015,970,755,723,1002,989, 957,742,710,1021,976,761,729,1008,995,963,748,716,982,950,767,735,703,1014, 969,754,722,1001,988,972,956,757,741,725,709,1020,1004,991,975,959,760,744, 728,712,696,1023,1007,994,978,962,763,747,731,715,699,1010,981,766,734,702, 1013,968,753,721,1000,987,955,740,708,1019,974,759,727,1006,993,961,746,714, 980,948,765,733,701,1012,999,967,752,720,986,954,739,707,1018,973,758,726, 1005,992,960,745,713,979,764,732,700,1011,998,966,751,719,985,953,738,706, 1017] [ns_server:info,2014-08-19T16:50:23.879,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 943 state to replica [ns_server:info,2014-08-19T16:50:23.883,ns_1@10.242.238.90:<0.23203.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 943 to state replica [ns_server:debug,2014-08-19T16:50:23.914,ns_1@10.242.238.90:<0.23203.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_943_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:50:23.916,ns_1@10.242.238.90:<0.23203.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[943]}, {checkpoints,[{943,0}]}, {name,<<"replication_building_943_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[943]}, {takeover,false}, {suffix,"building_943_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",943,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,false}]} [rebalance:debug,2014-08-19T16:50:23.917,ns_1@10.242.238.90:<0.23203.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.23204.0> [rebalance:debug,2014-08-19T16:50:23.917,ns_1@10.242.238.90:<0.23203.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:50:23.918,ns_1@10.242.238.90:<0.23203.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.6890.1>,#Ref<16550.0.1.129830>}]} [rebalance:info,2014-08-19T16:50:23.918,ns_1@10.242.238.90:<0.23203.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 943 [rebalance:debug,2014-08-19T16:50:23.918,ns_1@10.242.238.90:<0.23203.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.6890.1>,#Ref<16550.0.1.129830>}] [ns_server:debug,2014-08-19T16:50:23.919,ns_1@10.242.238.90:<0.23203.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [views:debug,2014-08-19T16:50:23.922,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/948. Updated state: replica (0) [ns_server:debug,2014-08-19T16:50:23.923,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",948,replica,0} [rebalance:debug,2014-08-19T16:50:23.933,ns_1@10.242.238.90:<0.23205.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 943 [ns_server:info,2014-08-19T16:50:23.939,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 687 state to replica [ns_server:info,2014-08-19T16:50:23.945,ns_1@10.242.238.90:<0.23208.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 687 to state replica [ns_server:debug,2014-08-19T16:50:23.988,ns_1@10.242.238.90:<0.23208.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_687_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:50:23.990,ns_1@10.242.238.90:<0.23208.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[687]}, {checkpoints,[{687,0}]}, {name,<<"replication_building_687_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[687]}, {takeover,false}, {suffix,"building_687_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",687,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,true}]} [rebalance:debug,2014-08-19T16:50:23.990,ns_1@10.242.238.90:<0.23208.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.23223.0> [rebalance:debug,2014-08-19T16:50:23.990,ns_1@10.242.238.90:<0.23208.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:50:23.991,ns_1@10.242.238.90:<0.23208.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.6910.1>,#Ref<16550.0.1.129947>}]} [rebalance:info,2014-08-19T16:50:23.991,ns_1@10.242.238.90:<0.23208.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 687 [rebalance:debug,2014-08-19T16:50:23.991,ns_1@10.242.238.90:<0.23208.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.6910.1>,#Ref<16550.0.1.129947>}] [ns_server:debug,2014-08-19T16:50:23.992,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.23224.0> (ok) [ns_server:debug,2014-08-19T16:50:23.992,ns_1@10.242.238.90:<0.23208.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:debug,2014-08-19T16:50:23.994,ns_1@10.242.238.90:<0.23225.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 687 [ns_server:debug,2014-08-19T16:50:24.056,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 946. Nacking mccouch update. [views:debug,2014-08-19T16:50:24.056,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/946. Updated state: replica (0) [ns_server:debug,2014-08-19T16:50:24.057,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",946,replica,0} [ns_server:debug,2014-08-19T16:50:24.057,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,965,750,718,984,952,737,705,1016,971,756,724,1003,990,958,743,711,1022, 977,762,730,698,1009,996,964,749,717,983,736,704,1015,970,755,723,1002,989, 957,742,710,1021,976,761,729,1008,995,963,748,716,982,950,767,735,703,1014, 969,754,722,1001,988,956,741,709,1020,991,975,959,760,744,728,712,696,1023, 1007,994,978,962,946,763,747,731,715,699,1010,981,766,734,702,1013,968,753, 721,1000,987,955,740,708,1019,974,759,727,1006,993,961,746,714,980,948,765, 733,701,1012,999,967,752,720,986,954,739,707,1018,973,758,726,1005,992,960, 745,713,979,764,732,700,1011,998,966,751,719,985,953,738,706,1017,972,757, 725,1004] [ns_server:info,2014-08-19T16:50:24.063,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 942 state to replica [ns_server:info,2014-08-19T16:50:24.067,ns_1@10.242.238.90:<0.23228.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 942 to state replica [ns_server:debug,2014-08-19T16:50:24.098,ns_1@10.242.238.90:<0.23228.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_942_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:50:24.100,ns_1@10.242.238.90:<0.23228.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[942]}, {checkpoints,[{942,0}]}, {name,<<"replication_building_942_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[942]}, {takeover,false}, {suffix,"building_942_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",942,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,false}]} [rebalance:debug,2014-08-19T16:50:24.100,ns_1@10.242.238.90:<0.23228.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.23229.0> [rebalance:debug,2014-08-19T16:50:24.101,ns_1@10.242.238.90:<0.23228.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:50:24.101,ns_1@10.242.238.90:<0.23228.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.6967.1>,#Ref<16550.0.1.130212>}]} [rebalance:info,2014-08-19T16:50:24.102,ns_1@10.242.238.90:<0.23228.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 942 [rebalance:debug,2014-08-19T16:50:24.102,ns_1@10.242.238.90:<0.23228.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.6967.1>,#Ref<16550.0.1.130212>}] [ns_server:debug,2014-08-19T16:50:24.103,ns_1@10.242.238.90:<0.23228.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:debug,2014-08-19T16:50:24.118,ns_1@10.242.238.90:<0.23230.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 942 [ns_server:info,2014-08-19T16:50:24.123,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 686 state to replica [views:debug,2014-08-19T16:50:24.123,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/946. Updated state: replica (0) [ns_server:debug,2014-08-19T16:50:24.124,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",946,replica,0} [ns_server:info,2014-08-19T16:50:24.130,ns_1@10.242.238.90:<0.23233.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 686 to state replica [ns_server:debug,2014-08-19T16:50:24.175,ns_1@10.242.238.90:<0.23233.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_686_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:50:24.177,ns_1@10.242.238.90:<0.23233.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[686]}, {checkpoints,[{686,0}]}, {name,<<"replication_building_686_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[686]}, {takeover,false}, {suffix,"building_686_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",686,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,true}]} [rebalance:debug,2014-08-19T16:50:24.178,ns_1@10.242.238.90:<0.23233.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.23234.0> [rebalance:debug,2014-08-19T16:50:24.178,ns_1@10.242.238.90:<0.23233.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:50:24.178,ns_1@10.242.238.90:<0.23233.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.6987.1>,#Ref<16550.0.1.130341>}]} [rebalance:info,2014-08-19T16:50:24.179,ns_1@10.242.238.90:<0.23233.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 686 [rebalance:debug,2014-08-19T16:50:24.179,ns_1@10.242.238.90:<0.23233.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.6987.1>,#Ref<16550.0.1.130341>}] [ns_server:debug,2014-08-19T16:50:24.180,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.23235.0> (ok) [ns_server:debug,2014-08-19T16:50:24.180,ns_1@10.242.238.90:<0.23233.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:debug,2014-08-19T16:50:24.181,ns_1@10.242.238.90:<0.23236.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 686 [ns_server:info,2014-08-19T16:50:24.246,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 941 state to replica [ns_server:info,2014-08-19T16:50:24.250,ns_1@10.242.238.90:<0.23253.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 941 to state replica [ns_server:debug,2014-08-19T16:50:24.265,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 694. Nacking mccouch update. [views:debug,2014-08-19T16:50:24.266,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/694. Updated state: pending (0) [ns_server:debug,2014-08-19T16:50:24.266,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",694,pending,0} [ns_server:debug,2014-08-19T16:50:24.266,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,965,750,718,984,952,737,705,1016,971,756,724,1003,990,958,743,711,1022, 977,762,730,698,1009,996,964,749,717,983,736,704,1015,970,755,723,1002,989, 957,742,710,1021,976,761,729,1008,995,963,748,716,982,950,767,735,703,1014, 969,754,722,1001,988,956,741,709,1020,991,975,959,760,744,728,712,696,1023, 1007,994,978,962,946,763,747,731,715,699,1010,981,766,734,702,1013,968,753, 721,1000,987,955,740,708,1019,974,759,727,1006,993,961,746,714,980,948,765, 733,701,1012,999,967,752,720,986,954,739,707,1018,973,758,726,694,1005,992, 960,745,713,979,764,732,700,1011,998,966,751,719,985,953,738,706,1017,972, 757,725,1004] [ns_server:debug,2014-08-19T16:50:24.281,ns_1@10.242.238.90:<0.23253.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_941_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:50:24.282,ns_1@10.242.238.90:<0.23253.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[941]}, {checkpoints,[{941,0}]}, {name,<<"replication_building_941_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[941]}, {takeover,false}, {suffix,"building_941_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",941,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,false}]} [rebalance:debug,2014-08-19T16:50:24.282,ns_1@10.242.238.90:<0.23253.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.23254.0> [rebalance:debug,2014-08-19T16:50:24.282,ns_1@10.242.238.90:<0.23253.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:50:24.283,ns_1@10.242.238.90:<0.23253.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.7034.1>,#Ref<16550.0.1.130590>}]} [rebalance:info,2014-08-19T16:50:24.283,ns_1@10.242.238.90:<0.23253.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 941 [rebalance:debug,2014-08-19T16:50:24.283,ns_1@10.242.238.90:<0.23253.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.7034.1>,#Ref<16550.0.1.130590>}] [ns_server:debug,2014-08-19T16:50:24.285,ns_1@10.242.238.90:<0.23253.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [views:debug,2014-08-19T16:50:24.299,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/694. Updated state: pending (0) [ns_server:debug,2014-08-19T16:50:24.300,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",694,pending,0} [rebalance:debug,2014-08-19T16:50:24.301,ns_1@10.242.238.90:<0.23255.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 941 [ns_server:info,2014-08-19T16:50:24.306,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 685 state to replica [ns_server:info,2014-08-19T16:50:24.313,ns_1@10.242.238.90:<0.23258.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 685 to state replica [ns_server:debug,2014-08-19T16:50:24.356,ns_1@10.242.238.90:<0.23258.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_685_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:50:24.358,ns_1@10.242.238.90:<0.23258.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[685]}, {checkpoints,[{685,0}]}, {name,<<"replication_building_685_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[685]}, {takeover,false}, {suffix,"building_685_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",685,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,true}]} [rebalance:debug,2014-08-19T16:50:24.358,ns_1@10.242.238.90:<0.23258.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.23259.0> [rebalance:debug,2014-08-19T16:50:24.358,ns_1@10.242.238.90:<0.23258.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:50:24.359,ns_1@10.242.238.90:<0.23258.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.7068.1>,#Ref<16550.0.1.130751>}]} [rebalance:info,2014-08-19T16:50:24.359,ns_1@10.242.238.90:<0.23258.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 685 [rebalance:debug,2014-08-19T16:50:24.359,ns_1@10.242.238.90:<0.23258.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.7068.1>,#Ref<16550.0.1.130751>}] [ns_server:debug,2014-08-19T16:50:24.360,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.23265.0> (ok) [ns_server:debug,2014-08-19T16:50:24.360,ns_1@10.242.238.90:<0.23258.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:debug,2014-08-19T16:50:24.361,ns_1@10.242.238.90:<0.23269.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 685 [ns_server:debug,2014-08-19T16:50:24.426,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 692. Nacking mccouch update. [views:debug,2014-08-19T16:50:24.426,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/692. Updated state: pending (0) [ns_server:debug,2014-08-19T16:50:24.426,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",692,pending,0} [ns_server:debug,2014-08-19T16:50:24.426,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,965,750,718,984,952,737,705,1016,971,756,724,692,1003,990,958,743,711, 1022,977,762,730,698,1009,996,964,749,717,983,736,704,1015,970,755,723,1002, 989,957,742,710,1021,976,761,729,1008,995,963,748,716,982,950,767,735,703, 1014,969,754,722,1001,988,956,741,709,1020,991,975,959,760,744,728,712,696, 1023,1007,994,978,962,946,763,747,731,715,699,1010,981,766,734,702,1013,968, 753,721,1000,987,955,740,708,1019,974,759,727,1006,993,961,746,714,980,948, 765,733,701,1012,999,967,752,720,986,954,739,707,1018,973,758,726,694,1005, 992,960,745,713,979,764,732,700,1011,998,966,751,719,985,953,738,706,1017, 972,757,725,1004] [ns_server:info,2014-08-19T16:50:24.429,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 940 state to replica [ns_server:info,2014-08-19T16:50:24.437,ns_1@10.242.238.90:<0.23279.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 940 to state replica [ns_server:debug,2014-08-19T16:50:24.473,ns_1@10.242.238.90:<0.23279.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_940_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:50:24.474,ns_1@10.242.238.90:<0.23279.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[940]}, {checkpoints,[{940,0}]}, {name,<<"replication_building_940_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[940]}, {takeover,false}, {suffix,"building_940_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",940,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,false}]} [rebalance:debug,2014-08-19T16:50:24.475,ns_1@10.242.238.90:<0.23279.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.23280.0> [rebalance:debug,2014-08-19T16:50:24.475,ns_1@10.242.238.90:<0.23279.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:50:24.476,ns_1@10.242.238.90:<0.23279.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.7111.1>,#Ref<16550.0.1.130986>}]} [rebalance:info,2014-08-19T16:50:24.476,ns_1@10.242.238.90:<0.23279.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 940 [rebalance:debug,2014-08-19T16:50:24.476,ns_1@10.242.238.90:<0.23279.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.7111.1>,#Ref<16550.0.1.130986>}] [ns_server:debug,2014-08-19T16:50:24.477,ns_1@10.242.238.90:<0.23279.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:debug,2014-08-19T16:50:24.486,ns_1@10.242.238.90:<0.23281.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 940 [ns_server:info,2014-08-19T16:50:24.492,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 684 state to replica [views:debug,2014-08-19T16:50:24.493,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/692. Updated state: pending (0) [ns_server:debug,2014-08-19T16:50:24.493,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",692,pending,0} [ns_server:info,2014-08-19T16:50:24.498,ns_1@10.242.238.90:<0.23284.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 684 to state replica [ns_server:debug,2014-08-19T16:50:24.541,ns_1@10.242.238.90:<0.23284.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_684_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:50:24.542,ns_1@10.242.238.90:<0.23284.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[684]}, {checkpoints,[{684,0}]}, {name,<<"replication_building_684_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[684]}, {takeover,false}, {suffix,"building_684_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",684,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,true}]} [rebalance:debug,2014-08-19T16:50:24.543,ns_1@10.242.238.90:<0.23284.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.23285.0> [rebalance:debug,2014-08-19T16:50:24.543,ns_1@10.242.238.90:<0.23284.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:50:24.544,ns_1@10.242.238.90:<0.23284.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.7145.1>,#Ref<16550.0.1.131130>}]} [rebalance:info,2014-08-19T16:50:24.544,ns_1@10.242.238.90:<0.23284.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 684 [rebalance:debug,2014-08-19T16:50:24.544,ns_1@10.242.238.90:<0.23284.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.7145.1>,#Ref<16550.0.1.131130>}] [ns_server:debug,2014-08-19T16:50:24.545,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.23286.0> (ok) [ns_server:debug,2014-08-19T16:50:24.545,ns_1@10.242.238.90:<0.23284.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:debug,2014-08-19T16:50:24.546,ns_1@10.242.238.90:<0.23287.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 684 [ns_server:info,2014-08-19T16:50:24.614,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 939 state to replica [ns_server:info,2014-08-19T16:50:24.618,ns_1@10.242.238.90:<0.23304.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 939 to state replica [ns_server:debug,2014-08-19T16:50:24.627,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 690. Nacking mccouch update. [views:debug,2014-08-19T16:50:24.627,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/690. Updated state: pending (0) [ns_server:debug,2014-08-19T16:50:24.627,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",690,pending,0} [ns_server:debug,2014-08-19T16:50:24.627,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,965,750,718,984,952,737,705,1016,971,756,724,692,1003,990,958,743,711, 1022,977,762,730,698,1009,996,964,749,717,983,736,704,1015,970,755,723,1002, 989,957,742,710,1021,976,761,729,1008,995,963,748,716,982,950,767,735,703, 1014,969,754,722,690,1001,988,956,741,709,1020,991,975,959,760,744,728,712, 696,1023,1007,994,978,962,946,763,747,731,715,699,1010,981,766,734,702,1013, 968,753,721,1000,987,955,740,708,1019,974,759,727,1006,993,961,746,714,980, 948,765,733,701,1012,999,967,752,720,986,954,739,707,1018,973,758,726,694, 1005,992,960,745,713,979,764,732,700,1011,998,966,751,719,985,953,738,706, 1017,972,757,725,1004] [ns_server:debug,2014-08-19T16:50:24.649,ns_1@10.242.238.90:<0.23304.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_939_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:50:24.651,ns_1@10.242.238.90:<0.23304.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[939]}, {checkpoints,[{939,0}]}, {name,<<"replication_building_939_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[939]}, {takeover,false}, {suffix,"building_939_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",939,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,false}]} [rebalance:debug,2014-08-19T16:50:24.651,ns_1@10.242.238.90:<0.23304.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.23305.0> [rebalance:debug,2014-08-19T16:50:24.651,ns_1@10.242.238.90:<0.23304.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:50:24.652,ns_1@10.242.238.90:<0.23304.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.7188.1>,#Ref<16550.0.1.131345>}]} [rebalance:info,2014-08-19T16:50:24.652,ns_1@10.242.238.90:<0.23304.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 939 [rebalance:debug,2014-08-19T16:50:24.652,ns_1@10.242.238.90:<0.23304.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.7188.1>,#Ref<16550.0.1.131345>}] [ns_server:debug,2014-08-19T16:50:24.653,ns_1@10.242.238.90:<0.23304.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:debug,2014-08-19T16:50:24.668,ns_1@10.242.238.90:<0.23306.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 939 [ns_server:info,2014-08-19T16:50:24.673,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 683 state to replica [ns_server:info,2014-08-19T16:50:24.679,ns_1@10.242.238.90:<0.23309.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 683 to state replica [views:debug,2014-08-19T16:50:24.685,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/690. Updated state: pending (0) [ns_server:debug,2014-08-19T16:50:24.686,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",690,pending,0} [ns_server:debug,2014-08-19T16:50:24.720,ns_1@10.242.238.90:<0.23309.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_683_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:50:24.722,ns_1@10.242.238.90:<0.23309.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[683]}, {checkpoints,[{683,0}]}, {name,<<"replication_building_683_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[683]}, {takeover,false}, {suffix,"building_683_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",683,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,true}]} [rebalance:debug,2014-08-19T16:50:24.722,ns_1@10.242.238.90:<0.23309.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.23310.0> [rebalance:debug,2014-08-19T16:50:24.722,ns_1@10.242.238.90:<0.23309.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:50:24.723,ns_1@10.242.238.90:<0.23309.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.7208.1>,#Ref<16550.0.1.131463>}]} [rebalance:info,2014-08-19T16:50:24.723,ns_1@10.242.238.90:<0.23309.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 683 [rebalance:debug,2014-08-19T16:50:24.723,ns_1@10.242.238.90:<0.23309.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.7208.1>,#Ref<16550.0.1.131463>}] [ns_server:debug,2014-08-19T16:50:24.724,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.23311.0> (ok) [ns_server:debug,2014-08-19T16:50:24.725,ns_1@10.242.238.90:<0.23309.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:debug,2014-08-19T16:50:24.726,ns_1@10.242.238.90:<0.23312.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 683 [ns_server:info,2014-08-19T16:50:24.792,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 938 state to replica [ns_server:info,2014-08-19T16:50:24.796,ns_1@10.242.238.90:<0.23329.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 938 to state replica [ns_server:debug,2014-08-19T16:50:24.826,ns_1@10.242.238.90:<0.23329.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_938_'ns_1@10.242.238.90' [ns_server:debug,2014-08-19T16:50:24.827,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 951. Nacking mccouch update. [views:debug,2014-08-19T16:50:24.827,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/951. Updated state: replica (0) [ns_server:debug,2014-08-19T16:50:24.828,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",951,replica,0} [ns_server:debug,2014-08-19T16:50:24.828,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,965,750,718,984,952,737,705,1016,971,756,724,692,1003,990,958,743,711, 1022,977,762,730,698,1009,996,964,749,717,983,951,736,704,1015,970,755,723, 1002,989,957,742,710,1021,976,761,729,1008,995,963,748,716,982,950,767,735, 703,1014,969,754,722,690,1001,988,956,741,709,1020,991,975,959,760,744,728, 712,696,1023,1007,994,978,962,946,763,747,731,715,699,1010,981,766,734,702, 1013,968,753,721,1000,987,955,740,708,1019,974,759,727,1006,993,961,746,714, 980,948,765,733,701,1012,999,967,752,720,986,954,739,707,1018,973,758,726, 694,1005,992,960,745,713,979,764,732,700,1011,998,966,751,719,985,953,738, 706,1017,972,757,725,1004] [rebalance:info,2014-08-19T16:50:24.828,ns_1@10.242.238.90:<0.23329.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[938]}, {checkpoints,[{938,0}]}, {name,<<"replication_building_938_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[938]}, {takeover,false}, {suffix,"building_938_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",938,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,false}]} [rebalance:debug,2014-08-19T16:50:24.829,ns_1@10.242.238.90:<0.23329.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.23330.0> [rebalance:debug,2014-08-19T16:50:24.829,ns_1@10.242.238.90:<0.23329.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:50:24.829,ns_1@10.242.238.90:<0.23329.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.7265.1>,#Ref<16550.0.1.131728>}]} [rebalance:info,2014-08-19T16:50:24.829,ns_1@10.242.238.90:<0.23329.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 938 [rebalance:debug,2014-08-19T16:50:24.830,ns_1@10.242.238.90:<0.23329.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.7265.1>,#Ref<16550.0.1.131728>}] [ns_server:debug,2014-08-19T16:50:24.831,ns_1@10.242.238.90:<0.23329.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:debug,2014-08-19T16:50:24.847,ns_1@10.242.238.90:<0.23331.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 938 [views:debug,2014-08-19T16:50:24.878,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/951. Updated state: replica (0) [ns_server:debug,2014-08-19T16:50:24.878,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",951,replica,0} [ns_server:debug,2014-08-19T16:50:25.020,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 949. Nacking mccouch update. [views:debug,2014-08-19T16:50:25.020,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/949. Updated state: replica (0) [ns_server:debug,2014-08-19T16:50:25.020,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",949,replica,0} [ns_server:debug,2014-08-19T16:50:25.020,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,965,750,718,984,952,737,705,1016,971,756,724,692,1003,990,958,743,711, 1022,977,762,730,698,1009,996,964,749,717,983,951,736,704,1015,970,755,723, 1002,989,957,742,710,1021,976,761,729,1008,995,963,748,716,982,950,767,735, 703,1014,969,754,722,690,1001,988,956,741,709,1020,975,760,728,696,1007,994, 978,962,946,763,747,731,715,699,1010,981,949,766,734,702,1013,968,753,721, 1000,987,955,740,708,1019,974,759,727,1006,993,961,746,714,980,948,765,733, 701,1012,999,967,752,720,986,954,739,707,1018,973,758,726,694,1005,992,960, 745,713,979,764,732,700,1011,998,966,751,719,985,953,738,706,1017,972,757, 725,1004,991,959,744,712,1023] [views:debug,2014-08-19T16:50:25.087,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/949. Updated state: replica (0) [ns_server:debug,2014-08-19T16:50:25.087,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",949,replica,0} [ns_server:debug,2014-08-19T16:50:25.184,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 947. Nacking mccouch update. [views:debug,2014-08-19T16:50:25.184,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/947. Updated state: replica (0) [ns_server:debug,2014-08-19T16:50:25.184,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",947,replica,0} [ns_server:debug,2014-08-19T16:50:25.184,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,965,750,718,984,952,737,705,1016,971,756,724,692,1003,990,958,743,711, 1022,977,762,730,698,1009,996,964,749,717,983,951,736,704,1015,970,755,723, 1002,989,957,742,710,1021,976,761,729,1008,995,963,748,716,982,950,767,735, 703,1014,969,754,722,690,1001,988,956,741,709,1020,975,760,728,696,1007,994, 978,962,946,763,747,731,715,699,1010,981,949,766,734,702,1013,968,753,721, 1000,987,955,740,708,1019,974,759,727,1006,993,961,746,714,980,948,765,733, 701,1012,999,967,752,720,986,954,739,707,1018,973,758,726,694,1005,992,960, 745,713,979,947,764,732,700,1011,998,966,751,719,985,953,738,706,1017,972, 757,725,1004,991,959,744,712,1023] [views:debug,2014-08-19T16:50:25.218,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/947. Updated state: replica (0) [ns_server:debug,2014-08-19T16:50:25.218,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",947,replica,0} [ns_server:debug,2014-08-19T16:50:25.285,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 945. Nacking mccouch update. [views:debug,2014-08-19T16:50:25.285,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/945. Updated state: replica (0) [ns_server:debug,2014-08-19T16:50:25.285,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",945,replica,0} [ns_server:debug,2014-08-19T16:50:25.285,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,965,750,718,984,952,737,705,1016,971,756,724,692,1003,990,958,743,711, 1022,977,945,762,730,698,1009,996,964,749,717,983,951,736,704,1015,970,755, 723,1002,989,957,742,710,1021,976,761,729,1008,995,963,748,716,982,950,767, 735,703,1014,969,754,722,690,1001,988,956,741,709,1020,975,760,728,696,1007, 994,978,962,946,763,747,731,715,699,1010,981,949,766,734,702,1013,968,753, 721,1000,987,955,740,708,1019,974,759,727,1006,993,961,746,714,980,948,765, 733,701,1012,999,967,752,720,986,954,739,707,1018,973,758,726,694,1005,992, 960,745,713,979,947,764,732,700,1011,998,966,751,719,985,953,738,706,1017, 972,757,725,1004,991,959,744,712,1023] [views:debug,2014-08-19T16:50:25.319,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/945. Updated state: replica (0) [ns_server:debug,2014-08-19T16:50:25.319,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",945,replica,0} [ns_server:debug,2014-08-19T16:50:25.386,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 943. Nacking mccouch update. [views:debug,2014-08-19T16:50:25.386,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/943. Updated state: replica (0) [ns_server:debug,2014-08-19T16:50:25.386,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",943,replica,0} [ns_server:debug,2014-08-19T16:50:25.386,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,965,750,718,984,952,737,705,1016,971,756,724,692,1003,990,958,743,711, 1022,977,945,762,730,698,1009,996,964,749,717,983,951,736,704,1015,970,755, 723,1002,989,957,742,710,1021,976,761,729,1008,995,963,748,716,982,950,767, 735,703,1014,969,754,722,690,1001,988,956,741,709,1020,975,943,760,728,696, 1007,994,978,962,946,763,747,731,715,699,1010,981,949,766,734,702,1013,968, 753,721,1000,987,955,740,708,1019,974,759,727,1006,993,961,746,714,980,948, 765,733,701,1012,999,967,752,720,986,954,739,707,1018,973,758,726,694,1005, 992,960,745,713,979,947,764,732,700,1011,998,966,751,719,985,953,738,706, 1017,972,757,725,1004,991,959,744,712,1023] [views:debug,2014-08-19T16:50:25.420,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/943. Updated state: replica (0) [ns_server:debug,2014-08-19T16:50:25.420,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",943,replica,0} [ns_server:debug,2014-08-19T16:50:25.486,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 941. Nacking mccouch update. [views:debug,2014-08-19T16:50:25.487,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/941. Updated state: replica (0) [ns_server:debug,2014-08-19T16:50:25.487,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",941,replica,0} [ns_server:debug,2014-08-19T16:50:25.487,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,965,750,718,984,952,737,705,1016,971,756,724,692,1003,990,958,743,711, 1022,977,945,762,730,698,1009,996,964,749,717,983,951,736,704,1015,970,755, 723,1002,989,957,742,710,1021,976,761,729,1008,995,963,748,716,982,950,767, 735,703,1014,969,754,722,690,1001,988,956,741,709,1020,975,943,760,728,696, 1007,994,978,962,946,763,747,731,715,699,1010,981,949,766,734,702,1013,968, 753,721,1000,987,955,740,708,1019,974,759,727,1006,993,961,746,714,980,948, 765,733,701,1012,999,967,752,720,986,954,739,707,1018,973,941,758,726,694, 1005,992,960,745,713,979,947,764,732,700,1011,998,966,751,719,985,953,738, 706,1017,972,757,725,1004,991,959,744,712,1023] [views:debug,2014-08-19T16:50:25.521,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/941. Updated state: replica (0) [ns_server:debug,2014-08-19T16:50:25.521,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",941,replica,0} [ns_server:debug,2014-08-19T16:50:25.595,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 939. Nacking mccouch update. [views:debug,2014-08-19T16:50:25.595,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/939. Updated state: replica (0) [ns_server:debug,2014-08-19T16:50:25.596,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",939,replica,0} [ns_server:debug,2014-08-19T16:50:25.596,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,965,750,718,984,952,737,705,1016,971,939,756,724,692,1003,990,958,743, 711,1022,977,945,762,730,698,1009,996,964,749,717,983,951,736,704,1015,970, 755,723,1002,989,957,742,710,1021,976,761,729,1008,995,963,748,716,982,950, 767,735,703,1014,969,754,722,690,1001,988,956,741,709,1020,975,943,760,728, 696,1007,994,962,747,715,981,949,766,734,702,1013,968,753,721,1000,987,955, 740,708,1019,974,759,727,1006,993,961,746,714,980,948,765,733,701,1012,999, 967,752,720,986,954,739,707,1018,973,941,758,726,694,1005,992,960,745,713, 979,947,764,732,700,1011,998,966,751,719,985,953,738,706,1017,972,757,725, 1004,991,959,744,712,1023,978,946,763,731,699,1010] [views:debug,2014-08-19T16:50:25.663,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/939. Updated state: replica (0) [ns_server:debug,2014-08-19T16:50:25.663,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",939,replica,0} [ns_server:debug,2014-08-19T16:50:25.855,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 697. Nacking mccouch update. [views:debug,2014-08-19T16:50:25.855,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/697. Updated state: pending (0) [ns_server:debug,2014-08-19T16:50:25.855,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",697,pending,0} [ns_server:debug,2014-08-19T16:50:25.856,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,965,750,718,984,952,737,705,1016,971,939,756,724,692,1003,990,958,743, 711,1022,977,945,762,730,698,1009,996,964,749,717,983,951,736,704,1015,970, 755,723,1002,989,957,742,710,1021,976,761,729,697,1008,995,963,748,716,982, 950,767,735,703,1014,969,754,722,690,1001,988,956,741,709,1020,975,943,760, 728,696,1007,994,962,747,715,981,949,766,734,702,1013,968,753,721,1000,987, 955,740,708,1019,974,759,727,1006,993,961,746,714,980,948,765,733,701,1012, 999,967,752,720,986,954,739,707,1018,973,941,758,726,694,1005,992,960,745, 713,979,947,764,732,700,1011,998,966,751,719,985,953,738,706,1017,972,757, 725,1004,991,959,744,712,1023,978,946,763,731,699,1010] [views:debug,2014-08-19T16:50:25.923,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/697. Updated state: pending (0) [ns_server:debug,2014-08-19T16:50:25.923,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",697,pending,0} [ns_server:debug,2014-08-19T16:50:26.072,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 695. Nacking mccouch update. [views:debug,2014-08-19T16:50:26.073,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/695. Updated state: pending (0) [ns_server:debug,2014-08-19T16:50:26.073,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",695,pending,0} [ns_server:debug,2014-08-19T16:50:26.073,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,965,750,718,984,952,737,705,1016,971,939,756,724,692,1003,990,958,743, 711,1022,977,945,762,730,698,1009,996,964,749,717,983,951,736,704,1015,970, 755,723,1002,989,957,742,710,1021,976,761,729,697,1008,995,963,748,716,982, 950,767,735,703,1014,969,754,722,690,1001,988,956,741,709,1020,975,943,760, 728,696,1007,994,962,747,715,981,949,766,734,702,1013,968,753,721,1000,987, 955,740,708,1019,974,759,727,695,1006,993,961,746,714,980,948,765,733,701, 1012,999,967,752,720,986,954,739,707,1018,973,941,758,726,694,1005,992,960, 745,713,979,947,764,732,700,1011,998,966,751,719,985,953,738,706,1017,972, 757,725,1004,991,959,744,712,1023,978,946,763,731,699,1010] [views:debug,2014-08-19T16:50:26.149,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/695. Updated state: pending (0) [ns_server:debug,2014-08-19T16:50:26.149,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",695,pending,0} [ns_server:debug,2014-08-19T16:50:26.307,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 693. Nacking mccouch update. [views:debug,2014-08-19T16:50:26.307,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/693. Updated state: pending (0) [ns_server:debug,2014-08-19T16:50:26.307,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",693,pending,0} [ns_server:debug,2014-08-19T16:50:26.307,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,965,750,718,984,952,737,705,1016,971,939,756,724,692,1003,990,958,743, 711,1022,977,945,762,730,698,1009,996,964,749,717,983,951,736,704,1015,970, 755,723,1002,989,957,742,710,1021,976,761,729,697,1008,995,963,748,716,982, 950,767,735,703,1014,969,754,722,690,1001,988,956,741,709,1020,975,943,760, 728,696,1007,994,962,747,715,981,949,766,734,702,1013,968,753,721,1000,987, 955,740,708,1019,974,759,727,695,1006,993,961,746,714,980,948,765,733,701, 1012,999,967,752,720,986,954,739,707,1018,973,941,758,726,694,1005,992,960, 745,713,979,947,764,732,700,1011,998,966,751,719,985,953,738,706,1017,972, 757,725,693,1004,991,959,744,712,1023,978,946,763,731,699,1010] [views:debug,2014-08-19T16:50:26.391,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/693. Updated state: pending (0) [ns_server:debug,2014-08-19T16:50:26.391,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",693,pending,0} [ns_server:debug,2014-08-19T16:50:26.554,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 691. Nacking mccouch update. [views:debug,2014-08-19T16:50:26.554,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/691. Updated state: pending (0) [ns_server:debug,2014-08-19T16:50:26.554,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",691,pending,0} [ns_server:debug,2014-08-19T16:50:26.555,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,965,750,718,984,952,737,705,1016,971,939,756,724,692,1003,990,958,743, 711,1022,977,945,762,730,698,1009,996,964,749,717,983,951,736,704,1015,970, 755,723,691,1002,989,957,742,710,1021,976,761,729,697,1008,995,963,748,716, 982,950,767,735,703,1014,969,754,722,690,1001,988,956,741,709,1020,975,943, 760,728,696,1007,994,962,747,715,981,949,766,734,702,1013,968,753,721,1000, 987,955,740,708,1019,974,759,727,695,1006,993,961,746,714,980,948,765,733, 701,1012,999,967,752,720,986,954,739,707,1018,973,941,758,726,694,1005,992, 960,745,713,979,947,764,732,700,1011,998,966,751,719,985,953,738,706,1017, 972,757,725,693,1004,991,959,744,712,1023,978,946,763,731,699,1010] [views:debug,2014-08-19T16:50:26.605,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/691. Updated state: pending (0) [ns_server:debug,2014-08-19T16:50:26.605,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",691,pending,0} [ns_server:debug,2014-08-19T16:50:26.671,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 689. Nacking mccouch update. [views:debug,2014-08-19T16:50:26.672,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/689. Updated state: pending (0) [ns_server:debug,2014-08-19T16:50:26.672,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",689,pending,0} [ns_server:debug,2014-08-19T16:50:26.672,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,750,984,952,737,705,1016,971,939,756,724,692,1003,990,958,743,711,1022, 977,945,762,730,698,1009,996,964,749,717,983,951,736,704,1015,970,755,723, 691,1002,989,957,742,710,1021,976,761,729,697,1008,995,963,748,716,982,950, 767,735,703,1014,969,754,722,690,1001,988,956,741,709,1020,975,943,760,728, 696,1007,994,962,747,715,981,949,766,734,702,1013,968,753,721,689,1000,987, 955,740,708,1019,974,759,727,695,1006,993,961,746,714,980,948,765,733,701, 1012,999,967,752,720,986,954,739,707,1018,973,941,758,726,694,1005,992,960, 745,713,979,947,764,732,700,1011,998,966,751,719,985,953,738,706,1017,972, 757,725,693,1004,991,959,744,712,1023,978,946,763,731,699,1010,965,718] [views:debug,2014-08-19T16:50:26.706,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/689. Updated state: pending (0) [ns_server:debug,2014-08-19T16:50:26.706,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",689,pending,0} [ns_server:debug,2014-08-19T16:50:26.789,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 687. Nacking mccouch update. [views:debug,2014-08-19T16:50:26.789,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/687. Updated state: pending (0) [ns_server:debug,2014-08-19T16:50:26.789,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",687,pending,0} [ns_server:debug,2014-08-19T16:50:26.790,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,750,984,952,737,705,1016,971,939,756,724,692,1003,990,958,743,711,1022, 977,945,762,730,698,1009,996,964,749,717,983,951,736,704,1015,970,755,723, 691,1002,989,957,742,710,1021,976,761,729,697,1008,995,963,748,716,982,950, 767,735,703,1014,969,754,722,690,1001,988,956,741,709,1020,975,943,760,728, 696,1007,994,962,747,715,981,949,766,734,702,1013,968,753,721,689,1000,987, 955,740,708,1019,974,759,727,695,1006,993,961,746,714,980,948,765,733,701, 1012,999,967,752,720,986,954,739,707,1018,973,941,758,726,694,1005,992,960, 745,713,979,947,764,732,700,1011,998,966,751,719,687,985,953,738,706,1017, 972,757,725,693,1004,991,959,744,712,1023,978,946,763,731,699,1010,965,718] [views:debug,2014-08-19T16:50:26.832,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/687. Updated state: pending (0) [ns_server:debug,2014-08-19T16:50:26.832,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",687,pending,0} [ns_server:debug,2014-08-19T16:50:26.898,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 685. Nacking mccouch update. [views:debug,2014-08-19T16:50:26.898,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/685. Updated state: pending (0) [ns_server:debug,2014-08-19T16:50:26.898,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",685,pending,0} [ns_server:debug,2014-08-19T16:50:26.899,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,750,984,952,737,705,1016,971,939,756,724,692,1003,990,958,743,711,1022, 977,945,762,730,698,1009,996,964,749,717,685,983,951,736,704,1015,970,755, 723,691,1002,989,957,742,710,1021,976,761,729,697,1008,995,963,748,716,982, 950,767,735,703,1014,969,754,722,690,1001,988,956,741,709,1020,975,943,760, 728,696,1007,994,962,747,715,981,949,766,734,702,1013,968,753,721,689,1000, 987,955,740,708,1019,974,759,727,695,1006,993,961,746,714,980,948,765,733, 701,1012,999,967,752,720,986,954,739,707,1018,973,941,758,726,694,1005,992, 960,745,713,979,947,764,732,700,1011,998,966,751,719,687,985,953,738,706, 1017,972,757,725,693,1004,991,959,744,712,1023,978,946,763,731,699,1010,965, 718] [views:debug,2014-08-19T16:50:26.933,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/685. Updated state: pending (0) [ns_server:debug,2014-08-19T16:50:26.933,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",685,pending,0} [ns_server:debug,2014-08-19T16:50:27.016,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 683. Nacking mccouch update. [views:debug,2014-08-19T16:50:27.016,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/683. Updated state: pending (0) [ns_server:debug,2014-08-19T16:50:27.016,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",683,pending,0} [ns_server:debug,2014-08-19T16:50:27.016,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,750,984,952,737,705,1016,971,939,756,724,692,1003,990,958,743,711,1022, 977,945,762,730,698,1009,996,964,749,717,685,983,951,736,704,1015,970,755, 723,691,1002,989,957,742,710,1021,976,761,729,697,1008,995,963,748,716,982, 950,767,735,703,1014,969,754,722,690,1001,988,956,741,709,1020,975,943,760, 728,696,1007,994,962,747,715,683,981,949,766,734,702,1013,968,753,721,689, 1000,987,955,740,708,1019,974,759,727,695,1006,993,961,746,714,980,948,765, 733,701,1012,999,967,752,720,986,954,739,707,1018,973,941,758,726,694,1005, 992,960,745,713,979,947,764,732,700,1011,998,966,751,719,687,985,953,738,706, 1017,972,757,725,693,1004,991,959,744,712,1023,978,946,763,731,699,1010,965, 718] [views:debug,2014-08-19T16:50:27.058,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/683. Updated state: pending (0) [ns_server:debug,2014-08-19T16:50:27.058,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",683,pending,0} [ns_server:debug,2014-08-19T16:50:27.217,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 944. Nacking mccouch update. [views:debug,2014-08-19T16:50:27.218,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/944. Updated state: replica (0) [ns_server:debug,2014-08-19T16:50:27.218,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",944,replica,0} [ns_server:debug,2014-08-19T16:50:27.218,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,750,984,952,737,705,1016,971,939,756,724,692,1003,990,958,743,711,1022, 977,945,762,730,698,1009,996,964,749,717,685,983,951,736,704,1015,970,755, 723,691,1002,989,957,742,710,1021,976,944,761,729,697,1008,995,963,748,716, 982,950,767,735,703,1014,969,754,722,690,1001,988,956,741,709,1020,975,943, 760,728,696,1007,994,962,747,715,683,981,949,766,734,702,1013,968,753,721, 689,1000,987,955,740,708,1019,974,759,727,695,1006,993,961,746,714,980,948, 765,733,701,1012,999,967,752,720,986,954,739,707,1018,973,941,758,726,694, 1005,992,960,745,713,979,947,764,732,700,1011,998,966,751,719,687,985,953, 738,706,1017,972,757,725,693,1004,991,959,744,712,1023,978,946,763,731,699, 1010,965,718] [views:debug,2014-08-19T16:50:27.285,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/944. Updated state: replica (0) [ns_server:debug,2014-08-19T16:50:27.285,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",944,replica,0} [ns_server:debug,2014-08-19T16:50:27.418,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 942. Nacking mccouch update. [views:debug,2014-08-19T16:50:27.418,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/942. Updated state: replica (0) [ns_server:debug,2014-08-19T16:50:27.418,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",942,replica,0} [ns_server:debug,2014-08-19T16:50:27.419,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,750,984,737,971,939,756,724,692,1003,990,958,743,711,1022,977,945,762, 730,698,1009,996,964,749,717,685,983,951,736,704,1015,970,755,723,691,1002, 989,957,742,710,1021,976,944,761,729,697,1008,995,963,748,716,982,950,767, 735,703,1014,969,754,722,690,1001,988,956,741,709,1020,975,943,760,728,696, 1007,994,962,747,715,683,981,949,766,734,702,1013,968,753,721,689,1000,987, 955,740,708,1019,974,942,759,727,695,1006,993,961,746,714,980,948,765,733, 701,1012,999,967,752,720,986,954,739,707,1018,973,941,758,726,694,1005,992, 960,745,713,979,947,764,732,700,1011,998,966,751,719,687,985,953,738,706, 1017,972,757,725,693,1004,991,959,744,712,1023,978,946,763,731,699,1010,965, 718,952,705,1016] [views:debug,2014-08-19T16:50:27.486,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/942. Updated state: replica (0) [ns_server:debug,2014-08-19T16:50:27.486,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",942,replica,0} [ns_server:debug,2014-08-19T16:50:27.619,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 940. Nacking mccouch update. [views:debug,2014-08-19T16:50:27.619,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/940. Updated state: replica (0) [ns_server:debug,2014-08-19T16:50:27.619,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",940,replica,0} [ns_server:debug,2014-08-19T16:50:27.620,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,750,984,737,971,939,756,724,692,1003,990,958,743,711,1022,977,945,762, 730,698,1009,996,964,749,717,685,983,951,736,704,1015,970,755,723,691,1002, 989,957,742,710,1021,976,944,761,729,697,1008,995,963,748,716,982,950,767, 735,703,1014,969,754,722,690,1001,988,956,741,709,1020,975,943,760,728,696, 1007,994,962,747,715,683,981,949,766,734,702,1013,968,753,721,689,1000,987, 955,740,708,1019,974,942,759,727,695,1006,993,961,746,714,980,948,765,733, 701,1012,999,967,752,720,986,954,739,707,1018,973,941,758,726,694,1005,992, 960,745,713,979,947,764,732,700,1011,998,966,751,719,687,985,953,738,706, 1017,972,940,757,725,693,1004,991,959,744,712,1023,978,946,763,731,699,1010, 965,718,952,705,1016] [views:debug,2014-08-19T16:50:27.687,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/940. Updated state: replica (0) [ns_server:debug,2014-08-19T16:50:27.687,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",940,replica,0} [ns_server:debug,2014-08-19T16:50:27.828,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 938. Nacking mccouch update. [views:debug,2014-08-19T16:50:27.828,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/938. Updated state: replica (0) [ns_server:debug,2014-08-19T16:50:27.829,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",938,replica,0} [ns_server:debug,2014-08-19T16:50:27.829,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,750,984,737,971,939,756,724,692,1003,990,958,743,711,1022,977,945,762, 730,698,1009,996,964,749,717,685,983,951,736,704,1015,970,938,755,723,691, 1002,989,957,742,710,1021,976,944,761,729,697,1008,995,963,748,716,982,950, 767,735,703,1014,969,754,722,690,1001,988,956,741,709,1020,975,943,760,728, 696,1007,994,962,747,715,683,981,949,766,734,702,1013,968,753,721,689,1000, 987,955,740,708,1019,974,942,759,727,695,1006,993,961,746,714,980,948,765, 733,701,1012,999,967,752,720,986,954,739,707,1018,973,941,758,726,694,1005, 992,960,745,713,979,947,764,732,700,1011,998,966,751,719,687,985,953,738,706, 1017,972,940,757,725,693,1004,991,959,744,712,1023,978,946,763,731,699,1010, 965,718,952,705,1016] [views:debug,2014-08-19T16:50:27.896,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/938. Updated state: replica (0) [ns_server:debug,2014-08-19T16:50:27.896,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",938,replica,0} [ns_server:debug,2014-08-19T16:50:28.016,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 688. Nacking mccouch update. [views:debug,2014-08-19T16:50:28.016,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/688. Updated state: pending (0) [ns_server:debug,2014-08-19T16:50:28.016,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",688,pending,0} [ns_server:debug,2014-08-19T16:50:28.017,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,750,984,737,971,939,756,724,692,1003,990,958,743,711,1022,977,945,762, 730,698,1009,996,964,749,717,685,983,951,736,704,1015,970,938,755,723,691, 1002,989,957,742,710,1021,976,944,761,729,697,1008,995,963,748,716,982,950, 767,735,703,1014,969,754,722,690,1001,988,956,741,709,1020,975,943,760,728, 696,1007,994,962,747,715,683,981,949,766,734,702,1013,968,753,721,689,1000, 987,955,740,708,1019,974,942,759,727,695,1006,993,961,746,714,980,948,765, 733,701,1012,999,967,752,720,688,986,954,739,707,1018,973,941,758,726,694, 1005,992,960,745,713,979,947,764,732,700,1011,998,966,751,719,687,985,953, 738,706,1017,972,940,757,725,693,1004,991,959,744,712,1023,978,946,763,731, 699,1010,965,718,952,705,1016] [views:debug,2014-08-19T16:50:28.058,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/688. Updated state: pending (0) [ns_server:debug,2014-08-19T16:50:28.058,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",688,pending,0} [ns_server:debug,2014-08-19T16:50:28.142,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 686. Nacking mccouch update. [views:debug,2014-08-19T16:50:28.142,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/686. Updated state: pending (0) [ns_server:debug,2014-08-19T16:50:28.142,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",686,pending,0} [ns_server:debug,2014-08-19T16:50:28.142,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,750,686,984,737,971,939,756,724,692,1003,990,958,743,711,1022,977,945, 762,730,698,1009,996,964,749,717,685,983,951,736,704,1015,970,938,755,723, 691,1002,989,957,742,710,1021,976,944,761,729,697,1008,995,963,748,716,982, 950,767,735,703,1014,969,754,722,690,1001,988,956,741,709,1020,975,943,760, 728,696,1007,994,962,747,715,683,981,949,766,734,702,1013,968,753,721,689, 1000,987,955,740,708,1019,974,942,759,727,695,1006,993,961,746,714,980,948, 765,733,701,1012,999,967,752,720,688,986,954,739,707,1018,973,941,758,726, 694,1005,992,960,745,713,979,947,764,732,700,1011,998,966,751,719,687,985, 953,738,706,1017,972,940,757,725,693,1004,991,959,744,712,1023,978,946,763, 731,699,1010,965,718,952,705,1016] [views:debug,2014-08-19T16:50:28.177,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/686. Updated state: pending (0) [ns_server:debug,2014-08-19T16:50:28.177,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",686,pending,0} [ns_server:debug,2014-08-19T16:50:28.252,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 684. Nacking mccouch update. [views:debug,2014-08-19T16:50:28.252,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/684. Updated state: pending (0) [ns_server:debug,2014-08-19T16:50:28.253,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",684,pending,0} [ns_server:debug,2014-08-19T16:50:28.253,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,750,686,984,737,971,724,990,958,743,711,1022,977,945,762,730,698,1009, 996,964,749,717,685,983,951,736,704,1015,970,938,755,723,691,1002,989,957, 742,710,1021,976,944,761,729,697,1008,995,963,748,716,684,982,950,767,735, 703,1014,969,754,722,690,1001,988,956,741,709,1020,975,943,760,728,696,1007, 994,962,747,715,683,981,949,766,734,702,1013,968,753,721,689,1000,987,955, 740,708,1019,974,942,759,727,695,1006,993,961,746,714,980,948,765,733,701, 1012,999,967,752,720,688,986,954,739,707,1018,973,941,758,726,694,1005,992, 960,745,713,979,947,764,732,700,1011,998,966,751,719,687,985,953,738,706, 1017,972,940,757,725,693,1004,991,959,744,712,1023,978,946,763,731,699,1010, 965,718,952,705,1016,939,756,692,1003] [views:debug,2014-08-19T16:50:28.286,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/684. Updated state: pending (0) [ns_server:debug,2014-08-19T16:50:28.287,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",684,pending,0} [rebalance:debug,2014-08-19T16:50:28.288,ns_1@10.242.238.90:<0.22823.0>:janitor_agent:handle_call:795]Done [rebalance:debug,2014-08-19T16:50:28.288,ns_1@10.242.238.90:<0.22849.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:50:28.288,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.22823.0> (ok) [ns_server:debug,2014-08-19T16:50:28.288,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.22849.0> (ok) [rebalance:debug,2014-08-19T16:50:28.398,ns_1@10.242.238.90:<0.22792.0>:janitor_agent:handle_call:795]Done [rebalance:debug,2014-08-19T16:50:28.398,ns_1@10.242.238.90:<0.22787.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:50:28.398,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.22792.0> (ok) [ns_server:debug,2014-08-19T16:50:28.398,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.22787.0> (ok) [rebalance:debug,2014-08-19T16:50:28.504,ns_1@10.242.238.90:<0.23287.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:50:28.504,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.23287.0> (ok) [rebalance:debug,2014-08-19T16:50:28.504,ns_1@10.242.238.90:<0.23312.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:50:28.504,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.23312.0> (ok) [rebalance:debug,2014-08-19T16:50:28.588,ns_1@10.242.238.90:<0.23236.0>:janitor_agent:handle_call:795]Done [rebalance:debug,2014-08-19T16:50:28.588,ns_1@10.242.238.90:<0.23269.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:50:28.588,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.23236.0> (ok) [ns_server:debug,2014-08-19T16:50:28.588,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.23269.0> (ok) [rebalance:debug,2014-08-19T16:50:28.713,ns_1@10.242.238.90:<0.23200.0>:janitor_agent:handle_call:795]Done [rebalance:debug,2014-08-19T16:50:28.713,ns_1@10.242.238.90:<0.23225.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:50:28.713,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.23200.0> (ok) [ns_server:debug,2014-08-19T16:50:28.713,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.23225.0> (ok) [rebalance:debug,2014-08-19T16:50:28.856,ns_1@10.242.238.90:<0.23136.0>:janitor_agent:handle_call:795]Done [rebalance:debug,2014-08-19T16:50:28.856,ns_1@10.242.238.90:<0.23175.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:50:28.856,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.23136.0> (ok) [ns_server:debug,2014-08-19T16:50:28.856,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.23175.0> (ok) [rebalance:debug,2014-08-19T16:50:28.981,ns_1@10.242.238.90:<0.23070.0>:janitor_agent:handle_call:795]Done [rebalance:debug,2014-08-19T16:50:28.981,ns_1@10.242.238.90:<0.23095.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:50:28.981,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.23070.0> (ok) [ns_server:debug,2014-08-19T16:50:28.981,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.23095.0> (ok) [rebalance:debug,2014-08-19T16:50:29.131,ns_1@10.242.238.90:<0.23019.0>:janitor_agent:handle_call:795]Done [rebalance:debug,2014-08-19T16:50:29.131,ns_1@10.242.238.90:<0.23045.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:50:29.131,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.23019.0> (ok) [ns_server:debug,2014-08-19T16:50:29.132,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.23045.0> (ok) [rebalance:debug,2014-08-19T16:50:29.282,ns_1@10.242.238.90:<0.22994.0>:janitor_agent:handle_call:795]Done [rebalance:debug,2014-08-19T16:50:29.282,ns_1@10.242.238.90:<0.22969.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:50:29.282,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.22994.0> (ok) [ns_server:debug,2014-08-19T16:50:29.282,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.22969.0> (ok) [rebalance:debug,2014-08-19T16:50:29.407,ns_1@10.242.238.90:<0.22933.0>:janitor_agent:handle_call:795]Done [rebalance:debug,2014-08-19T16:50:29.407,ns_1@10.242.238.90:<0.22958.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:50:29.407,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.22933.0> (ok) [ns_server:debug,2014-08-19T16:50:29.407,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.22958.0> (ok) [rebalance:debug,2014-08-19T16:50:29.533,ns_1@10.242.238.90:<0.23331.0>:janitor_agent:handle_call:795]Done [rebalance:debug,2014-08-19T16:50:29.533,ns_1@10.242.238.90:<0.22894.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:50:29.533,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.23331.0> (ok) [ns_server:debug,2014-08-19T16:50:29.533,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.22894.0> (ok) [rebalance:debug,2014-08-19T16:50:29.663,ns_1@10.242.238.90:<0.23306.0>:janitor_agent:handle_call:795]Done [rebalance:debug,2014-08-19T16:50:29.663,ns_1@10.242.238.90:<0.23281.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:50:29.663,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.23306.0> (ok) [ns_server:debug,2014-08-19T16:50:29.663,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.23281.0> (ok) [rebalance:debug,2014-08-19T16:50:29.730,ns_1@10.242.238.90:<0.23255.0>:janitor_agent:handle_call:795]Done [rebalance:debug,2014-08-19T16:50:29.730,ns_1@10.242.238.90:<0.23230.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:50:29.730,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.23255.0> (ok) [ns_server:debug,2014-08-19T16:50:29.730,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.23230.0> (ok) [rebalance:debug,2014-08-19T16:50:29.797,ns_1@10.242.238.90:<0.23205.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:50:29.797,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.23205.0> (ok) [rebalance:debug,2014-08-19T16:50:29.798,ns_1@10.242.238.90:<0.23180.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:50:29.798,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.23180.0> (ok) [rebalance:debug,2014-08-19T16:50:29.881,ns_1@10.242.238.90:<0.23128.0>:janitor_agent:handle_call:795]Done [rebalance:debug,2014-08-19T16:50:29.881,ns_1@10.242.238.90:<0.23155.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:50:29.881,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.23128.0> (ok) [ns_server:debug,2014-08-19T16:50:29.881,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.23155.0> (ok) [rebalance:debug,2014-08-19T16:50:29.990,ns_1@10.242.238.90:<0.23050.0>:janitor_agent:handle_call:795]Done [rebalance:debug,2014-08-19T16:50:29.990,ns_1@10.242.238.90:<0.23075.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:50:29.990,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.23050.0> (ok) [ns_server:debug,2014-08-19T16:50:29.990,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.23075.0> (ok) [rebalance:debug,2014-08-19T16:50:30.090,ns_1@10.242.238.90:<0.23013.0>:janitor_agent:handle_call:795]Done [rebalance:debug,2014-08-19T16:50:30.090,ns_1@10.242.238.90:<0.23025.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:50:30.090,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.23013.0> (ok) [ns_server:debug,2014-08-19T16:50:30.090,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.23025.0> (ok) [rebalance:debug,2014-08-19T16:50:30.174,ns_1@10.242.238.90:<0.22963.0>:janitor_agent:handle_call:795]Done [rebalance:debug,2014-08-19T16:50:30.174,ns_1@10.242.238.90:<0.22988.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:50:30.174,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.22963.0> (ok) [ns_server:debug,2014-08-19T16:50:30.174,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.22988.0> (ok) [rebalance:debug,2014-08-19T16:50:30.283,ns_1@10.242.238.90:<0.22913.0>:janitor_agent:handle_call:795]Done [rebalance:debug,2014-08-19T16:50:30.283,ns_1@10.242.238.90:<0.22938.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:50:30.283,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.22913.0> (ok) [ns_server:debug,2014-08-19T16:50:30.283,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.22938.0> (ok) [rebalance:debug,2014-08-19T16:50:30.417,ns_1@10.242.238.90:<0.22874.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:50:30.417,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.22874.0> (ok) [rebalance:debug,2014-08-19T16:50:32.166,ns_1@10.242.238.90:<0.23665.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 696 [rebalance:debug,2014-08-19T16:50:32.166,ns_1@10.242.238.90:<0.23666.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 697 [rebalance:debug,2014-08-19T16:50:32.167,ns_1@10.242.238.90:<0.23665.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:50:32.167,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.23665.0> (ok) [rebalance:debug,2014-08-19T16:50:32.167,ns_1@10.242.238.90:<0.23666.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:50:32.167,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.23666.0> (ok) [rebalance:debug,2014-08-19T16:50:32.250,ns_1@10.242.238.90:<0.23671.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 699 [rebalance:debug,2014-08-19T16:50:32.250,ns_1@10.242.238.90:<0.23674.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 698 [rebalance:debug,2014-08-19T16:50:32.251,ns_1@10.242.238.90:<0.23674.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:50:32.251,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.23674.0> (ok) [rebalance:debug,2014-08-19T16:50:32.251,ns_1@10.242.238.90:<0.23671.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:50:32.251,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.23671.0> (ok) [rebalance:debug,2014-08-19T16:50:32.350,ns_1@10.242.238.90:<0.23677.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 700 [rebalance:debug,2014-08-19T16:50:32.350,ns_1@10.242.238.90:<0.23680.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 701 [rebalance:debug,2014-08-19T16:50:32.351,ns_1@10.242.238.90:<0.23677.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:50:32.351,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.23677.0> (ok) [rebalance:debug,2014-08-19T16:50:32.352,ns_1@10.242.238.90:<0.23680.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:50:32.352,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.23680.0> (ok) [rebalance:debug,2014-08-19T16:50:32.459,ns_1@10.242.238.90:<0.23683.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 703 [rebalance:debug,2014-08-19T16:50:32.459,ns_1@10.242.238.90:<0.23686.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 702 [rebalance:debug,2014-08-19T16:50:32.460,ns_1@10.242.238.90:<0.23686.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:50:32.460,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.23686.0> (ok) [rebalance:debug,2014-08-19T16:50:32.461,ns_1@10.242.238.90:<0.23683.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:50:32.461,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.23683.0> (ok) [rebalance:debug,2014-08-19T16:50:32.569,ns_1@10.242.238.90:<0.23689.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 952 [rebalance:debug,2014-08-19T16:50:32.569,ns_1@10.242.238.90:<0.23692.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 953 [rebalance:debug,2014-08-19T16:50:32.570,ns_1@10.242.238.90:<0.23689.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:50:32.570,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.23689.0> (ok) [rebalance:debug,2014-08-19T16:50:32.570,ns_1@10.242.238.90:<0.23692.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:50:32.570,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.23692.0> (ok) [rebalance:debug,2014-08-19T16:50:32.686,ns_1@10.242.238.90:<0.23695.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 954 [rebalance:debug,2014-08-19T16:50:32.686,ns_1@10.242.238.90:<0.23698.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 955 [rebalance:debug,2014-08-19T16:50:32.687,ns_1@10.242.238.90:<0.23695.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:50:32.687,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.23695.0> (ok) [rebalance:debug,2014-08-19T16:50:32.687,ns_1@10.242.238.90:<0.23698.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:50:32.687,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.23698.0> (ok) [rebalance:debug,2014-08-19T16:50:32.802,ns_1@10.242.238.90:<0.23702.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 956 [rebalance:debug,2014-08-19T16:50:32.802,ns_1@10.242.238.90:<0.23703.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 957 [rebalance:debug,2014-08-19T16:50:32.803,ns_1@10.242.238.90:<0.23702.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:50:32.803,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.23702.0> (ok) [rebalance:debug,2014-08-19T16:50:32.803,ns_1@10.242.238.90:<0.23703.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:50:32.804,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.23703.0> (ok) [rebalance:debug,2014-08-19T16:50:32.912,ns_1@10.242.238.90:<0.23708.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 959 [rebalance:debug,2014-08-19T16:50:32.913,ns_1@10.242.238.90:<0.23711.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 958 [rebalance:debug,2014-08-19T16:50:32.914,ns_1@10.242.238.90:<0.23711.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:50:32.914,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.23711.0> (ok) [rebalance:debug,2014-08-19T16:50:32.914,ns_1@10.242.238.90:<0.23708.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:50:32.914,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.23708.0> (ok) [ns_server:debug,2014-08-19T16:50:33.571,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:50:33.574,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:33.574,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 3281 us [ns_server:debug,2014-08-19T16:50:33.574,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:33.575,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{440, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [rebalance:debug,2014-08-19T16:50:33.586,ns_1@10.242.238.90:<0.23732.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 684 [rebalance:debug,2014-08-19T16:50:33.587,ns_1@10.242.238.90:<0.23732.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:50:33.588,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.23732.0> (ok) [rebalance:debug,2014-08-19T16:50:33.657,ns_1@10.242.238.90:<0.23735.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 683 [rebalance:debug,2014-08-19T16:50:33.657,ns_1@10.242.238.90:<0.23738.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 686 [ns_server:debug,2014-08-19T16:50:33.658,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [rebalance:debug,2014-08-19T16:50:33.660,ns_1@10.242.238.90:<0.23738.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:50:33.660,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.23738.0> (ok) [rebalance:debug,2014-08-19T16:50:33.660,ns_1@10.242.238.90:<0.23735.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:50:33.660,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.23735.0> (ok) [ns_server:debug,2014-08-19T16:50:33.661,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:33.661,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 1804 us [ns_server:debug,2014-08-19T16:50:33.662,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:33.662,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{442, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:50:33.682,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:50:33.689,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 6627 us [ns_server:debug,2014-08-19T16:50:33.689,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:33.689,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:33.690,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{447, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:50:33.711,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:50:33.715,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:33.715,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 4328 us [ns_server:debug,2014-08-19T16:50:33.716,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:33.717,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{446, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:50:33.739,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:50:33.742,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 2700 us [ns_server:debug,2014-08-19T16:50:33.742,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:33.742,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:33.743,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{445, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:50:33.761,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [rebalance:debug,2014-08-19T16:50:33.763,ns_1@10.242.238.90:<0.23745.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 688 [rebalance:debug,2014-08-19T16:50:33.763,ns_1@10.242.238.90:<0.23748.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 685 [ns_server:debug,2014-08-19T16:50:33.765,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:33.765,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 3322 us [rebalance:debug,2014-08-19T16:50:33.765,ns_1@10.242.238.90:<0.23748.0>:janitor_agent:handle_call:795]Done [rebalance:debug,2014-08-19T16:50:33.765,ns_1@10.242.238.90:<0.23745.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:50:33.765,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.23748.0> (ok) [ns_server:debug,2014-08-19T16:50:33.765,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.23745.0> (ok) [ns_server:debug,2014-08-19T16:50:33.765,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:33.766,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{444, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:50:33.791,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:50:33.794,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 2921 us [ns_server:debug,2014-08-19T16:50:33.794,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:33.794,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:33.795,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{443, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [rebalance:debug,2014-08-19T16:50:33.863,ns_1@10.242.238.90:<0.23752.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 687 [rebalance:debug,2014-08-19T16:50:33.863,ns_1@10.242.238.90:<0.23753.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 690 [rebalance:debug,2014-08-19T16:50:33.864,ns_1@10.242.238.90:<0.23753.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:50:33.864,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.23753.0> (ok) [rebalance:debug,2014-08-19T16:50:33.864,ns_1@10.242.238.90:<0.23752.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:50:33.865,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.23752.0> (ok) [rebalance:debug,2014-08-19T16:50:33.954,ns_1@10.242.238.90:<0.23758.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 689 [rebalance:debug,2014-08-19T16:50:33.955,ns_1@10.242.238.90:<0.23761.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 692 [rebalance:debug,2014-08-19T16:50:33.956,ns_1@10.242.238.90:<0.23761.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:50:33.956,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.23761.0> (ok) [rebalance:debug,2014-08-19T16:50:33.956,ns_1@10.242.238.90:<0.23758.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:50:33.956,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.23758.0> (ok) [rebalance:debug,2014-08-19T16:50:34.055,ns_1@10.242.238.90:<0.23764.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 691 [rebalance:debug,2014-08-19T16:50:34.055,ns_1@10.242.238.90:<0.23767.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 694 [rebalance:debug,2014-08-19T16:50:34.056,ns_1@10.242.238.90:<0.23767.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:50:34.056,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.23767.0> (ok) [rebalance:debug,2014-08-19T16:50:34.057,ns_1@10.242.238.90:<0.23764.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:50:34.057,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.23764.0> (ok) [ns_server:debug,2014-08-19T16:50:34.136,ns_1@10.242.238.90:<0.23771.0>:capi_set_view_manager:do_wait_index_updated:618]References to wait: [] ("default", 696) [ns_server:debug,2014-08-19T16:50:34.136,ns_1@10.242.238.90:<0.23771.0>:capi_set_view_manager:do_wait_index_updated:638]All refs fired [ns_server:debug,2014-08-19T16:50:34.136,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.23770.0> (ok) [rebalance:debug,2014-08-19T16:50:34.137,ns_1@10.242.238.90:<0.22966.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:50:34.137,ns_1@10.242.238.90:<0.22966.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:50:34.137,ns_1@10.242.238.90:<0.23772.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:50:34.137,ns_1@10.242.238.90:<0.23772.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:50:34.137,ns_1@10.242.238.90:<0.22966.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [rebalance:debug,2014-08-19T16:50:34.138,ns_1@10.242.238.90:<0.23773.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 693 [rebalance:debug,2014-08-19T16:50:34.139,ns_1@10.242.238.90:<0.23773.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:50:34.139,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.23773.0> (ok) [ns_server:info,2014-08-19T16:50:34.184,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 696 state to active [ns_server:debug,2014-08-19T16:50:34.186,ns_1@10.242.238.90:<0.23777.0>:capi_set_view_manager:do_wait_index_updated:618]References to wait: [] ("default", 698) [ns_server:debug,2014-08-19T16:50:34.186,ns_1@10.242.238.90:<0.23777.0>:capi_set_view_manager:do_wait_index_updated:638]All refs fired [ns_server:debug,2014-08-19T16:50:34.186,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.23776.0> (ok) [rebalance:debug,2014-08-19T16:50:34.187,ns_1@10.242.238.90:<0.22916.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:50:34.187,ns_1@10.242.238.90:<0.22916.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:50:34.187,ns_1@10.242.238.90:<0.23778.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:50:34.187,ns_1@10.242.238.90:<0.23778.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:50:34.188,ns_1@10.242.238.90:<0.22916.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [rebalance:debug,2014-08-19T16:50:34.188,ns_1@10.242.238.90:<0.23779.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 695 [ns_server:debug,2014-08-19T16:50:34.212,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:50:34.215,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:34.215,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 3230 us [ns_server:debug,2014-08-19T16:50:34.216,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:34.216,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{696, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:info,2014-08-19T16:50:34.229,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 698 state to active [ns_server:debug,2014-08-19T16:50:34.249,ns_1@10.242.238.90:<0.23785.0>:capi_set_view_manager:do_wait_index_updated:618]References to wait: [] ("default", 702) [ns_server:debug,2014-08-19T16:50:34.249,ns_1@10.242.238.90:<0.23785.0>:capi_set_view_manager:do_wait_index_updated:638]All refs fired [ns_server:debug,2014-08-19T16:50:34.249,ns_1@10.242.238.90:<0.23787.0>:capi_set_view_manager:do_wait_index_updated:618]References to wait: [] ("default", 699) [ns_server:debug,2014-08-19T16:50:34.249,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.23783.0> (ok) [ns_server:debug,2014-08-19T16:50:34.249,ns_1@10.242.238.90:<0.23787.0>:capi_set_view_manager:do_wait_index_updated:638]All refs fired [ns_server:debug,2014-08-19T16:50:34.249,ns_1@10.242.238.90:<0.23788.0>:capi_set_view_manager:do_wait_index_updated:618]References to wait: [] ("default", 701) [ns_server:debug,2014-08-19T16:50:34.249,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.23784.0> (ok) [ns_server:debug,2014-08-19T16:50:34.249,ns_1@10.242.238.90:<0.23788.0>:capi_set_view_manager:do_wait_index_updated:638]All refs fired [ns_server:debug,2014-08-19T16:50:34.249,ns_1@10.242.238.90:<0.23791.0>:capi_set_view_manager:do_wait_index_updated:618]References to wait: [] ("default", 700) [ns_server:debug,2014-08-19T16:50:34.249,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.23786.0> (ok) [ns_server:debug,2014-08-19T16:50:34.249,ns_1@10.242.238.90:<0.23791.0>:capi_set_view_manager:do_wait_index_updated:638]All refs fired [ns_server:debug,2014-08-19T16:50:34.250,ns_1@10.242.238.90:<0.23792.0>:capi_set_view_manager:do_wait_index_updated:618]References to wait: [] ("default", 697) [rebalance:debug,2014-08-19T16:50:34.250,ns_1@10.242.238.90:<0.22795.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:50:34.250,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.23789.0> (ok) [ns_server:debug,2014-08-19T16:50:34.250,ns_1@10.242.238.90:<0.23792.0>:capi_set_view_manager:do_wait_index_updated:638]All refs fired [ns_server:debug,2014-08-19T16:50:34.250,ns_1@10.242.238.90:<0.22795.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:50:34.250,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.23790.0> (ok) [ns_server:debug,2014-08-19T16:50:34.250,ns_1@10.242.238.90:<0.23793.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [rebalance:debug,2014-08-19T16:50:34.250,ns_1@10.242.238.90:<0.22877.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:50:34.250,ns_1@10.242.238.90:<0.23793.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:50:34.250,ns_1@10.242.238.90:<0.22795.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [rebalance:debug,2014-08-19T16:50:34.250,ns_1@10.242.238.90:<0.22826.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:50:34.250,ns_1@10.242.238.90:<0.22877.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:50:34.250,ns_1@10.242.238.90:<0.23794.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [rebalance:debug,2014-08-19T16:50:34.250,ns_1@10.242.238.90:<0.22852.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:50:34.250,ns_1@10.242.238.90:<0.23794.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [ns_server:debug,2014-08-19T16:50:34.250,ns_1@10.242.238.90:<0.22826.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:50:34.250,ns_1@10.242.238.90:<0.23795.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:50:34.251,ns_1@10.242.238.90:<0.23795.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [ns_server:debug,2014-08-19T16:50:34.251,ns_1@10.242.238.90:<0.22852.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [rebalance:debug,2014-08-19T16:50:34.251,ns_1@10.242.238.90:<0.22941.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:50:34.251,ns_1@10.242.238.90:<0.23796.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [rebalance:info,2014-08-19T16:50:34.251,ns_1@10.242.238.90:<0.22877.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [rebalance:info,2014-08-19T16:50:34.251,ns_1@10.242.238.90:<0.22826.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:50:34.251,ns_1@10.242.238.90:<0.23796.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [ns_server:debug,2014-08-19T16:50:34.251,ns_1@10.242.238.90:<0.22941.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:50:34.251,ns_1@10.242.238.90:<0.23797.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:50:34.251,ns_1@10.242.238.90:<0.23797.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:debug,2014-08-19T16:50:34.251,ns_1@10.242.238.90:<0.23798.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 938 [rebalance:info,2014-08-19T16:50:34.251,ns_1@10.242.238.90:<0.22852.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [rebalance:info,2014-08-19T16:50:34.251,ns_1@10.242.238.90:<0.22941.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:50:34.253,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:50:34.260,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:34.260,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 7012 us [ns_server:debug,2014-08-19T16:50:34.261,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:34.261,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{698, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [views:debug,2014-08-19T16:50:34.275,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/696. Updated state: active (1) [ns_server:debug,2014-08-19T16:50:34.276,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",696,active,1} [rebalance:debug,2014-08-19T16:50:34.276,ns_1@10.242.238.90:<0.23798.0>:janitor_agent:handle_call:795]Done [rebalance:debug,2014-08-19T16:50:34.276,ns_1@10.242.238.90:<0.23779.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:50:34.276,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.23798.0> (ok) [ns_server:debug,2014-08-19T16:50:34.277,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.23779.0> (ok) [ns_server:info,2014-08-19T16:50:34.299,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 702 state to active [ns_server:info,2014-08-19T16:50:34.313,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 700 state to active [ns_server:debug,2014-08-19T16:50:34.318,ns_1@10.242.238.90:<0.23803.0>:capi_set_view_manager:do_wait_index_updated:618]References to wait: [] ("default", 703) [ns_server:debug,2014-08-19T16:50:34.318,ns_1@10.242.238.90:<0.23803.0>:capi_set_view_manager:do_wait_index_updated:638]All refs fired [ns_server:debug,2014-08-19T16:50:34.318,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.23802.0> (ok) [rebalance:debug,2014-08-19T16:50:34.319,ns_1@10.242.238.90:<0.22765.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:50:34.319,ns_1@10.242.238.90:<0.22765.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:50:34.319,ns_1@10.242.238.90:<0.23804.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:50:34.319,ns_1@10.242.238.90:<0.23804.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:50:34.319,ns_1@10.242.238.90:<0.22765.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [rebalance:debug,2014-08-19T16:50:34.320,ns_1@10.242.238.90:<0.23805.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 940 [ns_server:debug,2014-08-19T16:50:34.325,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:50:34.328,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:34.329,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 4072 us [ns_server:debug,2014-08-19T16:50:34.329,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:34.330,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{702, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:info,2014-08-19T16:50:34.332,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 701 state to active [views:debug,2014-08-19T16:50:34.335,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/698. Updated state: active (1) [ns_server:debug,2014-08-19T16:50:34.335,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",698,active,1} [ns_server:info,2014-08-19T16:50:34.346,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 699 state to active [ns_server:info,2014-08-19T16:50:34.360,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 697 state to active [ns_server:debug,2014-08-19T16:50:34.367,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [rebalance:debug,2014-08-19T16:50:34.368,ns_1@10.242.238.90:<0.23809.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 942 [rebalance:debug,2014-08-19T16:50:34.368,ns_1@10.242.238.90:<0.23812.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 939 [ns_server:debug,2014-08-19T16:50:34.370,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:34.370,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 3279 us [ns_server:debug,2014-08-19T16:50:34.371,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:34.371,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{700, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:info,2014-08-19T16:50:34.382,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 703 state to active [views:debug,2014-08-19T16:50:34.393,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/702. Updated state: active (1) [ns_server:debug,2014-08-19T16:50:34.394,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",702,active,1} [ns_server:debug,2014-08-19T16:50:34.396,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:50:34.400,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:34.400,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 3410 us [ns_server:debug,2014-08-19T16:50:34.401,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:34.401,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{701, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:50:34.420,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:50:34.423,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 2793 us [ns_server:debug,2014-08-19T16:50:34.423,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:34.424,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:34.424,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{699, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [views:debug,2014-08-19T16:50:34.444,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/700. Updated state: active (1) [ns_server:debug,2014-08-19T16:50:34.444,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",700,active,1} [ns_server:debug,2014-08-19T16:50:34.450,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:50:34.454,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:34.454,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 3687 us [ns_server:debug,2014-08-19T16:50:34.455,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:34.455,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{697, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:50:34.476,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:50:34.483,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 7180 us [ns_server:debug,2014-08-19T16:50:34.483,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:34.484,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:34.485,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{703, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [rebalance:debug,2014-08-19T16:50:34.500,ns_1@10.242.238.90:<0.23819.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 941 [rebalance:debug,2014-08-19T16:50:34.500,ns_1@10.242.238.90:<0.23820.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 944 [views:debug,2014-08-19T16:50:34.511,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/703. Updated state: active (1) [ns_server:debug,2014-08-19T16:50:34.511,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",703,active,1} [views:debug,2014-08-19T16:50:34.561,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/701. Updated state: active (1) [ns_server:debug,2014-08-19T16:50:34.561,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",701,active,1} [rebalance:debug,2014-08-19T16:50:34.626,ns_1@10.242.238.90:<0.23826.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 946 [rebalance:debug,2014-08-19T16:50:34.626,ns_1@10.242.238.90:<0.23829.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 943 [views:debug,2014-08-19T16:50:34.628,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/699. Updated state: active (1) [ns_server:debug,2014-08-19T16:50:34.628,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",699,active,1} [views:debug,2014-08-19T16:50:34.695,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/697. Updated state: active (1) [ns_server:debug,2014-08-19T16:50:34.695,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",697,active,1} [rebalance:debug,2014-08-19T16:50:34.696,ns_1@10.242.238.90:<0.23805.0>:janitor_agent:handle_call:795]Done [rebalance:debug,2014-08-19T16:50:34.696,ns_1@10.242.238.90:<0.23812.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:50:34.696,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.23805.0> (ok) [ns_server:debug,2014-08-19T16:50:34.696,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.23812.0> (ok) [rebalance:debug,2014-08-19T16:50:34.697,ns_1@10.242.238.90:<0.23819.0>:janitor_agent:handle_call:795]Done [rebalance:debug,2014-08-19T16:50:34.697,ns_1@10.242.238.90:<0.23820.0>:janitor_agent:handle_call:795]Done [rebalance:debug,2014-08-19T16:50:34.697,ns_1@10.242.238.90:<0.23826.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:50:34.697,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.23819.0> (ok) [ns_server:debug,2014-08-19T16:50:34.697,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.23820.0> (ok) [ns_server:debug,2014-08-19T16:50:34.697,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.23826.0> (ok) [rebalance:debug,2014-08-19T16:50:34.697,ns_1@10.242.238.90:<0.23809.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:50:34.697,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.23809.0> (ok) [rebalance:debug,2014-08-19T16:50:34.697,ns_1@10.242.238.90:<0.23829.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:50:34.698,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.23829.0> (ok) [rebalance:debug,2014-08-19T16:50:34.759,ns_1@10.242.238.90:<0.23832.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 948 [rebalance:debug,2014-08-19T16:50:34.760,ns_1@10.242.238.90:<0.23835.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 945 [rebalance:debug,2014-08-19T16:50:34.761,ns_1@10.242.238.90:<0.23832.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:50:34.761,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.23832.0> (ok) [rebalance:debug,2014-08-19T16:50:34.761,ns_1@10.242.238.90:<0.23835.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:50:34.761,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.23835.0> (ok) [rebalance:debug,2014-08-19T16:50:34.894,ns_1@10.242.238.90:<0.23838.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 947 [rebalance:debug,2014-08-19T16:50:34.894,ns_1@10.242.238.90:<0.23841.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 950 [rebalance:debug,2014-08-19T16:50:34.895,ns_1@10.242.238.90:<0.23841.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:50:34.895,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.23841.0> (ok) [rebalance:debug,2014-08-19T16:50:34.895,ns_1@10.242.238.90:<0.23838.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:50:34.895,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.23838.0> (ok) [rebalance:debug,2014-08-19T16:50:35.019,ns_1@10.242.238.90:<0.23844.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 949 [rebalance:debug,2014-08-19T16:50:35.021,ns_1@10.242.238.90:<0.23844.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:50:35.021,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.23844.0> (ok) [rebalance:debug,2014-08-19T16:50:35.062,ns_1@10.242.238.90:<0.22961.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:50:35.062,ns_1@10.242.238.90:<0.22961.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:50:35.062,ns_1@10.242.238.90:<0.23847.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:50:35.062,ns_1@10.242.238.90:<0.23847.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:50:35.063,ns_1@10.242.238.90:<0.22961.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:info,2014-08-19T16:50:35.066,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 952 state to replica [ns_server:info,2014-08-19T16:50:35.066,ns_1@10.242.238.90:tap_replication_manager-default<0.18775.0>:tap_replication_manager:change_vbucket_filter:200]Going to change replication from 'ns_1@10.242.238.91' to have [952,960,961,962,963,964,965,966,967,968,969,970,971,972,973,974,975,976,977, 978,979,980,981,982,983,984,985,986,987,988,989,990,991,992,993,994,995,996, 997,998,999,1000,1001,1002,1003,1004,1005,1006,1007,1008,1009,1010,1011,1012, 1013,1014,1015,1016,1017,1018,1019,1020,1021,1022,1023] ([952], []) [ns_server:debug,2014-08-19T16:50:35.068,ns_1@10.242.238.90:<0.23848.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:135]Registered myself under id:{"default", {new_child_id, [952,960,961,962,963,964,965,966,967,968,969, 970,971,972,973,974,975,976,977,978,979,980, 981,982,983,984,985,986,987,988,989,990,991, 992,993,994,995,996,997,998,999,1000,1001, 1002,1003,1004,1005,1006,1007,1008,1009,1010, 1011,1012,1013,1014,1015,1016,1017,1018,1019, 1020,1021,1022,1023], 'ns_1@10.242.238.91'}, #Ref<0.0.1.2827>} Args:[{"10.242.238.91",11209}, {"10.242.238.90",11209}, [{old_state_retriever,#Fun}, {on_not_ready_vbuckets,#Fun}, {username,"default"}, {password,get_from_config}, {vbuckets,[952,960,961,962,963,964,965,966,967,968,969,970,971,972,973, 974,975,976,977,978,979,980,981,982,983,984,985,986,987,988, 989,990,991,992,993,994,995,996,997,998,999,1000,1001,1002, 1003,1004,1005,1006,1007,1008,1009,1010,1011,1012,1013,1014, 1015,1016,1017,1018,1019,1020,1021,1022,1023]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]] [ns_server:debug,2014-08-19T16:50:35.068,ns_1@10.242.238.90:<0.23848.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:139]Linked myself to old ebucketmigrator <0.22762.0> [ns_server:info,2014-08-19T16:50:35.068,ns_1@10.242.238.90:<0.22762.0>:ebucketmigrator_srv:handle_call:270]Starting new-style vbucket filter change on stream `replication_ns_1@10.242.238.90` [ns_server:info,2014-08-19T16:50:35.087,ns_1@10.242.238.90:<0.22762.0>:ebucketmigrator_srv:handle_call:297]Changing vbucket filter on tap stream `replication_ns_1@10.242.238.90`: [{952,1}, {960,1}, {961,1}, {962,1}, {963,1}, {964,1}, {965,1}, {966,1}, {967,1}, {968,1}, {969,1}, {970,1}, {971,1}, {972,1}, {973,1}, {974,1}, {975,1}, {976,1}, {977,1}, {978,1}, {979,1}, {980,1}, {981,1}, {982,1}, {983,1}, {984,1}, {985,1}, {986,1}, {987,1}, {988,1}, {989,1}, {990,1}, {991,1}, {992,1}, {993,1}, {994,1}, {995,1}, {996,1}, {997,1}, {998,1}, {999,1}, {1000,1}, {1001,1}, {1002,1}, {1003,1}, {1004,1}, {1005,1}, {1006,1}, {1007,1}, {1008,1}, {1009,1}, {1010,1}, {1011,1}, {1012,1}, {1013,1}, {1014,1}, {1015,1}, {1016,1}, {1017,1}, {1018,1}, {1019,1}, {1020,1}, {1021,1}, {1022,1}, {1023,1}] [ns_server:info,2014-08-19T16:50:35.088,ns_1@10.242.238.90:<0.22762.0>:ebucketmigrator_srv:handle_call:307]Successfully changed vbucket filter on tap stream `replication_ns_1@10.242.238.90`. [ns_server:info,2014-08-19T16:50:35.089,ns_1@10.242.238.90:<0.22762.0>:ebucketmigrator_srv:process_upstream:1027]Got vbucket filter change completion message. Silencing upstream sender [ns_server:info,2014-08-19T16:50:35.089,ns_1@10.242.238.90:<0.22762.0>:ebucketmigrator_srv:handle_info:366]Got reply from upstream silencing request. Completing state transition to a new ebucketmigrator. [ns_server:debug,2014-08-19T16:50:35.089,ns_1@10.242.238.90:<0.22762.0>:ebucketmigrator_srv:complete_native_vb_filter_change:170]Proceeding with reading unread binaries [ns_server:debug,2014-08-19T16:50:35.089,ns_1@10.242.238.90:<0.22762.0>:ebucketmigrator_srv:confirm_downstream:197]Going to confirm reception downstream messages [ns_server:debug,2014-08-19T16:50:35.089,ns_1@10.242.238.90:<0.22762.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:50:35.089,ns_1@10.242.238.90:<0.23850.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:50:35.089,ns_1@10.242.238.90:<0.23850.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:50:35.089,ns_1@10.242.238.90:<0.22762.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:50:35.090,ns_1@10.242.238.90:<0.22762.0>:ebucketmigrator_srv:confirm_downstream:201]Confirmed upstream messages are feeded to kernel [ns_server:debug,2014-08-19T16:50:35.090,ns_1@10.242.238.90:<0.22762.0>:ebucketmigrator_srv:reply_and_die:210]Passed old state to caller [ns_server:debug,2014-08-19T16:50:35.090,ns_1@10.242.238.90:<0.22762.0>:ebucketmigrator_srv:reply_and_die:213]Sent out state. Preparing to die [ns_server:debug,2014-08-19T16:50:35.090,ns_1@10.242.238.90:<0.23848.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:143]Got old state from previous ebucketmigrator: <0.22762.0> [ns_server:debug,2014-08-19T16:50:35.090,ns_1@10.242.238.90:<0.23848.0>:ns_vbm_new_sup:perform_vbucket_filter_change_loop:184]Sent old state to new instance [ns_server:info,2014-08-19T16:50:35.090,ns_1@10.242.238.90:<0.23852.0>:ns_vbm_new_sup:mk_old_state_retriever:83]Got vbucket filter change old state. Proceeding vbucket filter change operation [ns_server:debug,2014-08-19T16:50:35.090,ns_1@10.242.238.90:<0.23852.0>:ebucketmigrator_srv:init:494]Got old ebucketmigrator state from <0.22762.0>: {state,#Port<0.13886>,#Port<0.13883>,#Port<0.13887>,#Port<0.13884>, <0.22763.0>,<<"cut off">>,<<"cut off">>,[],196,false,false,0, {1408,452635,89058}, completed, {<0.23848.0>,#Ref<0.0.1.2840>}, <<"replication_ns_1@10.242.238.90">>,<0.22762.0>, {had_backfill,false,undefined,[]}, completed,false}. [ns_server:debug,2014-08-19T16:50:35.091,ns_1@10.242.238.90:<0.17162.0>:ns_process_registry:handle_info:98]Got exit msg: {'EXIT',<0.23848.0>,{#Ref<0.0.1.2829>,<0.23852.0>}} [rebalance:debug,2014-08-19T16:50:35.091,ns_1@10.242.238.90:<0.23853.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 951 [error_logger:info,2014-08-19T16:50:35.091,ns_1@10.242.238.90:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,'ns_vbm_new_sup-default'} started: [{pid,<0.23852.0>}, {name, {new_child_id, [952,960,961,962,963,964,965,966,967,968,969, 970,971,972,973,974,975,976,977,978,979,980, 981,982,983,984,985,986,987,988,989,990,991, 992,993,994,995,996,997,998,999,1000,1001, 1002,1003,1004,1005,1006,1007,1008,1009,1010, 1011,1012,1013,1014,1015,1016,1017,1018,1019, 1020,1021,1022,1023], 'ns_1@10.242.238.91'}}, {mfargs, {ebucketmigrator_srv,start_link, [{"10.242.238.91",11209}, {"10.242.238.90",11209}, [{old_state_retriever, #Fun}, {on_not_ready_vbuckets, #Fun}, {username,"default"}, {password,get_from_config}, {vbuckets, [952,960,961,962,963,964,965,966,967,968, 969,970,971,972,973,974,975,976,977,978, 979,980,981,982,983,984,985,986,987,988, 989,990,991,992,993,994,995,996,997,998, 999,1000,1001,1002,1003,1004,1005,1006, 1007,1008,1009,1010,1011,1012,1013,1014, 1015,1016,1017,1018,1019,1020,1021,1022, 1023]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]]}}, {restart_type,temporary}, {shutdown,60000}, {child_type,worker}] [rebalance:debug,2014-08-19T16:50:35.093,ns_1@10.242.238.90:<0.23853.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:50:35.093,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.23853.0> (ok) [ns_server:debug,2014-08-19T16:50:35.097,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:50:35.100,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:35.100,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 3483 us [ns_server:debug,2014-08-19T16:50:35.101,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:35.101,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{952, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:50:35.102,ns_1@10.242.238.90:<0.23852.0>:ebucketmigrator_srv:init:621]Reusing old upstream: [{vbuckets,[952,960,961,962,963,964,965,966,967,968,969,970,971,972,973,974, 975,976,977,978,979,980,981,982,983,984,985,986,987,988,989,990, 991,992,993,994,995,996,997,998,999,1000,1001,1002,1003,1004,1005, 1006,1007,1008,1009,1010,1011,1012,1013,1014,1015,1016,1017,1018, 1019,1020,1021,1022,1023]}, {name,<<"replication_ns_1@10.242.238.90">>}, {takeover,false}] [rebalance:debug,2014-08-19T16:50:35.103,ns_1@10.242.238.90:<0.23852.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.23857.0> [rebalance:debug,2014-08-19T16:50:35.112,ns_1@10.242.238.90:<0.22897.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:50:35.113,ns_1@10.242.238.90:<0.22897.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:50:35.113,ns_1@10.242.238.90:<0.23858.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:50:35.113,ns_1@10.242.238.90:<0.23858.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:50:35.113,ns_1@10.242.238.90:<0.22897.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:info,2014-08-19T16:50:35.116,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 954 state to replica [ns_server:info,2014-08-19T16:50:35.117,ns_1@10.242.238.90:tap_replication_manager-default<0.18775.0>:tap_replication_manager:change_vbucket_filter:200]Going to change replication from 'ns_1@10.242.238.91' to have [952,954,960,961,962,963,964,965,966,967,968,969,970,971,972,973,974,975,976, 977,978,979,980,981,982,983,984,985,986,987,988,989,990,991,992,993,994,995, 996,997,998,999,1000,1001,1002,1003,1004,1005,1006,1007,1008,1009,1010,1011, 1012,1013,1014,1015,1016,1017,1018,1019,1020,1021,1022,1023] ([954], []) [ns_server:debug,2014-08-19T16:50:35.117,ns_1@10.242.238.90:<0.23859.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:135]Registered myself under id:{"default", {new_child_id, [952,954,960,961,962,963,964,965,966,967,968, 969,970,971,972,973,974,975,976,977,978,979, 980,981,982,983,984,985,986,987,988,989,990, 991,992,993,994,995,996,997,998,999,1000, 1001,1002,1003,1004,1005,1006,1007,1008,1009, 1010,1011,1012,1013,1014,1015,1016,1017,1018, 1019,1020,1021,1022,1023], 'ns_1@10.242.238.91'}, #Ref<0.0.1.3007>} Args:[{"10.242.238.91",11209}, {"10.242.238.90",11209}, [{old_state_retriever,#Fun}, {on_not_ready_vbuckets,#Fun}, {username,"default"}, {password,get_from_config}, {vbuckets,[952,954,960,961,962,963,964,965,966,967,968,969,970,971,972, 973,974,975,976,977,978,979,980,981,982,983,984,985,986,987, 988,989,990,991,992,993,994,995,996,997,998,999,1000,1001, 1002,1003,1004,1005,1006,1007,1008,1009,1010,1011,1012,1013, 1014,1015,1016,1017,1018,1019,1020,1021,1022,1023]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]] [ns_server:debug,2014-08-19T16:50:35.118,ns_1@10.242.238.90:<0.23859.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:139]Linked myself to old ebucketmigrator <0.23852.0> [ns_server:info,2014-08-19T16:50:35.118,ns_1@10.242.238.90:<0.23852.0>:ebucketmigrator_srv:handle_call:270]Starting new-style vbucket filter change on stream `replication_ns_1@10.242.238.90` [ns_server:info,2014-08-19T16:50:35.129,ns_1@10.242.238.90:<0.23852.0>:ebucketmigrator_srv:handle_call:297]Changing vbucket filter on tap stream `replication_ns_1@10.242.238.90`: [{952,1}, {954,1}, {960,1}, {961,1}, {962,1}, {963,1}, {964,1}, {965,1}, {966,1}, {967,1}, {968,1}, {969,1}, {970,1}, {971,1}, {972,1}, {973,1}, {974,1}, {975,1}, {976,1}, {977,1}, {978,1}, {979,1}, {980,1}, {981,1}, {982,1}, {983,1}, {984,1}, {985,1}, {986,1}, {987,1}, {988,1}, {989,1}, {990,1}, {991,1}, {992,1}, {993,1}, {994,1}, {995,1}, {996,1}, {997,1}, {998,1}, {999,1}, {1000,1}, {1001,1}, {1002,1}, {1003,1}, {1004,1}, {1005,1}, {1006,1}, {1007,1}, {1008,1}, {1009,1}, {1010,1}, {1011,1}, {1012,1}, {1013,1}, {1014,1}, {1015,1}, {1016,1}, {1017,1}, {1018,1}, {1019,1}, {1020,1}, {1021,1}, {1022,1}, {1023,1}] [ns_server:info,2014-08-19T16:50:35.130,ns_1@10.242.238.90:<0.23852.0>:ebucketmigrator_srv:handle_call:307]Successfully changed vbucket filter on tap stream `replication_ns_1@10.242.238.90`. [ns_server:info,2014-08-19T16:50:35.130,ns_1@10.242.238.90:<0.23852.0>:ebucketmigrator_srv:process_upstream:1027]Got vbucket filter change completion message. Silencing upstream sender [ns_server:info,2014-08-19T16:50:35.130,ns_1@10.242.238.90:<0.23852.0>:ebucketmigrator_srv:handle_info:366]Got reply from upstream silencing request. Completing state transition to a new ebucketmigrator. [ns_server:debug,2014-08-19T16:50:35.130,ns_1@10.242.238.90:<0.23852.0>:ebucketmigrator_srv:complete_native_vb_filter_change:170]Proceeding with reading unread binaries [ns_server:debug,2014-08-19T16:50:35.130,ns_1@10.242.238.90:<0.23852.0>:ebucketmigrator_srv:confirm_downstream:197]Going to confirm reception downstream messages [ns_server:debug,2014-08-19T16:50:35.130,ns_1@10.242.238.90:<0.23852.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:50:35.130,ns_1@10.242.238.90:<0.23861.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:50:35.130,ns_1@10.242.238.90:<0.23861.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:50:35.131,ns_1@10.242.238.90:<0.23852.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:50:35.131,ns_1@10.242.238.90:<0.23852.0>:ebucketmigrator_srv:confirm_downstream:201]Confirmed upstream messages are feeded to kernel [ns_server:debug,2014-08-19T16:50:35.131,ns_1@10.242.238.90:<0.23852.0>:ebucketmigrator_srv:reply_and_die:210]Passed old state to caller [ns_server:debug,2014-08-19T16:50:35.131,ns_1@10.242.238.90:<0.23852.0>:ebucketmigrator_srv:reply_and_die:213]Sent out state. Preparing to die [ns_server:debug,2014-08-19T16:50:35.131,ns_1@10.242.238.90:<0.23859.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:143]Got old state from previous ebucketmigrator: <0.23852.0> [ns_server:debug,2014-08-19T16:50:35.131,ns_1@10.242.238.90:<0.23859.0>:ns_vbm_new_sup:perform_vbucket_filter_change_loop:184]Sent old state to new instance [ns_server:info,2014-08-19T16:50:35.131,ns_1@10.242.238.90:<0.23863.0>:ns_vbm_new_sup:mk_old_state_retriever:83]Got vbucket filter change old state. Proceeding vbucket filter change operation [ns_server:debug,2014-08-19T16:50:35.132,ns_1@10.242.238.90:<0.23863.0>:ebucketmigrator_srv:init:494]Got old ebucketmigrator state from <0.23852.0>: {state,#Port<0.13886>,#Port<0.13883>,#Port<0.13887>,#Port<0.13884>, <0.23857.0>,<<"cut off">>,<<"cut off">>,[],199,false,false,0, {1408,452635,130327}, completed, {<0.23859.0>,#Ref<0.0.1.3020>}, <<"replication_ns_1@10.242.238.90">>,<0.23852.0>, {had_backfill,false,undefined,[]}, completed,false}. [ns_server:debug,2014-08-19T16:50:35.132,ns_1@10.242.238.90:<0.17162.0>:ns_process_registry:handle_info:98]Got exit msg: {'EXIT',<0.23859.0>,{#Ref<0.0.1.3009>,<0.23863.0>}} [error_logger:info,2014-08-19T16:50:35.132,ns_1@10.242.238.90:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,'ns_vbm_new_sup-default'} started: [{pid,<0.23863.0>}, {name, {new_child_id, [952,954,960,961,962,963,964,965,966,967,968, 969,970,971,972,973,974,975,976,977,978,979, 980,981,982,983,984,985,986,987,988,989,990, 991,992,993,994,995,996,997,998,999,1000,1001, 1002,1003,1004,1005,1006,1007,1008,1009,1010, 1011,1012,1013,1014,1015,1016,1017,1018,1019, 1020,1021,1022,1023], 'ns_1@10.242.238.91'}}, {mfargs, {ebucketmigrator_srv,start_link, [{"10.242.238.91",11209}, {"10.242.238.90",11209}, [{old_state_retriever, #Fun}, {on_not_ready_vbuckets, #Fun}, {username,"default"}, {password,get_from_config}, {vbuckets, [952,954,960,961,962,963,964,965,966,967, 968,969,970,971,972,973,974,975,976,977, 978,979,980,981,982,983,984,985,986,987, 988,989,990,991,992,993,994,995,996,997, 998,999,1000,1001,1002,1003,1004,1005, 1006,1007,1008,1009,1010,1011,1012,1013, 1014,1015,1016,1017,1018,1019,1020,1021, 1022,1023]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]]}}, {restart_type,temporary}, {shutdown,60000}, {child_type,worker}] [ns_server:debug,2014-08-19T16:50:35.137,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:50:35.140,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 3328 us [ns_server:debug,2014-08-19T16:50:35.140,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:35.141,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:35.141,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{954, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:50:35.144,ns_1@10.242.238.90:<0.23863.0>:ebucketmigrator_srv:init:621]Reusing old upstream: [{vbuckets,[952,954,960,961,962,963,964,965,966,967,968,969,970,971,972,973, 974,975,976,977,978,979,980,981,982,983,984,985,986,987,988,989, 990,991,992,993,994,995,996,997,998,999,1000,1001,1002,1003,1004, 1005,1006,1007,1008,1009,1010,1011,1012,1013,1014,1015,1016,1017, 1018,1019,1020,1021,1022,1023]}, {name,<<"replication_ns_1@10.242.238.90">>}, {takeover,false}] [rebalance:debug,2014-08-19T16:50:35.144,ns_1@10.242.238.90:<0.23863.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.23864.0> [rebalance:debug,2014-08-19T16:50:35.162,ns_1@10.242.238.90:<0.22936.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:50:35.162,ns_1@10.242.238.90:<0.22936.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:50:35.162,ns_1@10.242.238.90:<0.23865.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:50:35.163,ns_1@10.242.238.90:<0.23865.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:50:35.163,ns_1@10.242.238.90:<0.22936.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:info,2014-08-19T16:50:35.167,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 953 state to replica [ns_server:info,2014-08-19T16:50:35.167,ns_1@10.242.238.90:tap_replication_manager-default<0.18775.0>:tap_replication_manager:change_vbucket_filter:200]Going to change replication from 'ns_1@10.242.238.91' to have [952,953,954,960,961,962,963,964,965,966,967,968,969,970,971,972,973,974,975, 976,977,978,979,980,981,982,983,984,985,986,987,988,989,990,991,992,993,994, 995,996,997,998,999,1000,1001,1002,1003,1004,1005,1006,1007,1008,1009,1010, 1011,1012,1013,1014,1015,1016,1017,1018,1019,1020,1021,1022,1023] ([953], []) [ns_server:debug,2014-08-19T16:50:35.168,ns_1@10.242.238.90:<0.23866.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:135]Registered myself under id:{"default", {new_child_id, [952,953,954,960,961,962,963,964,965,966,967, 968,969,970,971,972,973,974,975,976,977,978, 979,980,981,982,983,984,985,986,987,988,989, 990,991,992,993,994,995,996,997,998,999,1000, 1001,1002,1003,1004,1005,1006,1007,1008,1009, 1010,1011,1012,1013,1014,1015,1016,1017,1018, 1019,1020,1021,1022,1023], 'ns_1@10.242.238.91'}, #Ref<0.0.1.3164>} Args:[{"10.242.238.91",11209}, {"10.242.238.90",11209}, [{old_state_retriever,#Fun}, {on_not_ready_vbuckets,#Fun}, {username,"default"}, {password,get_from_config}, {vbuckets,[952,953,954,960,961,962,963,964,965,966,967,968,969,970,971, 972,973,974,975,976,977,978,979,980,981,982,983,984,985,986, 987,988,989,990,991,992,993,994,995,996,997,998,999,1000, 1001,1002,1003,1004,1005,1006,1007,1008,1009,1010,1011,1012, 1013,1014,1015,1016,1017,1018,1019,1020,1021,1022,1023]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]] [ns_server:debug,2014-08-19T16:50:35.168,ns_1@10.242.238.90:<0.23866.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:139]Linked myself to old ebucketmigrator <0.23863.0> [ns_server:info,2014-08-19T16:50:35.169,ns_1@10.242.238.90:<0.23863.0>:ebucketmigrator_srv:handle_call:270]Starting new-style vbucket filter change on stream `replication_ns_1@10.242.238.90` [rebalance:debug,2014-08-19T16:50:35.179,ns_1@10.242.238.90:<0.22833.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:50:35.180,ns_1@10.242.238.90:<0.23868.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:50:35.180,ns_1@10.242.238.90:<0.22833.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:50:35.180,ns_1@10.242.238.90:<0.23868.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:50:35.180,ns_1@10.242.238.90:<0.22833.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:info,2014-08-19T16:50:35.180,ns_1@10.242.238.90:<0.23863.0>:ebucketmigrator_srv:handle_call:297]Changing vbucket filter on tap stream `replication_ns_1@10.242.238.90`: [{952,1}, {953,1}, {954,1}, {960,1}, {961,1}, {962,1}, {963,1}, {964,1}, {965,1}, {966,1}, {967,1}, {968,1}, {969,1}, {970,1}, {971,1}, {972,1}, {973,1}, {974,1}, {975,1}, {976,1}, {977,1}, {978,1}, {979,1}, {980,1}, {981,1}, {982,1}, {983,1}, {984,1}, {985,1}, {986,1}, {987,1}, {988,1}, {989,1}, {990,1}, {991,1}, {992,1}, {993,1}, {994,1}, {995,1}, {996,1}, {997,1}, {998,1}, {999,1}, {1000,1}, {1001,1}, {1002,1}, {1003,1}, {1004,1}, {1005,1}, {1006,1}, {1007,1}, {1008,1}, {1009,1}, {1010,1}, {1011,1}, {1012,1}, {1013,1}, {1014,1}, {1015,1}, {1016,1}, {1017,1}, {1018,1}, {1019,1}, {1020,1}, {1021,1}, {1022,1}, {1023,1}] [ns_server:info,2014-08-19T16:50:35.181,ns_1@10.242.238.90:<0.23863.0>:ebucketmigrator_srv:handle_call:307]Successfully changed vbucket filter on tap stream `replication_ns_1@10.242.238.90`. [ns_server:info,2014-08-19T16:50:35.181,ns_1@10.242.238.90:<0.23863.0>:ebucketmigrator_srv:process_upstream:1027]Got vbucket filter change completion message. Silencing upstream sender [ns_server:info,2014-08-19T16:50:35.181,ns_1@10.242.238.90:<0.23863.0>:ebucketmigrator_srv:handle_info:366]Got reply from upstream silencing request. Completing state transition to a new ebucketmigrator. [ns_server:debug,2014-08-19T16:50:35.181,ns_1@10.242.238.90:<0.23863.0>:ebucketmigrator_srv:complete_native_vb_filter_change:170]Proceeding with reading unread binaries [ns_server:debug,2014-08-19T16:50:35.182,ns_1@10.242.238.90:<0.23863.0>:ebucketmigrator_srv:confirm_downstream:197]Going to confirm reception downstream messages [ns_server:debug,2014-08-19T16:50:35.182,ns_1@10.242.238.90:<0.23863.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:50:35.182,ns_1@10.242.238.90:<0.23869.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:50:35.182,ns_1@10.242.238.90:<0.23869.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:50:35.182,ns_1@10.242.238.90:<0.23863.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:50:35.182,ns_1@10.242.238.90:<0.23863.0>:ebucketmigrator_srv:confirm_downstream:201]Confirmed upstream messages are feeded to kernel [ns_server:debug,2014-08-19T16:50:35.182,ns_1@10.242.238.90:<0.23863.0>:ebucketmigrator_srv:reply_and_die:210]Passed old state to caller [ns_server:debug,2014-08-19T16:50:35.182,ns_1@10.242.238.90:<0.23863.0>:ebucketmigrator_srv:reply_and_die:213]Sent out state. Preparing to die [ns_server:debug,2014-08-19T16:50:35.182,ns_1@10.242.238.90:<0.23866.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:143]Got old state from previous ebucketmigrator: <0.23863.0> [ns_server:debug,2014-08-19T16:50:35.183,ns_1@10.242.238.90:<0.23866.0>:ns_vbm_new_sup:perform_vbucket_filter_change_loop:184]Sent old state to new instance [ns_server:info,2014-08-19T16:50:35.183,ns_1@10.242.238.90:<0.23871.0>:ns_vbm_new_sup:mk_old_state_retriever:83]Got vbucket filter change old state. Proceeding vbucket filter change operation [ns_server:debug,2014-08-19T16:50:35.183,ns_1@10.242.238.90:<0.23871.0>:ebucketmigrator_srv:init:494]Got old ebucketmigrator state from <0.23863.0>: {state,#Port<0.13886>,#Port<0.13883>,#Port<0.13887>,#Port<0.13884>, <0.23864.0>,<<"cut off">>,<<"cut off">>,[],202,false,false,0, {1408,452635,181708}, completed, {<0.23866.0>,#Ref<0.0.1.3177>}, <<"replication_ns_1@10.242.238.90">>,<0.23863.0>, {had_backfill,false,undefined,[]}, completed,false}. [ns_server:debug,2014-08-19T16:50:35.183,ns_1@10.242.238.90:<0.17162.0>:ns_process_registry:handle_info:98]Got exit msg: {'EXIT',<0.23866.0>,{#Ref<0.0.1.3166>,<0.23871.0>}} [error_logger:info,2014-08-19T16:50:35.183,ns_1@10.242.238.90:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,'ns_vbm_new_sup-default'} started: [{pid,<0.23871.0>}, {name, {new_child_id, [952,953,954,960,961,962,963,964,965,966,967, 968,969,970,971,972,973,974,975,976,977,978, 979,980,981,982,983,984,985,986,987,988,989, 990,991,992,993,994,995,996,997,998,999,1000, 1001,1002,1003,1004,1005,1006,1007,1008,1009, 1010,1011,1012,1013,1014,1015,1016,1017,1018, 1019,1020,1021,1022,1023], 'ns_1@10.242.238.91'}}, {mfargs, {ebucketmigrator_srv,start_link, [{"10.242.238.91",11209}, {"10.242.238.90",11209}, [{old_state_retriever, #Fun}, {on_not_ready_vbuckets, #Fun}, {username,"default"}, {password,get_from_config}, {vbuckets, [952,953,954,960,961,962,963,964,965,966, 967,968,969,970,971,972,973,974,975,976, 977,978,979,980,981,982,983,984,985,986, 987,988,989,990,991,992,993,994,995,996, 997,998,999,1000,1001,1002,1003,1004, 1005,1006,1007,1008,1009,1010,1011,1012, 1013,1014,1015,1016,1017,1018,1019,1020, 1021,1022,1023]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]]}}, {restart_type,temporary}, {shutdown,60000}, {child_type,worker}] [ns_server:debug,2014-08-19T16:50:35.188,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:50:35.191,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 3616 us [ns_server:debug,2014-08-19T16:50:35.192,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:35.192,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:35.192,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{953, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:50:35.194,ns_1@10.242.238.90:<0.23871.0>:ebucketmigrator_srv:init:621]Reusing old upstream: [{vbuckets,[952,953,954,960,961,962,963,964,965,966,967,968,969,970,971,972, 973,974,975,976,977,978,979,980,981,982,983,984,985,986,987,988, 989,990,991,992,993,994,995,996,997,998,999,1000,1001,1002,1003, 1004,1005,1006,1007,1008,1009,1010,1011,1012,1013,1014,1015,1016, 1017,1018,1019,1020,1021,1022,1023]}, {name,<<"replication_ns_1@10.242.238.90">>}, {takeover,false}] [rebalance:debug,2014-08-19T16:50:35.194,ns_1@10.242.238.90:<0.23871.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.23872.0> [ns_server:info,2014-08-19T16:50:35.197,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 956 state to replica [ns_server:info,2014-08-19T16:50:35.197,ns_1@10.242.238.90:tap_replication_manager-default<0.18775.0>:tap_replication_manager:change_vbucket_filter:200]Going to change replication from 'ns_1@10.242.238.91' to have [952,953,954,956,960,961,962,963,964,965,966,967,968,969,970,971,972,973,974, 975,976,977,978,979,980,981,982,983,984,985,986,987,988,989,990,991,992,993, 994,995,996,997,998,999,1000,1001,1002,1003,1004,1005,1006,1007,1008,1009, 1010,1011,1012,1013,1014,1015,1016,1017,1018,1019,1020,1021,1022,1023] ([956], []) [ns_server:debug,2014-08-19T16:50:35.198,ns_1@10.242.238.90:<0.23873.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:135]Registered myself under id:{"default", {new_child_id, [952,953,954,956,960,961,962,963,964,965,966, 967,968,969,970,971,972,973,974,975,976,977, 978,979,980,981,982,983,984,985,986,987,988, 989,990,991,992,993,994,995,996,997,998,999, 1000,1001,1002,1003,1004,1005,1006,1007,1008, 1009,1010,1011,1012,1013,1014,1015,1016,1017, 1018,1019,1020,1021,1022,1023], 'ns_1@10.242.238.91'}, #Ref<0.0.1.3313>} Args:[{"10.242.238.91",11209}, {"10.242.238.90",11209}, [{old_state_retriever,#Fun}, {on_not_ready_vbuckets,#Fun}, {username,"default"}, {password,get_from_config}, {vbuckets,[952,953,954,956,960,961,962,963,964,965,966,967,968,969,970, 971,972,973,974,975,976,977,978,979,980,981,982,983,984,985, 986,987,988,989,990,991,992,993,994,995,996,997,998,999, 1000,1001,1002,1003,1004,1005,1006,1007,1008,1009,1010,1011, 1012,1013,1014,1015,1016,1017,1018,1019,1020,1021,1022, 1023]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]] [ns_server:debug,2014-08-19T16:50:35.198,ns_1@10.242.238.90:<0.23873.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:139]Linked myself to old ebucketmigrator <0.23871.0> [ns_server:info,2014-08-19T16:50:35.198,ns_1@10.242.238.90:<0.23871.0>:ebucketmigrator_srv:handle_call:270]Starting new-style vbucket filter change on stream `replication_ns_1@10.242.238.90` [ns_server:info,2014-08-19T16:50:35.209,ns_1@10.242.238.90:<0.23871.0>:ebucketmigrator_srv:handle_call:297]Changing vbucket filter on tap stream `replication_ns_1@10.242.238.90`: [{952,1}, {953,1}, {954,1}, {956,1}, {960,1}, {961,1}, {962,1}, {963,1}, {964,1}, {965,1}, {966,1}, {967,1}, {968,1}, {969,1}, {970,1}, {971,1}, {972,1}, {973,1}, {974,1}, {975,1}, {976,1}, {977,1}, {978,1}, {979,1}, {980,1}, {981,1}, {982,1}, {983,1}, {984,1}, {985,1}, {986,1}, {987,1}, {988,1}, {989,1}, {990,1}, {991,1}, {992,1}, {993,1}, {994,1}, {995,1}, {996,1}, {997,1}, {998,1}, {999,1}, {1000,1}, {1001,1}, {1002,1}, {1003,1}, {1004,1}, {1005,1}, {1006,1}, {1007,1}, {1008,1}, {1009,1}, {1010,1}, {1011,1}, {1012,1}, {1013,1}, {1014,1}, {1015,1}, {1016,1}, {1017,1}, {1018,1}, {1019,1}, {1020,1}, {1021,1}, {1022,1}, {1023,1}] [ns_server:info,2014-08-19T16:50:35.210,ns_1@10.242.238.90:<0.23871.0>:ebucketmigrator_srv:handle_call:307]Successfully changed vbucket filter on tap stream `replication_ns_1@10.242.238.90`. [ns_server:info,2014-08-19T16:50:35.210,ns_1@10.242.238.90:<0.23871.0>:ebucketmigrator_srv:process_upstream:1027]Got vbucket filter change completion message. Silencing upstream sender [ns_server:info,2014-08-19T16:50:35.210,ns_1@10.242.238.90:<0.23871.0>:ebucketmigrator_srv:handle_info:366]Got reply from upstream silencing request. Completing state transition to a new ebucketmigrator. [ns_server:debug,2014-08-19T16:50:35.211,ns_1@10.242.238.90:<0.23871.0>:ebucketmigrator_srv:complete_native_vb_filter_change:170]Proceeding with reading unread binaries [ns_server:debug,2014-08-19T16:50:35.211,ns_1@10.242.238.90:<0.23871.0>:ebucketmigrator_srv:confirm_downstream:197]Going to confirm reception downstream messages [ns_server:debug,2014-08-19T16:50:35.211,ns_1@10.242.238.90:<0.23871.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:50:35.211,ns_1@10.242.238.90:<0.23875.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:50:35.211,ns_1@10.242.238.90:<0.23875.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:50:35.211,ns_1@10.242.238.90:<0.23871.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:50:35.211,ns_1@10.242.238.90:<0.23871.0>:ebucketmigrator_srv:confirm_downstream:201]Confirmed upstream messages are feeded to kernel [ns_server:debug,2014-08-19T16:50:35.211,ns_1@10.242.238.90:<0.23871.0>:ebucketmigrator_srv:reply_and_die:210]Passed old state to caller [ns_server:debug,2014-08-19T16:50:35.211,ns_1@10.242.238.90:<0.23871.0>:ebucketmigrator_srv:reply_and_die:213]Sent out state. Preparing to die [ns_server:debug,2014-08-19T16:50:35.211,ns_1@10.242.238.90:<0.23873.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:143]Got old state from previous ebucketmigrator: <0.23871.0> [ns_server:debug,2014-08-19T16:50:35.212,ns_1@10.242.238.90:<0.23873.0>:ns_vbm_new_sup:perform_vbucket_filter_change_loop:184]Sent old state to new instance [ns_server:info,2014-08-19T16:50:35.212,ns_1@10.242.238.90:<0.23877.0>:ns_vbm_new_sup:mk_old_state_retriever:83]Got vbucket filter change old state. Proceeding vbucket filter change operation [ns_server:debug,2014-08-19T16:50:35.212,ns_1@10.242.238.90:<0.23877.0>:ebucketmigrator_srv:init:494]Got old ebucketmigrator state from <0.23871.0>: {state,#Port<0.13886>,#Port<0.13883>,#Port<0.13887>,#Port<0.13884>, <0.23872.0>,<<"cut off">>,<<"cut off">>,[],205,false,false,0, {1408,452635,210842}, completed, {<0.23873.0>,#Ref<0.0.1.3326>}, <<"replication_ns_1@10.242.238.90">>,<0.23871.0>, {had_backfill,false,undefined,[]}, completed,false}. [ns_server:debug,2014-08-19T16:50:35.212,ns_1@10.242.238.90:<0.17162.0>:ns_process_registry:handle_info:98]Got exit msg: {'EXIT',<0.23873.0>,{#Ref<0.0.1.3315>,<0.23877.0>}} [error_logger:info,2014-08-19T16:50:35.212,ns_1@10.242.238.90:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,'ns_vbm_new_sup-default'} started: [{pid,<0.23877.0>}, {name, {new_child_id, [952,953,954,956,960,961,962,963,964,965,966, 967,968,969,970,971,972,973,974,975,976,977, 978,979,980,981,982,983,984,985,986,987,988, 989,990,991,992,993,994,995,996,997,998,999, 1000,1001,1002,1003,1004,1005,1006,1007,1008, 1009,1010,1011,1012,1013,1014,1015,1016,1017, 1018,1019,1020,1021,1022,1023], 'ns_1@10.242.238.91'}}, {mfargs, {ebucketmigrator_srv,start_link, [{"10.242.238.91",11209}, {"10.242.238.90",11209}, [{old_state_retriever, #Fun}, {on_not_ready_vbuckets, #Fun}, {username,"default"}, {password,get_from_config}, {vbuckets, [952,953,954,956,960,961,962,963,964,965, 966,967,968,969,970,971,972,973,974,975, 976,977,978,979,980,981,982,983,984,985, 986,987,988,989,990,991,992,993,994,995, 996,997,998,999,1000,1001,1002,1003, 1004,1005,1006,1007,1008,1009,1010,1011, 1012,1013,1014,1015,1016,1017,1018,1019, 1020,1021,1022,1023]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]]}}, {restart_type,temporary}, {shutdown,60000}, {child_type,worker}] [ns_server:debug,2014-08-19T16:50:35.217,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:50:35.220,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 2978 us [ns_server:debug,2014-08-19T16:50:35.220,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:35.221,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:35.221,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{956, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:50:35.223,ns_1@10.242.238.90:<0.23877.0>:ebucketmigrator_srv:init:621]Reusing old upstream: [{vbuckets,[952,953,954,956,960,961,962,963,964,965,966,967,968,969,970,971, 972,973,974,975,976,977,978,979,980,981,982,983,984,985,986,987, 988,989,990,991,992,993,994,995,996,997,998,999,1000,1001,1002, 1003,1004,1005,1006,1007,1008,1009,1010,1011,1012,1013,1014,1015, 1016,1017,1018,1019,1020,1021,1022,1023]}, {name,<<"replication_ns_1@10.242.238.90">>}, {takeover,false}] [rebalance:debug,2014-08-19T16:50:35.223,ns_1@10.242.238.90:<0.23877.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.23879.0> [rebalance:debug,2014-08-19T16:50:35.343,ns_1@10.242.238.90:<0.22790.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:50:35.344,ns_1@10.242.238.90:<0.22790.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:50:35.344,ns_1@10.242.238.90:<0.23881.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:50:35.344,ns_1@10.242.238.90:<0.23881.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:50:35.344,ns_1@10.242.238.90:<0.22790.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:info,2014-08-19T16:50:35.347,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 958 state to replica [ns_server:info,2014-08-19T16:50:35.347,ns_1@10.242.238.90:tap_replication_manager-default<0.18775.0>:tap_replication_manager:change_vbucket_filter:200]Going to change replication from 'ns_1@10.242.238.91' to have [952,953,954,956,958,960,961,962,963,964,965,966,967,968,969,970,971,972,973, 974,975,976,977,978,979,980,981,982,983,984,985,986,987,988,989,990,991,992, 993,994,995,996,997,998,999,1000,1001,1002,1003,1004,1005,1006,1007,1008, 1009,1010,1011,1012,1013,1014,1015,1016,1017,1018,1019,1020,1021,1022,1023] ([958], []) [ns_server:debug,2014-08-19T16:50:35.349,ns_1@10.242.238.90:<0.23882.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:135]Registered myself under id:{"default", {new_child_id, [952,953,954,956,958,960,961,962,963,964,965, 966,967,968,969,970,971,972,973,974,975,976, 977,978,979,980,981,982,983,984,985,986,987, 988,989,990,991,992,993,994,995,996,997,998, 999,1000,1001,1002,1003,1004,1005,1006,1007, 1008,1009,1010,1011,1012,1013,1014,1015,1016, 1017,1018,1019,1020,1021,1022,1023], 'ns_1@10.242.238.91'}, #Ref<0.0.1.3475>} Args:[{"10.242.238.91",11209}, {"10.242.238.90",11209}, [{old_state_retriever,#Fun}, {on_not_ready_vbuckets,#Fun}, {username,"default"}, {password,get_from_config}, {vbuckets,[952,953,954,956,958,960,961,962,963,964,965,966,967,968,969, 970,971,972,973,974,975,976,977,978,979,980,981,982,983,984, 985,986,987,988,989,990,991,992,993,994,995,996,997,998,999, 1000,1001,1002,1003,1004,1005,1006,1007,1008,1009,1010,1011, 1012,1013,1014,1015,1016,1017,1018,1019,1020,1021,1022, 1023]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]] [ns_server:debug,2014-08-19T16:50:35.349,ns_1@10.242.238.90:<0.23882.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:139]Linked myself to old ebucketmigrator <0.23877.0> [ns_server:info,2014-08-19T16:50:35.349,ns_1@10.242.238.90:<0.23877.0>:ebucketmigrator_srv:handle_call:270]Starting new-style vbucket filter change on stream `replication_ns_1@10.242.238.90` [rebalance:debug,2014-08-19T16:50:35.357,ns_1@10.242.238.90:<0.22872.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:50:35.357,ns_1@10.242.238.90:<0.22872.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:50:35.357,ns_1@10.242.238.90:<0.23884.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:50:35.357,ns_1@10.242.238.90:<0.23884.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:50:35.358,ns_1@10.242.238.90:<0.22872.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:info,2014-08-19T16:50:35.361,ns_1@10.242.238.90:<0.23877.0>:ebucketmigrator_srv:handle_call:297]Changing vbucket filter on tap stream `replication_ns_1@10.242.238.90`: [{952,1}, {953,1}, {954,1}, {956,1}, {958,1}, {960,1}, {961,1}, {962,1}, {963,1}, {964,1}, {965,1}, {966,1}, {967,1}, {968,1}, {969,1}, {970,1}, {971,1}, {972,1}, {973,1}, {974,1}, {975,1}, {976,1}, {977,1}, {978,1}, {979,1}, {980,1}, {981,1}, {982,1}, {983,1}, {984,1}, {985,1}, {986,1}, {987,1}, {988,1}, {989,1}, {990,1}, {991,1}, {992,1}, {993,1}, {994,1}, {995,1}, {996,1}, {997,1}, {998,1}, {999,1}, {1000,1}, {1001,1}, {1002,1}, {1003,1}, {1004,1}, {1005,1}, {1006,1}, {1007,1}, {1008,1}, {1009,1}, {1010,1}, {1011,1}, {1012,1}, {1013,1}, {1014,1}, {1015,1}, {1016,1}, {1017,1}, {1018,1}, {1019,1}, {1020,1}, {1021,1}, {1022,1}, {1023,1}] [ns_server:info,2014-08-19T16:50:35.362,ns_1@10.242.238.90:<0.23877.0>:ebucketmigrator_srv:handle_call:307]Successfully changed vbucket filter on tap stream `replication_ns_1@10.242.238.90`. [ns_server:info,2014-08-19T16:50:35.362,ns_1@10.242.238.90:<0.23877.0>:ebucketmigrator_srv:process_upstream:1027]Got vbucket filter change completion message. Silencing upstream sender [ns_server:info,2014-08-19T16:50:35.362,ns_1@10.242.238.90:<0.23877.0>:ebucketmigrator_srv:handle_info:366]Got reply from upstream silencing request. Completing state transition to a new ebucketmigrator. [ns_server:debug,2014-08-19T16:50:35.362,ns_1@10.242.238.90:<0.23877.0>:ebucketmigrator_srv:complete_native_vb_filter_change:170]Proceeding with reading unread binaries [ns_server:debug,2014-08-19T16:50:35.362,ns_1@10.242.238.90:<0.23877.0>:ebucketmigrator_srv:confirm_downstream:197]Going to confirm reception downstream messages [ns_server:debug,2014-08-19T16:50:35.362,ns_1@10.242.238.90:<0.23877.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:50:35.362,ns_1@10.242.238.90:<0.23885.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:50:35.362,ns_1@10.242.238.90:<0.23885.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:50:35.363,ns_1@10.242.238.90:<0.23877.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:50:35.363,ns_1@10.242.238.90:<0.23877.0>:ebucketmigrator_srv:confirm_downstream:201]Confirmed upstream messages are feeded to kernel [ns_server:debug,2014-08-19T16:50:35.363,ns_1@10.242.238.90:<0.23877.0>:ebucketmigrator_srv:reply_and_die:210]Passed old state to caller [ns_server:debug,2014-08-19T16:50:35.363,ns_1@10.242.238.90:<0.23877.0>:ebucketmigrator_srv:reply_and_die:213]Sent out state. Preparing to die [ns_server:debug,2014-08-19T16:50:35.363,ns_1@10.242.238.90:<0.23882.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:143]Got old state from previous ebucketmigrator: <0.23877.0> [ns_server:debug,2014-08-19T16:50:35.363,ns_1@10.242.238.90:<0.23882.0>:ns_vbm_new_sup:perform_vbucket_filter_change_loop:184]Sent old state to new instance [ns_server:info,2014-08-19T16:50:35.363,ns_1@10.242.238.90:<0.23887.0>:ns_vbm_new_sup:mk_old_state_retriever:83]Got vbucket filter change old state. Proceeding vbucket filter change operation [ns_server:debug,2014-08-19T16:50:35.363,ns_1@10.242.238.90:<0.23887.0>:ebucketmigrator_srv:init:494]Got old ebucketmigrator state from <0.23877.0>: {state,#Port<0.13886>,#Port<0.13883>,#Port<0.13887>,#Port<0.13884>, <0.23879.0>,<<"cut off">>,<<"cut off">>,[],208,false,false,0, {1408,452635,362303}, completed, {<0.23882.0>,#Ref<0.0.1.3488>}, <<"replication_ns_1@10.242.238.90">>,<0.23877.0>, {had_backfill,false,undefined,[]}, completed,false}. [ns_server:debug,2014-08-19T16:50:35.364,ns_1@10.242.238.90:<0.17162.0>:ns_process_registry:handle_info:98]Got exit msg: {'EXIT',<0.23882.0>,{#Ref<0.0.1.3477>,<0.23887.0>}} [error_logger:info,2014-08-19T16:50:35.364,ns_1@10.242.238.90:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,'ns_vbm_new_sup-default'} started: [{pid,<0.23887.0>}, {name, {new_child_id, [952,953,954,956,958,960,961,962,963,964,965, 966,967,968,969,970,971,972,973,974,975,976, 977,978,979,980,981,982,983,984,985,986,987, 988,989,990,991,992,993,994,995,996,997,998, 999,1000,1001,1002,1003,1004,1005,1006,1007, 1008,1009,1010,1011,1012,1013,1014,1015,1016, 1017,1018,1019,1020,1021,1022,1023], 'ns_1@10.242.238.91'}}, {mfargs, {ebucketmigrator_srv,start_link, [{"10.242.238.91",11209}, {"10.242.238.90",11209}, [{old_state_retriever, #Fun}, {on_not_ready_vbuckets, #Fun}, {username,"default"}, {password,get_from_config}, {vbuckets, [952,953,954,956,958,960,961,962,963,964, 965,966,967,968,969,970,971,972,973,974, 975,976,977,978,979,980,981,982,983,984, 985,986,987,988,989,990,991,992,993,994, 995,996,997,998,999,1000,1001,1002,1003, 1004,1005,1006,1007,1008,1009,1010,1011, 1012,1013,1014,1015,1016,1017,1018,1019, 1020,1021,1022,1023]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]]}}, {restart_type,temporary}, {shutdown,60000}, {child_type,worker}] [ns_server:debug,2014-08-19T16:50:35.369,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:50:35.372,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:35.373,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 3200 us [ns_server:debug,2014-08-19T16:50:35.373,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:35.374,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{958, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:50:35.375,ns_1@10.242.238.90:<0.23887.0>:ebucketmigrator_srv:init:621]Reusing old upstream: [{vbuckets,[952,953,954,956,958,960,961,962,963,964,965,966,967,968,969,970, 971,972,973,974,975,976,977,978,979,980,981,982,983,984,985,986, 987,988,989,990,991,992,993,994,995,996,997,998,999,1000,1001, 1002,1003,1004,1005,1006,1007,1008,1009,1010,1011,1012,1013,1014, 1015,1016,1017,1018,1019,1020,1021,1022,1023]}, {name,<<"replication_ns_1@10.242.238.90">>}, {takeover,false}] [rebalance:debug,2014-08-19T16:50:35.375,ns_1@10.242.238.90:<0.23887.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.23895.0> [ns_server:info,2014-08-19T16:50:35.377,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 955 state to replica [ns_server:info,2014-08-19T16:50:35.378,ns_1@10.242.238.90:tap_replication_manager-default<0.18775.0>:tap_replication_manager:change_vbucket_filter:200]Going to change replication from 'ns_1@10.242.238.91' to have [952,953,954,955,956,958,960,961,962,963,964,965,966,967,968,969,970,971,972, 973,974,975,976,977,978,979,980,981,982,983,984,985,986,987,988,989,990,991, 992,993,994,995,996,997,998,999,1000,1001,1002,1003,1004,1005,1006,1007,1008, 1009,1010,1011,1012,1013,1014,1015,1016,1017,1018,1019,1020,1021,1022,1023] ([955], []) [ns_server:debug,2014-08-19T16:50:35.380,ns_1@10.242.238.90:<0.23896.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:135]Registered myself under id:{"default", {new_child_id, [952,953,954,955,956,958,960,961,962,963,964, 965,966,967,968,969,970,971,972,973,974,975, 976,977,978,979,980,981,982,983,984,985,986, 987,988,989,990,991,992,993,994,995,996,997, 998,999,1000,1001,1002,1003,1004,1005,1006, 1007,1008,1009,1010,1011,1012,1013,1014,1015, 1016,1017,1018,1019,1020,1021,1022,1023], 'ns_1@10.242.238.91'}, #Ref<0.0.1.3626>} Args:[{"10.242.238.91",11209}, {"10.242.238.90",11209}, [{old_state_retriever,#Fun}, {on_not_ready_vbuckets,#Fun}, {username,"default"}, {password,get_from_config}, {vbuckets,[952,953,954,955,956,958,960,961,962,963,964,965,966,967,968, 969,970,971,972,973,974,975,976,977,978,979,980,981,982,983, 984,985,986,987,988,989,990,991,992,993,994,995,996,997,998, 999,1000,1001,1002,1003,1004,1005,1006,1007,1008,1009,1010, 1011,1012,1013,1014,1015,1016,1017,1018,1019,1020,1021,1022, 1023]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]] [ns_server:debug,2014-08-19T16:50:35.380,ns_1@10.242.238.90:<0.23896.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:139]Linked myself to old ebucketmigrator <0.23887.0> [ns_server:info,2014-08-19T16:50:35.380,ns_1@10.242.238.90:<0.23887.0>:ebucketmigrator_srv:handle_call:270]Starting new-style vbucket filter change on stream `replication_ns_1@10.242.238.90` [ns_server:info,2014-08-19T16:50:35.391,ns_1@10.242.238.90:<0.23887.0>:ebucketmigrator_srv:handle_call:297]Changing vbucket filter on tap stream `replication_ns_1@10.242.238.90`: [{952,1}, {953,1}, {954,1}, {955,1}, {956,1}, {958,1}, {960,1}, {961,1}, {962,1}, {963,1}, {964,1}, {965,1}, {966,1}, {967,1}, {968,1}, {969,1}, {970,1}, {971,1}, {972,1}, {973,1}, {974,1}, {975,1}, {976,1}, {977,1}, {978,1}, {979,1}, {980,1}, {981,1}, {982,1}, {983,1}, {984,1}, {985,1}, {986,1}, {987,1}, {988,1}, {989,1}, {990,1}, {991,1}, {992,1}, {993,1}, {994,1}, {995,1}, {996,1}, {997,1}, {998,1}, {999,1}, {1000,1}, {1001,1}, {1002,1}, {1003,1}, {1004,1}, {1005,1}, {1006,1}, {1007,1}, {1008,1}, {1009,1}, {1010,1}, {1011,1}, {1012,1}, {1013,1}, {1014,1}, {1015,1}, {1016,1}, {1017,1}, {1018,1}, {1019,1}, {1020,1}, {1021,1}, {1022,1}, {1023,1}] [ns_server:info,2014-08-19T16:50:35.392,ns_1@10.242.238.90:<0.23887.0>:ebucketmigrator_srv:handle_call:307]Successfully changed vbucket filter on tap stream `replication_ns_1@10.242.238.90`. [ns_server:info,2014-08-19T16:50:35.392,ns_1@10.242.238.90:<0.23887.0>:ebucketmigrator_srv:process_upstream:1027]Got vbucket filter change completion message. Silencing upstream sender [ns_server:info,2014-08-19T16:50:35.392,ns_1@10.242.238.90:<0.23887.0>:ebucketmigrator_srv:handle_info:366]Got reply from upstream silencing request. Completing state transition to a new ebucketmigrator. [ns_server:debug,2014-08-19T16:50:35.392,ns_1@10.242.238.90:<0.23887.0>:ebucketmigrator_srv:complete_native_vb_filter_change:170]Proceeding with reading unread binaries [ns_server:debug,2014-08-19T16:50:35.392,ns_1@10.242.238.90:<0.23887.0>:ebucketmigrator_srv:confirm_downstream:197]Going to confirm reception downstream messages [ns_server:debug,2014-08-19T16:50:35.393,ns_1@10.242.238.90:<0.23887.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:50:35.393,ns_1@10.242.238.90:<0.23898.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:50:35.393,ns_1@10.242.238.90:<0.23898.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:50:35.393,ns_1@10.242.238.90:<0.23887.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:50:35.393,ns_1@10.242.238.90:<0.23887.0>:ebucketmigrator_srv:confirm_downstream:201]Confirmed upstream messages are feeded to kernel [ns_server:debug,2014-08-19T16:50:35.393,ns_1@10.242.238.90:<0.23887.0>:ebucketmigrator_srv:reply_and_die:210]Passed old state to caller [ns_server:debug,2014-08-19T16:50:35.393,ns_1@10.242.238.90:<0.23887.0>:ebucketmigrator_srv:reply_and_die:213]Sent out state. Preparing to die [ns_server:debug,2014-08-19T16:50:35.393,ns_1@10.242.238.90:<0.23896.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:143]Got old state from previous ebucketmigrator: <0.23887.0> [ns_server:debug,2014-08-19T16:50:35.394,ns_1@10.242.238.90:<0.23896.0>:ns_vbm_new_sup:perform_vbucket_filter_change_loop:184]Sent old state to new instance [ns_server:info,2014-08-19T16:50:35.394,ns_1@10.242.238.90:<0.23900.0>:ns_vbm_new_sup:mk_old_state_retriever:83]Got vbucket filter change old state. Proceeding vbucket filter change operation [ns_server:debug,2014-08-19T16:50:35.394,ns_1@10.242.238.90:<0.23900.0>:ebucketmigrator_srv:init:494]Got old ebucketmigrator state from <0.23887.0>: {state,#Port<0.13886>,#Port<0.13883>,#Port<0.13887>,#Port<0.13884>, <0.23895.0>,<<"cut off">>,<<"cut off">>,[],211,false,false,0, {1408,452635,392655}, completed, {<0.23896.0>,#Ref<0.0.1.3639>}, <<"replication_ns_1@10.242.238.90">>,<0.23887.0>, {had_backfill,false,undefined,[]}, completed,false}. [ns_server:debug,2014-08-19T16:50:35.394,ns_1@10.242.238.90:<0.17162.0>:ns_process_registry:handle_info:98]Got exit msg: {'EXIT',<0.23896.0>,{#Ref<0.0.1.3628>,<0.23900.0>}} [error_logger:info,2014-08-19T16:50:35.394,ns_1@10.242.238.90:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,'ns_vbm_new_sup-default'} started: [{pid,<0.23900.0>}, {name, {new_child_id, [952,953,954,955,956,958,960,961,962,963,964, 965,966,967,968,969,970,971,972,973,974,975, 976,977,978,979,980,981,982,983,984,985,986, 987,988,989,990,991,992,993,994,995,996,997, 998,999,1000,1001,1002,1003,1004,1005,1006, 1007,1008,1009,1010,1011,1012,1013,1014,1015, 1016,1017,1018,1019,1020,1021,1022,1023], 'ns_1@10.242.238.91'}}, {mfargs, {ebucketmigrator_srv,start_link, [{"10.242.238.91",11209}, {"10.242.238.90",11209}, [{old_state_retriever, #Fun}, {on_not_ready_vbuckets, #Fun}, {username,"default"}, {password,get_from_config}, {vbuckets, [952,953,954,955,956,958,960,961,962,963, 964,965,966,967,968,969,970,971,972,973, 974,975,976,977,978,979,980,981,982,983, 984,985,986,987,988,989,990,991,992,993, 994,995,996,997,998,999,1000,1001,1002, 1003,1004,1005,1006,1007,1008,1009,1010, 1011,1012,1013,1014,1015,1016,1017,1018, 1019,1020,1021,1022,1023]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]]}}, {restart_type,temporary}, {shutdown,60000}, {child_type,worker}] [ns_server:debug,2014-08-19T16:50:35.399,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:50:35.405,ns_1@10.242.238.90:<0.23900.0>:ebucketmigrator_srv:init:621]Reusing old upstream: [{vbuckets,[952,953,954,955,956,958,960,961,962,963,964,965,966,967,968,969, 970,971,972,973,974,975,976,977,978,979,980,981,982,983,984,985, 986,987,988,989,990,991,992,993,994,995,996,997,998,999,1000,1001, 1002,1003,1004,1005,1006,1007,1008,1009,1010,1011,1012,1013,1014, 1015,1016,1017,1018,1019,1020,1021,1022,1023]}, {name,<<"replication_ns_1@10.242.238.90">>}, {takeover,false}] [rebalance:debug,2014-08-19T16:50:35.405,ns_1@10.242.238.90:<0.23900.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.23901.0> [ns_server:debug,2014-08-19T16:50:35.406,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 6092 us [ns_server:debug,2014-08-19T16:50:35.406,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:35.406,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:35.407,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{955, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [rebalance:debug,2014-08-19T16:50:35.490,ns_1@10.242.238.90:<0.22815.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:50:35.491,ns_1@10.242.238.90:<0.22815.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:50:35.491,ns_1@10.242.238.90:<0.23903.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:50:35.491,ns_1@10.242.238.90:<0.23903.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:50:35.491,ns_1@10.242.238.90:<0.22815.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:info,2014-08-19T16:50:35.494,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 957 state to replica [ns_server:info,2014-08-19T16:50:35.495,ns_1@10.242.238.90:tap_replication_manager-default<0.18775.0>:tap_replication_manager:change_vbucket_filter:200]Going to change replication from 'ns_1@10.242.238.91' to have [952,953,954,955,956,957,958,960,961,962,963,964,965,966,967,968,969,970,971, 972,973,974,975,976,977,978,979,980,981,982,983,984,985,986,987,988,989,990, 991,992,993,994,995,996,997,998,999,1000,1001,1002,1003,1004,1005,1006,1007, 1008,1009,1010,1011,1012,1013,1014,1015,1016,1017,1018,1019,1020,1021,1022, 1023] ([957], []) [ns_server:debug,2014-08-19T16:50:35.496,ns_1@10.242.238.90:<0.23904.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:135]Registered myself under id:{"default", {new_child_id, [952,953,954,955,956,957,958,960,961,962,963, 964,965,966,967,968,969,970,971,972,973,974, 975,976,977,978,979,980,981,982,983,984,985, 986,987,988,989,990,991,992,993,994,995,996, 997,998,999,1000,1001,1002,1003,1004,1005, 1006,1007,1008,1009,1010,1011,1012,1013,1014, 1015,1016,1017,1018,1019,1020,1021,1022,1023], 'ns_1@10.242.238.91'}, #Ref<0.0.1.3796>} Args:[{"10.242.238.91",11209}, {"10.242.238.90",11209}, [{old_state_retriever,#Fun}, {on_not_ready_vbuckets,#Fun}, {username,"default"}, {password,get_from_config}, {vbuckets,[952,953,954,955,956,957,958,960,961,962,963,964,965,966,967, 968,969,970,971,972,973,974,975,976,977,978,979,980,981,982, 983,984,985,986,987,988,989,990,991,992,993,994,995,996,997, 998,999,1000,1001,1002,1003,1004,1005,1006,1007,1008,1009, 1010,1011,1012,1013,1014,1015,1016,1017,1018,1019,1020,1021, 1022,1023]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]] [ns_server:debug,2014-08-19T16:50:35.496,ns_1@10.242.238.90:<0.23904.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:139]Linked myself to old ebucketmigrator <0.23900.0> [ns_server:info,2014-08-19T16:50:35.496,ns_1@10.242.238.90:<0.23900.0>:ebucketmigrator_srv:handle_call:270]Starting new-style vbucket filter change on stream `replication_ns_1@10.242.238.90` [ns_server:info,2014-08-19T16:50:35.507,ns_1@10.242.238.90:<0.23900.0>:ebucketmigrator_srv:handle_call:297]Changing vbucket filter on tap stream `replication_ns_1@10.242.238.90`: [{952,1}, {953,1}, {954,1}, {955,1}, {956,1}, {957,1}, {958,1}, {960,1}, {961,1}, {962,1}, {963,1}, {964,1}, {965,1}, {966,1}, {967,1}, {968,1}, {969,1}, {970,1}, {971,1}, {972,1}, {973,1}, {974,1}, {975,1}, {976,1}, {977,1}, {978,1}, {979,1}, {980,1}, {981,1}, {982,1}, {983,1}, {984,1}, {985,1}, {986,1}, {987,1}, {988,1}, {989,1}, {990,1}, {991,1}, {992,1}, {993,1}, {994,1}, {995,1}, {996,1}, {997,1}, {998,1}, {999,1}, {1000,1}, {1001,1}, {1002,1}, {1003,1}, {1004,1}, {1005,1}, {1006,1}, {1007,1}, {1008,1}, {1009,1}, {1010,1}, {1011,1}, {1012,1}, {1013,1}, {1014,1}, {1015,1}, {1016,1}, {1017,1}, {1018,1}, {1019,1}, {1020,1}, {1021,1}, {1022,1}, {1023,1}] [ns_server:info,2014-08-19T16:50:35.508,ns_1@10.242.238.90:<0.23900.0>:ebucketmigrator_srv:handle_call:307]Successfully changed vbucket filter on tap stream `replication_ns_1@10.242.238.90`. [ns_server:info,2014-08-19T16:50:35.509,ns_1@10.242.238.90:<0.23900.0>:ebucketmigrator_srv:process_upstream:1027]Got vbucket filter change completion message. Silencing upstream sender [ns_server:info,2014-08-19T16:50:35.509,ns_1@10.242.238.90:<0.23900.0>:ebucketmigrator_srv:handle_info:366]Got reply from upstream silencing request. Completing state transition to a new ebucketmigrator. [ns_server:debug,2014-08-19T16:50:35.509,ns_1@10.242.238.90:<0.23900.0>:ebucketmigrator_srv:complete_native_vb_filter_change:170]Proceeding with reading unread binaries [ns_server:debug,2014-08-19T16:50:35.509,ns_1@10.242.238.90:<0.23900.0>:ebucketmigrator_srv:confirm_downstream:197]Going to confirm reception downstream messages [ns_server:debug,2014-08-19T16:50:35.509,ns_1@10.242.238.90:<0.23900.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:50:35.509,ns_1@10.242.238.90:<0.23906.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:50:35.509,ns_1@10.242.238.90:<0.23906.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:50:35.509,ns_1@10.242.238.90:<0.23900.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:50:35.509,ns_1@10.242.238.90:<0.23900.0>:ebucketmigrator_srv:confirm_downstream:201]Confirmed upstream messages are feeded to kernel [ns_server:debug,2014-08-19T16:50:35.509,ns_1@10.242.238.90:<0.23900.0>:ebucketmigrator_srv:reply_and_die:210]Passed old state to caller [ns_server:debug,2014-08-19T16:50:35.510,ns_1@10.242.238.90:<0.23900.0>:ebucketmigrator_srv:reply_and_die:213]Sent out state. Preparing to die [ns_server:debug,2014-08-19T16:50:35.510,ns_1@10.242.238.90:<0.23904.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:143]Got old state from previous ebucketmigrator: <0.23900.0> [ns_server:debug,2014-08-19T16:50:35.510,ns_1@10.242.238.90:<0.23904.0>:ns_vbm_new_sup:perform_vbucket_filter_change_loop:184]Sent old state to new instance [ns_server:info,2014-08-19T16:50:35.510,ns_1@10.242.238.90:<0.23908.0>:ns_vbm_new_sup:mk_old_state_retriever:83]Got vbucket filter change old state. Proceeding vbucket filter change operation [ns_server:debug,2014-08-19T16:50:35.510,ns_1@10.242.238.90:<0.23908.0>:ebucketmigrator_srv:init:494]Got old ebucketmigrator state from <0.23900.0>: {state,#Port<0.13886>,#Port<0.13883>,#Port<0.13887>,#Port<0.13884>, <0.23901.0>,<<"cut off">>,<<"cut off">>,[],214,false,false,0, {1408,452635,509012}, completed, {<0.23904.0>,#Ref<0.0.1.3809>}, <<"replication_ns_1@10.242.238.90">>,<0.23900.0>, {had_backfill,false,undefined,[]}, completed,false}. [ns_server:debug,2014-08-19T16:50:35.510,ns_1@10.242.238.90:<0.17162.0>:ns_process_registry:handle_info:98]Got exit msg: {'EXIT',<0.23904.0>,{#Ref<0.0.1.3798>,<0.23908.0>}} [error_logger:info,2014-08-19T16:50:35.510,ns_1@10.242.238.90:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,'ns_vbm_new_sup-default'} started: [{pid,<0.23908.0>}, {name, {new_child_id, [952,953,954,955,956,957,958,960,961,962,963, 964,965,966,967,968,969,970,971,972,973,974, 975,976,977,978,979,980,981,982,983,984,985, 986,987,988,989,990,991,992,993,994,995,996, 997,998,999,1000,1001,1002,1003,1004,1005, 1006,1007,1008,1009,1010,1011,1012,1013,1014, 1015,1016,1017,1018,1019,1020,1021,1022,1023], 'ns_1@10.242.238.91'}}, {mfargs, {ebucketmigrator_srv,start_link, [{"10.242.238.91",11209}, {"10.242.238.90",11209}, [{old_state_retriever, #Fun}, {on_not_ready_vbuckets, #Fun}, {username,"default"}, {password,get_from_config}, {vbuckets, [952,953,954,955,956,957,958,960,961,962, 963,964,965,966,967,968,969,970,971,972, 973,974,975,976,977,978,979,980,981,982, 983,984,985,986,987,988,989,990,991,992, 993,994,995,996,997,998,999,1000,1001, 1002,1003,1004,1005,1006,1007,1008,1009, 1010,1011,1012,1013,1014,1015,1016,1017, 1018,1019,1020,1021,1022,1023]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]]}}, {restart_type,temporary}, {shutdown,60000}, {child_type,worker}] [ns_server:debug,2014-08-19T16:50:35.516,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:50:35.519,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:35.519,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 3525 us [ns_server:debug,2014-08-19T16:50:35.520,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:35.520,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{957, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:50:35.521,ns_1@10.242.238.90:<0.23908.0>:ebucketmigrator_srv:init:621]Reusing old upstream: [{vbuckets,[952,953,954,955,956,957,958,960,961,962,963,964,965,966,967,968, 969,970,971,972,973,974,975,976,977,978,979,980,981,982,983,984, 985,986,987,988,989,990,991,992,993,994,995,996,997,998,999,1000, 1001,1002,1003,1004,1005,1006,1007,1008,1009,1010,1011,1012,1013, 1014,1015,1016,1017,1018,1019,1020,1021,1022,1023]}, {name,<<"replication_ns_1@10.242.238.90">>}, {takeover,false}] [rebalance:debug,2014-08-19T16:50:35.522,ns_1@10.242.238.90:<0.23908.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.23910.0> [rebalance:debug,2014-08-19T16:50:35.542,ns_1@10.242.238.90:<0.22771.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:50:35.543,ns_1@10.242.238.90:<0.22771.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:50:35.543,ns_1@10.242.238.90:<0.23911.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:50:35.543,ns_1@10.242.238.90:<0.23911.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:50:35.543,ns_1@10.242.238.90:<0.22771.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:info,2014-08-19T16:50:35.546,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 959 state to replica [ns_server:info,2014-08-19T16:50:35.546,ns_1@10.242.238.90:tap_replication_manager-default<0.18775.0>:tap_replication_manager:change_vbucket_filter:200]Going to change replication from 'ns_1@10.242.238.91' to have [952,953,954,955,956,957,958,959,960,961,962,963,964,965,966,967,968,969,970, 971,972,973,974,975,976,977,978,979,980,981,982,983,984,985,986,987,988,989, 990,991,992,993,994,995,996,997,998,999,1000,1001,1002,1003,1004,1005,1006, 1007,1008,1009,1010,1011,1012,1013,1014,1015,1016,1017,1018,1019,1020,1021, 1022,1023] ([959], []) [ns_server:debug,2014-08-19T16:50:35.547,ns_1@10.242.238.90:<0.23912.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:135]Registered myself under id:{"default", {new_child_id, [952,953,954,955,956,957,958,959,960,961,962, 963,964,965,966,967,968,969,970,971,972,973, 974,975,976,977,978,979,980,981,982,983,984, 985,986,987,988,989,990,991,992,993,994,995, 996,997,998,999,1000,1001,1002,1003,1004, 1005,1006,1007,1008,1009,1010,1011,1012,1013, 1014,1015,1016,1017,1018,1019,1020,1021,1022, 1023], 'ns_1@10.242.238.91'}, #Ref<0.0.1.3946>} Args:[{"10.242.238.91",11209}, {"10.242.238.90",11209}, [{old_state_retriever,#Fun}, {on_not_ready_vbuckets,#Fun}, {username,"default"}, {password,get_from_config}, {vbuckets,[952,953,954,955,956,957,958,959,960,961,962,963,964,965,966, 967,968,969,970,971,972,973,974,975,976,977,978,979,980,981, 982,983,984,985,986,987,988,989,990,991,992,993,994,995,996, 997,998,999,1000,1001,1002,1003,1004,1005,1006,1007,1008, 1009,1010,1011,1012,1013,1014,1015,1016,1017,1018,1019,1020, 1021,1022,1023]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]] [ns_server:debug,2014-08-19T16:50:35.548,ns_1@10.242.238.90:<0.23912.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:139]Linked myself to old ebucketmigrator <0.23908.0> [ns_server:info,2014-08-19T16:50:35.548,ns_1@10.242.238.90:<0.23908.0>:ebucketmigrator_srv:handle_call:270]Starting new-style vbucket filter change on stream `replication_ns_1@10.242.238.90` [ns_server:info,2014-08-19T16:50:35.559,ns_1@10.242.238.90:<0.23908.0>:ebucketmigrator_srv:handle_call:297]Changing vbucket filter on tap stream `replication_ns_1@10.242.238.90`: [{952,1}, {953,1}, {954,1}, {955,1}, {956,1}, {957,1}, {958,1}, {959,1}, {960,1}, {961,1}, {962,1}, {963,1}, {964,1}, {965,1}, {966,1}, {967,1}, {968,1}, {969,1}, {970,1}, {971,1}, {972,1}, {973,1}, {974,1}, {975,1}, {976,1}, {977,1}, {978,1}, {979,1}, {980,1}, {981,1}, {982,1}, {983,1}, {984,1}, {985,1}, {986,1}, {987,1}, {988,1}, {989,1}, {990,1}, {991,1}, {992,1}, {993,1}, {994,1}, {995,1}, {996,1}, {997,1}, {998,1}, {999,1}, {1000,1}, {1001,1}, {1002,1}, {1003,1}, {1004,1}, {1005,1}, {1006,1}, {1007,1}, {1008,1}, {1009,1}, {1010,1}, {1011,1}, {1012,1}, {1013,1}, {1014,1}, {1015,1}, {1016,1}, {1017,1}, {1018,1}, {1019,1}, {1020,1}, {1021,1}, {1022,1}, {1023,1}] [ns_server:info,2014-08-19T16:50:35.560,ns_1@10.242.238.90:<0.23908.0>:ebucketmigrator_srv:handle_call:307]Successfully changed vbucket filter on tap stream `replication_ns_1@10.242.238.90`. [ns_server:info,2014-08-19T16:50:35.560,ns_1@10.242.238.90:<0.23908.0>:ebucketmigrator_srv:process_upstream:1027]Got vbucket filter change completion message. Silencing upstream sender [ns_server:info,2014-08-19T16:50:35.560,ns_1@10.242.238.90:<0.23908.0>:ebucketmigrator_srv:handle_info:366]Got reply from upstream silencing request. Completing state transition to a new ebucketmigrator. [ns_server:debug,2014-08-19T16:50:35.560,ns_1@10.242.238.90:<0.23908.0>:ebucketmigrator_srv:complete_native_vb_filter_change:170]Proceeding with reading unread binaries [ns_server:debug,2014-08-19T16:50:35.560,ns_1@10.242.238.90:<0.23908.0>:ebucketmigrator_srv:confirm_downstream:197]Going to confirm reception downstream messages [ns_server:debug,2014-08-19T16:50:35.560,ns_1@10.242.238.90:<0.23908.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:50:35.560,ns_1@10.242.238.90:<0.23914.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:50:35.560,ns_1@10.242.238.90:<0.23914.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:50:35.561,ns_1@10.242.238.90:<0.23908.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:50:35.561,ns_1@10.242.238.90:<0.23908.0>:ebucketmigrator_srv:confirm_downstream:201]Confirmed upstream messages are feeded to kernel [ns_server:debug,2014-08-19T16:50:35.561,ns_1@10.242.238.90:<0.23908.0>:ebucketmigrator_srv:reply_and_die:210]Passed old state to caller [ns_server:debug,2014-08-19T16:50:35.561,ns_1@10.242.238.90:<0.23908.0>:ebucketmigrator_srv:reply_and_die:213]Sent out state. Preparing to die [ns_server:debug,2014-08-19T16:50:35.561,ns_1@10.242.238.90:<0.23912.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:143]Got old state from previous ebucketmigrator: <0.23908.0> [ns_server:debug,2014-08-19T16:50:35.561,ns_1@10.242.238.90:<0.23912.0>:ns_vbm_new_sup:perform_vbucket_filter_change_loop:184]Sent old state to new instance [ns_server:info,2014-08-19T16:50:35.561,ns_1@10.242.238.90:<0.23916.0>:ns_vbm_new_sup:mk_old_state_retriever:83]Got vbucket filter change old state. Proceeding vbucket filter change operation [ns_server:debug,2014-08-19T16:50:35.561,ns_1@10.242.238.90:<0.23916.0>:ebucketmigrator_srv:init:494]Got old ebucketmigrator state from <0.23908.0>: {state,#Port<0.13886>,#Port<0.13883>,#Port<0.13887>,#Port<0.13884>, <0.23910.0>,<<"cut off">>,<<"cut off">>,[],217,false,false,0, {1408,452635,560356}, completed, {<0.23912.0>,#Ref<0.0.1.3959>}, <<"replication_ns_1@10.242.238.90">>,<0.23908.0>, {had_backfill,false,undefined,[]}, completed,false}. [ns_server:debug,2014-08-19T16:50:35.562,ns_1@10.242.238.90:<0.17162.0>:ns_process_registry:handle_info:98]Got exit msg: {'EXIT',<0.23912.0>,{#Ref<0.0.1.3948>,<0.23916.0>}} [error_logger:info,2014-08-19T16:50:35.562,ns_1@10.242.238.90:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,'ns_vbm_new_sup-default'} started: [{pid,<0.23916.0>}, {name, {new_child_id, [952,953,954,955,956,957,958,959,960,961,962, 963,964,965,966,967,968,969,970,971,972,973, 974,975,976,977,978,979,980,981,982,983,984, 985,986,987,988,989,990,991,992,993,994,995, 996,997,998,999,1000,1001,1002,1003,1004,1005, 1006,1007,1008,1009,1010,1011,1012,1013,1014, 1015,1016,1017,1018,1019,1020,1021,1022,1023], 'ns_1@10.242.238.91'}}, {mfargs, {ebucketmigrator_srv,start_link, [{"10.242.238.91",11209}, {"10.242.238.90",11209}, [{old_state_retriever, #Fun}, {on_not_ready_vbuckets, #Fun}, {username,"default"}, {password,get_from_config}, {vbuckets, [952,953,954,955,956,957,958,959,960,961, 962,963,964,965,966,967,968,969,970,971, 972,973,974,975,976,977,978,979,980,981, 982,983,984,985,986,987,988,989,990,991, 992,993,994,995,996,997,998,999,1000, 1001,1002,1003,1004,1005,1006,1007,1008, 1009,1010,1011,1012,1013,1014,1015,1016, 1017,1018,1019,1020,1021,1022,1023]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]]}}, {restart_type,temporary}, {shutdown,60000}, {child_type,worker}] [ns_server:debug,2014-08-19T16:50:35.567,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:50:35.571,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:35.571,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 3982 us [ns_server:debug,2014-08-19T16:50:35.572,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{959, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:50:35.572,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:35.576,ns_1@10.242.238.90:<0.23916.0>:ebucketmigrator_srv:init:621]Reusing old upstream: [{vbuckets,[952,953,954,955,956,957,958,959,960,961,962,963,964,965,966,967, 968,969,970,971,972,973,974,975,976,977,978,979,980,981,982,983, 984,985,986,987,988,989,990,991,992,993,994,995,996,997,998,999, 1000,1001,1002,1003,1004,1005,1006,1007,1008,1009,1010,1011,1012, 1013,1014,1015,1016,1017,1018,1019,1020,1021,1022,1023]}, {name,<<"replication_ns_1@10.242.238.90">>}, {takeover,false}] [rebalance:debug,2014-08-19T16:50:35.577,ns_1@10.242.238.90:<0.23916.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.23918.0> [ns_server:debug,2014-08-19T16:50:35.616,ns_1@10.242.238.90:<0.23921.0>:capi_set_view_manager:do_wait_index_updated:618]References to wait: [] ("default", 686) [ns_server:debug,2014-08-19T16:50:35.616,ns_1@10.242.238.90:<0.23921.0>:capi_set_view_manager:do_wait_index_updated:638]All refs fired [ns_server:debug,2014-08-19T16:50:35.616,ns_1@10.242.238.90:<0.23922.0>:capi_set_view_manager:do_wait_index_updated:618]References to wait: [] ("default", 684) [ns_server:debug,2014-08-19T16:50:35.616,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.23920.0> (ok) [ns_server:debug,2014-08-19T16:50:35.616,ns_1@10.242.238.90:<0.23922.0>:capi_set_view_manager:do_wait_index_updated:638]All refs fired [ns_server:debug,2014-08-19T16:50:35.616,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.23919.0> (ok) [ns_server:debug,2014-08-19T16:50:35.617,ns_1@10.242.238.90:<0.23925.0>:capi_set_view_manager:do_wait_index_updated:618]References to wait: [] ("default", 692) [ns_server:debug,2014-08-19T16:50:35.617,ns_1@10.242.238.90:<0.23925.0>:capi_set_view_manager:do_wait_index_updated:638]All refs fired [ns_server:debug,2014-08-19T16:50:35.617,ns_1@10.242.238.90:<0.23926.0>:capi_set_view_manager:do_wait_index_updated:618]References to wait: [] ("default", 688) [ns_server:debug,2014-08-19T16:50:35.617,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.23923.0> (ok) [ns_server:debug,2014-08-19T16:50:35.617,ns_1@10.242.238.90:<0.23926.0>:capi_set_view_manager:do_wait_index_updated:638]All refs fired [rebalance:debug,2014-08-19T16:50:35.617,ns_1@10.242.238.90:<0.23233.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [rebalance:debug,2014-08-19T16:50:35.617,ns_1@10.242.238.90:<0.23284.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:50:35.618,ns_1@10.242.238.90:<0.23931.0>:capi_set_view_manager:do_wait_index_updated:618]References to wait: [] ("default", 694) [ns_server:debug,2014-08-19T16:50:35.618,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.23924.0> (ok) [ns_server:debug,2014-08-19T16:50:35.618,ns_1@10.242.238.90:<0.23931.0>:capi_set_view_manager:do_wait_index_updated:638]All refs fired [ns_server:debug,2014-08-19T16:50:35.618,ns_1@10.242.238.90:<0.23284.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:50:35.618,ns_1@10.242.238.90:<0.23935.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:50:35.618,ns_1@10.242.238.90:<0.23233.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:50:35.618,ns_1@10.242.238.90:<0.23938.0>:capi_set_view_manager:do_wait_index_updated:618]References to wait: [] ("default", 690) [ns_server:debug,2014-08-19T16:50:35.618,ns_1@10.242.238.90:<0.23936.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [rebalance:debug,2014-08-19T16:50:35.618,ns_1@10.242.238.90:<0.23053.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:50:35.618,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.23927.0> (ok) [ns_server:debug,2014-08-19T16:50:35.618,ns_1@10.242.238.90:<0.23935.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [ns_server:debug,2014-08-19T16:50:35.618,ns_1@10.242.238.90:<0.23938.0>:capi_set_view_manager:do_wait_index_updated:638]All refs fired [ns_server:debug,2014-08-19T16:50:35.618,ns_1@10.242.238.90:<0.23936.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [ns_server:debug,2014-08-19T16:50:35.618,ns_1@10.242.238.90:<0.23940.0>:capi_set_view_manager:do_wait_index_updated:618]References to wait: [] ("default", 683) [rebalance:info,2014-08-19T16:50:35.618,ns_1@10.242.238.90:<0.23284.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:50:35.618,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.23928.0> (ok) [ns_server:debug,2014-08-19T16:50:35.618,ns_1@10.242.238.90:<0.23053.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:50:35.618,ns_1@10.242.238.90:<0.23940.0>:capi_set_view_manager:do_wait_index_updated:638]All refs fired [ns_server:debug,2014-08-19T16:50:35.618,ns_1@10.242.238.90:<0.23941.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:50:35.618,ns_1@10.242.238.90:<0.23942.0>:capi_set_view_manager:do_wait_index_updated:618]References to wait: [] ("default", 685) [rebalance:debug,2014-08-19T16:50:35.618,ns_1@10.242.238.90:<0.23183.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [rebalance:info,2014-08-19T16:50:35.618,ns_1@10.242.238.90:<0.23233.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:50:35.618,ns_1@10.242.238.90:<0.23941.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [ns_server:debug,2014-08-19T16:50:35.619,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.23929.0> (ok) [ns_server:debug,2014-08-19T16:50:35.619,ns_1@10.242.238.90:<0.23942.0>:capi_set_view_manager:do_wait_index_updated:638]All refs fired [ns_server:debug,2014-08-19T16:50:35.619,ns_1@10.242.238.90:<0.23943.0>:capi_set_view_manager:do_wait_index_updated:618]References to wait: [] ("default", 689) [rebalance:info,2014-08-19T16:50:35.619,ns_1@10.242.238.90:<0.23053.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:50:35.619,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.23930.0> (ok) [ns_server:debug,2014-08-19T16:50:35.619,ns_1@10.242.238.90:<0.23183.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:50:35.619,ns_1@10.242.238.90:<0.23943.0>:capi_set_view_manager:do_wait_index_updated:638]All refs fired [ns_server:debug,2014-08-19T16:50:35.619,ns_1@10.242.238.90:<0.23945.0>:capi_set_view_manager:do_wait_index_updated:618]References to wait: [] ("default", 687) [ns_server:debug,2014-08-19T16:50:35.619,ns_1@10.242.238.90:<0.23944.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [rebalance:debug,2014-08-19T16:50:35.619,ns_1@10.242.238.90:<0.23016.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:50:35.619,ns_1@10.242.238.90:<0.23945.0>:capi_set_view_manager:do_wait_index_updated:638]All refs fired [ns_server:debug,2014-08-19T16:50:35.619,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.23932.0> (ok) [ns_server:debug,2014-08-19T16:50:35.619,ns_1@10.242.238.90:<0.23944.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:debug,2014-08-19T16:50:35.619,ns_1@10.242.238.90:<0.23131.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [rebalance:debug,2014-08-19T16:50:35.619,ns_1@10.242.238.90:<0.23309.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:50:35.619,ns_1@10.242.238.90:<0.23946.0>:capi_set_view_manager:do_wait_index_updated:618]References to wait: [] ("default", 691) [ns_server:debug,2014-08-19T16:50:35.619,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.23933.0> (ok) [ns_server:debug,2014-08-19T16:50:35.619,ns_1@10.242.238.90:<0.23016.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [rebalance:info,2014-08-19T16:50:35.619,ns_1@10.242.238.90:<0.23183.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:50:35.619,ns_1@10.242.238.90:<0.23947.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:50:35.619,ns_1@10.242.238.90:<0.23946.0>:capi_set_view_manager:do_wait_index_updated:638]All refs fired [ns_server:debug,2014-08-19T16:50:35.619,ns_1@10.242.238.90:<0.23948.0>:capi_set_view_manager:do_wait_index_updated:618]References to wait: [] ("default", 693) [ns_server:debug,2014-08-19T16:50:35.619,ns_1@10.242.238.90:<0.23131.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:50:35.619,ns_1@10.242.238.90:<0.23309.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:50:35.619,ns_1@10.242.238.90:<0.23950.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:50:35.619,ns_1@10.242.238.90:<0.23949.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [rebalance:debug,2014-08-19T16:50:35.620,ns_1@10.242.238.90:<0.23258.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:50:35.620,ns_1@10.242.238.90:<0.23948.0>:capi_set_view_manager:do_wait_index_updated:638]All refs fired [ns_server:debug,2014-08-19T16:50:35.620,ns_1@10.242.238.90:<0.23947.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:debug,2014-08-19T16:50:35.620,ns_1@10.242.238.90:<0.23166.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:50:35.620,ns_1@10.242.238.90:<0.23951.0>:capi_set_view_manager:do_wait_index_updated:618]References to wait: [] ("default", 695) [ns_server:debug,2014-08-19T16:50:35.620,ns_1@10.242.238.90:<0.23950.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [ns_server:debug,2014-08-19T16:50:35.620,ns_1@10.242.238.90:<0.23949.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [ns_server:debug,2014-08-19T16:50:35.620,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.23934.0> (ok) [rebalance:debug,2014-08-19T16:50:35.620,ns_1@10.242.238.90:<0.23208.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:50:35.620,ns_1@10.242.238.90:<0.23951.0>:capi_set_view_manager:do_wait_index_updated:638]All refs fired [ns_server:debug,2014-08-19T16:50:35.620,ns_1@10.242.238.90:<0.23258.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:50:35.620,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.23937.0> (ok) [ns_server:debug,2014-08-19T16:50:35.620,ns_1@10.242.238.90:<0.23952.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [rebalance:info,2014-08-19T16:50:35.620,ns_1@10.242.238.90:<0.23016.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:50:35.620,ns_1@10.242.238.90:<0.23166.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [rebalance:info,2014-08-19T16:50:35.620,ns_1@10.242.238.90:<0.23309.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:50:35.620,ns_1@10.242.238.90:<0.23953.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:50:35.620,ns_1@10.242.238.90:<0.23952.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [ns_server:debug,2014-08-19T16:50:35.620,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.23939.0> (ok) [ns_server:debug,2014-08-19T16:50:35.620,ns_1@10.242.238.90:<0.23208.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [rebalance:info,2014-08-19T16:50:35.620,ns_1@10.242.238.90:<0.23131.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:50:35.620,ns_1@10.242.238.90:<0.23953.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [ns_server:debug,2014-08-19T16:50:35.620,ns_1@10.242.238.90:<0.23954.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [rebalance:debug,2014-08-19T16:50:35.620,ns_1@10.242.238.90:<0.23092.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [rebalance:info,2014-08-19T16:50:35.620,ns_1@10.242.238.90:<0.23258.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:50:35.620,ns_1@10.242.238.90:<0.23954.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:50:35.620,ns_1@10.242.238.90:<0.23166.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [rebalance:debug,2014-08-19T16:50:35.621,ns_1@10.242.238.90:<0.23036.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [rebalance:info,2014-08-19T16:50:35.621,ns_1@10.242.238.90:<0.23208.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:50:35.621,ns_1@10.242.238.90:<0.23092.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:50:35.621,ns_1@10.242.238.90:<0.23955.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:50:35.621,ns_1@10.242.238.90:<0.23036.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:50:35.621,ns_1@10.242.238.90:<0.23956.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:50:35.621,ns_1@10.242.238.90:<0.23955.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:debug,2014-08-19T16:50:35.621,ns_1@10.242.238.90:<0.22991.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:50:35.621,ns_1@10.242.238.90:<0.23956.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:50:35.621,ns_1@10.242.238.90:<0.23092.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [rebalance:info,2014-08-19T16:50:35.621,ns_1@10.242.238.90:<0.23036.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:50:35.621,ns_1@10.242.238.90:<0.22991.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:50:35.621,ns_1@10.242.238.90:<0.23957.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:50:35.621,ns_1@10.242.238.90:<0.23957.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:50:35.621,ns_1@10.242.238.90:<0.22991.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:info,2014-08-19T16:50:35.745,ns_1@10.242.238.90:<0.18786.0>:ns_memcached:do_handle_call:527]Changed vbucket 684 state to active [ns_server:info,2014-08-19T16:50:35.747,ns_1@10.242.238.90:<0.18786.0>:ns_memcached:do_handle_call:527]Changed vbucket 686 state to active [ns_server:debug,2014-08-19T16:50:35.774,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:50:35.778,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:35.779,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 4369 us [ns_server:debug,2014-08-19T16:50:35.779,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:35.780,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{428, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:50:35.802,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:50:35.806,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 2940 us [ns_server:debug,2014-08-19T16:50:35.806,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:35.806,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:35.807,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{684, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [views:debug,2014-08-19T16:50:35.820,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/684. Updated state: active (1) [ns_server:debug,2014-08-19T16:50:35.820,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",684,active,1} [rebalance:debug,2014-08-19T16:50:35.827,ns_1@10.242.238.90:<0.22997.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:50:35.827,ns_1@10.242.238.90:<0.22997.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:50:35.827,ns_1@10.242.238.90:<0.23960.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:50:35.827,ns_1@10.242.238.90:<0.23960.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:50:35.827,ns_1@10.242.238.90:<0.22997.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:50:35.832,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:50:35.835,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:35.835,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 2720 us [ns_server:debug,2014-08-19T16:50:35.836,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:35.836,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{686, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [rebalance:debug,2014-08-19T16:50:35.836,ns_1@10.242.238.90:<0.23023.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:50:35.837,ns_1@10.242.238.90:<0.23023.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:50:35.837,ns_1@10.242.238.90:<0.23961.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:50:35.837,ns_1@10.242.238.90:<0.23961.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:50:35.837,ns_1@10.242.238.90:<0.23023.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [rebalance:debug,2014-08-19T16:50:35.853,ns_1@10.242.238.90:<0.23048.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:50:35.853,ns_1@10.242.238.90:<0.23048.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:50:35.853,ns_1@10.242.238.90:<0.23963.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:50:35.853,ns_1@10.242.238.90:<0.23963.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:50:35.853,ns_1@10.242.238.90:<0.23048.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:50:35.857,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:50:35.865,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:35.865,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 7684 us [ns_server:debug,2014-08-19T16:50:35.866,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:35.867,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{441, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [rebalance:debug,2014-08-19T16:50:35.881,ns_1@10.242.238.90:<0.23153.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:50:35.881,ns_1@10.242.238.90:<0.23153.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:50:35.881,ns_1@10.242.238.90:<0.23964.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:50:35.881,ns_1@10.242.238.90:<0.23964.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:50:35.881,ns_1@10.242.238.90:<0.23153.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:50:35.884,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:50:35.887,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:35.887,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 3273 us [views:debug,2014-08-19T16:50:35.888,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/686. Updated state: active (1) [ns_server:debug,2014-08-19T16:50:35.888,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",686,active,1} [ns_server:debug,2014-08-19T16:50:35.888,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{438, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:50:35.889,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [rebalance:debug,2014-08-19T16:50:35.906,ns_1@10.242.238.90:<0.23228.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:50:35.906,ns_1@10.242.238.90:<0.23228.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:50:35.907,ns_1@10.242.238.90:<0.23966.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:50:35.907,ns_1@10.242.238.90:<0.23966.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:50:35.907,ns_1@10.242.238.90:<0.23228.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:info,2014-08-19T16:50:35.909,ns_1@10.242.238.90:<0.18786.0>:ns_memcached:do_handle_call:527]Changed vbucket 691 state to active [ns_server:debug,2014-08-19T16:50:35.909,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:50:35.913,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 3269 us [ns_server:debug,2014-08-19T16:50:35.913,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:35.913,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:35.914,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{432, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:info,2014-08-19T16:50:35.925,ns_1@10.242.238.90:<0.18786.0>:ns_memcached:do_handle_call:527]Changed vbucket 685 state to active [ns_server:debug,2014-08-19T16:50:35.932,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:50:35.933,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 1428 us [ns_server:debug,2014-08-19T16:50:35.934,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:35.935,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{434, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:50:35.935,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:info,2014-08-19T16:50:35.938,ns_1@10.242.238.90:<0.18786.0>:ns_memcached:do_handle_call:527]Changed vbucket 950 state to replica [ns_server:info,2014-08-19T16:50:35.938,ns_1@10.242.238.90:tap_replication_manager-default<0.18775.0>:tap_replication_manager:change_vbucket_filter:200]Going to change replication from 'ns_1@10.242.238.91' to have [950,952,953,954,955,956,957,958,959,960,961,962,963,964,965,966,967,968,969, 970,971,972,973,974,975,976,977,978,979,980,981,982,983,984,985,986,987,988, 989,990,991,992,993,994,995,996,997,998,999,1000,1001,1002,1003,1004,1005, 1006,1007,1008,1009,1010,1011,1012,1013,1014,1015,1016,1017,1018,1019,1020, 1021,1022,1023] ([950], []) [ns_server:debug,2014-08-19T16:50:35.940,ns_1@10.242.238.90:<0.23968.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:135]Registered myself under id:{"default", {new_child_id, [950,952,953,954,955,956,957,958,959,960,961, 962,963,964,965,966,967,968,969,970,971,972, 973,974,975,976,977,978,979,980,981,982,983, 984,985,986,987,988,989,990,991,992,993,994, 995,996,997,998,999,1000,1001,1002,1003,1004, 1005,1006,1007,1008,1009,1010,1011,1012,1013, 1014,1015,1016,1017,1018,1019,1020,1021,1022, 1023], 'ns_1@10.242.238.91'}, #Ref<0.0.1.4765>} Args:[{"10.242.238.91",11209}, {"10.242.238.90",11209}, [{old_state_retriever,#Fun}, {on_not_ready_vbuckets,#Fun}, {username,"default"}, {password,get_from_config}, {vbuckets,[950,952,953,954,955,956,957,958,959,960,961,962,963,964,965, 966,967,968,969,970,971,972,973,974,975,976,977,978,979,980, 981,982,983,984,985,986,987,988,989,990,991,992,993,994,995, 996,997,998,999,1000,1001,1002,1003,1004,1005,1006,1007, 1008,1009,1010,1011,1012,1013,1014,1015,1016,1017,1018,1019, 1020,1021,1022,1023]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]] [ns_server:debug,2014-08-19T16:50:35.940,ns_1@10.242.238.90:<0.23968.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:139]Linked myself to old ebucketmigrator <0.23916.0> [ns_server:info,2014-08-19T16:50:35.941,ns_1@10.242.238.90:<0.23916.0>:ebucketmigrator_srv:handle_call:270]Starting new-style vbucket filter change on stream `replication_ns_1@10.242.238.90` [rebalance:debug,2014-08-19T16:50:35.957,ns_1@10.242.238.90:<0.23329.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:50:35.957,ns_1@10.242.238.90:<0.23329.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:50:35.958,ns_1@10.242.238.90:<0.23971.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:50:35.958,ns_1@10.242.238.90:<0.23971.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:50:35.958,ns_1@10.242.238.90:<0.23329.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:info,2014-08-19T16:50:35.959,ns_1@10.242.238.90:<0.23916.0>:ebucketmigrator_srv:handle_call:297]Changing vbucket filter on tap stream `replication_ns_1@10.242.238.90`: [{950,1}, {952,1}, {953,1}, {954,1}, {955,1}, {956,1}, {957,1}, {958,1}, {959,1}, {960,1}, {961,1}, {962,1}, {963,1}, {964,1}, {965,1}, {966,1}, {967,1}, {968,1}, {969,1}, {970,1}, {971,1}, {972,1}, {973,1}, {974,1}, {975,1}, {976,1}, {977,1}, {978,1}, {979,1}, {980,1}, {981,1}, {982,1}, {983,1}, {984,1}, {985,1}, {986,1}, {987,1}, {988,1}, {989,1}, {990,1}, {991,1}, {992,1}, {993,1}, {994,1}, {995,1}, {996,1}, {997,1}, {998,1}, {999,1}, {1000,1}, {1001,1}, {1002,1}, {1003,1}, {1004,1}, {1005,1}, {1006,1}, {1007,1}, {1008,1}, {1009,1}, {1010,1}, {1011,1}, {1012,1}, {1013,1}, {1014,1}, {1015,1}, {1016,1}, {1017,1}, {1018,1}, {1019,1}, {1020,1}, {1021,1}, {1022,1}, {1023,1}] [ns_server:info,2014-08-19T16:50:35.960,ns_1@10.242.238.90:<0.23916.0>:ebucketmigrator_srv:handle_call:307]Successfully changed vbucket filter on tap stream `replication_ns_1@10.242.238.90`. [ns_server:info,2014-08-19T16:50:35.960,ns_1@10.242.238.90:<0.23916.0>:ebucketmigrator_srv:process_upstream:1027]Got vbucket filter change completion message. Silencing upstream sender [ns_server:info,2014-08-19T16:50:35.960,ns_1@10.242.238.90:<0.23916.0>:ebucketmigrator_srv:handle_info:366]Got reply from upstream silencing request. Completing state transition to a new ebucketmigrator. [ns_server:debug,2014-08-19T16:50:35.961,ns_1@10.242.238.90:<0.23916.0>:ebucketmigrator_srv:complete_native_vb_filter_change:170]Proceeding with reading unread binaries [ns_server:debug,2014-08-19T16:50:35.961,ns_1@10.242.238.90:<0.23916.0>:ebucketmigrator_srv:confirm_downstream:197]Going to confirm reception downstream messages [ns_server:debug,2014-08-19T16:50:35.961,ns_1@10.242.238.90:<0.23916.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:50:35.961,ns_1@10.242.238.90:<0.23972.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:50:35.961,ns_1@10.242.238.90:<0.23972.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:50:35.961,ns_1@10.242.238.90:<0.23916.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:50:35.961,ns_1@10.242.238.90:<0.23916.0>:ebucketmigrator_srv:confirm_downstream:201]Confirmed upstream messages are feeded to kernel [ns_server:debug,2014-08-19T16:50:35.961,ns_1@10.242.238.90:<0.23916.0>:ebucketmigrator_srv:reply_and_die:210]Passed old state to caller [ns_server:debug,2014-08-19T16:50:35.961,ns_1@10.242.238.90:<0.23916.0>:ebucketmigrator_srv:reply_and_die:213]Sent out state. Preparing to die [ns_server:debug,2014-08-19T16:50:35.961,ns_1@10.242.238.90:<0.23968.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:143]Got old state from previous ebucketmigrator: <0.23916.0> [ns_server:debug,2014-08-19T16:50:35.962,ns_1@10.242.238.90:<0.23968.0>:ns_vbm_new_sup:perform_vbucket_filter_change_loop:184]Sent old state to new instance [ns_server:info,2014-08-19T16:50:35.962,ns_1@10.242.238.90:<0.23974.0>:ns_vbm_new_sup:mk_old_state_retriever:83]Got vbucket filter change old state. Proceeding vbucket filter change operation [ns_server:debug,2014-08-19T16:50:35.962,ns_1@10.242.238.90:<0.23974.0>:ebucketmigrator_srv:init:494]Got old ebucketmigrator state from <0.23916.0>: {state,#Port<0.13886>,#Port<0.13883>,#Port<0.13887>,#Port<0.13884>, <0.23918.0>,<<"cut off">>,<<"cut off">>,[],220,false,false,0, {1408,452635,960790}, completed, {<0.23968.0>,#Ref<0.0.1.4780>}, <<"replication_ns_1@10.242.238.90">>,<0.23916.0>, {had_backfill,false,undefined,[]}, completed,false}. [ns_server:debug,2014-08-19T16:50:35.962,ns_1@10.242.238.90:<0.17162.0>:ns_process_registry:handle_info:98]Got exit msg: {'EXIT',<0.23968.0>,{#Ref<0.0.1.4767>,<0.23974.0>}} [error_logger:info,2014-08-19T16:50:35.962,ns_1@10.242.238.90:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,'ns_vbm_new_sup-default'} started: [{pid,<0.23974.0>}, {name, {new_child_id, [950,952,953,954,955,956,957,958,959,960,961, 962,963,964,965,966,967,968,969,970,971,972, 973,974,975,976,977,978,979,980,981,982,983, 984,985,986,987,988,989,990,991,992,993,994, 995,996,997,998,999,1000,1001,1002,1003,1004, 1005,1006,1007,1008,1009,1010,1011,1012,1013, 1014,1015,1016,1017,1018,1019,1020,1021,1022, 1023], 'ns_1@10.242.238.91'}}, {mfargs, {ebucketmigrator_srv,start_link, [{"10.242.238.91",11209}, {"10.242.238.90",11209}, [{old_state_retriever, #Fun}, {on_not_ready_vbuckets, #Fun}, {username,"default"}, {password,get_from_config}, {vbuckets, [950,952,953,954,955,956,957,958,959,960, 961,962,963,964,965,966,967,968,969,970, 971,972,973,974,975,976,977,978,979,980, 981,982,983,984,985,986,987,988,989,990, 991,992,993,994,995,996,997,998,999, 1000,1001,1002,1003,1004,1005,1006,1007, 1008,1009,1010,1011,1012,1013,1014,1015, 1016,1017,1018,1019,1020,1021,1022, 1023]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]]}}, {restart_type,temporary}, {shutdown,60000}, {child_type,worker}] [ns_server:info,2014-08-19T16:50:35.963,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 692 state to active [views:debug,2014-08-19T16:50:35.971,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/691. Updated state: active (1) [ns_server:debug,2014-08-19T16:50:35.971,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",691,active,1} [ns_server:debug,2014-08-19T16:50:35.972,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:info,2014-08-19T16:50:35.975,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 688 state to active [ns_server:debug,2014-08-19T16:50:35.975,ns_1@10.242.238.90:<0.23974.0>:ebucketmigrator_srv:init:621]Reusing old upstream: [{vbuckets,[950,952,953,954,955,956,957,958,959,960,961,962,963,964,965,966, 967,968,969,970,971,972,973,974,975,976,977,978,979,980,981,982, 983,984,985,986,987,988,989,990,991,992,993,994,995,996,997,998, 999,1000,1001,1002,1003,1004,1005,1006,1007,1008,1009,1010,1011, 1012,1013,1014,1015,1016,1017,1018,1019,1020,1021,1022,1023]}, {name,<<"replication_ns_1@10.242.238.90">>}, {takeover,false}] [rebalance:debug,2014-08-19T16:50:35.975,ns_1@10.242.238.90:<0.23974.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.23975.0> [ns_server:debug,2014-08-19T16:50:35.975,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 2895 us [ns_server:debug,2014-08-19T16:50:35.976,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:35.976,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{950, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:50:35.976,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:info,2014-08-19T16:50:35.982,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 949 state to replica [ns_server:info,2014-08-19T16:50:35.983,ns_1@10.242.238.90:tap_replication_manager-default<0.18775.0>:tap_replication_manager:change_vbucket_filter:200]Going to change replication from 'ns_1@10.242.238.91' to have [949,950,952,953,954,955,956,957,958,959,960,961,962,963,964,965,966,967,968, 969,970,971,972,973,974,975,976,977,978,979,980,981,982,983,984,985,986,987, 988,989,990,991,992,993,994,995,996,997,998,999,1000,1001,1002,1003,1004, 1005,1006,1007,1008,1009,1010,1011,1012,1013,1014,1015,1016,1017,1018,1019, 1020,1021,1022,1023] ([949], []) [ns_server:debug,2014-08-19T16:50:35.983,ns_1@10.242.238.90:<0.23976.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:135]Registered myself under id:{"default", {new_child_id, [949,950,952,953,954,955,956,957,958,959,960, 961,962,963,964,965,966,967,968,969,970,971, 972,973,974,975,976,977,978,979,980,981,982, 983,984,985,986,987,988,989,990,991,992,993, 994,995,996,997,998,999,1000,1001,1002,1003, 1004,1005,1006,1007,1008,1009,1010,1011,1012, 1013,1014,1015,1016,1017,1018,1019,1020,1021, 1022,1023], 'ns_1@10.242.238.91'}, #Ref<0.0.1.4972>} Args:[{"10.242.238.91",11209}, {"10.242.238.90",11209}, [{old_state_retriever,#Fun}, {on_not_ready_vbuckets,#Fun}, {username,"default"}, {password,get_from_config}, {vbuckets,[949,950,952,953,954,955,956,957,958,959,960,961,962,963,964, 965,966,967,968,969,970,971,972,973,974,975,976,977,978,979, 980,981,982,983,984,985,986,987,988,989,990,991,992,993,994, 995,996,997,998,999,1000,1001,1002,1003,1004,1005,1006,1007, 1008,1009,1010,1011,1012,1013,1014,1015,1016,1017,1018,1019, 1020,1021,1022,1023]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]] [ns_server:debug,2014-08-19T16:50:35.984,ns_1@10.242.238.90:<0.23976.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:139]Linked myself to old ebucketmigrator <0.23974.0> [ns_server:info,2014-08-19T16:50:35.984,ns_1@10.242.238.90:<0.23974.0>:ebucketmigrator_srv:handle_call:270]Starting new-style vbucket filter change on stream `replication_ns_1@10.242.238.90` [rebalance:debug,2014-08-19T16:50:35.992,ns_1@10.242.238.90:<0.22986.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:50:35.992,ns_1@10.242.238.90:<0.22986.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:50:35.992,ns_1@10.242.238.90:<0.23978.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:50:35.992,ns_1@10.242.238.90:<0.23978.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:50:35.993,ns_1@10.242.238.90:<0.22986.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:info,2014-08-19T16:50:35.995,ns_1@10.242.238.90:<0.23974.0>:ebucketmigrator_srv:handle_call:297]Changing vbucket filter on tap stream `replication_ns_1@10.242.238.90`: [{949,1}, {950,1}, {952,1}, {953,1}, {954,1}, {955,1}, {956,1}, {957,1}, {958,1}, {959,1}, {960,1}, {961,1}, {962,1}, {963,1}, {964,1}, {965,1}, {966,1}, {967,1}, {968,1}, {969,1}, {970,1}, {971,1}, {972,1}, {973,1}, {974,1}, {975,1}, {976,1}, {977,1}, {978,1}, {979,1}, {980,1}, {981,1}, {982,1}, {983,1}, {984,1}, {985,1}, {986,1}, {987,1}, {988,1}, {989,1}, {990,1}, {991,1}, {992,1}, {993,1}, {994,1}, {995,1}, {996,1}, {997,1}, {998,1}, {999,1}, {1000,1}, {1001,1}, {1002,1}, {1003,1}, {1004,1}, {1005,1}, {1006,1}, {1007,1}, {1008,1}, {1009,1}, {1010,1}, {1011,1}, {1012,1}, {1013,1}, {1014,1}, {1015,1}, {1016,1}, {1017,1}, {1018,1}, {1019,1}, {1020,1}, {1021,1}, {1022,1}, {1023,1}] [ns_server:info,2014-08-19T16:50:35.996,ns_1@10.242.238.90:<0.23974.0>:ebucketmigrator_srv:handle_call:307]Successfully changed vbucket filter on tap stream `replication_ns_1@10.242.238.90`. [ns_server:info,2014-08-19T16:50:35.997,ns_1@10.242.238.90:<0.23974.0>:ebucketmigrator_srv:process_upstream:1027]Got vbucket filter change completion message. Silencing upstream sender [ns_server:info,2014-08-19T16:50:35.997,ns_1@10.242.238.90:<0.23974.0>:ebucketmigrator_srv:handle_info:366]Got reply from upstream silencing request. Completing state transition to a new ebucketmigrator. [ns_server:debug,2014-08-19T16:50:35.997,ns_1@10.242.238.90:<0.23974.0>:ebucketmigrator_srv:complete_native_vb_filter_change:170]Proceeding with reading unread binaries [ns_server:debug,2014-08-19T16:50:35.997,ns_1@10.242.238.90:<0.23974.0>:ebucketmigrator_srv:confirm_downstream:197]Going to confirm reception downstream messages [ns_server:debug,2014-08-19T16:50:35.997,ns_1@10.242.238.90:<0.23974.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:50:35.997,ns_1@10.242.238.90:<0.23980.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:50:35.997,ns_1@10.242.238.90:<0.23980.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:50:35.997,ns_1@10.242.238.90:<0.23974.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:50:35.997,ns_1@10.242.238.90:<0.23974.0>:ebucketmigrator_srv:confirm_downstream:201]Confirmed upstream messages are feeded to kernel [ns_server:debug,2014-08-19T16:50:35.998,ns_1@10.242.238.90:<0.23974.0>:ebucketmigrator_srv:reply_and_die:210]Passed old state to caller [ns_server:debug,2014-08-19T16:50:35.998,ns_1@10.242.238.90:<0.23974.0>:ebucketmigrator_srv:reply_and_die:213]Sent out state. Preparing to die [ns_server:debug,2014-08-19T16:50:35.998,ns_1@10.242.238.90:<0.23976.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:143]Got old state from previous ebucketmigrator: <0.23974.0> [ns_server:debug,2014-08-19T16:50:35.998,ns_1@10.242.238.90:<0.23976.0>:ns_vbm_new_sup:perform_vbucket_filter_change_loop:184]Sent old state to new instance [ns_server:info,2014-08-19T16:50:35.998,ns_1@10.242.238.90:<0.23982.0>:ns_vbm_new_sup:mk_old_state_retriever:83]Got vbucket filter change old state. Proceeding vbucket filter change operation [ns_server:debug,2014-08-19T16:50:35.998,ns_1@10.242.238.90:<0.23982.0>:ebucketmigrator_srv:init:494]Got old ebucketmigrator state from <0.23974.0>: {state,#Port<0.13886>,#Port<0.13883>,#Port<0.13887>,#Port<0.13884>, <0.23975.0>,<<"cut off">>,<<"cut off">>,[],223,false,false,0, {1408,452635,996976}, completed, {<0.23976.0>,#Ref<0.0.1.4985>}, <<"replication_ns_1@10.242.238.90">>,<0.23974.0>, {had_backfill,false,undefined,[]}, completed,false}. [ns_server:debug,2014-08-19T16:50:35.999,ns_1@10.242.238.90:<0.17162.0>:ns_process_registry:handle_info:98]Got exit msg: {'EXIT',<0.23976.0>,{#Ref<0.0.1.4974>,<0.23982.0>}} [error_logger:info,2014-08-19T16:50:35.999,ns_1@10.242.238.90:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,'ns_vbm_new_sup-default'} started: [{pid,<0.23982.0>}, {name, {new_child_id, [949,950,952,953,954,955,956,957,958,959,960, 961,962,963,964,965,966,967,968,969,970,971, 972,973,974,975,976,977,978,979,980,981,982, 983,984,985,986,987,988,989,990,991,992,993, 994,995,996,997,998,999,1000,1001,1002,1003, 1004,1005,1006,1007,1008,1009,1010,1011,1012, 1013,1014,1015,1016,1017,1018,1019,1020,1021, 1022,1023], 'ns_1@10.242.238.91'}}, {mfargs, {ebucketmigrator_srv,start_link, [{"10.242.238.91",11209}, {"10.242.238.90",11209}, [{old_state_retriever, #Fun}, {on_not_ready_vbuckets, #Fun}, {username,"default"}, {password,get_from_config}, {vbuckets, [949,950,952,953,954,955,956,957,958,959, 960,961,962,963,964,965,966,967,968,969, 970,971,972,973,974,975,976,977,978,979, 980,981,982,983,984,985,986,987,988,989, 990,991,992,993,994,995,996,997,998,999, 1000,1001,1002,1003,1004,1005,1006,1007, 1008,1009,1010,1011,1012,1013,1014,1015, 1016,1017,1018,1019,1020,1021,1022, 1023]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]]}}, {restart_type,temporary}, {shutdown,60000}, {child_type,worker}] [ns_server:debug,2014-08-19T16:50:36.003,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [rebalance:debug,2014-08-19T16:50:36.003,ns_1@10.242.238.90:<0.23279.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:50:36.003,ns_1@10.242.238.90:<0.23279.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:50:36.003,ns_1@10.242.238.90:<0.23983.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:50:36.003,ns_1@10.242.238.90:<0.23983.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:50:36.003,ns_1@10.242.238.90:<0.23279.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:50:36.005,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:36.006,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 2733 us [ns_server:debug,2014-08-19T16:50:36.006,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:36.007,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{949, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:info,2014-08-19T16:50:36.008,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 948 state to replica [ns_server:info,2014-08-19T16:50:36.008,ns_1@10.242.238.90:tap_replication_manager-default<0.18775.0>:tap_replication_manager:change_vbucket_filter:200]Going to change replication from 'ns_1@10.242.238.91' to have [948,949,950,952,953,954,955,956,957,958,959,960,961,962,963,964,965,966,967, 968,969,970,971,972,973,974,975,976,977,978,979,980,981,982,983,984,985,986, 987,988,989,990,991,992,993,994,995,996,997,998,999,1000,1001,1002,1003,1004, 1005,1006,1007,1008,1009,1010,1011,1012,1013,1014,1015,1016,1017,1018,1019, 1020,1021,1022,1023] ([948], []) [ns_server:debug,2014-08-19T16:50:36.010,ns_1@10.242.238.90:<0.23984.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:135]Registered myself under id:{"default", {new_child_id, [948,949,950,952,953,954,955,956,957,958,959, 960,961,962,963,964,965,966,967,968,969,970, 971,972,973,974,975,976,977,978,979,980,981, 982,983,984,985,986,987,988,989,990,991,992, 993,994,995,996,997,998,999,1000,1001,1002, 1003,1004,1005,1006,1007,1008,1009,1010,1011, 1012,1013,1014,1015,1016,1017,1018,1019,1020, 1021,1022,1023], 'ns_1@10.242.238.91'}, #Ref<0.0.1.5127>} Args:[{"10.242.238.91",11209}, {"10.242.238.90",11209}, [{old_state_retriever,#Fun}, {on_not_ready_vbuckets,#Fun}, {username,"default"}, {password,get_from_config}, {vbuckets,[948,949,950,952,953,954,955,956,957,958,959,960,961,962,963, 964,965,966,967,968,969,970,971,972,973,974,975,976,977,978, 979,980,981,982,983,984,985,986,987,988,989,990,991,992,993, 994,995,996,997,998,999,1000,1001,1002,1003,1004,1005,1006, 1007,1008,1009,1010,1011,1012,1013,1014,1015,1016,1017,1018, 1019,1020,1021,1022,1023]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]] [ns_server:debug,2014-08-19T16:50:36.010,ns_1@10.242.238.90:<0.23982.0>:ebucketmigrator_srv:init:621]Reusing old upstream: [{vbuckets,[949,950,952,953,954,955,956,957,958,959,960,961,962,963,964,965, 966,967,968,969,970,971,972,973,974,975,976,977,978,979,980,981, 982,983,984,985,986,987,988,989,990,991,992,993,994,995,996,997, 998,999,1000,1001,1002,1003,1004,1005,1006,1007,1008,1009,1010, 1011,1012,1013,1014,1015,1016,1017,1018,1019,1020,1021,1022,1023]}, {name,<<"replication_ns_1@10.242.238.90">>}, {takeover,false}] [ns_server:debug,2014-08-19T16:50:36.010,ns_1@10.242.238.90:<0.23984.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:139]Linked myself to old ebucketmigrator <0.23982.0> [rebalance:debug,2014-08-19T16:50:36.010,ns_1@10.242.238.90:<0.23982.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.23986.0> [ns_server:info,2014-08-19T16:50:36.010,ns_1@10.242.238.90:<0.23982.0>:ebucketmigrator_srv:handle_call:270]Starting new-style vbucket filter change on stream `replication_ns_1@10.242.238.90` [rebalance:debug,2014-08-19T16:50:36.018,ns_1@10.242.238.90:<0.23098.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:50:36.018,ns_1@10.242.238.90:<0.23098.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:50:36.018,ns_1@10.242.238.90:<0.23987.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:50:36.018,ns_1@10.242.238.90:<0.23987.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:50:36.018,ns_1@10.242.238.90:<0.23098.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:info,2014-08-19T16:50:36.021,ns_1@10.242.238.90:<0.23982.0>:ebucketmigrator_srv:handle_call:297]Changing vbucket filter on tap stream `replication_ns_1@10.242.238.90`: [{948,1}, {949,1}, {950,1}, {952,1}, {953,1}, {954,1}, {955,1}, {956,1}, {957,1}, {958,1}, {959,1}, {960,1}, {961,1}, {962,1}, {963,1}, {964,1}, {965,1}, {966,1}, {967,1}, {968,1}, {969,1}, {970,1}, {971,1}, {972,1}, {973,1}, {974,1}, {975,1}, {976,1}, {977,1}, {978,1}, {979,1}, {980,1}, {981,1}, {982,1}, {983,1}, {984,1}, {985,1}, {986,1}, {987,1}, {988,1}, {989,1}, {990,1}, {991,1}, {992,1}, {993,1}, {994,1}, {995,1}, {996,1}, {997,1}, {998,1}, {999,1}, {1000,1}, {1001,1}, {1002,1}, {1003,1}, {1004,1}, {1005,1}, {1006,1}, {1007,1}, {1008,1}, {1009,1}, {1010,1}, {1011,1}, {1012,1}, {1013,1}, {1014,1}, {1015,1}, {1016,1}, {1017,1}, {1018,1}, {1019,1}, {1020,1}, {1021,1}, {1022,1}, {1023,1}] [ns_server:info,2014-08-19T16:50:36.022,ns_1@10.242.238.90:<0.23982.0>:ebucketmigrator_srv:handle_call:307]Successfully changed vbucket filter on tap stream `replication_ns_1@10.242.238.90`. [ns_server:info,2014-08-19T16:50:36.022,ns_1@10.242.238.90:<0.23982.0>:ebucketmigrator_srv:process_upstream:1027]Got vbucket filter change completion message. Silencing upstream sender [ns_server:info,2014-08-19T16:50:36.023,ns_1@10.242.238.90:<0.23982.0>:ebucketmigrator_srv:handle_info:366]Got reply from upstream silencing request. Completing state transition to a new ebucketmigrator. [ns_server:debug,2014-08-19T16:50:36.023,ns_1@10.242.238.90:<0.23982.0>:ebucketmigrator_srv:complete_native_vb_filter_change:170]Proceeding with reading unread binaries [ns_server:debug,2014-08-19T16:50:36.023,ns_1@10.242.238.90:<0.23982.0>:ebucketmigrator_srv:confirm_downstream:197]Going to confirm reception downstream messages [ns_server:debug,2014-08-19T16:50:36.023,ns_1@10.242.238.90:<0.23982.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:50:36.023,ns_1@10.242.238.90:<0.23988.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:50:36.023,ns_1@10.242.238.90:<0.23988.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:50:36.023,ns_1@10.242.238.90:<0.23982.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:50:36.023,ns_1@10.242.238.90:<0.23982.0>:ebucketmigrator_srv:confirm_downstream:201]Confirmed upstream messages are feeded to kernel [ns_server:debug,2014-08-19T16:50:36.023,ns_1@10.242.238.90:<0.23982.0>:ebucketmigrator_srv:reply_and_die:210]Passed old state to caller [ns_server:debug,2014-08-19T16:50:36.023,ns_1@10.242.238.90:<0.23982.0>:ebucketmigrator_srv:reply_and_die:213]Sent out state. Preparing to die [ns_server:debug,2014-08-19T16:50:36.023,ns_1@10.242.238.90:<0.23984.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:143]Got old state from previous ebucketmigrator: <0.23982.0> [ns_server:debug,2014-08-19T16:50:36.024,ns_1@10.242.238.90:<0.23984.0>:ns_vbm_new_sup:perform_vbucket_filter_change_loop:184]Sent old state to new instance [ns_server:info,2014-08-19T16:50:36.024,ns_1@10.242.238.90:<0.23990.0>:ns_vbm_new_sup:mk_old_state_retriever:83]Got vbucket filter change old state. Proceeding vbucket filter change operation [ns_server:debug,2014-08-19T16:50:36.024,ns_1@10.242.238.90:<0.23990.0>:ebucketmigrator_srv:init:494]Got old ebucketmigrator state from <0.23982.0>: {state,#Port<0.13886>,#Port<0.13883>,#Port<0.13887>,#Port<0.13884>, <0.23986.0>,<<"cut off">>,<<"cut off">>,[],226,false,false,0, {1408,452636,22853}, completed, {<0.23984.0>,#Ref<0.0.1.5144>}, <<"replication_ns_1@10.242.238.90">>,<0.23982.0>, {had_backfill,false,undefined,[]}, completed,false}. [ns_server:debug,2014-08-19T16:50:36.024,ns_1@10.242.238.90:<0.17162.0>:ns_process_registry:handle_info:98]Got exit msg: {'EXIT',<0.23984.0>,{#Ref<0.0.1.5129>,<0.23990.0>}} [error_logger:info,2014-08-19T16:50:36.024,ns_1@10.242.238.90:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,'ns_vbm_new_sup-default'} started: [{pid,<0.23990.0>}, {name, {new_child_id, [948,949,950,952,953,954,955,956,957,958,959, 960,961,962,963,964,965,966,967,968,969,970, 971,972,973,974,975,976,977,978,979,980,981, 982,983,984,985,986,987,988,989,990,991,992, 993,994,995,996,997,998,999,1000,1001,1002, 1003,1004,1005,1006,1007,1008,1009,1010,1011, 1012,1013,1014,1015,1016,1017,1018,1019,1020, 1021,1022,1023], 'ns_1@10.242.238.91'}}, {mfargs, {ebucketmigrator_srv,start_link, [{"10.242.238.91",11209}, {"10.242.238.90",11209}, [{old_state_retriever, #Fun}, {on_not_ready_vbuckets, #Fun}, {username,"default"}, {password,get_from_config}, {vbuckets, [948,949,950,952,953,954,955,956,957,958, 959,960,961,962,963,964,965,966,967,968, 969,970,971,972,973,974,975,976,977,978, 979,980,981,982,983,984,985,986,987,988, 989,990,991,992,993,994,995,996,997,998, 999,1000,1001,1002,1003,1004,1005,1006, 1007,1008,1009,1010,1011,1012,1013,1014, 1015,1016,1017,1018,1019,1020,1021,1022, 1023]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]]}}, {restart_type,temporary}, {shutdown,60000}, {child_type,worker}] [ns_server:debug,2014-08-19T16:50:36.030,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:50:36.037,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 7295 us [ns_server:debug,2014-08-19T16:50:36.037,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:36.037,ns_1@10.242.238.90:<0.23990.0>:ebucketmigrator_srv:init:621]Reusing old upstream: [{vbuckets,[948,949,950,952,953,954,955,956,957,958,959,960,961,962,963,964, 965,966,967,968,969,970,971,972,973,974,975,976,977,978,979,980, 981,982,983,984,985,986,987,988,989,990,991,992,993,994,995,996, 997,998,999,1000,1001,1002,1003,1004,1005,1006,1007,1008,1009, 1010,1011,1012,1013,1014,1015,1016,1017,1018,1019,1020,1021,1022, 1023]}, {name,<<"replication_ns_1@10.242.238.90">>}, {takeover,false}] [rebalance:debug,2014-08-19T16:50:36.038,ns_1@10.242.238.90:<0.23990.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.23992.0> [ns_server:debug,2014-08-19T16:50:36.038,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [views:debug,2014-08-19T16:50:36.038,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/685. Updated state: active (1) [ns_server:debug,2014-08-19T16:50:36.039,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",685,active,1} [ns_server:debug,2014-08-19T16:50:36.039,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{948, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:info,2014-08-19T16:50:36.048,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 693 state to active [rebalance:debug,2014-08-19T16:50:36.062,ns_1@10.242.238.90:<0.23253.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:50:36.062,ns_1@10.242.238.90:<0.23253.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:50:36.062,ns_1@10.242.238.90:<0.23993.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:50:36.062,ns_1@10.242.238.90:<0.23993.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:50:36.063,ns_1@10.242.238.90:<0.23253.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:50:36.064,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:50:36.067,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:36.067,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 3069 us [ns_server:debug,2014-08-19T16:50:36.068,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:36.069,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{439, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:info,2014-08-19T16:50:36.071,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 945 state to replica [ns_server:info,2014-08-19T16:50:36.071,ns_1@10.242.238.90:tap_replication_manager-default<0.18775.0>:tap_replication_manager:change_vbucket_filter:200]Going to change replication from 'ns_1@10.242.238.91' to have [945,948,949,950,952,953,954,955,956,957,958,959,960,961,962,963,964,965,966, 967,968,969,970,971,972,973,974,975,976,977,978,979,980,981,982,983,984,985, 986,987,988,989,990,991,992,993,994,995,996,997,998,999,1000,1001,1002,1003, 1004,1005,1006,1007,1008,1009,1010,1011,1012,1013,1014,1015,1016,1017,1018, 1019,1020,1021,1022,1023] ([945], []) [ns_server:debug,2014-08-19T16:50:36.072,ns_1@10.242.238.90:<0.23995.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:135]Registered myself under id:{"default", {new_child_id, [945,948,949,950,952,953,954,955,956,957,958, 959,960,961,962,963,964,965,966,967,968,969, 970,971,972,973,974,975,976,977,978,979,980, 981,982,983,984,985,986,987,988,989,990,991, 992,993,994,995,996,997,998,999,1000,1001, 1002,1003,1004,1005,1006,1007,1008,1009,1010, 1011,1012,1013,1014,1015,1016,1017,1018,1019, 1020,1021,1022,1023], 'ns_1@10.242.238.91'}, #Ref<0.0.1.5367>} Args:[{"10.242.238.91",11209}, {"10.242.238.90",11209}, [{old_state_retriever,#Fun}, {on_not_ready_vbuckets,#Fun}, {username,"default"}, {password,get_from_config}, {vbuckets,[945,948,949,950,952,953,954,955,956,957,958,959,960,961,962, 963,964,965,966,967,968,969,970,971,972,973,974,975,976,977, 978,979,980,981,982,983,984,985,986,987,988,989,990,991,992, 993,994,995,996,997,998,999,1000,1001,1002,1003,1004,1005, 1006,1007,1008,1009,1010,1011,1012,1013,1014,1015,1016,1017, 1018,1019,1020,1021,1022,1023]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]] [ns_server:debug,2014-08-19T16:50:36.072,ns_1@10.242.238.90:<0.23995.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:139]Linked myself to old ebucketmigrator <0.23990.0> [ns_server:info,2014-08-19T16:50:36.073,ns_1@10.242.238.90:<0.23990.0>:ebucketmigrator_srv:handle_call:270]Starting new-style vbucket filter change on stream `replication_ns_1@10.242.238.90` [ns_server:info,2014-08-19T16:50:36.084,ns_1@10.242.238.90:<0.23990.0>:ebucketmigrator_srv:handle_call:297]Changing vbucket filter on tap stream `replication_ns_1@10.242.238.90`: [{945,1}, {948,1}, {949,1}, {950,1}, {952,1}, {953,1}, {954,1}, {955,1}, {956,1}, {957,1}, {958,1}, {959,1}, {960,1}, {961,1}, {962,1}, {963,1}, {964,1}, {965,1}, {966,1}, {967,1}, {968,1}, {969,1}, {970,1}, {971,1}, {972,1}, {973,1}, {974,1}, {975,1}, {976,1}, {977,1}, {978,1}, {979,1}, {980,1}, {981,1}, {982,1}, {983,1}, {984,1}, {985,1}, {986,1}, {987,1}, {988,1}, {989,1}, {990,1}, {991,1}, {992,1}, {993,1}, {994,1}, {995,1}, {996,1}, {997,1}, {998,1}, {999,1}, {1000,1}, {1001,1}, {1002,1}, {1003,1}, {1004,1}, {1005,1}, {1006,1}, {1007,1}, {1008,1}, {1009,1}, {1010,1}, {1011,1}, {1012,1}, {1013,1}, {1014,1}, {1015,1}, {1016,1}, {1017,1}, {1018,1}, {1019,1}, {1020,1}, {1021,1}, {1022,1}, {1023,1}] [ns_server:info,2014-08-19T16:50:36.085,ns_1@10.242.238.90:<0.23990.0>:ebucketmigrator_srv:handle_call:307]Successfully changed vbucket filter on tap stream `replication_ns_1@10.242.238.90`. [ns_server:info,2014-08-19T16:50:36.085,ns_1@10.242.238.90:<0.23990.0>:ebucketmigrator_srv:process_upstream:1027]Got vbucket filter change completion message. Silencing upstream sender [ns_server:info,2014-08-19T16:50:36.085,ns_1@10.242.238.90:<0.23990.0>:ebucketmigrator_srv:handle_info:366]Got reply from upstream silencing request. Completing state transition to a new ebucketmigrator. [ns_server:debug,2014-08-19T16:50:36.086,ns_1@10.242.238.90:<0.23990.0>:ebucketmigrator_srv:complete_native_vb_filter_change:170]Proceeding with reading unread binaries [ns_server:debug,2014-08-19T16:50:36.086,ns_1@10.242.238.90:<0.23990.0>:ebucketmigrator_srv:confirm_downstream:197]Going to confirm reception downstream messages [ns_server:debug,2014-08-19T16:50:36.086,ns_1@10.242.238.90:<0.23990.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:50:36.086,ns_1@10.242.238.90:<0.23997.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:50:36.086,ns_1@10.242.238.90:<0.23997.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:50:36.086,ns_1@10.242.238.90:<0.23990.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:50:36.086,ns_1@10.242.238.90:<0.23990.0>:ebucketmigrator_srv:confirm_downstream:201]Confirmed upstream messages are feeded to kernel [ns_server:debug,2014-08-19T16:50:36.086,ns_1@10.242.238.90:<0.23990.0>:ebucketmigrator_srv:reply_and_die:210]Passed old state to caller [ns_server:debug,2014-08-19T16:50:36.086,ns_1@10.242.238.90:<0.23990.0>:ebucketmigrator_srv:reply_and_die:213]Sent out state. Preparing to die [ns_server:debug,2014-08-19T16:50:36.087,ns_1@10.242.238.90:<0.23995.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:143]Got old state from previous ebucketmigrator: <0.23990.0> [ns_server:debug,2014-08-19T16:50:36.087,ns_1@10.242.238.90:<0.23995.0>:ns_vbm_new_sup:perform_vbucket_filter_change_loop:184]Sent old state to new instance [ns_server:info,2014-08-19T16:50:36.087,ns_1@10.242.238.90:<0.23999.0>:ns_vbm_new_sup:mk_old_state_retriever:83]Got vbucket filter change old state. Proceeding vbucket filter change operation [ns_server:debug,2014-08-19T16:50:36.087,ns_1@10.242.238.90:<0.23999.0>:ebucketmigrator_srv:init:494]Got old ebucketmigrator state from <0.23990.0>: {state,#Port<0.13886>,#Port<0.13883>,#Port<0.13887>,#Port<0.13884>, <0.23992.0>,<<"cut off">>,<<"cut off">>,[],229,false,false,0, {1408,452636,85781}, completed, {<0.23995.0>,#Ref<0.0.1.5380>}, <<"replication_ns_1@10.242.238.90">>,<0.23990.0>, {had_backfill,false,undefined,[]}, completed,false}. [ns_server:debug,2014-08-19T16:50:36.087,ns_1@10.242.238.90:<0.17162.0>:ns_process_registry:handle_info:98]Got exit msg: {'EXIT',<0.23995.0>,{#Ref<0.0.1.5369>,<0.23999.0>}} [ns_server:info,2014-08-19T16:50:36.088,ns_1@10.242.238.90:<0.18786.0>:ns_memcached:do_handle_call:527]Changed vbucket 683 state to active [error_logger:info,2014-08-19T16:50:36.087,ns_1@10.242.238.90:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,'ns_vbm_new_sup-default'} started: [{pid,<0.23999.0>}, {name, {new_child_id, [945,948,949,950,952,953,954,955,956,957,958, 959,960,961,962,963,964,965,966,967,968,969, 970,971,972,973,974,975,976,977,978,979,980, 981,982,983,984,985,986,987,988,989,990,991, 992,993,994,995,996,997,998,999,1000,1001, 1002,1003,1004,1005,1006,1007,1008,1009,1010, 1011,1012,1013,1014,1015,1016,1017,1018,1019, 1020,1021,1022,1023], 'ns_1@10.242.238.91'}}, {mfargs, {ebucketmigrator_srv,start_link, [{"10.242.238.91",11209}, {"10.242.238.90",11209}, [{old_state_retriever, #Fun}, {on_not_ready_vbuckets, #Fun}, {username,"default"}, {password,get_from_config}, {vbuckets, [945,948,949,950,952,953,954,955,956,957, 958,959,960,961,962,963,964,965,966,967, 968,969,970,971,972,973,974,975,976,977, 978,979,980,981,982,983,984,985,986,987, 988,989,990,991,992,993,994,995,996,997, 998,999,1000,1001,1002,1003,1004,1005, 1006,1007,1008,1009,1010,1011,1012,1013, 1014,1015,1016,1017,1018,1019,1020,1021, 1022,1023]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]]}}, {restart_type,temporary}, {shutdown,60000}, {child_type,worker}] [ns_server:info,2014-08-19T16:50:36.089,ns_1@10.242.238.90:<0.18786.0>:ns_memcached:do_handle_call:527]Changed vbucket 695 state to active [ns_server:debug,2014-08-19T16:50:36.093,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:50:36.096,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:36.096,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 2764 us [ns_server:debug,2014-08-19T16:50:36.096,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:36.097,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{945, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:50:36.100,ns_1@10.242.238.90:<0.23999.0>:ebucketmigrator_srv:init:621]Reusing old upstream: [{vbuckets,[945,948,949,950,952,953,954,955,956,957,958,959,960,961,962,963, 964,965,966,967,968,969,970,971,972,973,974,975,976,977,978,979, 980,981,982,983,984,985,986,987,988,989,990,991,992,993,994,995, 996,997,998,999,1000,1001,1002,1003,1004,1005,1006,1007,1008,1009, 1010,1011,1012,1013,1014,1015,1016,1017,1018,1019,1020,1021,1022, 1023]}, {name,<<"replication_ns_1@10.242.238.90">>}, {takeover,false}] [rebalance:debug,2014-08-19T16:50:36.100,ns_1@10.242.238.90:<0.23999.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.24000.0> [rebalance:debug,2014-08-19T16:50:36.102,ns_1@10.242.238.90:<0.23178.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:50:36.103,ns_1@10.242.238.90:<0.23178.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:50:36.103,ns_1@10.242.238.90:<0.24001.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:50:36.103,ns_1@10.242.238.90:<0.24001.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:50:36.103,ns_1@10.242.238.90:<0.23178.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [views:debug,2014-08-19T16:50:36.108,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/692. Updated state: active (1) [ns_server:debug,2014-08-19T16:50:36.108,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",692,active,1} [ns_server:info,2014-08-19T16:50:36.113,ns_1@10.242.238.90:<0.18786.0>:ns_memcached:do_handle_call:527]Changed vbucket 942 state to replica [ns_server:info,2014-08-19T16:50:36.113,ns_1@10.242.238.90:tap_replication_manager-default<0.18775.0>:tap_replication_manager:change_vbucket_filter:200]Going to change replication from 'ns_1@10.242.238.91' to have [942,945,948,949,950,952,953,954,955,956,957,958,959,960,961,962,963,964,965, 966,967,968,969,970,971,972,973,974,975,976,977,978,979,980,981,982,983,984, 985,986,987,988,989,990,991,992,993,994,995,996,997,998,999,1000,1001,1002, 1003,1004,1005,1006,1007,1008,1009,1010,1011,1012,1013,1014,1015,1016,1017, 1018,1019,1020,1021,1022,1023] ([942], []) [ns_server:debug,2014-08-19T16:50:36.114,ns_1@10.242.238.90:<0.24003.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:135]Registered myself under id:{"default", {new_child_id, [942,945,948,949,950,952,953,954,955,956,957, 958,959,960,961,962,963,964,965,966,967,968, 969,970,971,972,973,974,975,976,977,978,979, 980,981,982,983,984,985,986,987,988,989,990, 991,992,993,994,995,996,997,998,999,1000, 1001,1002,1003,1004,1005,1006,1007,1008,1009, 1010,1011,1012,1013,1014,1015,1016,1017,1018, 1019,1020,1021,1022,1023], 'ns_1@10.242.238.91'}, #Ref<0.0.1.5589>} Args:[{"10.242.238.91",11209}, {"10.242.238.90",11209}, [{old_state_retriever,#Fun}, {on_not_ready_vbuckets,#Fun}, {username,"default"}, {password,get_from_config}, {vbuckets,[942,945,948,949,950,952,953,954,955,956,957,958,959,960,961, 962,963,964,965,966,967,968,969,970,971,972,973,974,975,976, 977,978,979,980,981,982,983,984,985,986,987,988,989,990,991, 992,993,994,995,996,997,998,999,1000,1001,1002,1003,1004, 1005,1006,1007,1008,1009,1010,1011,1012,1013,1014,1015,1016, 1017,1018,1019,1020,1021,1022,1023]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]] [ns_server:debug,2014-08-19T16:50:36.115,ns_1@10.242.238.90:<0.24003.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:139]Linked myself to old ebucketmigrator <0.23999.0> [ns_server:info,2014-08-19T16:50:36.115,ns_1@10.242.238.90:<0.23999.0>:ebucketmigrator_srv:handle_call:270]Starting new-style vbucket filter change on stream `replication_ns_1@10.242.238.90` [rebalance:debug,2014-08-19T16:50:36.118,ns_1@10.242.238.90:<0.23304.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:50:36.118,ns_1@10.242.238.90:<0.23304.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:50:36.118,ns_1@10.242.238.90:<0.24005.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:50:36.118,ns_1@10.242.238.90:<0.24005.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:50:36.118,ns_1@10.242.238.90:<0.23304.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:info,2014-08-19T16:50:36.126,ns_1@10.242.238.90:<0.23999.0>:ebucketmigrator_srv:handle_call:297]Changing vbucket filter on tap stream `replication_ns_1@10.242.238.90`: [{942,1}, {945,1}, {948,1}, {949,1}, {950,1}, {952,1}, {953,1}, {954,1}, {955,1}, {956,1}, {957,1}, {958,1}, {959,1}, {960,1}, {961,1}, {962,1}, {963,1}, {964,1}, {965,1}, {966,1}, {967,1}, {968,1}, {969,1}, {970,1}, {971,1}, {972,1}, {973,1}, {974,1}, {975,1}, {976,1}, {977,1}, {978,1}, {979,1}, {980,1}, {981,1}, {982,1}, {983,1}, {984,1}, {985,1}, {986,1}, {987,1}, {988,1}, {989,1}, {990,1}, {991,1}, {992,1}, {993,1}, {994,1}, {995,1}, {996,1}, {997,1}, {998,1}, {999,1}, {1000,1}, {1001,1}, {1002,1}, {1003,1}, {1004,1}, {1005,1}, {1006,1}, {1007,1}, {1008,1}, {1009,1}, {1010,1}, {1011,1}, {1012,1}, {1013,1}, {1014,1}, {1015,1}, {1016,1}, {1017,1}, {1018,1}, {1019,1}, {1020,1}, {1021,1}, {1022,1}, {1023,1}] [ns_server:info,2014-08-19T16:50:36.127,ns_1@10.242.238.90:<0.23999.0>:ebucketmigrator_srv:handle_call:307]Successfully changed vbucket filter on tap stream `replication_ns_1@10.242.238.90`. [ns_server:info,2014-08-19T16:50:36.127,ns_1@10.242.238.90:<0.23999.0>:ebucketmigrator_srv:process_upstream:1027]Got vbucket filter change completion message. Silencing upstream sender [ns_server:info,2014-08-19T16:50:36.127,ns_1@10.242.238.90:<0.23999.0>:ebucketmigrator_srv:handle_info:366]Got reply from upstream silencing request. Completing state transition to a new ebucketmigrator. [ns_server:debug,2014-08-19T16:50:36.127,ns_1@10.242.238.90:<0.23999.0>:ebucketmigrator_srv:complete_native_vb_filter_change:170]Proceeding with reading unread binaries [ns_server:debug,2014-08-19T16:50:36.127,ns_1@10.242.238.90:<0.23999.0>:ebucketmigrator_srv:confirm_downstream:197]Going to confirm reception downstream messages [ns_server:debug,2014-08-19T16:50:36.127,ns_1@10.242.238.90:<0.23999.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:50:36.127,ns_1@10.242.238.90:<0.24006.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:50:36.128,ns_1@10.242.238.90:<0.24006.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:50:36.128,ns_1@10.242.238.90:<0.23999.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:50:36.128,ns_1@10.242.238.90:<0.23999.0>:ebucketmigrator_srv:confirm_downstream:201]Confirmed upstream messages are feeded to kernel [ns_server:debug,2014-08-19T16:50:36.128,ns_1@10.242.238.90:<0.23999.0>:ebucketmigrator_srv:reply_and_die:210]Passed old state to caller [ns_server:debug,2014-08-19T16:50:36.128,ns_1@10.242.238.90:<0.23999.0>:ebucketmigrator_srv:reply_and_die:213]Sent out state. Preparing to die [ns_server:debug,2014-08-19T16:50:36.128,ns_1@10.242.238.90:<0.24003.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:143]Got old state from previous ebucketmigrator: <0.23999.0> [ns_server:debug,2014-08-19T16:50:36.128,ns_1@10.242.238.90:<0.24003.0>:ns_vbm_new_sup:perform_vbucket_filter_change_loop:184]Sent old state to new instance [ns_server:info,2014-08-19T16:50:36.129,ns_1@10.242.238.90:<0.24008.0>:ns_vbm_new_sup:mk_old_state_retriever:83]Got vbucket filter change old state. Proceeding vbucket filter change operation [ns_server:debug,2014-08-19T16:50:36.129,ns_1@10.242.238.90:<0.24008.0>:ebucketmigrator_srv:init:494]Got old ebucketmigrator state from <0.23999.0>: {state,#Port<0.13886>,#Port<0.13883>,#Port<0.13887>,#Port<0.13884>, <0.24000.0>,<<"cut off">>,<<"cut off">>,[],232,false,false,0, {1408,452636,127271}, completed, {<0.24003.0>,#Ref<0.0.1.5602>}, <<"replication_ns_1@10.242.238.90">>,<0.23999.0>, {had_backfill,false,undefined,[]}, completed,false}. [ns_server:debug,2014-08-19T16:50:36.129,ns_1@10.242.238.90:<0.17162.0>:ns_process_registry:handle_info:98]Got exit msg: {'EXIT',<0.24003.0>,{#Ref<0.0.1.5591>,<0.24008.0>}} [error_logger:info,2014-08-19T16:50:36.129,ns_1@10.242.238.90:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,'ns_vbm_new_sup-default'} started: [{pid,<0.24008.0>}, {name, {new_child_id, [942,945,948,949,950,952,953,954,955,956,957, 958,959,960,961,962,963,964,965,966,967,968, 969,970,971,972,973,974,975,976,977,978,979, 980,981,982,983,984,985,986,987,988,989,990, 991,992,993,994,995,996,997,998,999,1000,1001, 1002,1003,1004,1005,1006,1007,1008,1009,1010, 1011,1012,1013,1014,1015,1016,1017,1018,1019, 1020,1021,1022,1023], 'ns_1@10.242.238.91'}}, {mfargs, {ebucketmigrator_srv,start_link, [{"10.242.238.91",11209}, {"10.242.238.90",11209}, [{old_state_retriever, #Fun}, {on_not_ready_vbuckets, #Fun}, {username,"default"}, {password,get_from_config}, {vbuckets, [942,945,948,949,950,952,953,954,955,956, 957,958,959,960,961,962,963,964,965,966, 967,968,969,970,971,972,973,974,975,976, 977,978,979,980,981,982,983,984,985,986, 987,988,989,990,991,992,993,994,995,996, 997,998,999,1000,1001,1002,1003,1004, 1005,1006,1007,1008,1009,1010,1011,1012, 1013,1014,1015,1016,1017,1018,1019,1020, 1021,1022,1023]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]]}}, {restart_type,temporary}, {shutdown,60000}, {child_type,worker}] [ns_server:debug,2014-08-19T16:50:36.137,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:50:36.140,ns_1@10.242.238.90:<0.24008.0>:ebucketmigrator_srv:init:621]Reusing old upstream: [{vbuckets,[942,945,948,949,950,952,953,954,955,956,957,958,959,960,961,962, 963,964,965,966,967,968,969,970,971,972,973,974,975,976,977,978, 979,980,981,982,983,984,985,986,987,988,989,990,991,992,993,994, 995,996,997,998,999,1000,1001,1002,1003,1004,1005,1006,1007,1008, 1009,1010,1011,1012,1013,1014,1015,1016,1017,1018,1019,1020,1021, 1022,1023]}, {name,<<"replication_ns_1@10.242.238.90">>}, {takeover,false}] [rebalance:debug,2014-08-19T16:50:36.140,ns_1@10.242.238.90:<0.24008.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.24009.0> [ns_server:debug,2014-08-19T16:50:36.141,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:36.141,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 3409 us [ns_server:debug,2014-08-19T16:50:36.141,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:36.142,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{942, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [views:debug,2014-08-19T16:50:36.158,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/688. Updated state: active (1) [ns_server:debug,2014-08-19T16:50:36.158,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",688,active,1} [ns_server:info,2014-08-19T16:50:36.158,ns_1@10.242.238.90:<0.18786.0>:ns_memcached:do_handle_call:527]Changed vbucket 689 state to active [ns_server:debug,2014-08-19T16:50:36.170,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:info,2014-08-19T16:50:36.173,ns_1@10.242.238.90:<0.18786.0>:ns_memcached:do_handle_call:527]Changed vbucket 694 state to active [ns_server:debug,2014-08-19T16:50:36.173,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 3296 us [ns_server:debug,2014-08-19T16:50:36.174,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:36.174,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:36.174,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{691, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:50:36.199,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [views:debug,2014-08-19T16:50:36.200,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/695. Updated state: active (1) [ns_server:debug,2014-08-19T16:50:36.200,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",695,active,1} [ns_server:debug,2014-08-19T16:50:36.202,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:36.202,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 2777 us [ns_server:debug,2014-08-19T16:50:36.203,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:36.203,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{685, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:info,2014-08-19T16:50:36.205,ns_1@10.242.238.90:<0.18786.0>:ns_memcached:do_handle_call:527]Changed vbucket 938 state to replica [ns_server:info,2014-08-19T16:50:36.205,ns_1@10.242.238.90:tap_replication_manager-default<0.18775.0>:tap_replication_manager:change_vbucket_filter:200]Going to change replication from 'ns_1@10.242.238.91' to have [938,942,945,948,949,950,952,953,954,955,956,957,958,959,960,961,962,963,964, 965,966,967,968,969,970,971,972,973,974,975,976,977,978,979,980,981,982,983, 984,985,986,987,988,989,990,991,992,993,994,995,996,997,998,999,1000,1001, 1002,1003,1004,1005,1006,1007,1008,1009,1010,1011,1012,1013,1014,1015,1016, 1017,1018,1019,1020,1021,1022,1023] ([938], []) [ns_server:debug,2014-08-19T16:50:36.206,ns_1@10.242.238.90:<0.24012.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:135]Registered myself under id:{"default", {new_child_id, [938,942,945,948,949,950,952,953,954,955,956, 957,958,959,960,961,962,963,964,965,966,967, 968,969,970,971,972,973,974,975,976,977,978, 979,980,981,982,983,984,985,986,987,988,989, 990,991,992,993,994,995,996,997,998,999,1000, 1001,1002,1003,1004,1005,1006,1007,1008,1009, 1010,1011,1012,1013,1014,1015,1016,1017,1018, 1019,1020,1021,1022,1023], 'ns_1@10.242.238.91'}, #Ref<0.0.1.5859>} Args:[{"10.242.238.91",11209}, {"10.242.238.90",11209}, [{old_state_retriever,#Fun}, {on_not_ready_vbuckets,#Fun}, {username,"default"}, {password,get_from_config}, {vbuckets,[938,942,945,948,949,950,952,953,954,955,956,957,958,959,960, 961,962,963,964,965,966,967,968,969,970,971,972,973,974,975, 976,977,978,979,980,981,982,983,984,985,986,987,988,989,990, 991,992,993,994,995,996,997,998,999,1000,1001,1002,1003, 1004,1005,1006,1007,1008,1009,1010,1011,1012,1013,1014,1015, 1016,1017,1018,1019,1020,1021,1022,1023]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]] [ns_server:debug,2014-08-19T16:50:36.206,ns_1@10.242.238.90:<0.24012.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:139]Linked myself to old ebucketmigrator <0.24008.0> [ns_server:info,2014-08-19T16:50:36.207,ns_1@10.242.238.90:<0.24008.0>:ebucketmigrator_srv:handle_call:270]Starting new-style vbucket filter change on stream `replication_ns_1@10.242.238.90` [ns_server:info,2014-08-19T16:50:36.218,ns_1@10.242.238.90:<0.24008.0>:ebucketmigrator_srv:handle_call:297]Changing vbucket filter on tap stream `replication_ns_1@10.242.238.90`: [{938,1}, {942,1}, {945,1}, {948,1}, {949,1}, {950,1}, {952,1}, {953,1}, {954,1}, {955,1}, {956,1}, {957,1}, {958,1}, {959,1}, {960,1}, {961,1}, {962,1}, {963,1}, {964,1}, {965,1}, {966,1}, {967,1}, {968,1}, {969,1}, {970,1}, {971,1}, {972,1}, {973,1}, {974,1}, {975,1}, {976,1}, {977,1}, {978,1}, {979,1}, {980,1}, {981,1}, {982,1}, {983,1}, {984,1}, {985,1}, {986,1}, {987,1}, {988,1}, {989,1}, {990,1}, {991,1}, {992,1}, {993,1}, {994,1}, {995,1}, {996,1}, {997,1}, {998,1}, {999,1}, {1000,1}, {1001,1}, {1002,1}, {1003,1}, {1004,1}, {1005,1}, {1006,1}, {1007,1}, {1008,1}, {1009,1}, {1010,1}, {1011,1}, {1012,1}, {1013,1}, {1014,1}, {1015,1}, {1016,1}, {1017,1}, {1018,1}, {1019,1}, {1020,1}, {1021,1}, {1022,1}, {1023,1}] [ns_server:info,2014-08-19T16:50:36.219,ns_1@10.242.238.90:<0.24008.0>:ebucketmigrator_srv:handle_call:307]Successfully changed vbucket filter on tap stream `replication_ns_1@10.242.238.90`. [ns_server:info,2014-08-19T16:50:36.219,ns_1@10.242.238.90:<0.24008.0>:ebucketmigrator_srv:process_upstream:1027]Got vbucket filter change completion message. Silencing upstream sender [ns_server:info,2014-08-19T16:50:36.219,ns_1@10.242.238.90:<0.24008.0>:ebucketmigrator_srv:handle_info:366]Got reply from upstream silencing request. Completing state transition to a new ebucketmigrator. [ns_server:debug,2014-08-19T16:50:36.219,ns_1@10.242.238.90:<0.24008.0>:ebucketmigrator_srv:complete_native_vb_filter_change:170]Proceeding with reading unread binaries [ns_server:debug,2014-08-19T16:50:36.220,ns_1@10.242.238.90:<0.24008.0>:ebucketmigrator_srv:confirm_downstream:197]Going to confirm reception downstream messages [ns_server:debug,2014-08-19T16:50:36.220,ns_1@10.242.238.90:<0.24008.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:50:36.220,ns_1@10.242.238.90:<0.24015.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:50:36.220,ns_1@10.242.238.90:<0.24015.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:50:36.220,ns_1@10.242.238.90:<0.24008.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:50:36.220,ns_1@10.242.238.90:<0.24008.0>:ebucketmigrator_srv:confirm_downstream:201]Confirmed upstream messages are feeded to kernel [ns_server:debug,2014-08-19T16:50:36.220,ns_1@10.242.238.90:<0.24008.0>:ebucketmigrator_srv:reply_and_die:210]Passed old state to caller [ns_server:debug,2014-08-19T16:50:36.220,ns_1@10.242.238.90:<0.24008.0>:ebucketmigrator_srv:reply_and_die:213]Sent out state. Preparing to die [ns_server:debug,2014-08-19T16:50:36.220,ns_1@10.242.238.90:<0.24012.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:143]Got old state from previous ebucketmigrator: <0.24008.0> [ns_server:debug,2014-08-19T16:50:36.221,ns_1@10.242.238.90:<0.24012.0>:ns_vbm_new_sup:perform_vbucket_filter_change_loop:184]Sent old state to new instance [ns_server:info,2014-08-19T16:50:36.221,ns_1@10.242.238.90:<0.24017.0>:ns_vbm_new_sup:mk_old_state_retriever:83]Got vbucket filter change old state. Proceeding vbucket filter change operation [ns_server:debug,2014-08-19T16:50:36.221,ns_1@10.242.238.90:<0.24017.0>:ebucketmigrator_srv:init:494]Got old ebucketmigrator state from <0.24008.0>: {state,#Port<0.13886>,#Port<0.13883>,#Port<0.13887>,#Port<0.13884>, <0.24009.0>,<<"cut off">>,<<"cut off">>,[],235,false,false,0, {1408,452636,219716}, completed, {<0.24012.0>,#Ref<0.0.1.5872>}, <<"replication_ns_1@10.242.238.90">>,<0.24008.0>, {had_backfill,false,undefined,[]}, completed,false}. [ns_server:debug,2014-08-19T16:50:36.221,ns_1@10.242.238.90:<0.17162.0>:ns_process_registry:handle_info:98]Got exit msg: {'EXIT',<0.24012.0>,{#Ref<0.0.1.5861>,<0.24017.0>}} [error_logger:info,2014-08-19T16:50:36.221,ns_1@10.242.238.90:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,'ns_vbm_new_sup-default'} started: [{pid,<0.24017.0>}, {name, {new_child_id, [938,942,945,948,949,950,952,953,954,955,956, 957,958,959,960,961,962,963,964,965,966,967, 968,969,970,971,972,973,974,975,976,977,978, 979,980,981,982,983,984,985,986,987,988,989, 990,991,992,993,994,995,996,997,998,999,1000, 1001,1002,1003,1004,1005,1006,1007,1008,1009, 1010,1011,1012,1013,1014,1015,1016,1017,1018, 1019,1020,1021,1022,1023], 'ns_1@10.242.238.91'}}, {mfargs, {ebucketmigrator_srv,start_link, [{"10.242.238.91",11209}, {"10.242.238.90",11209}, [{old_state_retriever, #Fun}, {on_not_ready_vbuckets, #Fun}, {username,"default"}, {password,get_from_config}, {vbuckets, [938,942,945,948,949,950,952,953,954,955, 956,957,958,959,960,961,962,963,964,965, 966,967,968,969,970,971,972,973,974,975, 976,977,978,979,980,981,982,983,984,985, 986,987,988,989,990,991,992,993,994,995, 996,997,998,999,1000,1001,1002,1003, 1004,1005,1006,1007,1008,1009,1010,1011, 1012,1013,1014,1015,1016,1017,1018,1019, 1020,1021,1022,1023]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]]}}, {restart_type,temporary}, {shutdown,60000}, {child_type,worker}] [ns_server:debug,2014-08-19T16:50:36.228,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:info,2014-08-19T16:50:36.229,ns_1@10.242.238.90:<0.18786.0>:ns_memcached:do_handle_call:527]Changed vbucket 690 state to active [ns_server:debug,2014-08-19T16:50:36.235,ns_1@10.242.238.90:<0.24017.0>:ebucketmigrator_srv:init:621]Reusing old upstream: [{vbuckets,[938,942,945,948,949,950,952,953,954,955,956,957,958,959,960,961, 962,963,964,965,966,967,968,969,970,971,972,973,974,975,976,977, 978,979,980,981,982,983,984,985,986,987,988,989,990,991,992,993, 994,995,996,997,998,999,1000,1001,1002,1003,1004,1005,1006,1007, 1008,1009,1010,1011,1012,1013,1014,1015,1016,1017,1018,1019,1020, 1021,1022,1023]}, {name,<<"replication_ns_1@10.242.238.90">>}, {takeover,false}] [rebalance:debug,2014-08-19T16:50:36.236,ns_1@10.242.238.90:<0.24017.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.24018.0> [ns_server:debug,2014-08-19T16:50:36.238,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 9814 us [ns_server:debug,2014-08-19T16:50:36.238,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:36.239,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:36.239,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{938, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [views:debug,2014-08-19T16:50:36.241,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/693. Updated state: active (1) [ns_server:debug,2014-08-19T16:50:36.242,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",693,active,1} [rebalance:debug,2014-08-19T16:50:36.262,ns_1@10.242.238.90:<0.23203.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:50:36.262,ns_1@10.242.238.90:<0.23203.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:50:36.262,ns_1@10.242.238.90:<0.24020.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:50:36.262,ns_1@10.242.238.90:<0.24020.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:50:36.262,ns_1@10.242.238.90:<0.23203.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [rebalance:debug,2014-08-19T16:50:36.262,ns_1@10.242.238.90:<0.23073.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:50:36.263,ns_1@10.242.238.90:<0.23073.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:50:36.263,ns_1@10.242.238.90:<0.24021.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:50:36.263,ns_1@10.242.238.90:<0.24021.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:50:36.263,ns_1@10.242.238.90:<0.23073.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:50:36.265,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:50:36.268,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 2759 us [ns_server:debug,2014-08-19T16:50:36.268,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:36.268,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:36.269,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{692, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [views:debug,2014-08-19T16:50:36.284,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/689. Updated state: active (1) [ns_server:debug,2014-08-19T16:50:36.284,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",689,active,1} [ns_server:debug,2014-08-19T16:50:36.290,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:50:36.293,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:36.293,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 3302 us [ns_server:debug,2014-08-19T16:50:36.294,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:36.294,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{688, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:info,2014-08-19T16:50:36.297,ns_1@10.242.238.90:<0.18786.0>:ns_memcached:do_handle_call:527]Changed vbucket 687 state to active [ns_server:info,2014-08-19T16:50:36.298,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 951 state to replica [ns_server:info,2014-08-19T16:50:36.299,ns_1@10.242.238.90:tap_replication_manager-default<0.18775.0>:tap_replication_manager:change_vbucket_filter:200]Going to change replication from 'ns_1@10.242.238.91' to have [938,942,945,948,949,950,951,952,953,954,955,956,957,958,959,960,961,962,963, 964,965,966,967,968,969,970,971,972,973,974,975,976,977,978,979,980,981,982, 983,984,985,986,987,988,989,990,991,992,993,994,995,996,997,998,999,1000, 1001,1002,1003,1004,1005,1006,1007,1008,1009,1010,1011,1012,1013,1014,1015, 1016,1017,1018,1019,1020,1021,1022,1023] ([951], []) [ns_server:debug,2014-08-19T16:50:36.301,ns_1@10.242.238.90:<0.24023.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:135]Registered myself under id:{"default", {new_child_id, [938,942,945,948,949,950,951,952,953,954,955, 956,957,958,959,960,961,962,963,964,965,966, 967,968,969,970,971,972,973,974,975,976,977, 978,979,980,981,982,983,984,985,986,987,988, 989,990,991,992,993,994,995,996,997,998,999, 1000,1001,1002,1003,1004,1005,1006,1007,1008, 1009,1010,1011,1012,1013,1014,1015,1016,1017, 1018,1019,1020,1021,1022,1023], 'ns_1@10.242.238.91'}, #Ref<0.0.1.6160>} Args:[{"10.242.238.91",11209}, {"10.242.238.90",11209}, [{old_state_retriever,#Fun}, {on_not_ready_vbuckets,#Fun}, {username,"default"}, {password,get_from_config}, {vbuckets,[938,942,945,948,949,950,951,952,953,954,955,956,957,958,959, 960,961,962,963,964,965,966,967,968,969,970,971,972,973,974, 975,976,977,978,979,980,981,982,983,984,985,986,987,988,989, 990,991,992,993,994,995,996,997,998,999,1000,1001,1002,1003, 1004,1005,1006,1007,1008,1009,1010,1011,1012,1013,1014,1015, 1016,1017,1018,1019,1020,1021,1022,1023]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]] [ns_server:debug,2014-08-19T16:50:36.302,ns_1@10.242.238.90:<0.24023.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:139]Linked myself to old ebucketmigrator <0.24017.0> [ns_server:info,2014-08-19T16:50:36.302,ns_1@10.242.238.90:<0.24017.0>:ebucketmigrator_srv:handle_call:270]Starting new-style vbucket filter change on stream `replication_ns_1@10.242.238.90` [ns_server:info,2014-08-19T16:50:36.314,ns_1@10.242.238.90:<0.24017.0>:ebucketmigrator_srv:handle_call:297]Changing vbucket filter on tap stream `replication_ns_1@10.242.238.90`: [{938,1}, {942,1}, {945,1}, {948,1}, {949,1}, {950,1}, {951,1}, {952,1}, {953,1}, {954,1}, {955,1}, {956,1}, {957,1}, {958,1}, {959,1}, {960,1}, {961,1}, {962,1}, {963,1}, {964,1}, {965,1}, {966,1}, {967,1}, {968,1}, {969,1}, {970,1}, {971,1}, {972,1}, {973,1}, {974,1}, {975,1}, {976,1}, {977,1}, {978,1}, {979,1}, {980,1}, {981,1}, {982,1}, {983,1}, {984,1}, {985,1}, {986,1}, {987,1}, {988,1}, {989,1}, {990,1}, {991,1}, {992,1}, {993,1}, {994,1}, {995,1}, {996,1}, {997,1}, {998,1}, {999,1}, {1000,1}, {1001,1}, {1002,1}, {1003,1}, {1004,1}, {1005,1}, {1006,1}, {1007,1}, {1008,1}, {1009,1}, {1010,1}, {1011,1}, {1012,1}, {1013,1}, {1014,1}, {1015,1}, {1016,1}, {1017,1}, {1018,1}, {1019,1}, {1020,1}, {1021,1}, {1022,1}, {1023,1}] [ns_server:info,2014-08-19T16:50:36.314,ns_1@10.242.238.90:<0.24017.0>:ebucketmigrator_srv:handle_call:307]Successfully changed vbucket filter on tap stream `replication_ns_1@10.242.238.90`. [ns_server:info,2014-08-19T16:50:36.315,ns_1@10.242.238.90:<0.24017.0>:ebucketmigrator_srv:process_upstream:1027]Got vbucket filter change completion message. Silencing upstream sender [ns_server:info,2014-08-19T16:50:36.315,ns_1@10.242.238.90:<0.24017.0>:ebucketmigrator_srv:handle_info:366]Got reply from upstream silencing request. Completing state transition to a new ebucketmigrator. [ns_server:debug,2014-08-19T16:50:36.315,ns_1@10.242.238.90:<0.24017.0>:ebucketmigrator_srv:complete_native_vb_filter_change:170]Proceeding with reading unread binaries [ns_server:debug,2014-08-19T16:50:36.315,ns_1@10.242.238.90:<0.24017.0>:ebucketmigrator_srv:confirm_downstream:197]Going to confirm reception downstream messages [ns_server:debug,2014-08-19T16:50:36.315,ns_1@10.242.238.90:<0.24017.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:50:36.315,ns_1@10.242.238.90:<0.24026.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:50:36.315,ns_1@10.242.238.90:<0.24026.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:50:36.315,ns_1@10.242.238.90:<0.24017.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:50:36.315,ns_1@10.242.238.90:<0.24017.0>:ebucketmigrator_srv:confirm_downstream:201]Confirmed upstream messages are feeded to kernel [ns_server:debug,2014-08-19T16:50:36.316,ns_1@10.242.238.90:<0.24017.0>:ebucketmigrator_srv:reply_and_die:210]Passed old state to caller [ns_server:debug,2014-08-19T16:50:36.316,ns_1@10.242.238.90:<0.24017.0>:ebucketmigrator_srv:reply_and_die:213]Sent out state. Preparing to die [ns_server:debug,2014-08-19T16:50:36.316,ns_1@10.242.238.90:<0.24023.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:143]Got old state from previous ebucketmigrator: <0.24017.0> [ns_server:debug,2014-08-19T16:50:36.316,ns_1@10.242.238.90:<0.24023.0>:ns_vbm_new_sup:perform_vbucket_filter_change_loop:184]Sent old state to new instance [ns_server:info,2014-08-19T16:50:36.316,ns_1@10.242.238.90:<0.24028.0>:ns_vbm_new_sup:mk_old_state_retriever:83]Got vbucket filter change old state. Proceeding vbucket filter change operation [ns_server:debug,2014-08-19T16:50:36.316,ns_1@10.242.238.90:<0.24028.0>:ebucketmigrator_srv:init:494]Got old ebucketmigrator state from <0.24017.0>: {state,#Port<0.13886>,#Port<0.13883>,#Port<0.13887>,#Port<0.13884>, <0.24018.0>,<<"cut off">>,<<"cut off">>,[],238,false,false,0, {1408,452636,315139}, completed, {<0.24023.0>,#Ref<0.0.1.6173>}, <<"replication_ns_1@10.242.238.90">>,<0.24017.0>, {had_backfill,false,undefined,[]}, completed,false}. [ns_server:debug,2014-08-19T16:50:36.317,ns_1@10.242.238.90:<0.17162.0>:ns_process_registry:handle_info:98]Got exit msg: {'EXIT',<0.24023.0>,{#Ref<0.0.1.6162>,<0.24028.0>}} [error_logger:info,2014-08-19T16:50:36.317,ns_1@10.242.238.90:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,'ns_vbm_new_sup-default'} started: [{pid,<0.24028.0>}, {name, {new_child_id, [938,942,945,948,949,950,951,952,953,954,955, 956,957,958,959,960,961,962,963,964,965,966, 967,968,969,970,971,972,973,974,975,976,977, 978,979,980,981,982,983,984,985,986,987,988, 989,990,991,992,993,994,995,996,997,998,999, 1000,1001,1002,1003,1004,1005,1006,1007,1008, 1009,1010,1011,1012,1013,1014,1015,1016,1017, 1018,1019,1020,1021,1022,1023], 'ns_1@10.242.238.91'}}, {mfargs, {ebucketmigrator_srv,start_link, [{"10.242.238.91",11209}, {"10.242.238.90",11209}, [{old_state_retriever, #Fun}, {on_not_ready_vbuckets, #Fun}, {username,"default"}, {password,get_from_config}, {vbuckets, [938,942,945,948,949,950,951,952,953,954, 955,956,957,958,959,960,961,962,963,964, 965,966,967,968,969,970,971,972,973,974, 975,976,977,978,979,980,981,982,983,984, 985,986,987,988,989,990,991,992,993,994, 995,996,997,998,999,1000,1001,1002,1003, 1004,1005,1006,1007,1008,1009,1010,1011, 1012,1013,1014,1015,1016,1017,1018,1019, 1020,1021,1022,1023]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]]}}, {restart_type,temporary}, {shutdown,60000}, {child_type,worker}] [ns_server:debug,2014-08-19T16:50:36.322,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:50:36.326,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 4116 us [ns_server:debug,2014-08-19T16:50:36.326,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:36.326,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [views:debug,2014-08-19T16:50:36.326,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/683. Updated state: active (1) [ns_server:debug,2014-08-19T16:50:36.327,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",683,active,1} [ns_server:debug,2014-08-19T16:50:36.327,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{951, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:info,2014-08-19T16:50:36.328,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 940 state to replica [ns_server:debug,2014-08-19T16:50:36.329,ns_1@10.242.238.90:<0.24028.0>:ebucketmigrator_srv:init:621]Reusing old upstream: [{vbuckets,[938,942,945,948,949,950,951,952,953,954,955,956,957,958,959,960, 961,962,963,964,965,966,967,968,969,970,971,972,973,974,975,976, 977,978,979,980,981,982,983,984,985,986,987,988,989,990,991,992, 993,994,995,996,997,998,999,1000,1001,1002,1003,1004,1005,1006, 1007,1008,1009,1010,1011,1012,1013,1014,1015,1016,1017,1018,1019, 1020,1021,1022,1023]}, {name,<<"replication_ns_1@10.242.238.90">>}, {takeover,false}] [rebalance:debug,2014-08-19T16:50:36.329,ns_1@10.242.238.90:<0.24028.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.24029.0> [ns_server:info,2014-08-19T16:50:36.329,ns_1@10.242.238.90:tap_replication_manager-default<0.18775.0>:tap_replication_manager:change_vbucket_filter:200]Going to change replication from 'ns_1@10.242.238.91' to have [938,940,942,945,948,949,950,951,952,953,954,955,956,957,958,959,960,961,962, 963,964,965,966,967,968,969,970,971,972,973,974,975,976,977,978,979,980,981, 982,983,984,985,986,987,988,989,990,991,992,993,994,995,996,997,998,999,1000, 1001,1002,1003,1004,1005,1006,1007,1008,1009,1010,1011,1012,1013,1014,1015, 1016,1017,1018,1019,1020,1021,1022,1023] ([940], []) [ns_server:debug,2014-08-19T16:50:36.330,ns_1@10.242.238.90:<0.24030.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:135]Registered myself under id:{"default", {new_child_id, [938,940,942,945,948,949,950,951,952,953,954, 955,956,957,958,959,960,961,962,963,964,965, 966,967,968,969,970,971,972,973,974,975,976, 977,978,979,980,981,982,983,984,985,986,987, 988,989,990,991,992,993,994,995,996,997,998, 999,1000,1001,1002,1003,1004,1005,1006,1007, 1008,1009,1010,1011,1012,1013,1014,1015,1016, 1017,1018,1019,1020,1021,1022,1023], 'ns_1@10.242.238.91'}, #Ref<0.0.1.6317>} Args:[{"10.242.238.91",11209}, {"10.242.238.90",11209}, [{old_state_retriever,#Fun}, {on_not_ready_vbuckets,#Fun}, {username,"default"}, {password,get_from_config}, {vbuckets,[938,940,942,945,948,949,950,951,952,953,954,955,956,957,958, 959,960,961,962,963,964,965,966,967,968,969,970,971,972,973, 974,975,976,977,978,979,980,981,982,983,984,985,986,987,988, 989,990,991,992,993,994,995,996,997,998,999,1000,1001,1002, 1003,1004,1005,1006,1007,1008,1009,1010,1011,1012,1013,1014, 1015,1016,1017,1018,1019,1020,1021,1022,1023]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]] [ns_server:debug,2014-08-19T16:50:36.330,ns_1@10.242.238.90:<0.24030.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:139]Linked myself to old ebucketmigrator <0.24028.0> [ns_server:info,2014-08-19T16:50:36.330,ns_1@10.242.238.90:<0.24028.0>:ebucketmigrator_srv:handle_call:270]Starting new-style vbucket filter change on stream `replication_ns_1@10.242.238.90` [ns_server:info,2014-08-19T16:50:36.342,ns_1@10.242.238.90:<0.24028.0>:ebucketmigrator_srv:handle_call:297]Changing vbucket filter on tap stream `replication_ns_1@10.242.238.90`: [{938,1}, {940,1}, {942,1}, {945,1}, {948,1}, {949,1}, {950,1}, {951,1}, {952,1}, {953,1}, {954,1}, {955,1}, {956,1}, {957,1}, {958,1}, {959,1}, {960,1}, {961,1}, {962,1}, {963,1}, {964,1}, {965,1}, {966,1}, {967,1}, {968,1}, {969,1}, {970,1}, {971,1}, {972,1}, {973,1}, {974,1}, {975,1}, {976,1}, {977,1}, {978,1}, {979,1}, {980,1}, {981,1}, {982,1}, {983,1}, {984,1}, {985,1}, {986,1}, {987,1}, {988,1}, {989,1}, {990,1}, {991,1}, {992,1}, {993,1}, {994,1}, {995,1}, {996,1}, {997,1}, {998,1}, {999,1}, {1000,1}, {1001,1}, {1002,1}, {1003,1}, {1004,1}, {1005,1}, {1006,1}, {1007,1}, {1008,1}, {1009,1}, {1010,1}, {1011,1}, {1012,1}, {1013,1}, {1014,1}, {1015,1}, {1016,1}, {1017,1}, {1018,1}, {1019,1}, {1020,1}, {1021,1}, {1022,1}, {1023,1}] [ns_server:info,2014-08-19T16:50:36.343,ns_1@10.242.238.90:<0.24028.0>:ebucketmigrator_srv:handle_call:307]Successfully changed vbucket filter on tap stream `replication_ns_1@10.242.238.90`. [ns_server:info,2014-08-19T16:50:36.343,ns_1@10.242.238.90:<0.24028.0>:ebucketmigrator_srv:process_upstream:1027]Got vbucket filter change completion message. Silencing upstream sender [ns_server:info,2014-08-19T16:50:36.343,ns_1@10.242.238.90:<0.24028.0>:ebucketmigrator_srv:handle_info:366]Got reply from upstream silencing request. Completing state transition to a new ebucketmigrator. [ns_server:debug,2014-08-19T16:50:36.343,ns_1@10.242.238.90:<0.24028.0>:ebucketmigrator_srv:complete_native_vb_filter_change:170]Proceeding with reading unread binaries [ns_server:debug,2014-08-19T16:50:36.343,ns_1@10.242.238.90:<0.24028.0>:ebucketmigrator_srv:confirm_downstream:197]Going to confirm reception downstream messages [ns_server:debug,2014-08-19T16:50:36.343,ns_1@10.242.238.90:<0.24028.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:50:36.343,ns_1@10.242.238.90:<0.24032.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:50:36.344,ns_1@10.242.238.90:<0.24032.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:50:36.344,ns_1@10.242.238.90:<0.24028.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:50:36.344,ns_1@10.242.238.90:<0.24028.0>:ebucketmigrator_srv:confirm_downstream:201]Confirmed upstream messages are feeded to kernel [ns_server:debug,2014-08-19T16:50:36.344,ns_1@10.242.238.90:<0.24028.0>:ebucketmigrator_srv:reply_and_die:210]Passed old state to caller [ns_server:debug,2014-08-19T16:50:36.344,ns_1@10.242.238.90:<0.24028.0>:ebucketmigrator_srv:reply_and_die:213]Sent out state. Preparing to die [ns_server:debug,2014-08-19T16:50:36.344,ns_1@10.242.238.90:<0.24030.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:143]Got old state from previous ebucketmigrator: <0.24028.0> [ns_server:debug,2014-08-19T16:50:36.344,ns_1@10.242.238.90:<0.24030.0>:ns_vbm_new_sup:perform_vbucket_filter_change_loop:184]Sent old state to new instance [ns_server:info,2014-08-19T16:50:36.345,ns_1@10.242.238.90:<0.24034.0>:ns_vbm_new_sup:mk_old_state_retriever:83]Got vbucket filter change old state. Proceeding vbucket filter change operation [ns_server:debug,2014-08-19T16:50:36.345,ns_1@10.242.238.90:<0.24034.0>:ebucketmigrator_srv:init:494]Got old ebucketmigrator state from <0.24028.0>: {state,#Port<0.13886>,#Port<0.13883>,#Port<0.13887>,#Port<0.13884>, <0.24029.0>,<<"cut off">>,<<"cut off">>,[],241,false,false,0, {1408,452636,343446}, completed, {<0.24030.0>,#Ref<0.0.1.6330>}, <<"replication_ns_1@10.242.238.90">>,<0.24028.0>, {had_backfill,false,undefined,[]}, completed,false}. [ns_server:debug,2014-08-19T16:50:36.345,ns_1@10.242.238.90:<0.17162.0>:ns_process_registry:handle_info:98]Got exit msg: {'EXIT',<0.24030.0>,{#Ref<0.0.1.6319>,<0.24034.0>}} [error_logger:info,2014-08-19T16:50:36.345,ns_1@10.242.238.90:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,'ns_vbm_new_sup-default'} started: [{pid,<0.24034.0>}, {name, {new_child_id, [938,940,942,945,948,949,950,951,952,953,954, 955,956,957,958,959,960,961,962,963,964,965, 966,967,968,969,970,971,972,973,974,975,976, 977,978,979,980,981,982,983,984,985,986,987, 988,989,990,991,992,993,994,995,996,997,998, 999,1000,1001,1002,1003,1004,1005,1006,1007, 1008,1009,1010,1011,1012,1013,1014,1015,1016, 1017,1018,1019,1020,1021,1022,1023], 'ns_1@10.242.238.91'}}, {mfargs, {ebucketmigrator_srv,start_link, [{"10.242.238.91",11209}, {"10.242.238.90",11209}, [{old_state_retriever, #Fun}, {on_not_ready_vbuckets, #Fun}, {username,"default"}, {password,get_from_config}, {vbuckets, [938,940,942,945,948,949,950,951,952,953, 954,955,956,957,958,959,960,961,962,963, 964,965,966,967,968,969,970,971,972,973, 974,975,976,977,978,979,980,981,982,983, 984,985,986,987,988,989,990,991,992,993, 994,995,996,997,998,999,1000,1001,1002, 1003,1004,1005,1006,1007,1008,1009,1010, 1011,1012,1013,1014,1015,1016,1017,1018, 1019,1020,1021,1022,1023]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]]}}, {restart_type,temporary}, {shutdown,60000}, {child_type,worker}] [ns_server:debug,2014-08-19T16:50:36.353,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:50:36.356,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 2638 us [ns_server:debug,2014-08-19T16:50:36.358,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:36.359,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [views:debug,2014-08-19T16:50:36.359,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/687. Updated state: active (1) [ns_server:debug,2014-08-19T16:50:36.359,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",687,active,1} [ns_server:debug,2014-08-19T16:50:36.360,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{940, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:info,2014-08-19T16:50:36.360,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 946 state to replica [ns_server:info,2014-08-19T16:50:36.361,ns_1@10.242.238.90:tap_replication_manager-default<0.18775.0>:tap_replication_manager:change_vbucket_filter:200]Going to change replication from 'ns_1@10.242.238.91' to have [938,940,942,945,946,948,949,950,951,952,953,954,955,956,957,958,959,960,961, 962,963,964,965,966,967,968,969,970,971,972,973,974,975,976,977,978,979,980, 981,982,983,984,985,986,987,988,989,990,991,992,993,994,995,996,997,998,999, 1000,1001,1002,1003,1004,1005,1006,1007,1008,1009,1010,1011,1012,1013,1014, 1015,1016,1017,1018,1019,1020,1021,1022,1023] ([946], []) [ns_server:debug,2014-08-19T16:50:36.361,ns_1@10.242.238.90:<0.24034.0>:ebucketmigrator_srv:init:621]Reusing old upstream: [{vbuckets,[938,940,942,945,948,949,950,951,952,953,954,955,956,957,958,959, 960,961,962,963,964,965,966,967,968,969,970,971,972,973,974,975, 976,977,978,979,980,981,982,983,984,985,986,987,988,989,990,991, 992,993,994,995,996,997,998,999,1000,1001,1002,1003,1004,1005, 1006,1007,1008,1009,1010,1011,1012,1013,1014,1015,1016,1017,1018, 1019,1020,1021,1022,1023]}, {name,<<"replication_ns_1@10.242.238.90">>}, {takeover,false}] [rebalance:debug,2014-08-19T16:50:36.361,ns_1@10.242.238.90:<0.24034.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.24036.0> [ns_server:debug,2014-08-19T16:50:36.362,ns_1@10.242.238.90:<0.24037.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:135]Registered myself under id:{"default", {new_child_id, [938,940,942,945,946,948,949,950,951,952,953, 954,955,956,957,958,959,960,961,962,963,964, 965,966,967,968,969,970,971,972,973,974,975, 976,977,978,979,980,981,982,983,984,985,986, 987,988,989,990,991,992,993,994,995,996,997, 998,999,1000,1001,1002,1003,1004,1005,1006, 1007,1008,1009,1010,1011,1012,1013,1014,1015, 1016,1017,1018,1019,1020,1021,1022,1023], 'ns_1@10.242.238.91'}, #Ref<0.0.1.6484>} Args:[{"10.242.238.91",11209}, {"10.242.238.90",11209}, [{old_state_retriever,#Fun}, {on_not_ready_vbuckets,#Fun}, {username,"default"}, {password,get_from_config}, {vbuckets,[938,940,942,945,946,948,949,950,951,952,953,954,955,956,957, 958,959,960,961,962,963,964,965,966,967,968,969,970,971,972, 973,974,975,976,977,978,979,980,981,982,983,984,985,986,987, 988,989,990,991,992,993,994,995,996,997,998,999,1000,1001, 1002,1003,1004,1005,1006,1007,1008,1009,1010,1011,1012,1013, 1014,1015,1016,1017,1018,1019,1020,1021,1022,1023]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]] [ns_server:debug,2014-08-19T16:50:36.363,ns_1@10.242.238.90:<0.24037.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:139]Linked myself to old ebucketmigrator <0.24034.0> [ns_server:info,2014-08-19T16:50:36.363,ns_1@10.242.238.90:<0.24034.0>:ebucketmigrator_srv:handle_call:270]Starting new-style vbucket filter change on stream `replication_ns_1@10.242.238.90` [ns_server:info,2014-08-19T16:50:36.374,ns_1@10.242.238.90:<0.24034.0>:ebucketmigrator_srv:handle_call:297]Changing vbucket filter on tap stream `replication_ns_1@10.242.238.90`: [{938,1}, {940,1}, {942,1}, {945,1}, {946,1}, {948,1}, {949,1}, {950,1}, {951,1}, {952,1}, {953,1}, {954,1}, {955,1}, {956,1}, {957,1}, {958,1}, {959,1}, {960,1}, {961,1}, {962,1}, {963,1}, {964,1}, {965,1}, {966,1}, {967,1}, {968,1}, {969,1}, {970,1}, {971,1}, {972,1}, {973,1}, {974,1}, {975,1}, {976,1}, {977,1}, {978,1}, {979,1}, {980,1}, {981,1}, {982,1}, {983,1}, {984,1}, {985,1}, {986,1}, {987,1}, {988,1}, {989,1}, {990,1}, {991,1}, {992,1}, {993,1}, {994,1}, {995,1}, {996,1}, {997,1}, {998,1}, {999,1}, {1000,1}, {1001,1}, {1002,1}, {1003,1}, {1004,1}, {1005,1}, {1006,1}, {1007,1}, {1008,1}, {1009,1}, {1010,1}, {1011,1}, {1012,1}, {1013,1}, {1014,1}, {1015,1}, {1016,1}, {1017,1}, {1018,1}, {1019,1}, {1020,1}, {1021,1}, {1022,1}, {1023,1}] [ns_server:info,2014-08-19T16:50:36.374,ns_1@10.242.238.90:<0.24034.0>:ebucketmigrator_srv:handle_call:307]Successfully changed vbucket filter on tap stream `replication_ns_1@10.242.238.90`. [ns_server:info,2014-08-19T16:50:36.375,ns_1@10.242.238.90:<0.24034.0>:ebucketmigrator_srv:process_upstream:1027]Got vbucket filter change completion message. Silencing upstream sender [ns_server:info,2014-08-19T16:50:36.375,ns_1@10.242.238.90:<0.24034.0>:ebucketmigrator_srv:handle_info:366]Got reply from upstream silencing request. Completing state transition to a new ebucketmigrator. [ns_server:debug,2014-08-19T16:50:36.375,ns_1@10.242.238.90:<0.24034.0>:ebucketmigrator_srv:complete_native_vb_filter_change:170]Proceeding with reading unread binaries [ns_server:debug,2014-08-19T16:50:36.375,ns_1@10.242.238.90:<0.24034.0>:ebucketmigrator_srv:confirm_downstream:197]Going to confirm reception downstream messages [ns_server:debug,2014-08-19T16:50:36.375,ns_1@10.242.238.90:<0.24034.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:50:36.375,ns_1@10.242.238.90:<0.24039.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:50:36.375,ns_1@10.242.238.90:<0.24039.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:50:36.375,ns_1@10.242.238.90:<0.24034.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:50:36.375,ns_1@10.242.238.90:<0.24034.0>:ebucketmigrator_srv:confirm_downstream:201]Confirmed upstream messages are feeded to kernel [ns_server:debug,2014-08-19T16:50:36.376,ns_1@10.242.238.90:<0.24034.0>:ebucketmigrator_srv:reply_and_die:210]Passed old state to caller [ns_server:debug,2014-08-19T16:50:36.376,ns_1@10.242.238.90:<0.24034.0>:ebucketmigrator_srv:reply_and_die:213]Sent out state. Preparing to die [ns_server:debug,2014-08-19T16:50:36.376,ns_1@10.242.238.90:<0.24037.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:143]Got old state from previous ebucketmigrator: <0.24034.0> [ns_server:debug,2014-08-19T16:50:36.376,ns_1@10.242.238.90:<0.24037.0>:ns_vbm_new_sup:perform_vbucket_filter_change_loop:184]Sent old state to new instance [ns_server:info,2014-08-19T16:50:36.376,ns_1@10.242.238.90:<0.24041.0>:ns_vbm_new_sup:mk_old_state_retriever:83]Got vbucket filter change old state. Proceeding vbucket filter change operation [ns_server:debug,2014-08-19T16:50:36.376,ns_1@10.242.238.90:<0.24041.0>:ebucketmigrator_srv:init:494]Got old ebucketmigrator state from <0.24034.0>: {state,#Port<0.13886>,#Port<0.13883>,#Port<0.13887>,#Port<0.13884>, <0.24036.0>,<<"cut off">>,<<"cut off">>,[],244,false,false,0, {1408,452636,375092}, completed, {<0.24037.0>,#Ref<0.0.1.6497>}, <<"replication_ns_1@10.242.238.90">>,<0.24034.0>, {had_backfill,false,undefined,[]}, completed,false}. [ns_server:debug,2014-08-19T16:50:36.377,ns_1@10.242.238.90:<0.17162.0>:ns_process_registry:handle_info:98]Got exit msg: {'EXIT',<0.24037.0>,{#Ref<0.0.1.6486>,<0.24041.0>}} [error_logger:info,2014-08-19T16:50:36.376,ns_1@10.242.238.90:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,'ns_vbm_new_sup-default'} started: [{pid,<0.24041.0>}, {name, {new_child_id, [938,940,942,945,946,948,949,950,951,952,953, 954,955,956,957,958,959,960,961,962,963,964, 965,966,967,968,969,970,971,972,973,974,975, 976,977,978,979,980,981,982,983,984,985,986, 987,988,989,990,991,992,993,994,995,996,997, 998,999,1000,1001,1002,1003,1004,1005,1006, 1007,1008,1009,1010,1011,1012,1013,1014,1015, 1016,1017,1018,1019,1020,1021,1022,1023], 'ns_1@10.242.238.91'}}, {mfargs, {ebucketmigrator_srv,start_link, [{"10.242.238.91",11209}, {"10.242.238.90",11209}, [{old_state_retriever, #Fun}, {on_not_ready_vbuckets, #Fun}, {username,"default"}, {password,get_from_config}, {vbuckets, [938,940,942,945,946,948,949,950,951,952, 953,954,955,956,957,958,959,960,961,962, 963,964,965,966,967,968,969,970,971,972, 973,974,975,976,977,978,979,980,981,982, 983,984,985,986,987,988,989,990,991,992, 993,994,995,996,997,998,999,1000,1001, 1002,1003,1004,1005,1006,1007,1008,1009, 1010,1011,1012,1013,1014,1015,1016,1017, 1018,1019,1020,1021,1022,1023]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]]}}, {restart_type,temporary}, {shutdown,60000}, {child_type,worker}] [ns_server:debug,2014-08-19T16:50:36.382,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:50:36.385,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:36.385,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 3327 us [ns_server:debug,2014-08-19T16:50:36.386,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:36.386,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{946, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:50:36.387,ns_1@10.242.238.90:<0.24041.0>:ebucketmigrator_srv:init:621]Reusing old upstream: [{vbuckets,[938,940,942,945,946,948,949,950,951,952,953,954,955,956,957,958, 959,960,961,962,963,964,965,966,967,968,969,970,971,972,973,974, 975,976,977,978,979,980,981,982,983,984,985,986,987,988,989,990, 991,992,993,994,995,996,997,998,999,1000,1001,1002,1003,1004,1005, 1006,1007,1008,1009,1010,1011,1012,1013,1014,1015,1016,1017,1018, 1019,1020,1021,1022,1023]}, {name,<<"replication_ns_1@10.242.238.90">>}, {takeover,false}] [rebalance:debug,2014-08-19T16:50:36.388,ns_1@10.242.238.90:<0.24041.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.24043.0> [views:debug,2014-08-19T16:50:36.392,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/694. Updated state: active (1) [ns_server:debug,2014-08-19T16:50:36.392,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",694,active,1} [ns_server:debug,2014-08-19T16:50:36.410,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:50:36.419,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:36.419,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 9792 us [ns_server:debug,2014-08-19T16:50:36.420,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{435, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:50:36.421,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [views:debug,2014-08-19T16:50:36.426,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/690. Updated state: active (1) [ns_server:debug,2014-08-19T16:50:36.426,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",690,active,1} [ns_server:debug,2014-08-19T16:50:36.440,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:50:36.443,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:36.443,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 3041 us [ns_server:debug,2014-08-19T16:50:36.444,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:36.444,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{693, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:info,2014-08-19T16:50:36.448,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 941 state to replica [ns_server:info,2014-08-19T16:50:36.448,ns_1@10.242.238.90:tap_replication_manager-default<0.18775.0>:tap_replication_manager:change_vbucket_filter:200]Going to change replication from 'ns_1@10.242.238.91' to have [938,940,941,942,945,946,948,949,950,951,952,953,954,955,956,957,958,959,960, 961,962,963,964,965,966,967,968,969,970,971,972,973,974,975,976,977,978,979, 980,981,982,983,984,985,986,987,988,989,990,991,992,993,994,995,996,997,998, 999,1000,1001,1002,1003,1004,1005,1006,1007,1008,1009,1010,1011,1012,1013, 1014,1015,1016,1017,1018,1019,1020,1021,1022,1023] ([941], []) [ns_server:debug,2014-08-19T16:50:36.449,ns_1@10.242.238.90:<0.24046.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:135]Registered myself under id:{"default", {new_child_id, [938,940,941,942,945,946,948,949,950,951,952, 953,954,955,956,957,958,959,960,961,962,963, 964,965,966,967,968,969,970,971,972,973,974, 975,976,977,978,979,980,981,982,983,984,985, 986,987,988,989,990,991,992,993,994,995,996, 997,998,999,1000,1001,1002,1003,1004,1005, 1006,1007,1008,1009,1010,1011,1012,1013,1014, 1015,1016,1017,1018,1019,1020,1021,1022,1023], 'ns_1@10.242.238.91'}, #Ref<0.0.1.6753>} Args:[{"10.242.238.91",11209}, {"10.242.238.90",11209}, [{old_state_retriever,#Fun}, {on_not_ready_vbuckets,#Fun}, {username,"default"}, {password,get_from_config}, {vbuckets,[938,940,941,942,945,946,948,949,950,951,952,953,954,955,956, 957,958,959,960,961,962,963,964,965,966,967,968,969,970,971, 972,973,974,975,976,977,978,979,980,981,982,983,984,985,986, 987,988,989,990,991,992,993,994,995,996,997,998,999,1000, 1001,1002,1003,1004,1005,1006,1007,1008,1009,1010,1011,1012, 1013,1014,1015,1016,1017,1018,1019,1020,1021,1022,1023]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]] [ns_server:debug,2014-08-19T16:50:36.450,ns_1@10.242.238.90:<0.24046.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:139]Linked myself to old ebucketmigrator <0.24041.0> [ns_server:info,2014-08-19T16:50:36.450,ns_1@10.242.238.90:<0.24041.0>:ebucketmigrator_srv:handle_call:270]Starting new-style vbucket filter change on stream `replication_ns_1@10.242.238.90` [ns_server:info,2014-08-19T16:50:36.461,ns_1@10.242.238.90:<0.24041.0>:ebucketmigrator_srv:handle_call:297]Changing vbucket filter on tap stream `replication_ns_1@10.242.238.90`: [{938,1}, {940,1}, {941,1}, {942,1}, {945,1}, {946,1}, {948,1}, {949,1}, {950,1}, {951,1}, {952,1}, {953,1}, {954,1}, {955,1}, {956,1}, {957,1}, {958,1}, {959,1}, {960,1}, {961,1}, {962,1}, {963,1}, {964,1}, {965,1}, {966,1}, {967,1}, {968,1}, {969,1}, {970,1}, {971,1}, {972,1}, {973,1}, {974,1}, {975,1}, {976,1}, {977,1}, {978,1}, {979,1}, {980,1}, {981,1}, {982,1}, {983,1}, {984,1}, {985,1}, {986,1}, {987,1}, {988,1}, {989,1}, {990,1}, {991,1}, {992,1}, {993,1}, {994,1}, {995,1}, {996,1}, {997,1}, {998,1}, {999,1}, {1000,1}, {1001,1}, {1002,1}, {1003,1}, {1004,1}, {1005,1}, {1006,1}, {1007,1}, {1008,1}, {1009,1}, {1010,1}, {1011,1}, {1012,1}, {1013,1}, {1014,1}, {1015,1}, {1016,1}, {1017,1}, {1018,1}, {1019,1}, {1020,1}, {1021,1}, {1022,1}, {1023,1}] [ns_server:info,2014-08-19T16:50:36.462,ns_1@10.242.238.90:<0.24041.0>:ebucketmigrator_srv:handle_call:307]Successfully changed vbucket filter on tap stream `replication_ns_1@10.242.238.90`. [ns_server:info,2014-08-19T16:50:36.462,ns_1@10.242.238.90:<0.24041.0>:ebucketmigrator_srv:process_upstream:1027]Got vbucket filter change completion message. Silencing upstream sender [ns_server:info,2014-08-19T16:50:36.462,ns_1@10.242.238.90:<0.24041.0>:ebucketmigrator_srv:handle_info:366]Got reply from upstream silencing request. Completing state transition to a new ebucketmigrator. [ns_server:debug,2014-08-19T16:50:36.462,ns_1@10.242.238.90:<0.24041.0>:ebucketmigrator_srv:complete_native_vb_filter_change:170]Proceeding with reading unread binaries [ns_server:debug,2014-08-19T16:50:36.462,ns_1@10.242.238.90:<0.24041.0>:ebucketmigrator_srv:confirm_downstream:197]Going to confirm reception downstream messages [ns_server:debug,2014-08-19T16:50:36.463,ns_1@10.242.238.90:<0.24041.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:50:36.463,ns_1@10.242.238.90:<0.24049.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:50:36.463,ns_1@10.242.238.90:<0.24049.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:50:36.463,ns_1@10.242.238.90:<0.24041.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:50:36.463,ns_1@10.242.238.90:<0.24041.0>:ebucketmigrator_srv:confirm_downstream:201]Confirmed upstream messages are feeded to kernel [ns_server:debug,2014-08-19T16:50:36.463,ns_1@10.242.238.90:<0.24041.0>:ebucketmigrator_srv:reply_and_die:210]Passed old state to caller [ns_server:debug,2014-08-19T16:50:36.463,ns_1@10.242.238.90:<0.24041.0>:ebucketmigrator_srv:reply_and_die:213]Sent out state. Preparing to die [ns_server:debug,2014-08-19T16:50:36.463,ns_1@10.242.238.90:<0.24046.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:143]Got old state from previous ebucketmigrator: <0.24041.0> [ns_server:debug,2014-08-19T16:50:36.464,ns_1@10.242.238.90:<0.24046.0>:ns_vbm_new_sup:perform_vbucket_filter_change_loop:184]Sent old state to new instance [ns_server:info,2014-08-19T16:50:36.464,ns_1@10.242.238.90:<0.24051.0>:ns_vbm_new_sup:mk_old_state_retriever:83]Got vbucket filter change old state. Proceeding vbucket filter change operation [ns_server:debug,2014-08-19T16:50:36.464,ns_1@10.242.238.90:<0.24051.0>:ebucketmigrator_srv:init:494]Got old ebucketmigrator state from <0.24041.0>: {state,#Port<0.13886>,#Port<0.13883>,#Port<0.13887>,#Port<0.13884>, <0.24043.0>,<<"cut off">>,<<"cut off">>,[],247,false,false,0, {1408,452636,462586}, completed, {<0.24046.0>,#Ref<0.0.1.6766>}, <<"replication_ns_1@10.242.238.90">>,<0.24041.0>, {had_backfill,false,undefined,[]}, completed,false}. [ns_server:debug,2014-08-19T16:50:36.464,ns_1@10.242.238.90:<0.17162.0>:ns_process_registry:handle_info:98]Got exit msg: {'EXIT',<0.24046.0>,{#Ref<0.0.1.6755>,<0.24051.0>}} [error_logger:info,2014-08-19T16:50:36.464,ns_1@10.242.238.90:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,'ns_vbm_new_sup-default'} started: [{pid,<0.24051.0>}, {name, {new_child_id, [938,940,941,942,945,946,948,949,950,951,952, 953,954,955,956,957,958,959,960,961,962,963, 964,965,966,967,968,969,970,971,972,973,974, 975,976,977,978,979,980,981,982,983,984,985, 986,987,988,989,990,991,992,993,994,995,996, 997,998,999,1000,1001,1002,1003,1004,1005, 1006,1007,1008,1009,1010,1011,1012,1013,1014, 1015,1016,1017,1018,1019,1020,1021,1022,1023], 'ns_1@10.242.238.91'}}, {mfargs, {ebucketmigrator_srv,start_link, [{"10.242.238.91",11209}, {"10.242.238.90",11209}, [{old_state_retriever, #Fun}, {on_not_ready_vbuckets, #Fun}, {username,"default"}, {password,get_from_config}, {vbuckets, [938,940,941,942,945,946,948,949,950,951, 952,953,954,955,956,957,958,959,960,961, 962,963,964,965,966,967,968,969,970,971, 972,973,974,975,976,977,978,979,980,981, 982,983,984,985,986,987,988,989,990,991, 992,993,994,995,996,997,998,999,1000, 1001,1002,1003,1004,1005,1006,1007,1008, 1009,1010,1011,1012,1013,1014,1015,1016, 1017,1018,1019,1020,1021,1022,1023]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]]}}, {restart_type,temporary}, {shutdown,60000}, {child_type,worker}] [ns_server:debug,2014-08-19T16:50:36.468,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:50:36.472,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 3366 us [ns_server:debug,2014-08-19T16:50:36.473,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:36.473,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:36.474,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{941, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:50:36.478,ns_1@10.242.238.90:<0.24051.0>:ebucketmigrator_srv:init:621]Reusing old upstream: [{vbuckets,[938,940,941,942,945,946,948,949,950,951,952,953,954,955,956,957, 958,959,960,961,962,963,964,965,966,967,968,969,970,971,972,973, 974,975,976,977,978,979,980,981,982,983,984,985,986,987,988,989, 990,991,992,993,994,995,996,997,998,999,1000,1001,1002,1003,1004, 1005,1006,1007,1008,1009,1010,1011,1012,1013,1014,1015,1016,1017, 1018,1019,1020,1021,1022,1023]}, {name,<<"replication_ns_1@10.242.238.90">>}, {takeover,false}] [rebalance:debug,2014-08-19T16:50:36.478,ns_1@10.242.238.90:<0.24051.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.24052.0> [ns_server:debug,2014-08-19T16:50:36.500,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:50:36.503,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:36.503,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 2994 us [ns_server:debug,2014-08-19T16:50:36.504,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:36.504,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{683, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:info,2014-08-19T16:50:36.508,ns_1@10.242.238.90:<0.18786.0>:ns_memcached:do_handle_call:527]Changed vbucket 944 state to replica [ns_server:info,2014-08-19T16:50:36.508,ns_1@10.242.238.90:tap_replication_manager-default<0.18775.0>:tap_replication_manager:change_vbucket_filter:200]Going to change replication from 'ns_1@10.242.238.91' to have [938,940,941,942,944,945,946,948,949,950,951,952,953,954,955,956,957,958,959, 960,961,962,963,964,965,966,967,968,969,970,971,972,973,974,975,976,977,978, 979,980,981,982,983,984,985,986,987,988,989,990,991,992,993,994,995,996,997, 998,999,1000,1001,1002,1003,1004,1005,1006,1007,1008,1009,1010,1011,1012, 1013,1014,1015,1016,1017,1018,1019,1020,1021,1022,1023] ([944], []) [ns_server:debug,2014-08-19T16:50:36.509,ns_1@10.242.238.90:<0.24054.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:135]Registered myself under id:{"default", {new_child_id, [938,940,941,942,944,945,946,948,949,950,951, 952,953,954,955,956,957,958,959,960,961,962, 963,964,965,966,967,968,969,970,971,972,973, 974,975,976,977,978,979,980,981,982,983,984, 985,986,987,988,989,990,991,992,993,994,995, 996,997,998,999,1000,1001,1002,1003,1004, 1005,1006,1007,1008,1009,1010,1011,1012,1013, 1014,1015,1016,1017,1018,1019,1020,1021,1022, 1023], 'ns_1@10.242.238.91'}, #Ref<0.0.1.6918>} Args:[{"10.242.238.91",11209}, {"10.242.238.90",11209}, [{old_state_retriever,#Fun}, {on_not_ready_vbuckets,#Fun}, {username,"default"}, {password,get_from_config}, {vbuckets,[938,940,941,942,944,945,946,948,949,950,951,952,953,954,955, 956,957,958,959,960,961,962,963,964,965,966,967,968,969,970, 971,972,973,974,975,976,977,978,979,980,981,982,983,984,985, 986,987,988,989,990,991,992,993,994,995,996,997,998,999, 1000,1001,1002,1003,1004,1005,1006,1007,1008,1009,1010,1011, 1012,1013,1014,1015,1016,1017,1018,1019,1020,1021,1022, 1023]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]] [ns_server:debug,2014-08-19T16:50:36.510,ns_1@10.242.238.90:<0.24054.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:139]Linked myself to old ebucketmigrator <0.24051.0> [ns_server:info,2014-08-19T16:50:36.510,ns_1@10.242.238.90:<0.24051.0>:ebucketmigrator_srv:handle_call:270]Starting new-style vbucket filter change on stream `replication_ns_1@10.242.238.90` [ns_server:info,2014-08-19T16:50:36.521,ns_1@10.242.238.90:<0.24051.0>:ebucketmigrator_srv:handle_call:297]Changing vbucket filter on tap stream `replication_ns_1@10.242.238.90`: [{938,1}, {940,1}, {941,1}, {942,1}, {944,1}, {945,1}, {946,1}, {948,1}, {949,1}, {950,1}, {951,1}, {952,1}, {953,1}, {954,1}, {955,1}, {956,1}, {957,1}, {958,1}, {959,1}, {960,1}, {961,1}, {962,1}, {963,1}, {964,1}, {965,1}, {966,1}, {967,1}, {968,1}, {969,1}, {970,1}, {971,1}, {972,1}, {973,1}, {974,1}, {975,1}, {976,1}, {977,1}, {978,1}, {979,1}, {980,1}, {981,1}, {982,1}, {983,1}, {984,1}, {985,1}, {986,1}, {987,1}, {988,1}, {989,1}, {990,1}, {991,1}, {992,1}, {993,1}, {994,1}, {995,1}, {996,1}, {997,1}, {998,1}, {999,1}, {1000,1}, {1001,1}, {1002,1}, {1003,1}, {1004,1}, {1005,1}, {1006,1}, {1007,1}, {1008,1}, {1009,1}, {1010,1}, {1011,1}, {1012,1}, {1013,1}, {1014,1}, {1015,1}, {1016,1}, {1017,1}, {1018,1}, {1019,1}, {1020,1}, {1021,1}, {1022,1}, {1023,1}] [ns_server:info,2014-08-19T16:50:36.522,ns_1@10.242.238.90:<0.24051.0>:ebucketmigrator_srv:handle_call:307]Successfully changed vbucket filter on tap stream `replication_ns_1@10.242.238.90`. [ns_server:info,2014-08-19T16:50:36.522,ns_1@10.242.238.90:<0.24051.0>:ebucketmigrator_srv:process_upstream:1027]Got vbucket filter change completion message. Silencing upstream sender [ns_server:info,2014-08-19T16:50:36.522,ns_1@10.242.238.90:<0.24051.0>:ebucketmigrator_srv:handle_info:366]Got reply from upstream silencing request. Completing state transition to a new ebucketmigrator. [ns_server:debug,2014-08-19T16:50:36.522,ns_1@10.242.238.90:<0.24051.0>:ebucketmigrator_srv:complete_native_vb_filter_change:170]Proceeding with reading unread binaries [ns_server:debug,2014-08-19T16:50:36.523,ns_1@10.242.238.90:<0.24051.0>:ebucketmigrator_srv:confirm_downstream:197]Going to confirm reception downstream messages [ns_server:debug,2014-08-19T16:50:36.523,ns_1@10.242.238.90:<0.24051.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:50:36.523,ns_1@10.242.238.90:<0.24056.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:50:36.523,ns_1@10.242.238.90:<0.24056.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:50:36.523,ns_1@10.242.238.90:<0.24051.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:50:36.523,ns_1@10.242.238.90:<0.24051.0>:ebucketmigrator_srv:confirm_downstream:201]Confirmed upstream messages are feeded to kernel [ns_server:debug,2014-08-19T16:50:36.523,ns_1@10.242.238.90:<0.24051.0>:ebucketmigrator_srv:reply_and_die:210]Passed old state to caller [ns_server:debug,2014-08-19T16:50:36.523,ns_1@10.242.238.90:<0.24051.0>:ebucketmigrator_srv:reply_and_die:213]Sent out state. Preparing to die [ns_server:debug,2014-08-19T16:50:36.523,ns_1@10.242.238.90:<0.24054.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:143]Got old state from previous ebucketmigrator: <0.24051.0> [ns_server:debug,2014-08-19T16:50:36.524,ns_1@10.242.238.90:<0.24054.0>:ns_vbm_new_sup:perform_vbucket_filter_change_loop:184]Sent old state to new instance [ns_server:info,2014-08-19T16:50:36.524,ns_1@10.242.238.90:<0.24058.0>:ns_vbm_new_sup:mk_old_state_retriever:83]Got vbucket filter change old state. Proceeding vbucket filter change operation [ns_server:debug,2014-08-19T16:50:36.524,ns_1@10.242.238.90:<0.24058.0>:ebucketmigrator_srv:init:494]Got old ebucketmigrator state from <0.24051.0>: {state,#Port<0.13886>,#Port<0.13883>,#Port<0.13887>,#Port<0.13884>, <0.24052.0>,<<"cut off">>,<<"cut off">>,[],250,false,false,0, {1408,452636,522672}, completed, {<0.24054.0>,#Ref<0.0.1.6931>}, <<"replication_ns_1@10.242.238.90">>,<0.24051.0>, {had_backfill,false,undefined,[]}, completed,false}. [ns_server:debug,2014-08-19T16:50:36.524,ns_1@10.242.238.90:<0.17162.0>:ns_process_registry:handle_info:98]Got exit msg: {'EXIT',<0.24054.0>,{#Ref<0.0.1.6920>,<0.24058.0>}} [error_logger:info,2014-08-19T16:50:36.524,ns_1@10.242.238.90:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,'ns_vbm_new_sup-default'} started: [{pid,<0.24058.0>}, {name, {new_child_id, [938,940,941,942,944,945,946,948,949,950,951, 952,953,954,955,956,957,958,959,960,961,962, 963,964,965,966,967,968,969,970,971,972,973, 974,975,976,977,978,979,980,981,982,983,984, 985,986,987,988,989,990,991,992,993,994,995, 996,997,998,999,1000,1001,1002,1003,1004,1005, 1006,1007,1008,1009,1010,1011,1012,1013,1014, 1015,1016,1017,1018,1019,1020,1021,1022,1023], 'ns_1@10.242.238.91'}}, {mfargs, {ebucketmigrator_srv,start_link, [{"10.242.238.91",11209}, {"10.242.238.90",11209}, [{old_state_retriever, #Fun}, {on_not_ready_vbuckets, #Fun}, {username,"default"}, {password,get_from_config}, {vbuckets, [938,940,941,942,944,945,946,948,949,950, 951,952,953,954,955,956,957,958,959,960, 961,962,963,964,965,966,967,968,969,970, 971,972,973,974,975,976,977,978,979,980, 981,982,983,984,985,986,987,988,989,990, 991,992,993,994,995,996,997,998,999, 1000,1001,1002,1003,1004,1005,1006,1007, 1008,1009,1010,1011,1012,1013,1014,1015, 1016,1017,1018,1019,1020,1021,1022, 1023]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]]}}, {restart_type,temporary}, {shutdown,60000}, {child_type,worker}] [ns_server:debug,2014-08-19T16:50:36.529,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:50:36.532,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 3077 us [ns_server:debug,2014-08-19T16:50:36.532,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:36.533,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:36.533,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{944, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:50:36.536,ns_1@10.242.238.90:<0.24058.0>:ebucketmigrator_srv:init:621]Reusing old upstream: [{vbuckets,[938,940,941,942,944,945,946,948,949,950,951,952,953,954,955,956, 957,958,959,960,961,962,963,964,965,966,967,968,969,970,971,972, 973,974,975,976,977,978,979,980,981,982,983,984,985,986,987,988, 989,990,991,992,993,994,995,996,997,998,999,1000,1001,1002,1003, 1004,1005,1006,1007,1008,1009,1010,1011,1012,1013,1014,1015,1016, 1017,1018,1019,1020,1021,1022,1023]}, {name,<<"replication_ns_1@10.242.238.90">>}, {takeover,false}] [rebalance:debug,2014-08-19T16:50:36.536,ns_1@10.242.238.90:<0.24058.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.24060.0> [ns_server:debug,2014-08-19T16:50:36.562,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:50:36.565,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 2701 us [ns_server:debug,2014-08-19T16:50:36.565,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:36.566,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:36.566,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{695, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:info,2014-08-19T16:50:36.570,ns_1@10.242.238.90:<0.18786.0>:ns_memcached:do_handle_call:527]Changed vbucket 939 state to replica [ns_server:info,2014-08-19T16:50:36.570,ns_1@10.242.238.90:tap_replication_manager-default<0.18775.0>:tap_replication_manager:change_vbucket_filter:200]Going to change replication from 'ns_1@10.242.238.91' to have [938,939,940,941,942,944,945,946,948,949,950,951,952,953,954,955,956,957,958, 959,960,961,962,963,964,965,966,967,968,969,970,971,972,973,974,975,976,977, 978,979,980,981,982,983,984,985,986,987,988,989,990,991,992,993,994,995,996, 997,998,999,1000,1001,1002,1003,1004,1005,1006,1007,1008,1009,1010,1011,1012, 1013,1014,1015,1016,1017,1018,1019,1020,1021,1022,1023] ([939], []) [ns_server:debug,2014-08-19T16:50:36.571,ns_1@10.242.238.90:<0.24062.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:135]Registered myself under id:{"default", {new_child_id, [938,939,940,941,942,944,945,946,948,949,950, 951,952,953,954,955,956,957,958,959,960,961, 962,963,964,965,966,967,968,969,970,971,972, 973,974,975,976,977,978,979,980,981,982,983, 984,985,986,987,988,989,990,991,992,993,994, 995,996,997,998,999,1000,1001,1002,1003,1004, 1005,1006,1007,1008,1009,1010,1011,1012,1013, 1014,1015,1016,1017,1018,1019,1020,1021,1022, 1023], 'ns_1@10.242.238.91'}, #Ref<0.0.1.7086>} Args:[{"10.242.238.91",11209}, {"10.242.238.90",11209}, [{old_state_retriever,#Fun}, {on_not_ready_vbuckets,#Fun}, {username,"default"}, {password,get_from_config}, {vbuckets,[938,939,940,941,942,944,945,946,948,949,950,951,952,953,954, 955,956,957,958,959,960,961,962,963,964,965,966,967,968,969, 970,971,972,973,974,975,976,977,978,979,980,981,982,983,984, 985,986,987,988,989,990,991,992,993,994,995,996,997,998,999, 1000,1001,1002,1003,1004,1005,1006,1007,1008,1009,1010,1011, 1012,1013,1014,1015,1016,1017,1018,1019,1020,1021,1022, 1023]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]] [ns_server:debug,2014-08-19T16:50:36.572,ns_1@10.242.238.90:<0.24062.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:139]Linked myself to old ebucketmigrator <0.24058.0> [ns_server:info,2014-08-19T16:50:36.572,ns_1@10.242.238.90:<0.24058.0>:ebucketmigrator_srv:handle_call:270]Starting new-style vbucket filter change on stream `replication_ns_1@10.242.238.90` [ns_server:info,2014-08-19T16:50:36.584,ns_1@10.242.238.90:<0.24058.0>:ebucketmigrator_srv:handle_call:297]Changing vbucket filter on tap stream `replication_ns_1@10.242.238.90`: [{938,1}, {939,1}, {940,1}, {941,1}, {942,1}, {944,1}, {945,1}, {946,1}, {948,1}, {949,1}, {950,1}, {951,1}, {952,1}, {953,1}, {954,1}, {955,1}, {956,1}, {957,1}, {958,1}, {959,1}, {960,1}, {961,1}, {962,1}, {963,1}, {964,1}, {965,1}, {966,1}, {967,1}, {968,1}, {969,1}, {970,1}, {971,1}, {972,1}, {973,1}, {974,1}, {975,1}, {976,1}, {977,1}, {978,1}, {979,1}, {980,1}, {981,1}, {982,1}, {983,1}, {984,1}, {985,1}, {986,1}, {987,1}, {988,1}, {989,1}, {990,1}, {991,1}, {992,1}, {993,1}, {994,1}, {995,1}, {996,1}, {997,1}, {998,1}, {999,1}, {1000,1}, {1001,1}, {1002,1}, {1003,1}, {1004,1}, {1005,1}, {1006,1}, {1007,1}, {1008,1}, {1009,1}, {1010,1}, {1011,1}, {1012,1}, {1013,1}, {1014,1}, {1015,1}, {1016,1}, {1017,1}, {1018,1}, {1019,1}, {1020,1}, {1021,1}, {1022,1}, {1023,1}] [ns_server:info,2014-08-19T16:50:36.585,ns_1@10.242.238.90:<0.24058.0>:ebucketmigrator_srv:handle_call:307]Successfully changed vbucket filter on tap stream `replication_ns_1@10.242.238.90`. [ns_server:info,2014-08-19T16:50:36.585,ns_1@10.242.238.90:<0.24058.0>:ebucketmigrator_srv:process_upstream:1027]Got vbucket filter change completion message. Silencing upstream sender [ns_server:info,2014-08-19T16:50:36.585,ns_1@10.242.238.90:<0.24058.0>:ebucketmigrator_srv:handle_info:366]Got reply from upstream silencing request. Completing state transition to a new ebucketmigrator. [ns_server:debug,2014-08-19T16:50:36.585,ns_1@10.242.238.90:<0.24058.0>:ebucketmigrator_srv:complete_native_vb_filter_change:170]Proceeding with reading unread binaries [ns_server:debug,2014-08-19T16:50:36.586,ns_1@10.242.238.90:<0.24058.0>:ebucketmigrator_srv:confirm_downstream:197]Going to confirm reception downstream messages [ns_server:debug,2014-08-19T16:50:36.586,ns_1@10.242.238.90:<0.24058.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:50:36.586,ns_1@10.242.238.90:<0.24065.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:50:36.586,ns_1@10.242.238.90:<0.24065.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:50:36.586,ns_1@10.242.238.90:<0.24058.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:50:36.586,ns_1@10.242.238.90:<0.24058.0>:ebucketmigrator_srv:confirm_downstream:201]Confirmed upstream messages are feeded to kernel [ns_server:debug,2014-08-19T16:50:36.586,ns_1@10.242.238.90:<0.24058.0>:ebucketmigrator_srv:reply_and_die:210]Passed old state to caller [ns_server:debug,2014-08-19T16:50:36.586,ns_1@10.242.238.90:<0.24058.0>:ebucketmigrator_srv:reply_and_die:213]Sent out state. Preparing to die [ns_server:debug,2014-08-19T16:50:36.586,ns_1@10.242.238.90:<0.24062.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:143]Got old state from previous ebucketmigrator: <0.24058.0> [ns_server:debug,2014-08-19T16:50:36.587,ns_1@10.242.238.90:<0.24062.0>:ns_vbm_new_sup:perform_vbucket_filter_change_loop:184]Sent old state to new instance [ns_server:info,2014-08-19T16:50:36.587,ns_1@10.242.238.90:<0.24067.0>:ns_vbm_new_sup:mk_old_state_retriever:83]Got vbucket filter change old state. Proceeding vbucket filter change operation [ns_server:debug,2014-08-19T16:50:36.587,ns_1@10.242.238.90:<0.24067.0>:ebucketmigrator_srv:init:494]Got old ebucketmigrator state from <0.24058.0>: {state,#Port<0.13886>,#Port<0.13883>,#Port<0.13887>,#Port<0.13884>, <0.24060.0>,<<"cut off">>,<<"cut off">>,[],253,false,false,0, {1408,452636,585642}, completed, {<0.24062.0>,#Ref<0.0.1.7099>}, <<"replication_ns_1@10.242.238.90">>,<0.24058.0>, {had_backfill,false,undefined,[]}, completed,false}. [ns_server:debug,2014-08-19T16:50:36.587,ns_1@10.242.238.90:<0.17162.0>:ns_process_registry:handle_info:98]Got exit msg: {'EXIT',<0.24062.0>,{#Ref<0.0.1.7088>,<0.24067.0>}} [error_logger:info,2014-08-19T16:50:36.587,ns_1@10.242.238.90:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,'ns_vbm_new_sup-default'} started: [{pid,<0.24067.0>}, {name, {new_child_id, [938,939,940,941,942,944,945,946,948,949,950, 951,952,953,954,955,956,957,958,959,960,961, 962,963,964,965,966,967,968,969,970,971,972, 973,974,975,976,977,978,979,980,981,982,983, 984,985,986,987,988,989,990,991,992,993,994, 995,996,997,998,999,1000,1001,1002,1003,1004, 1005,1006,1007,1008,1009,1010,1011,1012,1013, 1014,1015,1016,1017,1018,1019,1020,1021,1022, 1023], 'ns_1@10.242.238.91'}}, {mfargs, {ebucketmigrator_srv,start_link, [{"10.242.238.91",11209}, {"10.242.238.90",11209}, [{old_state_retriever, #Fun}, {on_not_ready_vbuckets, #Fun}, {username,"default"}, {password,get_from_config}, {vbuckets, [938,939,940,941,942,944,945,946,948,949, 950,951,952,953,954,955,956,957,958,959, 960,961,962,963,964,965,966,967,968,969, 970,971,972,973,974,975,976,977,978,979, 980,981,982,983,984,985,986,987,988,989, 990,991,992,993,994,995,996,997,998,999, 1000,1001,1002,1003,1004,1005,1006,1007, 1008,1009,1010,1011,1012,1013,1014,1015, 1016,1017,1018,1019,1020,1021,1022, 1023]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]]}}, {restart_type,temporary}, {shutdown,60000}, {child_type,worker}] [ns_server:debug,2014-08-19T16:50:36.592,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:50:36.600,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 8455 us [ns_server:debug,2014-08-19T16:50:36.601,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:36.601,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:36.602,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{939, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:50:36.603,ns_1@10.242.238.90:<0.24067.0>:ebucketmigrator_srv:init:621]Reusing old upstream: [{vbuckets,[938,939,940,941,942,944,945,946,948,949,950,951,952,953,954,955, 956,957,958,959,960,961,962,963,964,965,966,967,968,969,970,971, 972,973,974,975,976,977,978,979,980,981,982,983,984,985,986,987, 988,989,990,991,992,993,994,995,996,997,998,999,1000,1001,1002, 1003,1004,1005,1006,1007,1008,1009,1010,1011,1012,1013,1014,1015, 1016,1017,1018,1019,1020,1021,1022,1023]}, {name,<<"replication_ns_1@10.242.238.90">>}, {takeover,false}] [rebalance:debug,2014-08-19T16:50:36.603,ns_1@10.242.238.90:<0.24067.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.24068.0> [ns_server:debug,2014-08-19T16:50:36.623,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:50:36.626,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 2957 us [ns_server:debug,2014-08-19T16:50:36.626,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:36.626,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:36.627,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{429, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:50:36.647,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:50:36.649,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:36.649,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 2596 us [ns_server:debug,2014-08-19T16:50:36.650,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:36.651,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{436, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:50:36.672,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:50:36.675,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:36.675,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 2857 us [ns_server:debug,2014-08-19T16:50:36.675,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:36.676,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{689, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:50:36.696,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:50:36.699,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 2921 us [ns_server:debug,2014-08-19T16:50:36.699,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:36.699,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:36.700,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{694, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:50:36.723,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:50:36.726,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:36.726,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 2640 us [ns_server:debug,2014-08-19T16:50:36.726,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:36.727,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{430, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:50:36.749,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:50:36.757,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 8075 us [ns_server:debug,2014-08-19T16:50:36.757,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:36.758,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:36.758,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{437, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:50:36.778,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:50:36.781,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 2912 us [ns_server:debug,2014-08-19T16:50:36.781,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:36.782,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:36.782,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{433, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:50:36.805,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:50:36.807,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 2564 us [ns_server:debug,2014-08-19T16:50:36.807,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:36.808,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:36.808,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{690, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:info,2014-08-19T16:50:36.811,ns_1@10.242.238.90:<0.18786.0>:ns_memcached:do_handle_call:527]Changed vbucket 943 state to replica [ns_server:info,2014-08-19T16:50:36.811,ns_1@10.242.238.90:tap_replication_manager-default<0.18775.0>:tap_replication_manager:change_vbucket_filter:200]Going to change replication from 'ns_1@10.242.238.91' to have [938,939,940,941,942,943,944,945,946,948,949,950,951,952,953,954,955,956,957, 958,959,960,961,962,963,964,965,966,967,968,969,970,971,972,973,974,975,976, 977,978,979,980,981,982,983,984,985,986,987,988,989,990,991,992,993,994,995, 996,997,998,999,1000,1001,1002,1003,1004,1005,1006,1007,1008,1009,1010,1011, 1012,1013,1014,1015,1016,1017,1018,1019,1020,1021,1022,1023] ([943], []) [ns_server:debug,2014-08-19T16:50:36.819,ns_1@10.242.238.90:<0.24075.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:135]Registered myself under id:{"default", {new_child_id, [938,939,940,941,942,943,944,945,946,948,949, 950,951,952,953,954,955,956,957,958,959,960, 961,962,963,964,965,966,967,968,969,970,971, 972,973,974,975,976,977,978,979,980,981,982, 983,984,985,986,987,988,989,990,991,992,993, 994,995,996,997,998,999,1000,1001,1002,1003, 1004,1005,1006,1007,1008,1009,1010,1011,1012, 1013,1014,1015,1016,1017,1018,1019,1020,1021, 1022,1023], 'ns_1@10.242.238.91'}, #Ref<0.0.1.7440>} Args:[{"10.242.238.91",11209}, {"10.242.238.90",11209}, [{old_state_retriever,#Fun}, {on_not_ready_vbuckets,#Fun}, {username,"default"}, {password,get_from_config}, {vbuckets,[938,939,940,941,942,943,944,945,946,948,949,950,951,952,953, 954,955,956,957,958,959,960,961,962,963,964,965,966,967,968, 969,970,971,972,973,974,975,976,977,978,979,980,981,982,983, 984,985,986,987,988,989,990,991,992,993,994,995,996,997,998, 999,1000,1001,1002,1003,1004,1005,1006,1007,1008,1009,1010, 1011,1012,1013,1014,1015,1016,1017,1018,1019,1020,1021,1022, 1023]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]] [ns_server:debug,2014-08-19T16:50:36.819,ns_1@10.242.238.90:<0.24075.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:139]Linked myself to old ebucketmigrator <0.24067.0> [ns_server:info,2014-08-19T16:50:36.820,ns_1@10.242.238.90:<0.24067.0>:ebucketmigrator_srv:handle_call:270]Starting new-style vbucket filter change on stream `replication_ns_1@10.242.238.90` [ns_server:info,2014-08-19T16:50:36.836,ns_1@10.242.238.90:<0.24067.0>:ebucketmigrator_srv:handle_call:297]Changing vbucket filter on tap stream `replication_ns_1@10.242.238.90`: [{938,1}, {939,1}, {940,1}, {941,1}, {942,1}, {943,1}, {944,1}, {945,1}, {946,1}, {948,1}, {949,1}, {950,1}, {951,1}, {952,1}, {953,1}, {954,1}, {955,1}, {956,1}, {957,1}, {958,1}, {959,1}, {960,1}, {961,1}, {962,1}, {963,1}, {964,1}, {965,1}, {966,1}, {967,1}, {968,1}, {969,1}, {970,1}, {971,1}, {972,1}, {973,1}, {974,1}, {975,1}, {976,1}, {977,1}, {978,1}, {979,1}, {980,1}, {981,1}, {982,1}, {983,1}, {984,1}, {985,1}, {986,1}, {987,1}, {988,1}, {989,1}, {990,1}, {991,1}, {992,1}, {993,1}, {994,1}, {995,1}, {996,1}, {997,1}, {998,1}, {999,1}, {1000,1}, {1001,1}, {1002,1}, {1003,1}, {1004,1}, {1005,1}, {1006,1}, {1007,1}, {1008,1}, {1009,1}, {1010,1}, {1011,1}, {1012,1}, {1013,1}, {1014,1}, {1015,1}, {1016,1}, {1017,1}, {1018,1}, {1019,1}, {1020,1}, {1021,1}, {1022,1}, {1023,1}] [ns_server:info,2014-08-19T16:50:36.837,ns_1@10.242.238.90:<0.24067.0>:ebucketmigrator_srv:handle_call:307]Successfully changed vbucket filter on tap stream `replication_ns_1@10.242.238.90`. [ns_server:info,2014-08-19T16:50:36.837,ns_1@10.242.238.90:<0.24067.0>:ebucketmigrator_srv:process_upstream:1027]Got vbucket filter change completion message. Silencing upstream sender [ns_server:info,2014-08-19T16:50:36.837,ns_1@10.242.238.90:<0.24067.0>:ebucketmigrator_srv:handle_info:366]Got reply from upstream silencing request. Completing state transition to a new ebucketmigrator. [ns_server:debug,2014-08-19T16:50:36.837,ns_1@10.242.238.90:<0.24067.0>:ebucketmigrator_srv:complete_native_vb_filter_change:170]Proceeding with reading unread binaries [ns_server:debug,2014-08-19T16:50:36.837,ns_1@10.242.238.90:<0.24067.0>:ebucketmigrator_srv:confirm_downstream:197]Going to confirm reception downstream messages [ns_server:debug,2014-08-19T16:50:36.837,ns_1@10.242.238.90:<0.24067.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:50:36.837,ns_1@10.242.238.90:<0.24078.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:50:36.837,ns_1@10.242.238.90:<0.24078.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:50:36.838,ns_1@10.242.238.90:<0.24067.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:50:36.838,ns_1@10.242.238.90:<0.24067.0>:ebucketmigrator_srv:confirm_downstream:201]Confirmed upstream messages are feeded to kernel [ns_server:debug,2014-08-19T16:50:36.838,ns_1@10.242.238.90:<0.24067.0>:ebucketmigrator_srv:reply_and_die:210]Passed old state to caller [ns_server:debug,2014-08-19T16:50:36.838,ns_1@10.242.238.90:<0.24067.0>:ebucketmigrator_srv:reply_and_die:213]Sent out state. Preparing to die [ns_server:debug,2014-08-19T16:50:36.838,ns_1@10.242.238.90:<0.24075.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:143]Got old state from previous ebucketmigrator: <0.24067.0> [ns_server:debug,2014-08-19T16:50:36.838,ns_1@10.242.238.90:<0.24075.0>:ns_vbm_new_sup:perform_vbucket_filter_change_loop:184]Sent old state to new instance [ns_server:info,2014-08-19T16:50:36.838,ns_1@10.242.238.90:<0.24080.0>:ns_vbm_new_sup:mk_old_state_retriever:83]Got vbucket filter change old state. Proceeding vbucket filter change operation [ns_server:debug,2014-08-19T16:50:36.839,ns_1@10.242.238.90:<0.24080.0>:ebucketmigrator_srv:init:494]Got old ebucketmigrator state from <0.24067.0>: {state,#Port<0.13886>,#Port<0.13883>,#Port<0.13887>,#Port<0.13884>, <0.24068.0>,<<"cut off">>,<<"cut off">>,[],256,false,false,0, {1408,452636,837312}, completed, {<0.24075.0>,#Ref<0.0.1.7455>}, <<"replication_ns_1@10.242.238.90">>,<0.24067.0>, {had_backfill,false,undefined,[]}, completed,false}. [ns_server:debug,2014-08-19T16:50:36.839,ns_1@10.242.238.90:<0.17162.0>:ns_process_registry:handle_info:98]Got exit msg: {'EXIT',<0.24075.0>,{#Ref<0.0.1.7442>,<0.24080.0>}} [error_logger:info,2014-08-19T16:50:36.839,ns_1@10.242.238.90:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,'ns_vbm_new_sup-default'} started: [{pid,<0.24080.0>}, {name, {new_child_id, [938,939,940,941,942,943,944,945,946,948,949, 950,951,952,953,954,955,956,957,958,959,960, 961,962,963,964,965,966,967,968,969,970,971, 972,973,974,975,976,977,978,979,980,981,982, 983,984,985,986,987,988,989,990,991,992,993, 994,995,996,997,998,999,1000,1001,1002,1003, 1004,1005,1006,1007,1008,1009,1010,1011,1012, 1013,1014,1015,1016,1017,1018,1019,1020,1021, 1022,1023], 'ns_1@10.242.238.91'}}, {mfargs, {ebucketmigrator_srv,start_link, [{"10.242.238.91",11209}, {"10.242.238.90",11209}, [{old_state_retriever, #Fun}, {on_not_ready_vbuckets, #Fun}, {username,"default"}, {password,get_from_config}, {vbuckets, [938,939,940,941,942,943,944,945,946,948, 949,950,951,952,953,954,955,956,957,958, 959,960,961,962,963,964,965,966,967,968, 969,970,971,972,973,974,975,976,977,978, 979,980,981,982,983,984,985,986,987,988, 989,990,991,992,993,994,995,996,997,998, 999,1000,1001,1002,1003,1004,1005,1006, 1007,1008,1009,1010,1011,1012,1013,1014, 1015,1016,1017,1018,1019,1020,1021,1022, 1023]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]]}}, {restart_type,temporary}, {shutdown,60000}, {child_type,worker}] [ns_server:debug,2014-08-19T16:50:36.844,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:50:36.847,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:36.848,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 3602 us [ns_server:debug,2014-08-19T16:50:36.848,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:36.849,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{943, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:info,2014-08-19T16:50:36.851,ns_1@10.242.238.90:<0.18786.0>:ns_memcached:do_handle_call:527]Changed vbucket 947 state to replica [ns_server:info,2014-08-19T16:50:36.851,ns_1@10.242.238.90:tap_replication_manager-default<0.18775.0>:tap_replication_manager:change_vbucket_filter:200]Going to change replication from 'ns_1@10.242.238.91' to have [938,939,940,941,942,943,944,945,946,947,948,949,950,951,952,953,954,955,956, 957,958,959,960,961,962,963,964,965,966,967,968,969,970,971,972,973,974,975, 976,977,978,979,980,981,982,983,984,985,986,987,988,989,990,991,992,993,994, 995,996,997,998,999,1000,1001,1002,1003,1004,1005,1006,1007,1008,1009,1010, 1011,1012,1013,1014,1015,1016,1017,1018,1019,1020,1021,1022,1023] ([947], []) [ns_server:debug,2014-08-19T16:50:36.852,ns_1@10.242.238.90:<0.24080.0>:ebucketmigrator_srv:init:621]Reusing old upstream: [{vbuckets,[938,939,940,941,942,943,944,945,946,948,949,950,951,952,953,954, 955,956,957,958,959,960,961,962,963,964,965,966,967,968,969,970, 971,972,973,974,975,976,977,978,979,980,981,982,983,984,985,986, 987,988,989,990,991,992,993,994,995,996,997,998,999,1000,1001, 1002,1003,1004,1005,1006,1007,1008,1009,1010,1011,1012,1013,1014, 1015,1016,1017,1018,1019,1020,1021,1022,1023]}, {name,<<"replication_ns_1@10.242.238.90">>}, {takeover,false}] [ns_server:debug,2014-08-19T16:50:36.852,ns_1@10.242.238.90:<0.24081.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:135]Registered myself under id:{"default", {new_child_id, [938,939,940,941,942,943,944,945,946,947,948, 949,950,951,952,953,954,955,956,957,958,959, 960,961,962,963,964,965,966,967,968,969,970, 971,972,973,974,975,976,977,978,979,980,981, 982,983,984,985,986,987,988,989,990,991,992, 993,994,995,996,997,998,999,1000,1001,1002, 1003,1004,1005,1006,1007,1008,1009,1010,1011, 1012,1013,1014,1015,1016,1017,1018,1019,1020, 1021,1022,1023], 'ns_1@10.242.238.91'}, #Ref<0.0.1.7567>} Args:[{"10.242.238.91",11209}, {"10.242.238.90",11209}, [{old_state_retriever,#Fun}, {on_not_ready_vbuckets,#Fun}, {username,"default"}, {password,get_from_config}, {vbuckets,[938,939,940,941,942,943,944,945,946,947,948,949,950,951,952, 953,954,955,956,957,958,959,960,961,962,963,964,965,966,967, 968,969,970,971,972,973,974,975,976,977,978,979,980,981,982, 983,984,985,986,987,988,989,990,991,992,993,994,995,996,997, 998,999,1000,1001,1002,1003,1004,1005,1006,1007,1008,1009, 1010,1011,1012,1013,1014,1015,1016,1017,1018,1019,1020,1021, 1022,1023]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]] [ns_server:debug,2014-08-19T16:50:36.861,ns_1@10.242.238.90:<0.24081.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:139]Linked myself to old ebucketmigrator <0.24080.0> [rebalance:debug,2014-08-19T16:50:36.861,ns_1@10.242.238.90:<0.24080.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.24083.0> [ns_server:info,2014-08-19T16:50:36.862,ns_1@10.242.238.90:<0.24080.0>:ebucketmigrator_srv:handle_call:270]Starting new-style vbucket filter change on stream `replication_ns_1@10.242.238.90` [ns_server:info,2014-08-19T16:50:36.874,ns_1@10.242.238.90:<0.24080.0>:ebucketmigrator_srv:handle_call:297]Changing vbucket filter on tap stream `replication_ns_1@10.242.238.90`: [{938,1}, {939,1}, {940,1}, {941,1}, {942,1}, {943,1}, {944,1}, {945,1}, {946,1}, {947,1}, {948,1}, {949,1}, {950,1}, {951,1}, {952,1}, {953,1}, {954,1}, {955,1}, {956,1}, {957,1}, {958,1}, {959,1}, {960,1}, {961,1}, {962,1}, {963,1}, {964,1}, {965,1}, {966,1}, {967,1}, {968,1}, {969,1}, {970,1}, {971,1}, {972,1}, {973,1}, {974,1}, {975,1}, {976,1}, {977,1}, {978,1}, {979,1}, {980,1}, {981,1}, {982,1}, {983,1}, {984,1}, {985,1}, {986,1}, {987,1}, {988,1}, {989,1}, {990,1}, {991,1}, {992,1}, {993,1}, {994,1}, {995,1}, {996,1}, {997,1}, {998,1}, {999,1}, {1000,1}, {1001,1}, {1002,1}, {1003,1}, {1004,1}, {1005,1}, {1006,1}, {1007,1}, {1008,1}, {1009,1}, {1010,1}, {1011,1}, {1012,1}, {1013,1}, {1014,1}, {1015,1}, {1016,1}, {1017,1}, {1018,1}, {1019,1}, {1020,1}, {1021,1}, {1022,1}, {1023,1}] [ns_server:info,2014-08-19T16:50:36.875,ns_1@10.242.238.90:<0.24080.0>:ebucketmigrator_srv:handle_call:307]Successfully changed vbucket filter on tap stream `replication_ns_1@10.242.238.90`. [ns_server:info,2014-08-19T16:50:36.875,ns_1@10.242.238.90:<0.24080.0>:ebucketmigrator_srv:process_upstream:1027]Got vbucket filter change completion message. Silencing upstream sender [ns_server:info,2014-08-19T16:50:36.875,ns_1@10.242.238.90:<0.24080.0>:ebucketmigrator_srv:handle_info:366]Got reply from upstream silencing request. Completing state transition to a new ebucketmigrator. [ns_server:debug,2014-08-19T16:50:36.875,ns_1@10.242.238.90:<0.24080.0>:ebucketmigrator_srv:complete_native_vb_filter_change:170]Proceeding with reading unread binaries [ns_server:debug,2014-08-19T16:50:36.875,ns_1@10.242.238.90:<0.24080.0>:ebucketmigrator_srv:confirm_downstream:197]Going to confirm reception downstream messages [ns_server:debug,2014-08-19T16:50:36.875,ns_1@10.242.238.90:<0.24080.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:50:36.875,ns_1@10.242.238.90:<0.24085.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:50:36.875,ns_1@10.242.238.90:<0.24085.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:50:36.876,ns_1@10.242.238.90:<0.24080.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:50:36.876,ns_1@10.242.238.90:<0.24080.0>:ebucketmigrator_srv:confirm_downstream:201]Confirmed upstream messages are feeded to kernel [ns_server:debug,2014-08-19T16:50:36.876,ns_1@10.242.238.90:<0.24080.0>:ebucketmigrator_srv:reply_and_die:210]Passed old state to caller [ns_server:debug,2014-08-19T16:50:36.876,ns_1@10.242.238.90:<0.24080.0>:ebucketmigrator_srv:reply_and_die:213]Sent out state. Preparing to die [ns_server:debug,2014-08-19T16:50:36.876,ns_1@10.242.238.90:<0.24081.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:143]Got old state from previous ebucketmigrator: <0.24080.0> [ns_server:debug,2014-08-19T16:50:36.876,ns_1@10.242.238.90:<0.24081.0>:ns_vbm_new_sup:perform_vbucket_filter_change_loop:184]Sent old state to new instance [ns_server:info,2014-08-19T16:50:36.876,ns_1@10.242.238.90:<0.24087.0>:ns_vbm_new_sup:mk_old_state_retriever:83]Got vbucket filter change old state. Proceeding vbucket filter change operation [ns_server:debug,2014-08-19T16:50:36.877,ns_1@10.242.238.90:<0.24087.0>:ebucketmigrator_srv:init:494]Got old ebucketmigrator state from <0.24080.0>: {state,#Port<0.13886>,#Port<0.13883>,#Port<0.13887>,#Port<0.13884>, <0.24083.0>,<<"cut off">>,<<"cut off">>,[],259,false,false,0, {1408,452636,875407}, completed, {<0.24081.0>,#Ref<0.0.1.7589>}, <<"replication_ns_1@10.242.238.90">>,<0.24080.0>, {had_backfill,false,undefined,[]}, completed,false}. [ns_server:debug,2014-08-19T16:50:36.877,ns_1@10.242.238.90:<0.17162.0>:ns_process_registry:handle_info:98]Got exit msg: {'EXIT',<0.24081.0>,{#Ref<0.0.1.7570>,<0.24087.0>}} [error_logger:info,2014-08-19T16:50:36.877,ns_1@10.242.238.90:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,'ns_vbm_new_sup-default'} started: [{pid,<0.24087.0>}, {name, {new_child_id, [938,939,940,941,942,943,944,945,946,947,948, 949,950,951,952,953,954,955,956,957,958,959, 960,961,962,963,964,965,966,967,968,969,970, 971,972,973,974,975,976,977,978,979,980,981, 982,983,984,985,986,987,988,989,990,991,992, 993,994,995,996,997,998,999,1000,1001,1002, 1003,1004,1005,1006,1007,1008,1009,1010,1011, 1012,1013,1014,1015,1016,1017,1018,1019,1020, 1021,1022,1023], 'ns_1@10.242.238.91'}}, {mfargs, {ebucketmigrator_srv,start_link, [{"10.242.238.91",11209}, {"10.242.238.90",11209}, [{old_state_retriever, #Fun}, {on_not_ready_vbuckets, #Fun}, {username,"default"}, {password,get_from_config}, {vbuckets, [938,939,940,941,942,943,944,945,946,947, 948,949,950,951,952,953,954,955,956,957, 958,959,960,961,962,963,964,965,966,967, 968,969,970,971,972,973,974,975,976,977, 978,979,980,981,982,983,984,985,986,987, 988,989,990,991,992,993,994,995,996,997, 998,999,1000,1001,1002,1003,1004,1005, 1006,1007,1008,1009,1010,1011,1012,1013, 1014,1015,1016,1017,1018,1019,1020,1021, 1022,1023]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]]}}, {restart_type,temporary}, {shutdown,60000}, {child_type,worker}] [ns_server:debug,2014-08-19T16:50:36.882,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:50:36.885,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 3108 us [ns_server:debug,2014-08-19T16:50:36.885,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:36.886,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:36.887,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{947, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:50:36.888,ns_1@10.242.238.90:<0.24087.0>:ebucketmigrator_srv:init:621]Reusing old upstream: [{vbuckets,[938,939,940,941,942,943,944,945,946,947,948,949,950,951,952,953, 954,955,956,957,958,959,960,961,962,963,964,965,966,967,968,969, 970,971,972,973,974,975,976,977,978,979,980,981,982,983,984,985, 986,987,988,989,990,991,992,993,994,995,996,997,998,999,1000,1001, 1002,1003,1004,1005,1006,1007,1008,1009,1010,1011,1012,1013,1014, 1015,1016,1017,1018,1019,1020,1021,1022,1023]}, {name,<<"replication_ns_1@10.242.238.90">>}, {takeover,false}] [rebalance:debug,2014-08-19T16:50:36.888,ns_1@10.242.238.90:<0.24087.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.24088.0> [ns_server:debug,2014-08-19T16:50:36.912,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:50:36.915,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:36.915,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 2622 us [ns_server:debug,2014-08-19T16:50:36.915,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:36.916,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{431, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:50:36.935,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:50:36.943,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 7444 us [ns_server:debug,2014-08-19T16:50:36.943,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:36.943,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:36.944,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{427, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:50:36.974,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:50:36.977,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:36.977,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 3020 us [ns_server:debug,2014-08-19T16:50:36.977,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:36.978,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{687, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:info,2014-08-19T16:50:37.048,ns_1@10.242.238.90:<0.18786.0>:ns_memcached:do_handle_call:527]Changed vbucket 682 state to replica [ns_server:info,2014-08-19T16:50:37.053,ns_1@10.242.238.90:<0.24093.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 682 to state replica [ns_server:debug,2014-08-19T16:50:37.094,ns_1@10.242.238.90:<0.24093.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_682_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:50:37.096,ns_1@10.242.238.90:<0.24093.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[682]}, {checkpoints,[{682,0}]}, {name,<<"replication_building_682_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[682]}, {takeover,false}, {suffix,"building_682_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",682,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,true}]} [rebalance:debug,2014-08-19T16:50:37.097,ns_1@10.242.238.90:<0.24093.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.24094.0> [rebalance:debug,2014-08-19T16:50:37.097,ns_1@10.242.238.90:<0.24093.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:50:37.097,ns_1@10.242.238.90:<0.24093.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.9868.1>,#Ref<16550.0.1.157426>}]} [rebalance:info,2014-08-19T16:50:37.097,ns_1@10.242.238.90:<0.24093.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 682 [rebalance:debug,2014-08-19T16:50:37.098,ns_1@10.242.238.90:<0.24093.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.9868.1>,#Ref<16550.0.1.157426>}] [ns_server:debug,2014-08-19T16:50:37.098,ns_1@10.242.238.90:<0.24093.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:50:37.099,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.24095.0> (ok) [rebalance:debug,2014-08-19T16:50:37.100,ns_1@10.242.238.90:<0.24096.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 682 [ns_server:info,2014-08-19T16:50:37.105,ns_1@10.242.238.90:<0.18786.0>:ns_memcached:do_handle_call:527]Changed vbucket 426 state to replica [ns_server:info,2014-08-19T16:50:37.109,ns_1@10.242.238.90:<0.24099.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 426 to state replica [ns_server:debug,2014-08-19T16:50:37.136,ns_1@10.242.238.90:<0.24099.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_426_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:50:37.138,ns_1@10.242.238.90:<0.24099.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[426]}, {checkpoints,[{426,0}]}, {name,<<"replication_building_426_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[426]}, {takeover,false}, {suffix,"building_426_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",426,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,false}]} [rebalance:debug,2014-08-19T16:50:37.138,ns_1@10.242.238.90:<0.24099.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.24114.0> [rebalance:debug,2014-08-19T16:50:37.139,ns_1@10.242.238.90:<0.24099.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:50:37.139,ns_1@10.242.238.90:<0.24099.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.9895.1>,#Ref<16550.0.1.157562>}]} [rebalance:info,2014-08-19T16:50:37.139,ns_1@10.242.238.90:<0.24099.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 426 [rebalance:debug,2014-08-19T16:50:37.140,ns_1@10.242.238.90:<0.24099.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.9895.1>,#Ref<16550.0.1.157562>}] [ns_server:debug,2014-08-19T16:50:37.140,ns_1@10.242.238.90:<0.24099.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:50:37.153,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 682. Nacking mccouch update. [views:debug,2014-08-19T16:50:37.153,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/682. Updated state: replica (0) [ns_server:debug,2014-08-19T16:50:37.153,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",682,replica,0} [ns_server:debug,2014-08-19T16:50:37.153,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,750,686,984,737,971,724,990,958,743,711,1022,977,945,762,730,698,1009, 996,964,749,717,685,983,951,736,704,1015,970,938,755,723,691,1002,989,957, 742,710,1021,976,944,761,729,697,1008,995,963,748,716,684,982,950,767,735, 703,1014,969,754,722,690,1001,988,956,741,709,1020,975,943,760,728,696,1007, 994,962,747,715,683,981,949,766,734,702,1013,968,753,721,689,1000,987,955, 740,708,1019,974,942,759,727,695,1006,993,961,746,714,682,980,948,765,733, 701,1012,999,967,752,720,688,986,954,739,707,1018,973,941,758,726,694,1005, 992,960,745,713,979,947,764,732,700,1011,998,966,751,719,687,985,953,738,706, 1017,972,940,757,725,693,1004,991,959,744,712,1023,978,946,763,731,699,1010, 965,718,952,705,1016,939,756,692,1003] [rebalance:debug,2014-08-19T16:50:37.155,ns_1@10.242.238.90:<0.24115.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 426 [views:debug,2014-08-19T16:50:37.186,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/682. Updated state: replica (0) [ns_server:debug,2014-08-19T16:50:37.186,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",682,replica,0} [ns_server:info,2014-08-19T16:50:37.215,ns_1@10.242.238.90:<0.18786.0>:ns_memcached:do_handle_call:527]Changed vbucket 681 state to replica [views:debug,2014-08-19T16:50:37.220,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/682. Updated state: pending (0) [ns_server:debug,2014-08-19T16:50:37.220,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",682,pending,0} [ns_server:info,2014-08-19T16:50:37.221,ns_1@10.242.238.90:<0.24118.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 681 to state replica [ns_server:debug,2014-08-19T16:50:37.260,ns_1@10.242.238.90:<0.24118.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_681_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:50:37.261,ns_1@10.242.238.90:<0.24118.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[681]}, {checkpoints,[{681,0}]}, {name,<<"replication_building_681_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[681]}, {takeover,false}, {suffix,"building_681_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",681,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,true}]} [rebalance:debug,2014-08-19T16:50:37.262,ns_1@10.242.238.90:<0.24118.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.24133.0> [rebalance:debug,2014-08-19T16:50:37.262,ns_1@10.242.238.90:<0.24118.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:50:37.263,ns_1@10.242.238.90:<0.24118.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.9950.1>,#Ref<16550.0.1.157801>}]} [rebalance:info,2014-08-19T16:50:37.263,ns_1@10.242.238.90:<0.24118.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 681 [rebalance:debug,2014-08-19T16:50:37.263,ns_1@10.242.238.90:<0.24118.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.9950.1>,#Ref<16550.0.1.157801>}] [ns_server:debug,2014-08-19T16:50:37.264,ns_1@10.242.238.90:<0.24118.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:50:37.264,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.24134.0> (ok) [rebalance:debug,2014-08-19T16:50:37.265,ns_1@10.242.238.90:<0.24135.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 681 [ns_server:info,2014-08-19T16:50:37.271,ns_1@10.242.238.90:<0.18786.0>:ns_memcached:do_handle_call:527]Changed vbucket 425 state to replica [ns_server:info,2014-08-19T16:50:37.275,ns_1@10.242.238.90:<0.24138.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 425 to state replica [ns_server:debug,2014-08-19T16:50:37.296,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 426. Nacking mccouch update. [views:debug,2014-08-19T16:50:37.296,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/426. Updated state: replica (0) [ns_server:debug,2014-08-19T16:50:37.296,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",426,replica,0} [ns_server:debug,2014-08-19T16:50:37.297,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,750,686,984,737,426,971,724,990,958,743,711,1022,977,945,762,730,698, 1009,996,964,749,717,685,983,951,736,704,1015,970,938,755,723,691,1002,989, 957,742,710,1021,976,944,761,729,697,1008,995,963,748,716,684,982,950,767, 735,703,1014,969,754,722,690,1001,988,956,741,709,1020,975,943,760,728,696, 1007,994,962,747,715,683,981,949,766,734,702,1013,968,753,721,689,1000,987, 955,740,708,1019,974,942,759,727,695,1006,993,961,746,714,682,980,948,765, 733,701,1012,999,967,752,720,688,986,954,739,707,1018,973,941,758,726,694, 1005,992,960,745,713,979,947,764,732,700,1011,998,966,751,719,687,985,953, 738,706,1017,972,940,757,725,693,1004,991,959,744,712,1023,978,946,763,731, 699,1010,965,718,952,705,1016,939,756,692,1003] [ns_server:debug,2014-08-19T16:50:37.304,ns_1@10.242.238.90:<0.24138.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_425_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:50:37.305,ns_1@10.242.238.90:<0.24138.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[425]}, {checkpoints,[{425,0}]}, {name,<<"replication_building_425_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[425]}, {takeover,false}, {suffix,"building_425_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",425,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,false}]} [rebalance:debug,2014-08-19T16:50:37.306,ns_1@10.242.238.90:<0.24138.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.24139.0> [rebalance:debug,2014-08-19T16:50:37.306,ns_1@10.242.238.90:<0.24138.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:50:37.307,ns_1@10.242.238.90:<0.24138.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.9972.1>,#Ref<16550.0.1.157917>}]} [rebalance:info,2014-08-19T16:50:37.307,ns_1@10.242.238.90:<0.24138.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 425 [rebalance:debug,2014-08-19T16:50:37.307,ns_1@10.242.238.90:<0.24138.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.9972.1>,#Ref<16550.0.1.157917>}] [ns_server:debug,2014-08-19T16:50:37.308,ns_1@10.242.238.90:<0.24138.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:debug,2014-08-19T16:50:37.322,ns_1@10.242.238.90:<0.24140.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 425 [views:debug,2014-08-19T16:50:37.363,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/426. Updated state: replica (0) [ns_server:debug,2014-08-19T16:50:37.363,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",426,replica,0} [rebalance:debug,2014-08-19T16:50:37.364,ns_1@10.242.238.90:<0.24115.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:50:37.364,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.24115.0> (ok) [rebalance:debug,2014-08-19T16:50:37.366,ns_1@10.242.238.90:<0.24143.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 426 [ns_server:info,2014-08-19T16:50:37.383,ns_1@10.242.238.90:<0.18786.0>:ns_memcached:do_handle_call:527]Changed vbucket 680 state to replica [ns_server:info,2014-08-19T16:50:37.391,ns_1@10.242.238.90:<0.24152.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 680 to state replica [ns_server:debug,2014-08-19T16:50:37.430,ns_1@10.242.238.90:<0.24152.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_680_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:50:37.431,ns_1@10.242.238.90:<0.24152.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[680]}, {checkpoints,[{680,0}]}, {name,<<"replication_building_680_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[680]}, {takeover,false}, {suffix,"building_680_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",680,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,true}]} [rebalance:debug,2014-08-19T16:50:37.432,ns_1@10.242.238.90:<0.24152.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.24153.0> [rebalance:debug,2014-08-19T16:50:37.432,ns_1@10.242.238.90:<0.24152.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:50:37.432,ns_1@10.242.238.90:<0.24152.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.10017.1>,#Ref<16550.0.1.158165>}]} [rebalance:info,2014-08-19T16:50:37.432,ns_1@10.242.238.90:<0.24152.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 680 [rebalance:debug,2014-08-19T16:50:37.433,ns_1@10.242.238.90:<0.24152.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.10017.1>,#Ref<16550.0.1.158165>}] [ns_server:debug,2014-08-19T16:50:37.433,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.24154.0> (ok) [ns_server:debug,2014-08-19T16:50:37.434,ns_1@10.242.238.90:<0.24152.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:debug,2014-08-19T16:50:37.435,ns_1@10.242.238.90:<0.24155.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 680 [ns_server:info,2014-08-19T16:50:37.440,ns_1@10.242.238.90:<0.18786.0>:ns_memcached:do_handle_call:527]Changed vbucket 424 state to replica [ns_server:info,2014-08-19T16:50:37.444,ns_1@10.242.238.90:<0.24158.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 424 to state replica [ns_server:debug,2014-08-19T16:50:37.471,ns_1@10.242.238.90:<0.24158.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_424_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:50:37.472,ns_1@10.242.238.90:<0.24158.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[424]}, {checkpoints,[{424,0}]}, {name,<<"replication_building_424_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[424]}, {takeover,false}, {suffix,"building_424_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",424,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,false}]} [rebalance:debug,2014-08-19T16:50:37.473,ns_1@10.242.238.90:<0.24158.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.24159.0> [rebalance:debug,2014-08-19T16:50:37.473,ns_1@10.242.238.90:<0.24158.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:50:37.474,ns_1@10.242.238.90:<0.24158.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.10053.1>,#Ref<16550.0.1.158309>}]} [rebalance:info,2014-08-19T16:50:37.474,ns_1@10.242.238.90:<0.24158.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 424 [rebalance:debug,2014-08-19T16:50:37.474,ns_1@10.242.238.90:<0.24158.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.10053.1>,#Ref<16550.0.1.158309>}] [ns_server:debug,2014-08-19T16:50:37.475,ns_1@10.242.238.90:<0.24158.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:debug,2014-08-19T16:50:37.491,ns_1@10.242.238.90:<0.24174.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 424 [ns_server:debug,2014-08-19T16:50:37.547,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 681. Nacking mccouch update. [views:debug,2014-08-19T16:50:37.547,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/681. Updated state: pending (0) [ns_server:debug,2014-08-19T16:50:37.547,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",681,pending,0} [ns_server:debug,2014-08-19T16:50:37.547,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,750,686,984,737,426,971,724,990,958,743,711,1022,977,945,762,730,698, 1009,996,964,749,717,685,983,951,736,704,1015,970,938,755,723,691,1002,989, 957,742,710,1021,976,944,761,729,697,1008,995,963,748,716,684,982,950,767, 735,703,1014,969,754,722,690,1001,988,956,741,709,1020,975,943,760,728,696, 1007,994,962,747,715,683,981,949,766,734,702,1013,968,753,721,689,1000,987, 955,740,708,1019,974,942,759,727,695,1006,993,961,746,714,682,980,948,765, 733,701,1012,999,967,752,720,688,986,954,739,707,1018,973,941,758,726,694, 1005,992,960,745,713,681,979,947,764,732,700,1011,998,966,751,719,687,985, 953,738,706,1017,972,940,757,725,693,1004,991,959,744,712,1023,978,946,763, 731,699,1010,965,718,952,705,1016,939,756,692,1003] [ns_server:info,2014-08-19T16:50:37.551,ns_1@10.242.238.90:<0.18786.0>:ns_memcached:do_handle_call:527]Changed vbucket 679 state to replica [ns_server:info,2014-08-19T16:50:37.558,ns_1@10.242.238.90:<0.24177.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 679 to state replica [ns_server:debug,2014-08-19T16:50:37.604,ns_1@10.242.238.90:<0.24177.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_679_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:50:37.605,ns_1@10.242.238.90:<0.24177.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[679]}, {checkpoints,[{679,0}]}, {name,<<"replication_building_679_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[679]}, {takeover,false}, {suffix,"building_679_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",679,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,true}]} [rebalance:debug,2014-08-19T16:50:37.606,ns_1@10.242.238.90:<0.24177.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.24178.0> [rebalance:debug,2014-08-19T16:50:37.606,ns_1@10.242.238.90:<0.24177.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:50:37.607,ns_1@10.242.238.90:<0.24177.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.10094.1>,#Ref<16550.0.1.158522>}]} [rebalance:info,2014-08-19T16:50:37.607,ns_1@10.242.238.90:<0.24177.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 679 [rebalance:debug,2014-08-19T16:50:37.607,ns_1@10.242.238.90:<0.24177.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.10094.1>,#Ref<16550.0.1.158522>}] [ns_server:debug,2014-08-19T16:50:37.608,ns_1@10.242.238.90:<0.24177.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:50:37.608,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.24179.0> (ok) [rebalance:debug,2014-08-19T16:50:37.610,ns_1@10.242.238.90:<0.24180.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 679 [views:debug,2014-08-19T16:50:37.614,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/681. Updated state: pending (0) [ns_server:debug,2014-08-19T16:50:37.614,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",681,pending,0} [ns_server:info,2014-08-19T16:50:37.615,ns_1@10.242.238.90:<0.18786.0>:ns_memcached:do_handle_call:527]Changed vbucket 423 state to replica [ns_server:info,2014-08-19T16:50:37.619,ns_1@10.242.238.90:<0.24183.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 423 to state replica [ns_server:debug,2014-08-19T16:50:37.647,ns_1@10.242.238.90:<0.24183.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_423_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:50:37.648,ns_1@10.242.238.90:<0.24183.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[423]}, {checkpoints,[{423,0}]}, {name,<<"replication_building_423_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[423]}, {takeover,false}, {suffix,"building_423_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",423,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,false}]} [rebalance:debug,2014-08-19T16:50:37.649,ns_1@10.242.238.90:<0.24183.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.24184.0> [rebalance:debug,2014-08-19T16:50:37.649,ns_1@10.242.238.90:<0.24183.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:50:37.649,ns_1@10.242.238.90:<0.24183.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.10116.1>,#Ref<16550.0.1.159696>}]} [rebalance:info,2014-08-19T16:50:37.650,ns_1@10.242.238.90:<0.24183.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 423 [rebalance:debug,2014-08-19T16:50:37.650,ns_1@10.242.238.90:<0.24183.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.10116.1>,#Ref<16550.0.1.159696>}] [ns_server:debug,2014-08-19T16:50:37.651,ns_1@10.242.238.90:<0.24183.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:debug,2014-08-19T16:50:37.665,ns_1@10.242.238.90:<0.24185.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 423 [ns_server:info,2014-08-19T16:50:37.727,ns_1@10.242.238.90:<0.18786.0>:ns_memcached:do_handle_call:527]Changed vbucket 678 state to replica [ns_server:info,2014-08-19T16:50:37.732,ns_1@10.242.238.90:<0.24203.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 678 to state replica [ns_server:debug,2014-08-19T16:50:37.756,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 425. Nacking mccouch update. [views:debug,2014-08-19T16:50:37.756,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/425. Updated state: replica (0) [ns_server:debug,2014-08-19T16:50:37.756,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",425,replica,0} [ns_server:debug,2014-08-19T16:50:37.756,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,750,686,984,737,426,971,724,990,958,743,711,1022,977,945,762,730,698, 1009,996,964,749,717,685,983,951,736,704,425,1015,970,938,755,723,691,1002, 989,957,742,710,1021,976,944,761,729,697,1008,995,963,748,716,684,982,950, 767,735,703,1014,969,754,722,690,1001,988,956,741,709,1020,975,943,760,728, 696,1007,994,962,747,715,683,981,949,766,734,702,1013,968,753,721,689,1000, 987,955,740,708,1019,974,942,759,727,695,1006,993,961,746,714,682,980,948, 765,733,701,1012,999,967,752,720,688,986,954,739,707,1018,973,941,758,726, 694,1005,992,960,745,713,681,979,947,764,732,700,1011,998,966,751,719,687, 985,953,738,706,1017,972,940,757,725,693,1004,991,959,744,712,1023,978,946, 763,731,699,1010,965,718,952,705,1016,939,756,692,1003] [ns_server:debug,2014-08-19T16:50:37.772,ns_1@10.242.238.90:<0.24203.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_678_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:50:37.774,ns_1@10.242.238.90:<0.24203.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[678]}, {checkpoints,[{678,0}]}, {name,<<"replication_building_678_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[678]}, {takeover,false}, {suffix,"building_678_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",678,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,true}]} [rebalance:debug,2014-08-19T16:50:37.774,ns_1@10.242.238.90:<0.24203.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.24204.0> [rebalance:debug,2014-08-19T16:50:37.774,ns_1@10.242.238.90:<0.24203.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:50:37.775,ns_1@10.242.238.90:<0.24203.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.10171.1>,#Ref<16550.0.1.159960>}]} [rebalance:info,2014-08-19T16:50:37.775,ns_1@10.242.238.90:<0.24203.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 678 [rebalance:debug,2014-08-19T16:50:37.775,ns_1@10.242.238.90:<0.24203.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.10171.1>,#Ref<16550.0.1.159960>}] [ns_server:debug,2014-08-19T16:50:37.776,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.24205.0> (ok) [ns_server:debug,2014-08-19T16:50:37.776,ns_1@10.242.238.90:<0.24203.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:debug,2014-08-19T16:50:37.778,ns_1@10.242.238.90:<0.24206.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 678 [ns_server:info,2014-08-19T16:50:37.783,ns_1@10.242.238.90:<0.18786.0>:ns_memcached:do_handle_call:527]Changed vbucket 422 state to replica [ns_server:info,2014-08-19T16:50:37.787,ns_1@10.242.238.90:<0.24209.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 422 to state replica [ns_server:debug,2014-08-19T16:50:37.814,ns_1@10.242.238.90:<0.24209.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_422_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:50:37.815,ns_1@10.242.238.90:<0.24209.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[422]}, {checkpoints,[{422,0}]}, {name,<<"replication_building_422_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[422]}, {takeover,false}, {suffix,"building_422_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",422,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,false}]} [rebalance:debug,2014-08-19T16:50:37.816,ns_1@10.242.238.90:<0.24209.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.24210.0> [rebalance:debug,2014-08-19T16:50:37.816,ns_1@10.242.238.90:<0.24209.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:50:37.817,ns_1@10.242.238.90:<0.24209.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.10207.1>,#Ref<16550.0.1.160127>}]} [rebalance:info,2014-08-19T16:50:37.817,ns_1@10.242.238.90:<0.24209.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 422 [rebalance:debug,2014-08-19T16:50:37.817,ns_1@10.242.238.90:<0.24209.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.10207.1>,#Ref<16550.0.1.160127>}] [ns_server:debug,2014-08-19T16:50:37.818,ns_1@10.242.238.90:<0.24209.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [views:debug,2014-08-19T16:50:37.824,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/425. Updated state: replica (0) [ns_server:debug,2014-08-19T16:50:37.824,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",425,replica,0} [rebalance:debug,2014-08-19T16:50:37.833,ns_1@10.242.238.90:<0.24211.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 422 [ns_server:info,2014-08-19T16:50:37.894,ns_1@10.242.238.90:<0.18786.0>:ns_memcached:do_handle_call:527]Changed vbucket 677 state to replica [ns_server:info,2014-08-19T16:50:37.900,ns_1@10.242.238.90:<0.24228.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 677 to state replica [ns_server:debug,2014-08-19T16:50:37.940,ns_1@10.242.238.90:<0.24228.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_677_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:50:37.941,ns_1@10.242.238.90:<0.24228.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[677]}, {checkpoints,[{677,0}]}, {name,<<"replication_building_677_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[677]}, {takeover,false}, {suffix,"building_677_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",677,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,true}]} [rebalance:debug,2014-08-19T16:50:37.942,ns_1@10.242.238.90:<0.24228.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.24229.0> [rebalance:debug,2014-08-19T16:50:37.942,ns_1@10.242.238.90:<0.24228.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:50:37.942,ns_1@10.242.238.90:<0.24228.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.10278.1>,#Ref<16550.0.1.160620>}]} [rebalance:info,2014-08-19T16:50:37.943,ns_1@10.242.238.90:<0.24228.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 677 [rebalance:debug,2014-08-19T16:50:37.943,ns_1@10.242.238.90:<0.24228.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.10278.1>,#Ref<16550.0.1.160620>}] [ns_server:debug,2014-08-19T16:50:37.944,ns_1@10.242.238.90:<0.24228.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:50:37.944,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.24230.0> (ok) [rebalance:debug,2014-08-19T16:50:37.945,ns_1@10.242.238.90:<0.24231.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 677 [ns_server:info,2014-08-19T16:50:37.950,ns_1@10.242.238.90:<0.18786.0>:ns_memcached:do_handle_call:527]Changed vbucket 421 state to replica [ns_server:info,2014-08-19T16:50:37.954,ns_1@10.242.238.90:<0.24234.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 421 to state replica [ns_server:debug,2014-08-19T16:50:37.965,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 680. Nacking mccouch update. [views:debug,2014-08-19T16:50:37.965,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/680. Updated state: pending (0) [ns_server:debug,2014-08-19T16:50:37.966,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",680,pending,0} [ns_server:debug,2014-08-19T16:50:37.966,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,750,686,984,737,426,971,724,958,711,1022,977,945,762,730,698,1009,996, 964,749,717,685,983,951,736,704,425,1015,970,938,755,723,691,1002,989,957, 742,710,1021,976,944,761,729,697,1008,995,963,748,716,684,982,950,767,735, 703,1014,969,754,722,690,1001,988,956,741,709,1020,975,943,760,728,696,1007, 994,962,747,715,683,981,949,766,734,702,1013,968,753,721,689,1000,987,955, 740,708,1019,974,942,759,727,695,1006,993,961,746,714,682,980,948,765,733, 701,1012,999,967,752,720,688,986,954,739,707,1018,973,941,758,726,694,1005, 992,960,745,713,681,979,947,764,732,700,1011,998,966,751,719,687,985,953,738, 706,1017,972,940,757,725,693,1004,991,959,744,712,680,1023,978,946,763,731, 699,1010,965,718,952,705,1016,939,756,692,1003,990,743] [ns_server:debug,2014-08-19T16:50:37.982,ns_1@10.242.238.90:<0.24234.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_421_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:50:37.983,ns_1@10.242.238.90:<0.24234.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[421]}, {checkpoints,[{421,0}]}, {name,<<"replication_building_421_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[421]}, {takeover,false}, {suffix,"building_421_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",421,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,false}]} [rebalance:debug,2014-08-19T16:50:37.984,ns_1@10.242.238.90:<0.24234.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.24235.0> [rebalance:debug,2014-08-19T16:50:37.984,ns_1@10.242.238.90:<0.24234.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:50:37.985,ns_1@10.242.238.90:<0.24234.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.10300.1>,#Ref<16550.0.1.160740>}]} [rebalance:info,2014-08-19T16:50:37.985,ns_1@10.242.238.90:<0.24234.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 421 [rebalance:debug,2014-08-19T16:50:37.985,ns_1@10.242.238.90:<0.24234.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.10300.1>,#Ref<16550.0.1.160740>}] [ns_server:debug,2014-08-19T16:50:37.986,ns_1@10.242.238.90:<0.24234.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:debug,2014-08-19T16:50:37.999,ns_1@10.242.238.90:<0.24236.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 421 [views:debug,2014-08-19T16:50:38.049,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/680. Updated state: pending (0) [ns_server:debug,2014-08-19T16:50:38.049,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",680,pending,0} [ns_server:info,2014-08-19T16:50:38.058,ns_1@10.242.238.90:<0.18786.0>:ns_memcached:do_handle_call:527]Changed vbucket 676 state to replica [ns_server:info,2014-08-19T16:50:38.065,ns_1@10.242.238.90:<0.24239.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 676 to state replica [ns_server:debug,2014-08-19T16:50:38.109,ns_1@10.242.238.90:<0.24239.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_676_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:50:38.110,ns_1@10.242.238.90:<0.24239.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[676]}, {checkpoints,[{676,0}]}, {name,<<"replication_building_676_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[676]}, {takeover,false}, {suffix,"building_676_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",676,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,true}]} [rebalance:debug,2014-08-19T16:50:38.111,ns_1@10.242.238.90:<0.24239.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.24254.0> [rebalance:debug,2014-08-19T16:50:38.111,ns_1@10.242.238.90:<0.24239.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:50:38.111,ns_1@10.242.238.90:<0.24239.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.10355.1>,#Ref<16550.0.1.161003>}]} [rebalance:info,2014-08-19T16:50:38.112,ns_1@10.242.238.90:<0.24239.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 676 [rebalance:debug,2014-08-19T16:50:38.112,ns_1@10.242.238.90:<0.24239.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.10355.1>,#Ref<16550.0.1.161003>}] [ns_server:debug,2014-08-19T16:50:38.112,ns_1@10.242.238.90:<0.24239.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:50:38.113,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.24255.0> (ok) [rebalance:debug,2014-08-19T16:50:38.114,ns_1@10.242.238.90:<0.24256.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 676 [ns_server:info,2014-08-19T16:50:38.119,ns_1@10.242.238.90:<0.18786.0>:ns_memcached:do_handle_call:527]Changed vbucket 420 state to replica [ns_server:info,2014-08-19T16:50:38.125,ns_1@10.242.238.90:<0.24259.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 420 to state replica [ns_server:debug,2014-08-19T16:50:38.152,ns_1@10.242.238.90:<0.24259.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_420_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:50:38.154,ns_1@10.242.238.90:<0.24259.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[420]}, {checkpoints,[{420,0}]}, {name,<<"replication_building_420_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[420]}, {takeover,false}, {suffix,"building_420_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",420,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,false}]} [rebalance:debug,2014-08-19T16:50:38.155,ns_1@10.242.238.90:<0.24259.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.24260.0> [rebalance:debug,2014-08-19T16:50:38.155,ns_1@10.242.238.90:<0.24259.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:50:38.155,ns_1@10.242.238.90:<0.24259.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.10377.1>,#Ref<16550.0.1.161133>}]} [rebalance:info,2014-08-19T16:50:38.155,ns_1@10.242.238.90:<0.24259.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 420 [rebalance:debug,2014-08-19T16:50:38.156,ns_1@10.242.238.90:<0.24259.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.10377.1>,#Ref<16550.0.1.161133>}] [ns_server:debug,2014-08-19T16:50:38.156,ns_1@10.242.238.90:<0.24259.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:debug,2014-08-19T16:50:38.177,ns_1@10.242.238.90:<0.24261.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 420 [ns_server:debug,2014-08-19T16:50:38.187,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 678. Nacking mccouch update. [views:debug,2014-08-19T16:50:38.187,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/678. Updated state: pending (0) [ns_server:debug,2014-08-19T16:50:38.187,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",678,pending,0} [ns_server:debug,2014-08-19T16:50:38.188,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,750,686,984,737,426,971,724,958,711,1022,977,945,762,730,698,1009,996, 964,749,717,685,983,951,736,704,425,1015,970,938,755,723,691,1002,989,957, 742,710,678,1021,976,944,761,729,697,1008,995,963,748,716,684,982,950,767, 735,703,1014,969,754,722,690,1001,988,956,741,709,1020,975,943,760,728,696, 1007,994,962,747,715,683,981,949,766,734,702,1013,968,753,721,689,1000,987, 955,740,708,1019,974,942,759,727,695,1006,993,961,746,714,682,980,948,765, 733,701,1012,999,967,752,720,688,986,954,739,707,1018,973,941,758,726,694, 1005,992,960,745,713,681,979,947,764,732,700,1011,998,966,751,719,687,985, 953,738,706,1017,972,940,757,725,693,1004,991,959,744,712,680,1023,978,946, 763,731,699,1010,965,718,952,705,1016,939,756,692,1003,990,743] [views:debug,2014-08-19T16:50:38.229,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/678. Updated state: pending (0) [ns_server:debug,2014-08-19T16:50:38.229,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",678,pending,0} [ns_server:info,2014-08-19T16:50:38.239,ns_1@10.242.238.90:<0.18786.0>:ns_memcached:do_handle_call:527]Changed vbucket 675 state to replica [ns_server:info,2014-08-19T16:50:38.246,ns_1@10.242.238.90:<0.24264.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 675 to state replica [ns_server:debug,2014-08-19T16:50:38.287,ns_1@10.242.238.90:<0.24264.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_675_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:50:38.288,ns_1@10.242.238.90:<0.24264.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[675]}, {checkpoints,[{675,0}]}, {name,<<"replication_building_675_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[675]}, {takeover,false}, {suffix,"building_675_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",675,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,true}]} [rebalance:debug,2014-08-19T16:50:38.289,ns_1@10.242.238.90:<0.24264.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.24279.0> [rebalance:debug,2014-08-19T16:50:38.289,ns_1@10.242.238.90:<0.24264.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:50:38.290,ns_1@10.242.238.90:<0.24264.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.10436.1>,#Ref<16550.0.1.161432>}]} [rebalance:info,2014-08-19T16:50:38.290,ns_1@10.242.238.90:<0.24264.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 675 [rebalance:debug,2014-08-19T16:50:38.290,ns_1@10.242.238.90:<0.24264.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.10436.1>,#Ref<16550.0.1.161432>}] [ns_server:debug,2014-08-19T16:50:38.291,ns_1@10.242.238.90:<0.24264.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:50:38.291,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.24280.0> (ok) [rebalance:debug,2014-08-19T16:50:38.292,ns_1@10.242.238.90:<0.24281.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 675 [ns_server:info,2014-08-19T16:50:38.297,ns_1@10.242.238.90:<0.18786.0>:ns_memcached:do_handle_call:527]Changed vbucket 419 state to replica [ns_server:info,2014-08-19T16:50:38.301,ns_1@10.242.238.90:<0.24284.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 419 to state replica [ns_server:debug,2014-08-19T16:50:38.321,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 424. Nacking mccouch update. [views:debug,2014-08-19T16:50:38.321,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/424. Updated state: replica (0) [ns_server:debug,2014-08-19T16:50:38.321,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",424,replica,0} [ns_server:debug,2014-08-19T16:50:38.322,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,750,686,984,737,426,971,724,958,711,1022,977,945,762,730,698,1009,996, 964,749,717,685,983,951,736,704,425,1015,970,938,755,723,691,1002,989,957, 742,710,678,1021,976,944,761,729,697,1008,995,963,748,716,684,982,950,767, 735,703,424,1014,969,754,722,690,1001,988,956,741,709,1020,975,943,760,728, 696,1007,994,962,747,715,683,981,949,766,734,702,1013,968,753,721,689,1000, 987,955,740,708,1019,974,942,759,727,695,1006,993,961,746,714,682,980,948, 765,733,701,1012,999,967,752,720,688,986,954,739,707,1018,973,941,758,726, 694,1005,992,960,745,713,681,979,947,764,732,700,1011,998,966,751,719,687, 985,953,738,706,1017,972,940,757,725,693,1004,991,959,744,712,680,1023,978, 946,763,731,699,1010,965,718,952,705,1016,939,756,692,1003,990,743] [ns_server:debug,2014-08-19T16:50:38.329,ns_1@10.242.238.90:<0.24284.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_419_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:50:38.330,ns_1@10.242.238.90:<0.24284.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[419]}, {checkpoints,[{419,0}]}, {name,<<"replication_building_419_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[419]}, {takeover,false}, {suffix,"building_419_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",419,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,false}]} [rebalance:debug,2014-08-19T16:50:38.331,ns_1@10.242.238.90:<0.24284.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.24285.0> [rebalance:debug,2014-08-19T16:50:38.331,ns_1@10.242.238.90:<0.24284.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:50:38.332,ns_1@10.242.238.90:<0.24284.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.10458.1>,#Ref<16550.0.1.161548>}]} [rebalance:info,2014-08-19T16:50:38.332,ns_1@10.242.238.90:<0.24284.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 419 [rebalance:debug,2014-08-19T16:50:38.332,ns_1@10.242.238.90:<0.24284.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.10458.1>,#Ref<16550.0.1.161548>}] [ns_server:debug,2014-08-19T16:50:38.333,ns_1@10.242.238.90:<0.24284.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:debug,2014-08-19T16:50:38.348,ns_1@10.242.238.90:<0.24294.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 419 [views:debug,2014-08-19T16:50:38.355,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/424. Updated state: replica (0) [ns_server:debug,2014-08-19T16:50:38.355,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",424,replica,0} [ns_server:info,2014-08-19T16:50:38.410,ns_1@10.242.238.90:<0.18786.0>:ns_memcached:do_handle_call:527]Changed vbucket 674 state to replica [ns_server:info,2014-08-19T16:50:38.418,ns_1@10.242.238.90:<0.24311.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 674 to state replica [ns_server:debug,2014-08-19T16:50:38.422,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 422. Nacking mccouch update. [views:debug,2014-08-19T16:50:38.422,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/422. Updated state: replica (0) [ns_server:debug,2014-08-19T16:50:38.422,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",422,replica,0} [ns_server:debug,2014-08-19T16:50:38.423,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,750,686,984,737,426,971,724,958,711,1022,977,945,762,730,698,1009,996, 964,749,717,685,983,951,736,704,425,1015,970,938,755,723,691,1002,989,957, 742,710,678,1021,976,944,761,729,697,1008,995,963,748,716,684,982,950,767, 735,703,424,1014,969,754,722,690,1001,988,956,741,709,1020,975,943,760,728, 696,1007,994,962,747,715,683,981,949,766,734,702,1013,968,753,721,689,1000, 987,955,740,708,1019,974,942,759,727,695,1006,993,961,746,714,682,980,948, 765,733,701,422,1012,999,967,752,720,688,986,954,739,707,1018,973,941,758, 726,694,1005,992,960,745,713,681,979,947,764,732,700,1011,998,966,751,719, 687,985,953,738,706,1017,972,940,757,725,693,1004,991,959,744,712,680,1023, 978,946,763,731,699,1010,965,718,952,705,1016,939,756,692,1003,990,743] [ns_server:debug,2014-08-19T16:50:38.455,ns_1@10.242.238.90:<0.24311.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_674_'ns_1@10.242.238.90' [views:debug,2014-08-19T16:50:38.456,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/422. Updated state: replica (0) [ns_server:debug,2014-08-19T16:50:38.456,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",422,replica,0} [rebalance:debug,2014-08-19T16:50:38.457,ns_1@10.242.238.90:<0.24096.0>:janitor_agent:handle_call:795]Done [rebalance:debug,2014-08-19T16:50:38.457,ns_1@10.242.238.90:<0.24135.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:50:38.457,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.24096.0> (ok) [ns_server:debug,2014-08-19T16:50:38.457,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.24135.0> (ok) [rebalance:info,2014-08-19T16:50:38.457,ns_1@10.242.238.90:<0.24311.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[674]}, {checkpoints,[{674,0}]}, {name,<<"replication_building_674_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[674]}, {takeover,false}, {suffix,"building_674_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",674,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,true}]} [rebalance:debug,2014-08-19T16:50:38.457,ns_1@10.242.238.90:<0.24311.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.24314.0> [rebalance:debug,2014-08-19T16:50:38.458,ns_1@10.242.238.90:<0.24311.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:50:38.458,ns_1@10.242.238.90:<0.24311.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.10514.1>,#Ref<16550.0.1.161858>}]} [rebalance:info,2014-08-19T16:50:38.458,ns_1@10.242.238.90:<0.24311.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 674 [rebalance:debug,2014-08-19T16:50:38.459,ns_1@10.242.238.90:<0.24311.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.10514.1>,#Ref<16550.0.1.161858>}] [ns_server:debug,2014-08-19T16:50:38.459,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.24315.0> (ok) [ns_server:debug,2014-08-19T16:50:38.459,ns_1@10.242.238.90:<0.24311.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:debug,2014-08-19T16:50:38.460,ns_1@10.242.238.90:<0.24316.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 682 [rebalance:debug,2014-08-19T16:50:38.461,ns_1@10.242.238.90:<0.24319.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 674 [ns_server:info,2014-08-19T16:50:38.466,ns_1@10.242.238.90:<0.18786.0>:ns_memcached:do_handle_call:527]Changed vbucket 418 state to replica [ns_server:info,2014-08-19T16:50:38.470,ns_1@10.242.238.90:<0.24322.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 418 to state replica [ns_server:debug,2014-08-19T16:50:38.497,ns_1@10.242.238.90:<0.24322.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_418_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:50:38.499,ns_1@10.242.238.90:<0.24322.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[418]}, {checkpoints,[{418,0}]}, {name,<<"replication_building_418_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[418]}, {takeover,false}, {suffix,"building_418_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",418,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,false}]} [rebalance:debug,2014-08-19T16:50:38.499,ns_1@10.242.238.90:<0.24322.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.24323.0> [rebalance:debug,2014-08-19T16:50:38.499,ns_1@10.242.238.90:<0.24322.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:50:38.500,ns_1@10.242.238.90:<0.24322.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.10540.1>,#Ref<16550.0.1.162016>}]} [rebalance:info,2014-08-19T16:50:38.500,ns_1@10.242.238.90:<0.24322.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 418 [rebalance:debug,2014-08-19T16:50:38.500,ns_1@10.242.238.90:<0.24322.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.10540.1>,#Ref<16550.0.1.162016>}] [ns_server:debug,2014-08-19T16:50:38.501,ns_1@10.242.238.90:<0.24322.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:debug,2014-08-19T16:50:38.520,ns_1@10.242.238.90:<0.24324.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 418 [ns_server:info,2014-08-19T16:50:38.580,ns_1@10.242.238.90:<0.18786.0>:ns_memcached:do_handle_call:527]Changed vbucket 673 state to replica [ns_server:info,2014-08-19T16:50:38.589,ns_1@10.242.238.90:<0.24341.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 673 to state replica [ns_server:debug,2014-08-19T16:50:38.598,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 679. Nacking mccouch update. [views:debug,2014-08-19T16:50:38.598,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/679. Updated state: pending (0) [ns_server:debug,2014-08-19T16:50:38.598,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",679,pending,0} [ns_server:debug,2014-08-19T16:50:38.599,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,750,686,984,737,426,971,724,958,711,1022,977,945,762,730,698,1009,996, 964,749,717,685,983,951,736,704,425,1015,970,938,755,723,691,1002,989,957, 742,710,678,1021,976,944,761,729,697,1008,995,963,748,716,684,982,950,767, 735,703,424,1014,969,754,722,690,1001,988,956,741,709,1020,975,943,760,728, 696,1007,994,962,747,715,683,981,949,766,734,702,1013,968,753,721,689,1000, 987,955,740,708,1019,974,942,759,727,695,1006,993,961,746,714,682,980,948, 765,733,701,422,1012,999,967,752,720,688,986,954,739,707,1018,973,941,758, 726,694,1005,992,960,745,713,681,979,947,764,732,700,1011,998,966,751,719, 687,985,953,738,706,1017,972,940,757,725,693,1004,991,959,744,712,680,1023, 978,946,763,731,699,1010,965,718,952,705,1016,939,756,692,1003,990,743,679] [ns_server:debug,2014-08-19T16:50:38.628,ns_1@10.242.238.90:<0.24341.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_673_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:50:38.629,ns_1@10.242.238.90:<0.24341.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[673]}, {checkpoints,[{673,0}]}, {name,<<"replication_building_673_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[673]}, {takeover,false}, {suffix,"building_673_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",673,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,true}]} [rebalance:debug,2014-08-19T16:50:38.630,ns_1@10.242.238.90:<0.24341.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.24342.0> [rebalance:debug,2014-08-19T16:50:38.630,ns_1@10.242.238.90:<0.24341.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:50:38.631,ns_1@10.242.238.90:<0.24341.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.10581.1>,#Ref<16550.0.1.162230>}]} [rebalance:info,2014-08-19T16:50:38.631,ns_1@10.242.238.90:<0.24341.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 673 [rebalance:debug,2014-08-19T16:50:38.631,ns_1@10.242.238.90:<0.24341.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.10581.1>,#Ref<16550.0.1.162230>}] [ns_server:debug,2014-08-19T16:50:38.632,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.24343.0> (ok) [ns_server:debug,2014-08-19T16:50:38.632,ns_1@10.242.238.90:<0.24341.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:debug,2014-08-19T16:50:38.633,ns_1@10.242.238.90:<0.24344.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 673 [ns_server:info,2014-08-19T16:50:38.638,ns_1@10.242.238.90:<0.18786.0>:ns_memcached:do_handle_call:527]Changed vbucket 417 state to replica [ns_server:info,2014-08-19T16:50:38.642,ns_1@10.242.238.90:<0.24347.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 417 to state replica [views:debug,2014-08-19T16:50:38.649,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/679. Updated state: pending (0) [ns_server:debug,2014-08-19T16:50:38.649,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",679,pending,0} [ns_server:debug,2014-08-19T16:50:38.669,ns_1@10.242.238.90:<0.24347.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_417_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:50:38.670,ns_1@10.242.238.90:<0.24347.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[417]}, {checkpoints,[{417,0}]}, {name,<<"replication_building_417_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[417]}, {takeover,false}, {suffix,"building_417_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",417,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,false}]} [rebalance:debug,2014-08-19T16:50:38.671,ns_1@10.242.238.90:<0.24347.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.24348.0> [rebalance:debug,2014-08-19T16:50:38.671,ns_1@10.242.238.90:<0.24347.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:50:38.672,ns_1@10.242.238.90:<0.24347.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.10617.1>,#Ref<16550.0.1.162374>}]} [rebalance:info,2014-08-19T16:50:38.672,ns_1@10.242.238.90:<0.24347.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 417 [rebalance:debug,2014-08-19T16:50:38.672,ns_1@10.242.238.90:<0.24347.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.10617.1>,#Ref<16550.0.1.162374>}] [ns_server:debug,2014-08-19T16:50:38.673,ns_1@10.242.238.90:<0.24347.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:debug,2014-08-19T16:50:38.688,ns_1@10.242.238.90:<0.24363.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 417 [ns_server:debug,2014-08-19T16:50:38.741,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 677. Nacking mccouch update. [views:debug,2014-08-19T16:50:38.741,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/677. Updated state: pending (0) [ns_server:debug,2014-08-19T16:50:38.741,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",677,pending,0} [ns_server:debug,2014-08-19T16:50:38.741,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,750,686,984,737,426,971,724,958,711,1022,945,762,698,1009,996,964,749, 717,685,983,951,736,704,425,1015,970,938,755,723,691,1002,989,957,742,710, 678,1021,976,944,761,729,697,1008,995,963,748,716,684,982,950,767,735,703, 424,1014,969,754,722,690,1001,988,956,741,709,677,1020,975,943,760,728,696, 1007,994,962,747,715,683,981,949,766,734,702,1013,968,753,721,689,1000,987, 955,740,708,1019,974,942,759,727,695,1006,993,961,746,714,682,980,948,765, 733,701,422,1012,999,967,752,720,688,986,954,739,707,1018,973,941,758,726, 694,1005,992,960,745,713,681,979,947,764,732,700,1011,998,966,751,719,687, 985,953,738,706,1017,972,940,757,725,693,1004,991,959,744,712,680,1023,978, 946,763,731,699,1010,965,718,952,705,1016,939,756,692,1003,990,743,679,977, 730] [ns_server:info,2014-08-19T16:50:38.748,ns_1@10.242.238.90:<0.18786.0>:ns_memcached:do_handle_call:527]Changed vbucket 672 state to replica [ns_server:info,2014-08-19T16:50:38.756,ns_1@10.242.238.90:<0.24366.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 672 to state replica [ns_server:debug,2014-08-19T16:50:38.794,ns_1@10.242.238.90:<0.24366.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_672_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:50:38.795,ns_1@10.242.238.90:<0.24366.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[672]}, {checkpoints,[{672,0}]}, {name,<<"replication_building_672_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[672]}, {takeover,false}, {suffix,"building_672_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",672,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,true}]} [rebalance:debug,2014-08-19T16:50:38.796,ns_1@10.242.238.90:<0.24366.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.24367.0> [rebalance:debug,2014-08-19T16:50:38.796,ns_1@10.242.238.90:<0.24366.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:50:38.797,ns_1@10.242.238.90:<0.24366.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.10658.1>,#Ref<16550.0.1.162585>}]} [rebalance:info,2014-08-19T16:50:38.797,ns_1@10.242.238.90:<0.24366.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 672 [rebalance:debug,2014-08-19T16:50:38.797,ns_1@10.242.238.90:<0.24366.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.10658.1>,#Ref<16550.0.1.162585>}] [ns_server:debug,2014-08-19T16:50:38.798,ns_1@10.242.238.90:<0.24366.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:50:38.798,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.24368.0> (ok) [rebalance:debug,2014-08-19T16:50:38.800,ns_1@10.242.238.90:<0.24369.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 672 [ns_server:info,2014-08-19T16:50:38.805,ns_1@10.242.238.90:<0.18786.0>:ns_memcached:do_handle_call:527]Changed vbucket 416 state to replica [views:debug,2014-08-19T16:50:38.808,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/677. Updated state: pending (0) [ns_server:debug,2014-08-19T16:50:38.808,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",677,pending,0} [ns_server:info,2014-08-19T16:50:38.808,ns_1@10.242.238.90:<0.24372.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 416 to state replica [ns_server:debug,2014-08-19T16:50:38.835,ns_1@10.242.238.90:<0.24372.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_416_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:50:38.837,ns_1@10.242.238.90:<0.24372.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[416]}, {checkpoints,[{416,0}]}, {name,<<"replication_building_416_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[416]}, {takeover,false}, {suffix,"building_416_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",416,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,false}]} [rebalance:debug,2014-08-19T16:50:38.837,ns_1@10.242.238.90:<0.24372.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.24373.0> [rebalance:debug,2014-08-19T16:50:38.838,ns_1@10.242.238.90:<0.24372.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:50:38.838,ns_1@10.242.238.90:<0.24372.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.10680.1>,#Ref<16550.0.1.162706>}]} [rebalance:info,2014-08-19T16:50:38.838,ns_1@10.242.238.90:<0.24372.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 416 [rebalance:debug,2014-08-19T16:50:38.839,ns_1@10.242.238.90:<0.24372.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.10680.1>,#Ref<16550.0.1.162706>}] [ns_server:debug,2014-08-19T16:50:38.839,ns_1@10.242.238.90:<0.24372.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:debug,2014-08-19T16:50:38.855,ns_1@10.242.238.90:<0.24388.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 416 [ns_server:debug,2014-08-19T16:50:38.875,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 675. Nacking mccouch update. [views:debug,2014-08-19T16:50:38.875,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/675. Updated state: pending (0) [ns_server:debug,2014-08-19T16:50:38.875,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",675,pending,0} [ns_server:debug,2014-08-19T16:50:38.876,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,750,686,984,737,426,971,724,958,711,1022,945,762,698,1009,996,964,749, 717,685,983,951,736,704,425,1015,970,938,755,723,691,1002,989,957,742,710, 678,1021,976,944,761,729,697,1008,995,963,748,716,684,982,950,767,735,703, 424,1014,969,754,722,690,1001,988,956,741,709,677,1020,975,943,760,728,696, 1007,994,962,747,715,683,981,949,766,734,702,1013,968,753,721,689,1000,987, 955,740,708,1019,974,942,759,727,695,1006,993,961,746,714,682,980,948,765, 733,701,422,1012,999,967,752,720,688,986,954,739,707,675,1018,973,941,758, 726,694,1005,992,960,745,713,681,979,947,764,732,700,1011,998,966,751,719, 687,985,953,738,706,1017,972,940,757,725,693,1004,991,959,744,712,680,1023, 978,946,763,731,699,1010,965,718,952,705,1016,939,756,692,1003,990,743,679, 977,730] [views:debug,2014-08-19T16:50:38.909,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/675. Updated state: pending (0) [ns_server:debug,2014-08-19T16:50:38.909,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",675,pending,0} [ns_server:info,2014-08-19T16:50:38.916,ns_1@10.242.238.90:<0.18786.0>:ns_memcached:do_handle_call:527]Changed vbucket 671 state to replica [ns_server:info,2014-08-19T16:50:38.922,ns_1@10.242.238.90:<0.24391.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 671 to state replica [ns_server:debug,2014-08-19T16:50:38.961,ns_1@10.242.238.90:<0.24391.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_671_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:50:38.962,ns_1@10.242.238.90:<0.24391.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[671]}, {checkpoints,[{671,0}]}, {name,<<"replication_building_671_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[671]}, {takeover,false}, {suffix,"building_671_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",671,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,true}]} [rebalance:debug,2014-08-19T16:50:38.963,ns_1@10.242.238.90:<0.24391.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.24406.0> [rebalance:debug,2014-08-19T16:50:38.963,ns_1@10.242.238.90:<0.24391.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:50:38.964,ns_1@10.242.238.90:<0.24391.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.10735.1>,#Ref<16550.0.1.162945>}]} [rebalance:info,2014-08-19T16:50:38.964,ns_1@10.242.238.90:<0.24391.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 671 [rebalance:debug,2014-08-19T16:50:38.964,ns_1@10.242.238.90:<0.24391.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.10735.1>,#Ref<16550.0.1.162945>}] [ns_server:debug,2014-08-19T16:50:38.965,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.24407.0> (ok) [ns_server:debug,2014-08-19T16:50:38.965,ns_1@10.242.238.90:<0.24391.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:debug,2014-08-19T16:50:38.966,ns_1@10.242.238.90:<0.24408.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 671 [ns_server:info,2014-08-19T16:50:38.971,ns_1@10.242.238.90:<0.18786.0>:ns_memcached:do_handle_call:527]Changed vbucket 415 state to replica [ns_server:info,2014-08-19T16:50:38.975,ns_1@10.242.238.90:<0.24411.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 415 to state replica [ns_server:debug,2014-08-19T16:50:38.976,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 423. Nacking mccouch update. [views:debug,2014-08-19T16:50:38.976,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/423. Updated state: replica (0) [ns_server:debug,2014-08-19T16:50:38.976,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",423,replica,0} [ns_server:debug,2014-08-19T16:50:38.976,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,750,686,984,737,426,971,724,958,711,1022,945,762,698,1009,996,964,749, 717,685,983,951,736,704,425,1015,970,938,755,723,691,1002,989,957,742,710, 678,1021,976,944,761,729,697,1008,995,963,748,716,684,982,950,767,735,703, 424,1014,969,754,722,690,1001,988,956,741,709,677,1020,975,943,760,728,696, 1007,994,962,747,715,683,981,949,766,734,702,423,1013,968,753,721,689,1000, 987,955,740,708,1019,974,942,759,727,695,1006,993,961,746,714,682,980,948, 765,733,701,422,1012,999,967,752,720,688,986,954,739,707,675,1018,973,941, 758,726,694,1005,992,960,745,713,681,979,947,764,732,700,1011,998,966,751, 719,687,985,953,738,706,1017,972,940,757,725,693,1004,991,959,744,712,680, 1023,978,946,763,731,699,1010,965,718,952,705,1016,939,756,692,1003,990,743, 679,977,730] [ns_server:debug,2014-08-19T16:50:39.002,ns_1@10.242.238.90:<0.24411.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_415_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:50:39.003,ns_1@10.242.238.90:<0.24411.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[415]}, {checkpoints,[{415,0}]}, {name,<<"replication_building_415_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[415]}, {takeover,false}, {suffix,"building_415_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",415,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,false}]} [rebalance:debug,2014-08-19T16:50:39.004,ns_1@10.242.238.90:<0.24411.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.24412.0> [rebalance:debug,2014-08-19T16:50:39.004,ns_1@10.242.238.90:<0.24411.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:50:39.004,ns_1@10.242.238.90:<0.24411.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.10757.1>,#Ref<16550.0.1.163061>}]} [rebalance:info,2014-08-19T16:50:39.004,ns_1@10.242.238.90:<0.24411.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 415 [rebalance:debug,2014-08-19T16:50:39.005,ns_1@10.242.238.90:<0.24411.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.10757.1>,#Ref<16550.0.1.163061>}] [ns_server:debug,2014-08-19T16:50:39.005,ns_1@10.242.238.90:<0.24411.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [views:debug,2014-08-19T16:50:39.011,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/423. Updated state: replica (0) [ns_server:debug,2014-08-19T16:50:39.011,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",423,replica,0} [rebalance:debug,2014-08-19T16:50:39.023,ns_1@10.242.238.90:<0.24413.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 415 [ns_server:info,2014-08-19T16:50:39.085,ns_1@10.242.238.90:<0.18786.0>:ns_memcached:do_handle_call:527]Changed vbucket 670 state to replica [ns_server:info,2014-08-19T16:50:39.091,ns_1@10.242.238.90:<0.24430.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 670 to state replica [ns_server:debug,2014-08-19T16:50:39.095,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 421. Nacking mccouch update. [views:debug,2014-08-19T16:50:39.095,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/421. Updated state: replica (0) [ns_server:debug,2014-08-19T16:50:39.095,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",421,replica,0} [ns_server:debug,2014-08-19T16:50:39.095,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,750,686,984,737,426,971,724,958,711,1022,945,762,698,1009,996,964,749, 717,685,983,951,736,704,425,1015,970,938,755,723,691,1002,989,957,742,710, 678,1021,976,944,761,729,697,1008,995,963,748,716,684,982,950,767,735,703, 424,1014,969,754,722,690,1001,988,956,741,709,677,1020,975,943,760,728,696, 1007,994,962,747,715,683,981,949,766,734,702,423,1013,968,753,721,689,1000, 987,955,740,708,1019,974,942,759,727,695,1006,993,961,746,714,682,980,948, 765,733,701,422,1012,999,967,752,720,688,986,954,739,707,675,1018,973,941, 758,726,694,1005,992,960,745,713,681,979,947,764,732,700,421,1011,998,966, 751,719,687,985,953,738,706,1017,972,940,757,725,693,1004,991,959,744,712, 680,1023,978,946,763,731,699,1010,965,718,952,705,1016,939,756,692,1003,990, 743,679,977,730] [ns_server:debug,2014-08-19T16:50:39.131,ns_1@10.242.238.90:<0.24430.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_670_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:50:39.133,ns_1@10.242.238.90:<0.24430.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[670]}, {checkpoints,[{670,0}]}, {name,<<"replication_building_670_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[670]}, {takeover,false}, {suffix,"building_670_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",670,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,true}]} [rebalance:debug,2014-08-19T16:50:39.133,ns_1@10.242.238.90:<0.24430.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.24431.0> [rebalance:debug,2014-08-19T16:50:39.133,ns_1@10.242.238.90:<0.24430.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:50:39.134,ns_1@10.242.238.90:<0.24430.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.10817.1>,#Ref<16550.0.1.163347>}]} [rebalance:info,2014-08-19T16:50:39.134,ns_1@10.242.238.90:<0.24430.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 670 [rebalance:debug,2014-08-19T16:50:39.134,ns_1@10.242.238.90:<0.24430.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.10817.1>,#Ref<16550.0.1.163347>}] [ns_server:debug,2014-08-19T16:50:39.135,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.24432.0> (ok) [ns_server:debug,2014-08-19T16:50:39.135,ns_1@10.242.238.90:<0.24430.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:debug,2014-08-19T16:50:39.136,ns_1@10.242.238.90:<0.24433.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 670 [ns_server:info,2014-08-19T16:50:39.141,ns_1@10.242.238.90:<0.18786.0>:ns_memcached:do_handle_call:527]Changed vbucket 414 state to replica [ns_server:info,2014-08-19T16:50:39.145,ns_1@10.242.238.90:<0.24436.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 414 to state replica [views:debug,2014-08-19T16:50:39.145,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/421. Updated state: replica (0) [ns_server:debug,2014-08-19T16:50:39.146,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",421,replica,0} [ns_server:debug,2014-08-19T16:50:39.173,ns_1@10.242.238.90:<0.24436.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_414_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:50:39.174,ns_1@10.242.238.90:<0.24436.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[414]}, {checkpoints,[{414,0}]}, {name,<<"replication_building_414_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[414]}, {takeover,false}, {suffix,"building_414_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",414,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,false}]} [rebalance:debug,2014-08-19T16:50:39.175,ns_1@10.242.238.90:<0.24436.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.24437.0> [rebalance:debug,2014-08-19T16:50:39.175,ns_1@10.242.238.90:<0.24436.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:50:39.175,ns_1@10.242.238.90:<0.24436.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.10839.1>,#Ref<16550.0.1.163445>}]} [rebalance:info,2014-08-19T16:50:39.176,ns_1@10.242.238.90:<0.24436.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 414 [rebalance:debug,2014-08-19T16:50:39.176,ns_1@10.242.238.90:<0.24436.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.10839.1>,#Ref<16550.0.1.163445>}] [ns_server:debug,2014-08-19T16:50:39.177,ns_1@10.242.238.90:<0.24436.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:debug,2014-08-19T16:50:39.192,ns_1@10.242.238.90:<0.24452.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 414 [ns_server:debug,2014-08-19T16:50:39.212,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 419. Nacking mccouch update. [views:debug,2014-08-19T16:50:39.212,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/419. Updated state: replica (0) [ns_server:debug,2014-08-19T16:50:39.212,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",419,replica,0} [ns_server:debug,2014-08-19T16:50:39.213,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,750,686,984,737,426,971,724,958,711,1022,945,762,698,1009,996,964,749, 717,685,983,951,736,704,425,1015,970,938,755,723,691,1002,989,957,742,710, 678,1021,976,944,761,729,697,1008,995,963,748,716,684,982,950,767,735,703, 424,1014,969,754,722,690,1001,988,956,741,709,677,1020,975,943,760,728,696, 1007,994,962,747,715,683,981,949,766,734,702,423,1013,968,753,721,689,1000, 987,955,740,708,1019,974,942,759,727,695,1006,993,961,746,714,682,980,948, 765,733,701,422,1012,999,967,752,720,688,986,954,739,707,675,1018,973,941, 758,726,694,1005,992,960,745,713,681,979,947,764,732,700,421,1011,998,966, 751,719,687,985,953,738,706,1017,972,940,757,725,693,1004,991,959,744,712, 680,1023,978,946,763,731,699,1010,965,718,952,705,1016,939,756,692,1003,990, 743,679,977,730,419] [ns_server:info,2014-08-19T16:50:39.253,ns_1@10.242.238.90:<0.18786.0>:ns_memcached:do_handle_call:527]Changed vbucket 669 state to replica [views:debug,2014-08-19T16:50:39.258,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/419. Updated state: replica (0) [ns_server:debug,2014-08-19T16:50:39.258,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",419,replica,0} [ns_server:info,2014-08-19T16:50:39.261,ns_1@10.242.238.90:<0.24455.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 669 to state replica [ns_server:debug,2014-08-19T16:50:39.299,ns_1@10.242.238.90:<0.24455.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_669_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:50:39.300,ns_1@10.242.238.90:<0.24455.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[669]}, {checkpoints,[{669,0}]}, {name,<<"replication_building_669_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[669]}, {takeover,false}, {suffix,"building_669_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",669,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,true}]} [rebalance:debug,2014-08-19T16:50:39.300,ns_1@10.242.238.90:<0.24455.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.24456.0> [rebalance:debug,2014-08-19T16:50:39.301,ns_1@10.242.238.90:<0.24455.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:50:39.301,ns_1@10.242.238.90:<0.24455.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.10880.1>,#Ref<16550.0.1.163676>}]} [rebalance:info,2014-08-19T16:50:39.301,ns_1@10.242.238.90:<0.24455.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 669 [rebalance:debug,2014-08-19T16:50:39.302,ns_1@10.242.238.90:<0.24455.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.10880.1>,#Ref<16550.0.1.163676>}] [ns_server:debug,2014-08-19T16:50:39.302,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.24457.0> (ok) [ns_server:debug,2014-08-19T16:50:39.302,ns_1@10.242.238.90:<0.24455.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:debug,2014-08-19T16:50:39.304,ns_1@10.242.238.90:<0.24458.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 669 [ns_server:info,2014-08-19T16:50:39.309,ns_1@10.242.238.90:<0.18786.0>:ns_memcached:do_handle_call:527]Changed vbucket 413 state to replica [ns_server:info,2014-08-19T16:50:39.312,ns_1@10.242.238.90:<0.24461.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 413 to state replica [ns_server:debug,2014-08-19T16:50:39.340,ns_1@10.242.238.90:<0.24461.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_413_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:50:39.342,ns_1@10.242.238.90:<0.24461.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[413]}, {checkpoints,[{413,0}]}, {name,<<"replication_building_413_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[413]}, {takeover,false}, {suffix,"building_413_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",413,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,false}]} [rebalance:debug,2014-08-19T16:50:39.343,ns_1@10.242.238.90:<0.24461.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.24462.0> [rebalance:debug,2014-08-19T16:50:39.343,ns_1@10.242.238.90:<0.24461.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:50:39.344,ns_1@10.242.238.90:<0.24461.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.10902.1>,#Ref<16550.0.1.163775>}]} [rebalance:info,2014-08-19T16:50:39.344,ns_1@10.242.238.90:<0.24461.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 413 [rebalance:debug,2014-08-19T16:50:39.344,ns_1@10.242.238.90:<0.24461.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.10902.1>,#Ref<16550.0.1.163775>}] [ns_server:debug,2014-08-19T16:50:39.345,ns_1@10.242.238.90:<0.24461.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:debug,2014-08-19T16:50:39.359,ns_1@10.242.238.90:<0.24463.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 413 [ns_server:info,2014-08-19T16:50:39.418,ns_1@10.242.238.90:<0.18786.0>:ns_memcached:do_handle_call:527]Changed vbucket 668 state to replica [ns_server:info,2014-08-19T16:50:39.425,ns_1@10.242.238.90:<0.24486.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 668 to state replica [ns_server:debug,2014-08-19T16:50:39.463,ns_1@10.242.238.90:<0.24486.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_668_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:50:39.465,ns_1@10.242.238.90:<0.24486.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[668]}, {checkpoints,[{668,0}]}, {name,<<"replication_building_668_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[668]}, {takeover,false}, {suffix,"building_668_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",668,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,true}]} [rebalance:debug,2014-08-19T16:50:39.465,ns_1@10.242.238.90:<0.24486.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.24488.0> [rebalance:debug,2014-08-19T16:50:39.465,ns_1@10.242.238.90:<0.24486.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:50:39.466,ns_1@10.242.238.90:<0.24486.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.10957.1>,#Ref<16550.0.1.164033>}]} [rebalance:info,2014-08-19T16:50:39.466,ns_1@10.242.238.90:<0.24486.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 668 [rebalance:debug,2014-08-19T16:50:39.466,ns_1@10.242.238.90:<0.24486.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.10957.1>,#Ref<16550.0.1.164033>}] [ns_server:debug,2014-08-19T16:50:39.467,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.24489.0> (ok) [ns_server:debug,2014-08-19T16:50:39.467,ns_1@10.242.238.90:<0.24486.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:debug,2014-08-19T16:50:39.468,ns_1@10.242.238.90:<0.24490.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 668 [ns_server:info,2014-08-19T16:50:39.473,ns_1@10.242.238.90:<0.18786.0>:ns_memcached:do_handle_call:527]Changed vbucket 412 state to replica [ns_server:debug,2014-08-19T16:50:39.475,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 676. Nacking mccouch update. [views:debug,2014-08-19T16:50:39.475,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/676. Updated state: pending (0) [ns_server:debug,2014-08-19T16:50:39.475,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",676,pending,0} [ns_server:debug,2014-08-19T16:50:39.475,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,750,686,984,737,426,971,724,958,711,1022,945,762,698,1009,996,749,685, 983,951,736,704,425,1015,970,938,755,723,691,1002,989,957,742,710,678,1021, 976,944,761,729,697,1008,995,963,748,716,684,982,950,767,735,703,424,1014, 969,754,722,690,1001,988,956,741,709,677,1020,975,943,760,728,696,1007,994, 962,747,715,683,981,949,766,734,702,423,1013,968,753,721,689,1000,987,955, 740,708,676,1019,974,942,759,727,695,1006,993,961,746,714,682,980,948,765, 733,701,422,1012,999,967,752,720,688,986,954,739,707,675,1018,973,941,758, 726,694,1005,992,960,745,713,681,979,947,764,732,700,421,1011,998,966,751, 719,687,985,953,738,706,1017,972,940,757,725,693,1004,991,959,744,712,680, 1023,978,946,763,731,699,1010,965,718,952,705,1016,939,756,692,1003,990,743, 679,977,730,419,964,717] [ns_server:info,2014-08-19T16:50:39.477,ns_1@10.242.238.90:<0.24493.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 412 to state replica [ns_server:debug,2014-08-19T16:50:39.505,ns_1@10.242.238.90:<0.24493.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_412_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:50:39.507,ns_1@10.242.238.90:<0.24493.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[412]}, {checkpoints,[{412,0}]}, {name,<<"replication_building_412_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[412]}, {takeover,false}, {suffix,"building_412_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",412,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,false}]} [rebalance:debug,2014-08-19T16:50:39.508,ns_1@10.242.238.90:<0.24493.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.24494.0> [rebalance:debug,2014-08-19T16:50:39.508,ns_1@10.242.238.90:<0.24493.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:50:39.509,ns_1@10.242.238.90:<0.24493.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.10987.1>,#Ref<16550.0.1.164185>}]} [rebalance:info,2014-08-19T16:50:39.509,ns_1@10.242.238.90:<0.24493.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 412 [rebalance:debug,2014-08-19T16:50:39.509,ns_1@10.242.238.90:<0.24493.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.10987.1>,#Ref<16550.0.1.164185>}] [ns_server:debug,2014-08-19T16:50:39.510,ns_1@10.242.238.90:<0.24493.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:debug,2014-08-19T16:50:39.523,ns_1@10.242.238.90:<0.24495.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 412 [views:debug,2014-08-19T16:50:39.559,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/676. Updated state: pending (0) [ns_server:debug,2014-08-19T16:50:39.559,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",676,pending,0} [ns_server:info,2014-08-19T16:50:39.585,ns_1@10.242.238.90:<0.18786.0>:ns_memcached:do_handle_call:527]Changed vbucket 667 state to replica [ns_server:info,2014-08-19T16:50:39.591,ns_1@10.242.238.90:<0.24498.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 667 to state replica [ns_server:debug,2014-08-19T16:50:39.629,ns_1@10.242.238.90:<0.24498.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_667_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:50:39.631,ns_1@10.242.238.90:<0.24498.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[667]}, {checkpoints,[{667,0}]}, {name,<<"replication_building_667_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[667]}, {takeover,false}, {suffix,"building_667_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",667,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,true}]} [rebalance:debug,2014-08-19T16:50:39.631,ns_1@10.242.238.90:<0.24498.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.24513.0> [rebalance:debug,2014-08-19T16:50:39.631,ns_1@10.242.238.90:<0.24498.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:50:39.632,ns_1@10.242.238.90:<0.24498.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.11034.1>,#Ref<16550.0.1.164440>}]} [rebalance:info,2014-08-19T16:50:39.632,ns_1@10.242.238.90:<0.24498.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 667 [rebalance:debug,2014-08-19T16:50:39.633,ns_1@10.242.238.90:<0.24498.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.11034.1>,#Ref<16550.0.1.164440>}] [ns_server:debug,2014-08-19T16:50:39.633,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.24514.0> (ok) [ns_server:debug,2014-08-19T16:50:39.634,ns_1@10.242.238.90:<0.24498.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:debug,2014-08-19T16:50:39.635,ns_1@10.242.238.90:<0.24515.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 667 [ns_server:info,2014-08-19T16:50:39.640,ns_1@10.242.238.90:<0.18786.0>:ns_memcached:do_handle_call:527]Changed vbucket 411 state to replica [ns_server:info,2014-08-19T16:50:39.644,ns_1@10.242.238.90:<0.24518.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 411 to state replica [ns_server:debug,2014-08-19T16:50:39.650,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 674. Nacking mccouch update. [views:debug,2014-08-19T16:50:39.650,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/674. Updated state: pending (0) [ns_server:debug,2014-08-19T16:50:39.651,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",674,pending,0} [ns_server:debug,2014-08-19T16:50:39.651,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,750,686,984,737,426,971,724,958,711,1022,945,762,698,1009,996,749,685, 983,951,736,704,425,1015,970,938,755,723,691,1002,989,957,742,710,678,1021, 976,944,761,729,697,1008,995,963,748,716,684,982,950,767,735,703,424,1014, 969,754,722,690,1001,988,956,741,709,677,1020,975,943,760,728,696,1007,994, 962,747,715,683,981,949,766,734,702,423,1013,968,753,721,689,1000,987,955, 740,708,676,1019,974,942,759,727,695,1006,993,961,746,714,682,980,948,765, 733,701,422,1012,999,967,752,720,688,986,954,739,707,675,1018,973,941,758, 726,694,1005,992,960,745,713,681,979,947,764,732,700,421,1011,998,966,751, 719,687,985,953,738,706,674,1017,972,940,757,725,693,1004,991,959,744,712, 680,1023,978,946,763,731,699,1010,965,718,952,705,1016,939,756,692,1003,990, 743,679,977,730,419,964,717] [ns_server:debug,2014-08-19T16:50:39.671,ns_1@10.242.238.90:<0.24518.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_411_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:50:39.673,ns_1@10.242.238.90:<0.24518.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[411]}, {checkpoints,[{411,0}]}, {name,<<"replication_building_411_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[411]}, {takeover,false}, {suffix,"building_411_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",411,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,false}]} [rebalance:debug,2014-08-19T16:50:39.674,ns_1@10.242.238.90:<0.24518.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.24519.0> [rebalance:debug,2014-08-19T16:50:39.674,ns_1@10.242.238.90:<0.24518.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:50:39.674,ns_1@10.242.238.90:<0.24518.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.11074.1>,#Ref<16550.0.1.164626>}]} [rebalance:info,2014-08-19T16:50:39.675,ns_1@10.242.238.90:<0.24518.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 411 [rebalance:debug,2014-08-19T16:50:39.675,ns_1@10.242.238.90:<0.24518.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.11074.1>,#Ref<16550.0.1.164626>}] [ns_server:debug,2014-08-19T16:50:39.676,ns_1@10.242.238.90:<0.24518.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:debug,2014-08-19T16:50:39.690,ns_1@10.242.238.90:<0.24520.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 411 [views:debug,2014-08-19T16:50:39.702,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/674. Updated state: pending (0) [ns_server:debug,2014-08-19T16:50:39.702,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",674,pending,0} [ns_server:info,2014-08-19T16:50:39.750,ns_1@10.242.238.90:<0.18786.0>:ns_memcached:do_handle_call:527]Changed vbucket 666 state to replica [ns_server:info,2014-08-19T16:50:39.757,ns_1@10.242.238.90:<0.24523.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 666 to state replica [ns_server:debug,2014-08-19T16:50:39.795,ns_1@10.242.238.90:<0.24523.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_666_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:50:39.797,ns_1@10.242.238.90:<0.24523.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[666]}, {checkpoints,[{666,0}]}, {name,<<"replication_building_666_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[666]}, {takeover,false}, {suffix,"building_666_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",666,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,true}]} [rebalance:debug,2014-08-19T16:50:39.797,ns_1@10.242.238.90:<0.24523.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.24538.0> [rebalance:debug,2014-08-19T16:50:39.798,ns_1@10.242.238.90:<0.24523.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:50:39.798,ns_1@10.242.238.90:<0.24523.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.11129.1>,#Ref<16550.0.1.164912>}]} [rebalance:info,2014-08-19T16:50:39.798,ns_1@10.242.238.90:<0.24523.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 666 [rebalance:debug,2014-08-19T16:50:39.799,ns_1@10.242.238.90:<0.24523.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.11129.1>,#Ref<16550.0.1.164912>}] [ns_server:debug,2014-08-19T16:50:39.799,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.24539.0> (ok) [ns_server:debug,2014-08-19T16:50:39.800,ns_1@10.242.238.90:<0.24523.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:debug,2014-08-19T16:50:39.801,ns_1@10.242.238.90:<0.24540.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 666 [ns_server:info,2014-08-19T16:50:39.806,ns_1@10.242.238.90:<0.18786.0>:ns_memcached:do_handle_call:527]Changed vbucket 410 state to replica [ns_server:info,2014-08-19T16:50:39.810,ns_1@10.242.238.90:<0.24543.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 410 to state replica [ns_server:debug,2014-08-19T16:50:39.837,ns_1@10.242.238.90:<0.24543.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_410_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:50:39.838,ns_1@10.242.238.90:<0.24543.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[410]}, {checkpoints,[{410,0}]}, {name,<<"replication_building_410_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[410]}, {takeover,false}, {suffix,"building_410_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",410,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,false}]} [rebalance:debug,2014-08-19T16:50:39.839,ns_1@10.242.238.90:<0.24543.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.24544.0> [rebalance:debug,2014-08-19T16:50:39.839,ns_1@10.242.238.90:<0.24543.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:50:39.839,ns_1@10.242.238.90:<0.24543.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.11151.1>,#Ref<16550.0.1.165053>}]} [rebalance:info,2014-08-19T16:50:39.839,ns_1@10.242.238.90:<0.24543.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 410 [rebalance:debug,2014-08-19T16:50:39.840,ns_1@10.242.238.90:<0.24543.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.11151.1>,#Ref<16550.0.1.165053>}] [ns_server:debug,2014-08-19T16:50:39.841,ns_1@10.242.238.90:<0.24543.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:50:39.843,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 672. Nacking mccouch update. [views:debug,2014-08-19T16:50:39.843,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/672. Updated state: pending (0) [ns_server:debug,2014-08-19T16:50:39.843,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",672,pending,0} [ns_server:debug,2014-08-19T16:50:39.844,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,750,686,984,737,426,971,724,958,711,1022,945,762,698,1009,996,749,685, 983,951,736,704,672,425,1015,970,938,755,723,691,1002,989,957,742,710,678, 1021,976,944,761,729,697,1008,995,963,748,716,684,982,950,767,735,703,424, 1014,969,754,722,690,1001,988,956,741,709,677,1020,975,943,760,728,696,1007, 994,962,747,715,683,981,949,766,734,702,423,1013,968,753,721,689,1000,987, 955,740,708,676,1019,974,942,759,727,695,1006,993,961,746,714,682,980,948, 765,733,701,422,1012,999,967,752,720,688,986,954,739,707,675,1018,973,941, 758,726,694,1005,992,960,745,713,681,979,947,764,732,700,421,1011,998,966, 751,719,687,985,953,738,706,674,1017,972,940,757,725,693,1004,991,959,744, 712,680,1023,978,946,763,731,699,1010,965,718,952,705,1016,939,756,692,1003, 990,743,679,977,730,419,964,717] [rebalance:debug,2014-08-19T16:50:39.860,ns_1@10.242.238.90:<0.24545.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 410 [views:debug,2014-08-19T16:50:39.911,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/672. Updated state: pending (0) [ns_server:debug,2014-08-19T16:50:39.911,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",672,pending,0} [ns_server:info,2014-08-19T16:50:39.926,ns_1@10.242.238.90:<0.18786.0>:ns_memcached:do_handle_call:527]Changed vbucket 665 state to replica [ns_server:info,2014-08-19T16:50:39.933,ns_1@10.242.238.90:<0.24548.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 665 to state replica [ns_server:debug,2014-08-19T16:50:39.973,ns_1@10.242.238.90:<0.24548.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_665_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:50:39.975,ns_1@10.242.238.90:<0.24548.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[665]}, {checkpoints,[{665,0}]}, {name,<<"replication_building_665_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[665]}, {takeover,false}, {suffix,"building_665_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",665,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,true}]} [rebalance:debug,2014-08-19T16:50:39.976,ns_1@10.242.238.90:<0.24548.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.24563.0> [rebalance:debug,2014-08-19T16:50:39.976,ns_1@10.242.238.90:<0.24548.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:50:39.976,ns_1@10.242.238.90:<0.24548.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.11206.1>,#Ref<16550.0.1.165315>}]} [rebalance:info,2014-08-19T16:50:39.977,ns_1@10.242.238.90:<0.24548.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 665 [rebalance:debug,2014-08-19T16:50:39.977,ns_1@10.242.238.90:<0.24548.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.11206.1>,#Ref<16550.0.1.165315>}] [ns_server:debug,2014-08-19T16:50:39.977,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.24564.0> (ok) [ns_server:debug,2014-08-19T16:50:39.978,ns_1@10.242.238.90:<0.24548.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:debug,2014-08-19T16:50:39.979,ns_1@10.242.238.90:<0.24565.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 665 [ns_server:info,2014-08-19T16:50:39.984,ns_1@10.242.238.90:<0.18786.0>:ns_memcached:do_handle_call:527]Changed vbucket 409 state to replica [ns_server:debug,2014-08-19T16:50:39.994,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 670. Nacking mccouch update. [views:debug,2014-08-19T16:50:39.994,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/670. Updated state: pending (0) [ns_server:debug,2014-08-19T16:50:39.994,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",670,pending,0} [ns_server:debug,2014-08-19T16:50:39.994,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,750,686,984,737,426,971,724,958,711,1022,945,762,698,1009,996,749,685, 983,951,736,704,672,425,1015,970,938,755,723,691,1002,989,957,742,710,678, 1021,976,944,761,729,697,1008,995,963,748,716,684,982,950,767,735,703,424, 1014,969,754,722,690,1001,988,956,741,709,677,1020,975,943,760,728,696,1007, 994,962,747,715,683,981,949,766,734,702,670,423,1013,968,753,721,689,1000, 987,955,740,708,676,1019,974,942,759,727,695,1006,993,961,746,714,682,980, 948,765,733,701,422,1012,999,967,752,720,688,986,954,739,707,675,1018,973, 941,758,726,694,1005,992,960,745,713,681,979,947,764,732,700,421,1011,998, 966,751,719,687,985,953,738,706,674,1017,972,940,757,725,693,1004,991,959, 744,712,680,1023,978,946,763,731,699,1010,965,718,952,705,1016,939,756,692, 1003,990,743,679,977,730,419,964,717] [views:debug,2014-08-19T16:50:40.062,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/670. Updated state: pending (0) [ns_server:debug,2014-08-19T16:50:40.062,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",670,pending,0} [ns_server:info,2014-08-19T16:50:40.066,ns_1@10.242.238.90:<0.24568.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 409 to state replica [ns_server:debug,2014-08-19T16:50:40.093,ns_1@10.242.238.90:<0.24568.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_409_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:50:40.095,ns_1@10.242.238.90:<0.24568.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[409]}, {checkpoints,[{409,0}]}, {name,<<"replication_building_409_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[409]}, {takeover,false}, {suffix,"building_409_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",409,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,false}]} [rebalance:debug,2014-08-19T16:50:40.096,ns_1@10.242.238.90:<0.24568.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.24569.0> [rebalance:debug,2014-08-19T16:50:40.096,ns_1@10.242.238.90:<0.24568.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:50:40.096,ns_1@10.242.238.90:<0.24568.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.11228.1>,#Ref<16550.0.1.165433>}]} [rebalance:info,2014-08-19T16:50:40.097,ns_1@10.242.238.90:<0.24568.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 409 [rebalance:debug,2014-08-19T16:50:40.097,ns_1@10.242.238.90:<0.24568.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.11228.1>,#Ref<16550.0.1.165433>}] [ns_server:debug,2014-08-19T16:50:40.098,ns_1@10.242.238.90:<0.24568.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:debug,2014-08-19T16:50:40.116,ns_1@10.242.238.90:<0.24584.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 409 [ns_server:debug,2014-08-19T16:50:40.155,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 420. Nacking mccouch update. [views:debug,2014-08-19T16:50:40.155,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/420. Updated state: replica (0) [ns_server:debug,2014-08-19T16:50:40.155,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",420,replica,0} [ns_server:debug,2014-08-19T16:50:40.155,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,750,686,984,737,426,971,724,958,711,1022,945,762,698,1009,996,749,685, 983,951,736,704,672,425,1015,970,938,755,723,691,1002,989,957,742,710,678, 1021,976,944,761,729,697,1008,995,963,748,716,684,982,950,767,735,703,424, 1014,969,754,722,690,1001,988,956,741,709,677,1020,975,943,760,728,696,1007, 994,962,747,715,683,981,949,766,734,702,670,423,1013,968,753,721,689,1000, 987,955,740,708,676,1019,974,942,759,727,695,1006,993,961,746,714,682,980, 948,765,733,701,422,1012,999,967,752,720,688,986,954,739,707,675,1018,973, 941,758,726,694,1005,992,960,745,713,681,979,947,764,732,700,421,1011,998, 966,751,719,687,985,953,738,706,674,1017,972,940,757,725,693,1004,991,959, 744,712,680,1023,978,946,763,731,699,420,1010,965,718,952,705,1016,939,756, 692,1003,990,743,679,977,730,419,964,717] [ns_server:info,2014-08-19T16:50:40.178,ns_1@10.242.238.90:<0.18786.0>:ns_memcached:do_handle_call:527]Changed vbucket 664 state to replica [ns_server:info,2014-08-19T16:50:40.185,ns_1@10.242.238.90:<0.24587.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 664 to state replica [views:debug,2014-08-19T16:50:40.222,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/420. Updated state: replica (0) [ns_server:debug,2014-08-19T16:50:40.222,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",420,replica,0} [ns_server:debug,2014-08-19T16:50:40.224,ns_1@10.242.238.90:<0.24587.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_664_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:50:40.226,ns_1@10.242.238.90:<0.24587.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[664]}, {checkpoints,[{664,0}]}, {name,<<"replication_building_664_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[664]}, {takeover,false}, {suffix,"building_664_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",664,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,true}]} [rebalance:debug,2014-08-19T16:50:40.226,ns_1@10.242.238.90:<0.24587.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.24588.0> [rebalance:debug,2014-08-19T16:50:40.227,ns_1@10.242.238.90:<0.24587.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:50:40.227,ns_1@10.242.238.90:<0.24587.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.11283.1>,#Ref<16550.0.1.165709>}]} [rebalance:info,2014-08-19T16:50:40.227,ns_1@10.242.238.90:<0.24587.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 664 [rebalance:debug,2014-08-19T16:50:40.228,ns_1@10.242.238.90:<0.24587.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.11283.1>,#Ref<16550.0.1.165709>}] [ns_server:debug,2014-08-19T16:50:40.228,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.24589.0> (ok) [ns_server:debug,2014-08-19T16:50:40.228,ns_1@10.242.238.90:<0.24587.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:debug,2014-08-19T16:50:40.230,ns_1@10.242.238.90:<0.24590.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 664 [ns_server:info,2014-08-19T16:50:40.235,ns_1@10.242.238.90:<0.18786.0>:ns_memcached:do_handle_call:527]Changed vbucket 408 state to replica [ns_server:info,2014-08-19T16:50:40.239,ns_1@10.242.238.90:<0.24593.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 408 to state replica [ns_server:debug,2014-08-19T16:50:40.266,ns_1@10.242.238.90:<0.24593.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_408_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:50:40.267,ns_1@10.242.238.90:<0.24593.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[408]}, {checkpoints,[{408,0}]}, {name,<<"replication_building_408_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[408]}, {takeover,false}, {suffix,"building_408_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",408,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,false}]} [rebalance:debug,2014-08-19T16:50:40.268,ns_1@10.242.238.90:<0.24593.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.24608.0> [rebalance:debug,2014-08-19T16:50:40.268,ns_1@10.242.238.90:<0.24593.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:50:40.269,ns_1@10.242.238.90:<0.24593.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.11305.1>,#Ref<16550.0.1.165804>}]} [rebalance:info,2014-08-19T16:50:40.269,ns_1@10.242.238.90:<0.24593.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 408 [rebalance:debug,2014-08-19T16:50:40.269,ns_1@10.242.238.90:<0.24593.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.11305.1>,#Ref<16550.0.1.165804>}] [ns_server:debug,2014-08-19T16:50:40.270,ns_1@10.242.238.90:<0.24593.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:debug,2014-08-19T16:50:40.285,ns_1@10.242.238.90:<0.24609.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 408 [ns_server:debug,2014-08-19T16:50:40.306,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 418. Nacking mccouch update. [views:debug,2014-08-19T16:50:40.306,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/418. Updated state: replica (0) [ns_server:debug,2014-08-19T16:50:40.306,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",418,replica,0} [ns_server:debug,2014-08-19T16:50:40.306,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,750,686,984,737,426,971,724,958,711,1022,945,762,698,1009,996,749,685, 983,736,672,425,970,938,755,723,691,1002,989,957,742,710,678,1021,976,944, 761,729,697,418,1008,995,963,748,716,684,982,950,767,735,703,424,1014,969, 754,722,690,1001,988,956,741,709,677,1020,975,943,760,728,696,1007,994,962, 747,715,683,981,949,766,734,702,670,423,1013,968,753,721,689,1000,987,955, 740,708,676,1019,974,942,759,727,695,1006,993,961,746,714,682,980,948,765, 733,701,422,1012,999,967,752,720,688,986,954,739,707,675,1018,973,941,758, 726,694,1005,992,960,745,713,681,979,947,764,732,700,421,1011,998,966,751, 719,687,985,953,738,706,674,1017,972,940,757,725,693,1004,991,959,744,712, 680,1023,978,946,763,731,699,420,1010,965,718,952,705,1016,939,756,692,1003, 990,743,679,977,730,419,964,717,951,704,1015] [ns_server:info,2014-08-19T16:50:40.345,ns_1@10.242.238.90:<0.18786.0>:ns_memcached:do_handle_call:527]Changed vbucket 663 state to replica [ns_server:info,2014-08-19T16:50:40.352,ns_1@10.242.238.90:<0.24612.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 663 to state replica [views:debug,2014-08-19T16:50:40.364,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/418. Updated state: replica (0) [ns_server:debug,2014-08-19T16:50:40.365,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",418,replica,0} [ns_server:debug,2014-08-19T16:50:40.390,ns_1@10.242.238.90:<0.24612.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_663_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:50:40.391,ns_1@10.242.238.90:<0.24612.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[663]}, {checkpoints,[{663,0}]}, {name,<<"replication_building_663_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[663]}, {takeover,false}, {suffix,"building_663_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",663,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,true}]} [rebalance:debug,2014-08-19T16:50:40.392,ns_1@10.242.238.90:<0.24612.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.24613.0> [rebalance:debug,2014-08-19T16:50:40.392,ns_1@10.242.238.90:<0.24612.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:50:40.393,ns_1@10.242.238.90:<0.24612.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.11354.1>,#Ref<16550.0.1.166063>}]} [rebalance:info,2014-08-19T16:50:40.393,ns_1@10.242.238.90:<0.24612.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 663 [rebalance:debug,2014-08-19T16:50:40.393,ns_1@10.242.238.90:<0.24612.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.11354.1>,#Ref<16550.0.1.166063>}] [ns_server:debug,2014-08-19T16:50:40.394,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.24614.0> (ok) [ns_server:debug,2014-08-19T16:50:40.394,ns_1@10.242.238.90:<0.24612.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:debug,2014-08-19T16:50:40.396,ns_1@10.242.238.90:<0.24615.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 663 [ns_server:info,2014-08-19T16:50:40.401,ns_1@10.242.238.90:<0.18786.0>:ns_memcached:do_handle_call:527]Changed vbucket 407 state to replica [ns_server:info,2014-08-19T16:50:40.404,ns_1@10.242.238.90:<0.24632.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 407 to state replica [ns_server:debug,2014-08-19T16:50:40.431,ns_1@10.242.238.90:<0.24632.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_407_'ns_1@10.242.238.90' [ns_server:debug,2014-08-19T16:50:40.431,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 416. Nacking mccouch update. [views:debug,2014-08-19T16:50:40.431,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/416. Updated state: replica (0) [ns_server:debug,2014-08-19T16:50:40.431,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",416,replica,0} [ns_server:debug,2014-08-19T16:50:40.432,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,750,686,984,737,426,971,724,958,711,1022,945,762,698,1009,996,749,685, 983,736,672,425,970,938,755,723,691,1002,989,957,742,710,678,1021,976,944, 761,729,697,418,1008,995,963,748,716,684,982,950,767,735,703,424,1014,969, 754,722,690,1001,988,956,741,709,677,1020,975,943,760,728,696,1007,994,962, 747,715,683,981,949,766,734,702,670,423,1013,968,753,721,689,1000,987,955, 740,708,676,1019,974,942,759,727,695,416,1006,993,961,746,714,682,980,948, 765,733,701,422,1012,999,967,752,720,688,986,954,739,707,675,1018,973,941, 758,726,694,1005,992,960,745,713,681,979,947,764,732,700,421,1011,998,966, 751,719,687,985,953,738,706,674,1017,972,940,757,725,693,1004,991,959,744, 712,680,1023,978,946,763,731,699,420,1010,965,718,952,705,1016,939,756,692, 1003,990,743,679,977,730,419,964,717,951,704,1015] [rebalance:info,2014-08-19T16:50:40.432,ns_1@10.242.238.90:<0.24632.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[407]}, {checkpoints,[{407,0}]}, {name,<<"replication_building_407_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[407]}, {takeover,false}, {suffix,"building_407_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",407,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,false}]} [rebalance:debug,2014-08-19T16:50:40.433,ns_1@10.242.238.90:<0.24632.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.24633.0> [rebalance:debug,2014-08-19T16:50:40.433,ns_1@10.242.238.90:<0.24632.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:50:40.434,ns_1@10.242.238.90:<0.24632.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.11382.1>,#Ref<16550.0.1.166177>}]} [rebalance:info,2014-08-19T16:50:40.434,ns_1@10.242.238.90:<0.24632.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 407 [rebalance:debug,2014-08-19T16:50:40.434,ns_1@10.242.238.90:<0.24632.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.11382.1>,#Ref<16550.0.1.166177>}] [ns_server:debug,2014-08-19T16:50:40.435,ns_1@10.242.238.90:<0.24632.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:debug,2014-08-19T16:50:40.451,ns_1@10.242.238.90:<0.24634.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 407 [views:debug,2014-08-19T16:50:40.465,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/416. Updated state: replica (0) [ns_server:debug,2014-08-19T16:50:40.466,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",416,replica,0} [ns_server:info,2014-08-19T16:50:40.511,ns_1@10.242.238.90:<0.18786.0>:ns_memcached:do_handle_call:527]Changed vbucket 662 state to replica [ns_server:info,2014-08-19T16:50:40.518,ns_1@10.242.238.90:<0.24651.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 662 to state replica [ns_server:debug,2014-08-19T16:50:40.532,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 414. Nacking mccouch update. [views:debug,2014-08-19T16:50:40.532,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/414. Updated state: replica (0) [ns_server:debug,2014-08-19T16:50:40.532,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",414,replica,0} [ns_server:debug,2014-08-19T16:50:40.533,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,750,686,984,737,426,971,724,958,711,1022,945,762,698,1009,996,749,685, 983,736,672,425,970,938,755,723,691,1002,989,957,742,710,678,1021,976,944, 761,729,697,418,1008,995,963,748,716,684,982,950,767,735,703,424,1014,969, 754,722,690,1001,988,956,741,709,677,1020,975,943,760,728,696,1007,994,962, 747,715,683,981,949,766,734,702,670,423,1013,968,753,721,689,1000,987,955, 740,708,676,1019,974,942,759,727,695,416,1006,993,961,746,714,682,980,948, 765,733,701,422,1012,999,967,752,720,688,986,954,739,707,675,1018,973,941, 758,726,694,1005,992,960,745,713,681,979,947,764,732,700,421,1011,998,966, 751,719,687,985,953,738,706,674,1017,972,940,757,725,693,414,1004,991,959, 744,712,680,1023,978,946,763,731,699,420,1010,965,718,952,705,1016,939,756, 692,1003,990,743,679,977,730,419,964,717,951,704,1015] [ns_server:debug,2014-08-19T16:50:40.557,ns_1@10.242.238.90:<0.24651.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_662_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:50:40.559,ns_1@10.242.238.90:<0.24651.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[662]}, {checkpoints,[{662,0}]}, {name,<<"replication_building_662_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[662]}, {takeover,false}, {suffix,"building_662_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",662,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,true}]} [rebalance:debug,2014-08-19T16:50:40.559,ns_1@10.242.238.90:<0.24651.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.24652.0> [rebalance:debug,2014-08-19T16:50:40.559,ns_1@10.242.238.90:<0.24651.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:50:40.560,ns_1@10.242.238.90:<0.24651.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.11423.1>,#Ref<16550.0.1.166413>}]} [rebalance:info,2014-08-19T16:50:40.560,ns_1@10.242.238.90:<0.24651.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 662 [rebalance:debug,2014-08-19T16:50:40.560,ns_1@10.242.238.90:<0.24651.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.11423.1>,#Ref<16550.0.1.166413>}] [ns_server:debug,2014-08-19T16:50:40.561,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.24653.0> (ok) [ns_server:debug,2014-08-19T16:50:40.562,ns_1@10.242.238.90:<0.24651.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:debug,2014-08-19T16:50:40.563,ns_1@10.242.238.90:<0.24654.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 662 [views:debug,2014-08-19T16:50:40.566,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/414. Updated state: replica (0) [ns_server:debug,2014-08-19T16:50:40.566,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",414,replica,0} [ns_server:info,2014-08-19T16:50:40.567,ns_1@10.242.238.90:<0.18786.0>:ns_memcached:do_handle_call:527]Changed vbucket 406 state to replica [ns_server:info,2014-08-19T16:50:40.571,ns_1@10.242.238.90:<0.24657.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 406 to state replica [ns_server:debug,2014-08-19T16:50:40.598,ns_1@10.242.238.90:<0.24657.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_406_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:50:40.600,ns_1@10.242.238.90:<0.24657.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[406]}, {checkpoints,[{406,0}]}, {name,<<"replication_building_406_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[406]}, {takeover,false}, {suffix,"building_406_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",406,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,false}]} [rebalance:debug,2014-08-19T16:50:40.601,ns_1@10.242.238.90:<0.24657.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.24658.0> [rebalance:debug,2014-08-19T16:50:40.601,ns_1@10.242.238.90:<0.24657.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:50:40.601,ns_1@10.242.238.90:<0.24657.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.11445.1>,#Ref<16550.0.1.166509>}]} [rebalance:info,2014-08-19T16:50:40.602,ns_1@10.242.238.90:<0.24657.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 406 [rebalance:debug,2014-08-19T16:50:40.602,ns_1@10.242.238.90:<0.24657.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.11445.1>,#Ref<16550.0.1.166509>}] [ns_server:debug,2014-08-19T16:50:40.603,ns_1@10.242.238.90:<0.24657.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:debug,2014-08-19T16:50:40.617,ns_1@10.242.238.90:<0.24659.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 406 [ns_server:debug,2014-08-19T16:50:40.687,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 673. Nacking mccouch update. [views:debug,2014-08-19T16:50:40.687,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/673. Updated state: pending (0) [ns_server:debug,2014-08-19T16:50:40.688,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",673,pending,0} [ns_server:debug,2014-08-19T16:50:40.688,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,750,686,984,737,673,426,971,724,958,711,1022,945,762,698,1009,996,749, 685,983,736,672,425,970,938,755,723,691,1002,989,957,742,710,678,1021,976, 944,761,729,697,418,1008,995,963,748,716,684,982,950,767,735,703,424,1014, 969,754,722,690,1001,988,956,741,709,677,1020,975,943,760,728,696,1007,994, 962,747,715,683,981,949,766,734,702,670,423,1013,968,753,721,689,1000,987, 955,740,708,676,1019,974,942,759,727,695,416,1006,993,961,746,714,682,980, 948,765,733,701,422,1012,999,967,752,720,688,986,954,739,707,675,1018,973, 941,758,726,694,1005,992,960,745,713,681,979,947,764,732,700,421,1011,998, 966,751,719,687,985,953,738,706,674,1017,972,940,757,725,693,414,1004,991, 959,744,712,680,1023,978,946,763,731,699,420,1010,965,718,952,705,1016,939, 756,692,1003,990,743,679,977,730,419,964,717,951,704,1015] [views:debug,2014-08-19T16:50:40.754,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/673. Updated state: pending (0) [ns_server:debug,2014-08-19T16:50:40.755,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",673,pending,0} [ns_server:debug,2014-08-19T16:50:40.896,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 671. Nacking mccouch update. [views:debug,2014-08-19T16:50:40.896,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/671. Updated state: pending (0) [ns_server:debug,2014-08-19T16:50:40.897,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",671,pending,0} [ns_server:debug,2014-08-19T16:50:40.897,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,750,686,984,737,673,426,971,724,958,711,1022,945,762,698,1009,996,749, 685,983,736,672,425,970,938,755,723,691,1002,989,957,742,710,678,1021,976, 944,761,729,697,418,1008,995,963,748,716,684,982,950,767,735,703,671,424, 1014,969,754,722,690,1001,988,956,741,709,677,1020,975,943,760,728,696,1007, 994,962,747,715,683,981,949,766,734,702,670,423,1013,968,753,721,689,1000, 987,955,740,708,676,1019,974,942,759,727,695,416,1006,993,961,746,714,682, 980,948,765,733,701,422,1012,999,967,752,720,688,986,954,739,707,675,1018, 973,941,758,726,694,1005,992,960,745,713,681,979,947,764,732,700,421,1011, 998,966,751,719,687,985,953,738,706,674,1017,972,940,757,725,693,414,1004, 991,959,744,712,680,1023,978,946,763,731,699,420,1010,965,718,952,705,1016, 939,756,692,1003,990,743,679,977,730,419,964,717,951,704,1015] [views:debug,2014-08-19T16:50:40.964,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/671. Updated state: pending (0) [ns_server:debug,2014-08-19T16:50:40.964,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",671,pending,0} [ns_server:debug,2014-08-19T16:50:41.106,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 669. Nacking mccouch update. [views:debug,2014-08-19T16:50:41.106,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/669. Updated state: pending (0) [ns_server:debug,2014-08-19T16:50:41.106,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",669,pending,0} [ns_server:debug,2014-08-19T16:50:41.106,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,750,686,984,737,673,426,971,724,958,711,1022,945,762,698,1009,996,749, 685,983,736,672,425,970,723,989,957,742,710,678,1021,976,944,761,729,697,418, 1008,995,963,748,716,684,982,950,767,735,703,671,424,1014,969,754,722,690, 1001,988,956,741,709,677,1020,975,943,760,728,696,1007,994,962,747,715,683, 981,949,766,734,702,670,423,1013,968,753,721,689,1000,987,955,740,708,676, 1019,974,942,759,727,695,416,1006,993,961,746,714,682,980,948,765,733,701, 669,422,1012,999,967,752,720,688,986,954,739,707,675,1018,973,941,758,726, 694,1005,992,960,745,713,681,979,947,764,732,700,421,1011,998,966,751,719, 687,985,953,738,706,674,1017,972,940,757,725,693,414,1004,991,959,744,712, 680,1023,978,946,763,731,699,420,1010,965,718,952,705,1016,939,756,692,1003, 990,743,679,977,730,419,964,717,951,704,1015,938,755,691,1002] [views:debug,2014-08-19T16:50:41.173,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/669. Updated state: pending (0) [ns_server:debug,2014-08-19T16:50:41.173,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",669,pending,0} [ns_server:debug,2014-08-19T16:50:41.315,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 667. Nacking mccouch update. [views:debug,2014-08-19T16:50:41.315,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/667. Updated state: pending (0) [ns_server:debug,2014-08-19T16:50:41.315,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",667,pending,0} [ns_server:debug,2014-08-19T16:50:41.315,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,750,686,984,737,673,426,971,724,958,711,1022,945,762,698,1009,996,749, 685,983,736,672,425,970,723,989,957,742,710,678,1021,976,944,761,729,697,418, 1008,995,963,748,716,684,982,950,767,735,703,671,424,1014,969,754,722,690, 1001,988,956,741,709,677,1020,975,943,760,728,696,1007,994,962,747,715,683, 981,949,766,734,702,670,423,1013,968,753,721,689,1000,987,955,740,708,676, 1019,974,942,759,727,695,416,1006,993,961,746,714,682,980,948,765,733,701, 669,422,1012,999,967,752,720,688,986,954,739,707,675,1018,973,941,758,726, 694,1005,992,960,745,713,681,979,947,764,732,700,421,1011,998,966,751,719, 687,985,953,738,706,674,1017,972,940,757,725,693,414,1004,991,959,744,712, 680,1023,978,946,763,731,699,667,420,1010,965,718,952,705,1016,939,756,692, 1003,990,743,679,977,730,419,964,717,951,704,1015,938,755,691,1002] [views:debug,2014-08-19T16:50:41.382,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/667. Updated state: pending (0) [ns_server:debug,2014-08-19T16:50:41.382,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",667,pending,0} [ns_server:debug,2014-08-19T16:50:41.525,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 665. Nacking mccouch update. [views:debug,2014-08-19T16:50:41.525,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/665. Updated state: pending (0) [ns_server:debug,2014-08-19T16:50:41.525,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",665,pending,0} [ns_server:debug,2014-08-19T16:50:41.525,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,750,686,984,737,673,426,971,724,958,711,1022,945,762,698,1009,996,749, 685,983,736,672,425,970,723,989,957,742,710,678,1021,976,944,761,729,697,665, 418,1008,995,963,748,716,684,982,950,767,735,703,671,424,1014,969,754,722, 690,1001,988,956,741,709,677,1020,975,943,760,728,696,1007,994,962,747,715, 683,981,949,766,734,702,670,423,1013,968,753,721,689,1000,987,955,740,708, 676,1019,974,942,759,727,695,416,1006,993,961,746,714,682,980,948,765,733, 701,669,422,1012,999,967,752,720,688,986,954,739,707,675,1018,973,941,758, 726,694,1005,992,960,745,713,681,979,947,764,732,700,421,1011,998,966,751, 719,687,985,953,738,706,674,1017,972,940,757,725,693,414,1004,991,959,744, 712,680,1023,978,946,763,731,699,667,420,1010,965,718,952,705,1016,939,756, 692,1003,990,743,679,977,730,419,964,717,951,704,1015,938,755,691,1002] [views:debug,2014-08-19T16:50:41.583,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/665. Updated state: pending (0) [ns_server:debug,2014-08-19T16:50:41.584,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",665,pending,0} [ns_server:debug,2014-08-19T16:50:41.675,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 663. Nacking mccouch update. [views:debug,2014-08-19T16:50:41.675,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/663. Updated state: pending (0) [ns_server:debug,2014-08-19T16:50:41.676,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",663,pending,0} [ns_server:debug,2014-08-19T16:50:41.676,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,750,686,984,737,673,426,971,724,958,711,1022,945,762,698,1009,996,749, 685,983,736,672,425,970,723,989,957,742,710,678,1021,976,944,761,729,697,665, 418,1008,995,963,748,716,684,982,950,767,735,703,671,424,1014,969,754,722, 690,1001,988,956,741,709,677,1020,975,943,760,728,696,1007,994,962,747,715, 683,981,949,766,734,702,670,423,1013,968,753,721,689,1000,987,955,740,708, 676,1019,974,942,759,727,695,663,416,1006,993,961,746,714,682,980,948,765, 733,701,669,422,1012,999,967,752,720,688,986,954,739,707,675,1018,973,941, 758,726,694,1005,992,960,745,713,681,979,947,764,732,700,421,1011,998,966, 751,719,687,985,953,738,706,674,1017,972,940,757,725,693,414,1004,991,959, 744,712,680,1023,978,946,763,731,699,667,420,1010,965,718,952,705,1016,939, 756,692,1003,990,743,679,977,730,419,964,717,951,704,1015,938,755,691,1002] [views:debug,2014-08-19T16:50:41.726,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/663. Updated state: pending (0) [ns_server:debug,2014-08-19T16:50:41.726,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",663,pending,0} [ns_server:debug,2014-08-19T16:50:41.793,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 417. Nacking mccouch update. [views:debug,2014-08-19T16:50:41.793,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/417. Updated state: replica (0) [ns_server:debug,2014-08-19T16:50:41.793,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",417,replica,0} [ns_server:debug,2014-08-19T16:50:41.793,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,750,686,984,737,673,426,971,724,958,711,1022,945,762,698,1009,996,749, 685,983,736,672,425,970,723,989,957,742,710,678,1021,976,944,761,729,697,665, 418,1008,995,963,748,716,684,982,950,767,735,703,671,424,1014,969,754,722, 690,1001,988,956,741,709,677,1020,975,943,760,728,696,417,1007,994,962,747, 715,683,981,949,766,734,702,670,423,1013,968,753,721,689,1000,987,955,740, 708,676,1019,974,942,759,727,695,663,416,1006,993,961,746,714,682,980,948, 765,733,701,669,422,1012,999,967,752,720,688,986,954,739,707,675,1018,973, 941,758,726,694,1005,992,960,745,713,681,979,947,764,732,700,421,1011,998, 966,751,719,687,985,953,738,706,674,1017,972,940,757,725,693,414,1004,991, 959,744,712,680,1023,978,946,763,731,699,667,420,1010,965,718,952,705,1016, 939,756,692,1003,990,743,679,977,730,419,964,717,951,704,1015,938,755,691, 1002] [views:debug,2014-08-19T16:50:41.844,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/417. Updated state: replica (0) [ns_server:debug,2014-08-19T16:50:41.844,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",417,replica,0} [ns_server:debug,2014-08-19T16:50:41.985,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 415. Nacking mccouch update. [views:debug,2014-08-19T16:50:41.986,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/415. Updated state: replica (0) [ns_server:debug,2014-08-19T16:50:41.986,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",415,replica,0} [ns_server:debug,2014-08-19T16:50:41.986,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,750,686,984,737,673,426,971,724,958,711,1022,945,762,698,1009,996,749, 685,983,736,672,425,970,723,957,710,1021,976,944,761,729,697,665,418,1008, 995,963,748,716,684,982,950,767,735,703,671,424,1014,969,754,722,690,1001, 988,956,741,709,677,1020,975,943,760,728,696,417,1007,994,962,747,715,683, 981,949,766,734,702,670,423,1013,968,753,721,689,1000,987,955,740,708,676, 1019,974,942,759,727,695,663,416,1006,993,961,746,714,682,980,948,765,733, 701,669,422,1012,999,967,752,720,688,986,954,739,707,675,1018,973,941,758, 726,694,415,1005,992,960,745,713,681,979,947,764,732,700,421,1011,998,966, 751,719,687,985,953,738,706,674,1017,972,940,757,725,693,414,1004,991,959, 744,712,680,1023,978,946,763,731,699,667,420,1010,965,718,952,705,1016,939, 756,692,1003,990,743,679,977,730,419,964,717,951,704,1015,938,755,691,1002, 989,742,678] [views:debug,2014-08-19T16:50:42.037,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/415. Updated state: replica (0) [ns_server:debug,2014-08-19T16:50:42.037,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",415,replica,0} [ns_server:debug,2014-08-19T16:50:42.104,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 413. Nacking mccouch update. [views:debug,2014-08-19T16:50:42.104,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/413. Updated state: replica (0) [ns_server:debug,2014-08-19T16:50:42.104,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",413,replica,0} [ns_server:debug,2014-08-19T16:50:42.105,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,750,686,984,737,673,426,971,724,413,958,711,1022,945,762,698,1009,996, 749,685,983,736,672,425,970,723,957,710,1021,976,944,761,729,697,665,418, 1008,995,963,748,716,684,982,950,767,735,703,671,424,1014,969,754,722,690, 1001,988,956,741,709,677,1020,975,943,760,728,696,417,1007,994,962,747,715, 683,981,949,766,734,702,670,423,1013,968,753,721,689,1000,987,955,740,708, 676,1019,974,942,759,727,695,663,416,1006,993,961,746,714,682,980,948,765, 733,701,669,422,1012,999,967,752,720,688,986,954,739,707,675,1018,973,941, 758,726,694,415,1005,992,960,745,713,681,979,947,764,732,700,421,1011,998, 966,751,719,687,985,953,738,706,674,1017,972,940,757,725,693,414,1004,991, 959,744,712,680,1023,978,946,763,731,699,667,420,1010,965,718,952,705,1016, 939,756,692,1003,990,743,679,977,730,419,964,717,951,704,1015,938,755,691, 1002,989,742,678] [views:debug,2014-08-19T16:50:42.166,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/413. Updated state: replica (0) [ns_server:debug,2014-08-19T16:50:42.166,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",413,replica,0} [ns_server:debug,2014-08-19T16:50:42.308,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 411. Nacking mccouch update. [views:debug,2014-08-19T16:50:42.308,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/411. Updated state: replica (0) [ns_server:debug,2014-08-19T16:50:42.308,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",411,replica,0} [ns_server:debug,2014-08-19T16:50:42.309,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,750,686,984,737,673,426,971,724,413,958,711,1022,945,762,698,1009,996, 749,685,983,736,672,425,970,723,957,710,1021,976,944,761,729,697,665,418, 1008,995,963,748,716,684,982,950,767,735,703,671,424,1014,969,754,722,690, 411,1001,988,956,741,709,677,1020,975,943,760,728,696,417,1007,994,962,747, 715,683,981,949,766,734,702,670,423,1013,968,753,721,689,1000,987,955,740, 708,676,1019,974,942,759,727,695,663,416,1006,993,961,746,714,682,980,948, 765,733,701,669,422,1012,999,967,752,720,688,986,954,739,707,675,1018,973, 941,758,726,694,415,1005,992,960,745,713,681,979,947,764,732,700,421,1011, 998,966,751,719,687,985,953,738,706,674,1017,972,940,757,725,693,414,1004, 991,959,744,712,680,1023,978,946,763,731,699,667,420,1010,965,718,952,705, 1016,939,756,692,1003,990,743,679,977,730,419,964,717,951,704,1015,938,755, 691,1002,989,742,678] [views:debug,2014-08-19T16:50:42.384,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/411. Updated state: replica (0) [ns_server:debug,2014-08-19T16:50:42.384,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",411,replica,0} [ns_server:debug,2014-08-19T16:50:42.542,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 409. Nacking mccouch update. [views:debug,2014-08-19T16:50:42.542,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/409. Updated state: replica (0) [ns_server:debug,2014-08-19T16:50:42.542,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",409,replica,0} [ns_server:debug,2014-08-19T16:50:42.543,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,750,686,984,737,673,426,971,724,413,958,711,1022,945,762,698,1009,996, 749,685,983,736,672,425,970,723,957,710,1021,976,944,761,729,697,665,418, 1008,995,963,748,716,684,982,950,767,735,703,671,424,1014,969,754,722,690, 411,1001,988,956,741,709,677,1020,975,943,760,728,696,417,1007,994,962,747, 715,683,981,949,766,734,702,670,423,1013,968,753,721,689,1000,987,955,740, 708,676,1019,974,942,759,727,695,663,416,1006,993,961,746,714,682,980,948, 765,733,701,669,422,1012,999,967,752,720,688,409,986,954,739,707,675,1018, 973,941,758,726,694,415,1005,992,960,745,713,681,979,947,764,732,700,421, 1011,998,966,751,719,687,985,953,738,706,674,1017,972,940,757,725,693,414, 1004,991,959,744,712,680,1023,978,946,763,731,699,667,420,1010,965,718,952, 705,1016,939,756,692,1003,990,743,679,977,730,419,964,717,951,704,1015,938, 755,691,1002,989,742,678] [views:debug,2014-08-19T16:50:42.618,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/409. Updated state: replica (0) [ns_server:debug,2014-08-19T16:50:42.618,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",409,replica,0} [ns_server:debug,2014-08-19T16:50:42.785,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 407. Nacking mccouch update. [views:debug,2014-08-19T16:50:42.785,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/407. Updated state: replica (0) [ns_server:debug,2014-08-19T16:50:42.785,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",407,replica,0} [ns_server:debug,2014-08-19T16:50:42.785,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,750,686,984,737,673,426,971,724,413,958,711,1022,945,762,698,1009,996, 749,685,983,736,672,425,970,723,957,710,1021,976,944,761,729,697,665,418, 1008,995,963,748,716,684,982,950,767,735,703,671,424,1014,969,754,722,690, 411,1001,988,956,741,709,677,1020,975,943,760,728,696,417,1007,994,962,747, 715,683,981,949,766,734,702,670,423,1013,968,753,721,689,1000,987,955,740, 708,676,1019,974,942,759,727,695,663,416,1006,993,961,746,714,682,980,948, 765,733,701,669,422,1012,999,967,752,720,688,409,986,954,739,707,675,1018, 973,941,758,726,694,415,1005,992,960,745,713,681,979,947,764,732,700,421, 1011,998,966,751,719,687,985,953,738,706,674,1017,972,940,757,725,693,414, 1004,991,959,744,712,680,1023,978,946,763,731,699,667,420,1010,965,718,407, 952,705,1016,939,756,692,1003,990,743,679,977,730,419,964,717,951,704,1015, 938,755,691,1002,989,742,678] [views:debug,2014-08-19T16:50:42.860,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/407. Updated state: replica (0) [ns_server:debug,2014-08-19T16:50:42.860,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",407,replica,0} [ns_server:debug,2014-08-19T16:50:43.002,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 668. Nacking mccouch update. [views:debug,2014-08-19T16:50:43.002,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/668. Updated state: pending (0) [ns_server:debug,2014-08-19T16:50:43.002,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",668,pending,0} [ns_server:debug,2014-08-19T16:50:43.003,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,750,686,984,737,673,426,971,724,413,958,711,1022,945,762,698,1009,996, 749,685,983,736,672,425,970,723,957,710,1021,944,761,697,1008,995,963,748, 716,684,982,950,767,735,703,671,424,1014,969,754,722,690,411,1001,988,956, 741,709,677,1020,975,943,760,728,696,417,1007,994,962,747,715,683,981,949, 766,734,702,670,423,1013,968,753,721,689,1000,987,955,740,708,676,1019,974, 942,759,727,695,663,416,1006,993,961,746,714,682,980,948,765,733,701,669,422, 1012,999,967,752,720,688,409,986,954,739,707,675,1018,973,941,758,726,694, 415,1005,992,960,745,713,681,979,947,764,732,700,668,421,1011,998,966,751, 719,687,985,953,738,706,674,1017,972,940,757,725,693,414,1004,991,959,744, 712,680,1023,978,946,763,731,699,667,420,1010,965,718,407,952,705,1016,939, 756,692,1003,990,743,679,977,730,419,964,717,951,704,1015,938,755,691,1002, 989,742,678,976,729,665,418] [views:debug,2014-08-19T16:50:43.070,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/668. Updated state: pending (0) [ns_server:debug,2014-08-19T16:50:43.070,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",668,pending,0} [ns_server:debug,2014-08-19T16:50:43.163,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 666. Nacking mccouch update. [views:debug,2014-08-19T16:50:43.163,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/666. Updated state: pending (0) [ns_server:debug,2014-08-19T16:50:43.163,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",666,pending,0} [ns_server:debug,2014-08-19T16:50:43.164,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,750,686,984,737,673,426,971,724,413,958,711,1022,945,762,698,1009,996, 749,685,983,736,672,425,970,723,957,710,1021,944,761,697,1008,995,963,748, 716,684,982,950,767,735,703,671,424,1014,969,754,722,690,411,1001,988,956, 741,709,677,1020,975,943,760,728,696,417,1007,994,962,747,715,683,981,949, 766,734,702,670,423,1013,968,753,721,689,1000,987,955,740,708,676,1019,974, 942,759,727,695,663,416,1006,993,961,746,714,682,980,948,765,733,701,669,422, 1012,999,967,752,720,688,409,986,954,739,707,675,1018,973,941,758,726,694, 415,1005,992,960,745,713,681,979,947,764,732,700,668,421,1011,998,966,751, 719,687,985,953,738,706,674,1017,972,940,757,725,693,414,1004,991,959,744, 712,680,1023,978,946,763,731,699,667,420,1010,965,718,407,952,705,1016,939, 756,692,1003,990,743,679,977,730,666,419,964,717,951,704,1015,938,755,691, 1002,989,742,678,976,729,665,418] [views:debug,2014-08-19T16:50:43.197,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/666. Updated state: pending (0) [ns_server:debug,2014-08-19T16:50:43.197,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",666,pending,0} [ns_server:debug,2014-08-19T16:50:43.289,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 664. Nacking mccouch update. [views:debug,2014-08-19T16:50:43.289,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/664. Updated state: pending (0) [ns_server:debug,2014-08-19T16:50:43.289,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",664,pending,0} [ns_server:debug,2014-08-19T16:50:43.290,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,750,686,984,737,673,426,971,724,413,958,711,1022,945,762,698,1009,996, 749,685,983,736,672,425,970,723,957,710,1021,944,761,697,1008,995,963,748, 716,684,982,950,767,735,703,671,424,1014,969,754,722,690,411,1001,988,956, 741,709,677,1020,975,943,760,728,696,664,417,1007,994,962,747,715,683,981, 949,766,734,702,670,423,1013,968,753,721,689,1000,987,955,740,708,676,1019, 974,942,759,727,695,663,416,1006,993,961,746,714,682,980,948,765,733,701,669, 422,1012,999,967,752,720,688,409,986,954,739,707,675,1018,973,941,758,726, 694,415,1005,992,960,745,713,681,979,947,764,732,700,668,421,1011,998,966, 751,719,687,985,953,738,706,674,1017,972,940,757,725,693,414,1004,991,959, 744,712,680,1023,978,946,763,731,699,667,420,1010,965,718,407,952,705,1016, 939,756,692,1003,990,743,679,977,730,666,419,964,717,951,704,1015,938,755, 691,1002,989,742,678,976,729,665,418] [views:debug,2014-08-19T16:50:43.323,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/664. Updated state: pending (0) [ns_server:debug,2014-08-19T16:50:43.323,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",664,pending,0} [ns_server:debug,2014-08-19T16:50:43.406,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 662. Nacking mccouch update. [views:debug,2014-08-19T16:50:43.407,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/662. Updated state: pending (0) [ns_server:debug,2014-08-19T16:50:43.407,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",662,pending,0} [ns_server:debug,2014-08-19T16:50:43.407,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,750,686,984,737,673,426,971,724,413,958,711,1022,945,762,698,1009,996, 749,685,983,736,672,425,970,723,957,710,1021,944,761,697,1008,995,963,748, 716,684,982,950,767,735,703,671,424,1014,969,754,722,690,411,1001,988,956, 741,709,677,1020,975,943,760,728,696,664,417,1007,994,962,747,715,683,981, 949,766,734,702,670,423,1013,968,753,721,689,1000,987,955,740,708,676,1019, 974,942,759,727,695,663,416,1006,993,961,746,714,682,980,948,765,733,701,669, 422,1012,999,967,752,720,688,409,986,954,739,707,675,1018,973,941,758,726, 694,662,415,1005,992,960,745,713,681,979,947,764,732,700,668,421,1011,998, 966,751,719,687,985,953,738,706,674,1017,972,940,757,725,693,414,1004,991, 959,744,712,680,1023,978,946,763,731,699,667,420,1010,965,718,407,952,705, 1016,939,756,692,1003,990,743,679,977,730,666,419,964,717,951,704,1015,938, 755,691,1002,989,742,678,976,729,665,418] [views:debug,2014-08-19T16:50:43.441,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/662. Updated state: pending (0) [ns_server:debug,2014-08-19T16:50:43.441,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",662,pending,0} [ns_server:debug,2014-08-19T16:50:43.524,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 412. Nacking mccouch update. [views:debug,2014-08-19T16:50:43.524,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/412. Updated state: replica (0) [ns_server:debug,2014-08-19T16:50:43.524,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",412,replica,0} [ns_server:debug,2014-08-19T16:50:43.525,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,750,686,984,737,673,426,971,724,413,958,711,1022,945,762,698,1009,996, 749,685,983,736,672,425,970,723,412,957,710,1021,944,761,697,1008,995,963, 748,716,684,982,950,767,735,703,671,424,1014,969,754,722,690,411,1001,988, 956,741,709,677,1020,975,943,760,728,696,664,417,1007,994,962,747,715,683, 981,949,766,734,702,670,423,1013,968,753,721,689,1000,987,955,740,708,676, 1019,974,942,759,727,695,663,416,1006,993,961,746,714,682,980,948,765,733, 701,669,422,1012,999,967,752,720,688,409,986,954,739,707,675,1018,973,941, 758,726,694,662,415,1005,992,960,745,713,681,979,947,764,732,700,668,421, 1011,998,966,751,719,687,985,953,738,706,674,1017,972,940,757,725,693,414, 1004,991,959,744,712,680,1023,978,946,763,731,699,667,420,1010,965,718,407, 952,705,1016,939,756,692,1003,990,743,679,977,730,666,419,964,717,951,704, 1015,938,755,691,1002,989,742,678,976,729,665,418] [views:debug,2014-08-19T16:50:43.558,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/412. Updated state: replica (0) [ns_server:debug,2014-08-19T16:50:43.558,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",412,replica,0} [ns_server:debug,2014-08-19T16:50:43.694,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 410. Nacking mccouch update. [views:debug,2014-08-19T16:50:43.695,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/410. Updated state: replica (0) [ns_server:debug,2014-08-19T16:50:43.695,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",410,replica,0} [ns_server:debug,2014-08-19T16:50:43.695,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,750,686,984,737,673,426,971,724,413,958,711,1022,945,762,698,1009,996, 749,685,983,736,672,425,970,723,412,957,710,1021,944,761,697,1008,995,748, 684,982,950,767,735,703,671,424,1014,969,754,722,690,411,1001,988,956,741, 709,677,1020,975,943,760,728,696,664,417,1007,994,962,747,715,683,981,949, 766,734,702,670,423,1013,968,753,721,689,410,1000,987,955,740,708,676,1019, 974,942,759,727,695,663,416,1006,993,961,746,714,682,980,948,765,733,701,669, 422,1012,999,967,752,720,688,409,986,954,739,707,675,1018,973,941,758,726, 694,662,415,1005,992,960,745,713,681,979,947,764,732,700,668,421,1011,998, 966,751,719,687,985,953,738,706,674,1017,972,940,757,725,693,414,1004,991, 959,744,712,680,1023,978,946,763,731,699,667,420,1010,965,718,407,952,705, 1016,939,756,692,1003,990,743,679,977,730,666,419,964,717,951,704,1015,938, 755,691,1002,989,742,678,976,729,665,418,963,716] [views:debug,2014-08-19T16:50:43.746,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/410. Updated state: replica (0) [ns_server:debug,2014-08-19T16:50:43.746,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",410,replica,0} [ns_server:debug,2014-08-19T16:50:43.871,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 408. Nacking mccouch update. [views:debug,2014-08-19T16:50:43.872,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/408. Updated state: replica (0) [ns_server:debug,2014-08-19T16:50:43.872,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",408,replica,0} [ns_server:debug,2014-08-19T16:50:43.872,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,750,686,984,737,673,426,971,724,413,958,711,1022,945,762,698,1009,996, 749,685,983,736,672,425,970,723,412,957,710,1021,944,761,697,1008,995,748, 684,982,950,767,735,703,671,424,1014,969,754,722,690,411,1001,988,956,741, 709,677,1020,975,943,760,728,696,664,417,1007,994,962,747,715,683,981,949, 766,734,702,670,423,1013,968,753,721,689,410,1000,987,955,740,708,676,1019, 974,942,759,727,695,663,416,1006,993,961,746,714,682,980,948,765,733,701,669, 422,1012,999,967,752,720,688,409,986,954,739,707,675,1018,973,941,758,726, 694,662,415,1005,992,960,745,713,681,979,947,764,732,700,668,421,1011,998, 966,751,719,687,408,985,953,738,706,674,1017,972,940,757,725,693,414,1004, 991,959,744,712,680,1023,978,946,763,731,699,667,420,1010,965,718,407,952, 705,1016,939,756,692,1003,990,743,679,977,730,666,419,964,717,951,704,1015, 938,755,691,1002,989,742,678,976,729,665,418,963,716] [views:debug,2014-08-19T16:50:43.905,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/408. Updated state: replica (0) [ns_server:debug,2014-08-19T16:50:43.906,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",408,replica,0} [ns_server:debug,2014-08-19T16:50:43.989,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 406. Nacking mccouch update. [views:debug,2014-08-19T16:50:43.989,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/406. Updated state: replica (0) [ns_server:debug,2014-08-19T16:50:43.989,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",406,replica,0} [ns_server:debug,2014-08-19T16:50:43.990,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,750,686,984,737,673,426,971,724,413,958,711,1022,945,762,698,1009,996, 749,685,983,736,672,425,970,723,412,957,710,1021,944,761,697,1008,995,748, 684,982,950,767,735,703,671,424,1014,969,754,722,690,411,1001,988,956,741, 709,677,1020,975,943,760,728,696,664,417,1007,994,962,747,715,683,981,949, 766,734,702,670,423,1013,968,753,721,689,410,1000,987,955,740,708,676,1019, 974,942,759,727,695,663,416,1006,993,961,746,714,682,980,948,765,733,701,669, 422,1012,999,967,752,720,688,409,986,954,739,707,675,1018,973,941,758,726, 694,662,415,1005,992,960,745,713,681,979,947,764,732,700,668,421,1011,998, 966,751,719,687,408,985,953,738,706,674,1017,972,940,757,725,693,414,1004, 991,959,744,712,680,1023,978,946,763,731,699,667,420,1010,965,718,407,952, 705,1016,939,756,692,1003,990,743,679,977,730,666,419,964,717,406,951,704, 1015,938,755,691,1002,989,742,678,976,729,665,418,963,716] [views:debug,2014-08-19T16:50:44.040,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/406. Updated state: replica (0) [ns_server:debug,2014-08-19T16:50:44.040,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",406,replica,0} [rebalance:debug,2014-08-19T16:50:44.044,ns_1@10.242.238.90:<0.24659.0>:janitor_agent:handle_call:795]Done [rebalance:debug,2014-08-19T16:50:44.044,ns_1@10.242.238.90:<0.24634.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:50:44.044,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.24659.0> (ok) [ns_server:debug,2014-08-19T16:50:44.044,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.24634.0> (ok) [rebalance:debug,2014-08-19T16:50:44.140,ns_1@10.242.238.90:<0.24609.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:50:44.140,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.24609.0> (ok) [rebalance:debug,2014-08-19T16:50:44.140,ns_1@10.242.238.90:<0.24584.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:50:44.140,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.24584.0> (ok) [rebalance:debug,2014-08-19T16:50:44.224,ns_1@10.242.238.90:<0.24545.0>:janitor_agent:handle_call:795]Done [rebalance:debug,2014-08-19T16:50:44.224,ns_1@10.242.238.90:<0.24520.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:50:44.224,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.24545.0> (ok) [ns_server:debug,2014-08-19T16:50:44.224,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.24520.0> (ok) [rebalance:debug,2014-08-19T16:50:44.342,ns_1@10.242.238.90:<0.24495.0>:janitor_agent:handle_call:795]Done [rebalance:debug,2014-08-19T16:50:44.342,ns_1@10.242.238.90:<0.24463.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:50:44.342,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.24495.0> (ok) [ns_server:debug,2014-08-19T16:50:44.342,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.24463.0> (ok) [rebalance:debug,2014-08-19T16:50:44.468,ns_1@10.242.238.90:<0.24452.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:50:44.468,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.24452.0> (ok) [rebalance:debug,2014-08-19T16:50:44.468,ns_1@10.242.238.90:<0.24413.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:50:44.468,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.24413.0> (ok) [rebalance:debug,2014-08-19T16:50:44.534,ns_1@10.242.238.90:<0.24363.0>:janitor_agent:handle_call:795]Done [rebalance:debug,2014-08-19T16:50:44.534,ns_1@10.242.238.90:<0.24388.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:50:44.535,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.24363.0> (ok) [ns_server:debug,2014-08-19T16:50:44.535,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.24388.0> (ok) [rebalance:debug,2014-08-19T16:50:44.618,ns_1@10.242.238.90:<0.24324.0>:janitor_agent:handle_call:795]Done [rebalance:debug,2014-08-19T16:50:44.618,ns_1@10.242.238.90:<0.24294.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:50:44.618,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.24324.0> (ok) [ns_server:debug,2014-08-19T16:50:44.618,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.24294.0> (ok) [rebalance:debug,2014-08-19T16:50:44.685,ns_1@10.242.238.90:<0.24261.0>:janitor_agent:handle_call:795]Done [rebalance:debug,2014-08-19T16:50:44.685,ns_1@10.242.238.90:<0.24236.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:50:44.685,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.24261.0> (ok) [ns_server:debug,2014-08-19T16:50:44.686,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.24236.0> (ok) [rebalance:debug,2014-08-19T16:50:44.769,ns_1@10.242.238.90:<0.24211.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:50:44.769,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.24211.0> (ok) [rebalance:debug,2014-08-19T16:50:44.769,ns_1@10.242.238.90:<0.24185.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:50:44.769,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.24185.0> (ok) [rebalance:debug,2014-08-19T16:50:44.861,ns_1@10.242.238.90:<0.24174.0>:janitor_agent:handle_call:795]Done [rebalance:debug,2014-08-19T16:50:44.861,ns_1@10.242.238.90:<0.24140.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:50:44.861,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.24174.0> (ok) [ns_server:debug,2014-08-19T16:50:44.861,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.24140.0> (ok) [rebalance:debug,2014-08-19T16:50:44.979,ns_1@10.242.238.90:<0.24615.0>:janitor_agent:handle_call:795]Done [rebalance:debug,2014-08-19T16:50:44.979,ns_1@10.242.238.90:<0.24143.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:50:44.979,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.24615.0> (ok) [ns_server:debug,2014-08-19T16:50:44.979,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.24143.0> (ok) [rebalance:debug,2014-08-19T16:50:45.012,ns_1@10.242.238.90:<0.24654.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:50:45.012,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.24654.0> (ok) [rebalance:debug,2014-08-19T16:50:45.012,ns_1@10.242.238.90:<0.24565.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:50:45.012,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.24565.0> (ok) [rebalance:debug,2014-08-19T16:50:45.142,ns_1@10.242.238.90:<0.24515.0>:janitor_agent:handle_call:795]Done [rebalance:debug,2014-08-19T16:50:45.142,ns_1@10.242.238.90:<0.24590.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:50:45.142,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.24515.0> (ok) [ns_server:debug,2014-08-19T16:50:45.142,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.24590.0> (ok) [rebalance:debug,2014-08-19T16:50:45.284,ns_1@10.242.238.90:<0.24458.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:50:45.284,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.24458.0> (ok) [rebalance:debug,2014-08-19T16:50:45.284,ns_1@10.242.238.90:<0.24540.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:50:45.284,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.24540.0> (ok) [rebalance:debug,2014-08-19T16:50:45.418,ns_1@10.242.238.90:<0.24408.0>:janitor_agent:handle_call:795]Done [rebalance:debug,2014-08-19T16:50:45.418,ns_1@10.242.238.90:<0.24490.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:50:45.418,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.24408.0> (ok) [ns_server:debug,2014-08-19T16:50:45.418,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.24490.0> (ok) [rebalance:debug,2014-08-19T16:50:45.534,ns_1@10.242.238.90:<0.24433.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:50:45.535,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.24433.0> (ok) [rebalance:debug,2014-08-19T16:50:45.535,ns_1@10.242.238.90:<0.24344.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:50:45.535,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.24344.0> (ok) [rebalance:debug,2014-08-19T16:50:45.635,ns_1@10.242.238.90:<0.24369.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:50:45.635,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.24369.0> (ok) [rebalance:debug,2014-08-19T16:50:45.635,ns_1@10.242.238.90:<0.24281.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:50:45.635,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.24281.0> (ok) [rebalance:debug,2014-08-19T16:50:45.735,ns_1@10.242.238.90:<0.24319.0>:janitor_agent:handle_call:795]Done [rebalance:debug,2014-08-19T16:50:45.735,ns_1@10.242.238.90:<0.24231.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:50:45.735,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.24319.0> (ok) [ns_server:debug,2014-08-19T16:50:45.736,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.24231.0> (ok) [rebalance:debug,2014-08-19T16:50:45.802,ns_1@10.242.238.90:<0.24256.0>:janitor_agent:handle_call:795]Done [rebalance:debug,2014-08-19T16:50:45.803,ns_1@10.242.238.90:<0.24180.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:50:45.803,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.24256.0> (ok) [ns_server:debug,2014-08-19T16:50:45.803,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.24180.0> (ok) [rebalance:debug,2014-08-19T16:50:45.912,ns_1@10.242.238.90:<0.24206.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:50:45.912,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.24206.0> (ok) [rebalance:debug,2014-08-19T16:50:45.989,ns_1@10.242.238.90:<0.24155.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:50:45.989,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.24155.0> (ok) [rebalance:debug,2014-08-19T16:50:46.047,ns_1@10.242.238.90:<0.24316.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:50:46.047,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.24316.0> (ok) [ns_server:debug,2014-08-19T16:50:46.048,ns_1@10.242.238.90:<0.24974.0>:capi_set_view_manager:do_wait_index_updated:618]References to wait: [] ("default", 682) [ns_server:debug,2014-08-19T16:50:46.048,ns_1@10.242.238.90:<0.24974.0>:capi_set_view_manager:do_wait_index_updated:638]All refs fired [ns_server:debug,2014-08-19T16:50:46.049,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.24973.0> (ok) [rebalance:debug,2014-08-19T16:50:46.049,ns_1@10.242.238.90:<0.24093.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:50:46.049,ns_1@10.242.238.90:<0.24093.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:50:46.050,ns_1@10.242.238.90:<0.24975.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:50:46.050,ns_1@10.242.238.90:<0.24975.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:50:46.050,ns_1@10.242.238.90:<0.24093.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:info,2014-08-19T16:50:46.107,ns_1@10.242.238.90:<0.18786.0>:ns_memcached:do_handle_call:527]Changed vbucket 682 state to active [views:debug,2014-08-19T16:50:46.139,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/682. Updated state: active (1) [ns_server:debug,2014-08-19T16:50:46.139,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",682,active,1} [ns_server:debug,2014-08-19T16:50:46.143,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:50:46.146,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 3472 us [ns_server:debug,2014-08-19T16:50:46.147,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:46.147,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:46.147,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{682, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [rebalance:debug,2014-08-19T16:50:47.634,ns_1@10.242.238.90:<0.24983.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 407 [rebalance:debug,2014-08-19T16:50:47.635,ns_1@10.242.238.90:<0.24983.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:50:47.636,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.24983.0> (ok) [rebalance:debug,2014-08-19T16:50:47.707,ns_1@10.242.238.90:<0.24986.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 409 [rebalance:debug,2014-08-19T16:50:47.709,ns_1@10.242.238.90:<0.24986.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:50:47.709,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.24986.0> (ok) [rebalance:debug,2014-08-19T16:50:47.790,ns_1@10.242.238.90:<0.24990.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 411 [rebalance:debug,2014-08-19T16:50:47.790,ns_1@10.242.238.90:<0.24993.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 406 [rebalance:debug,2014-08-19T16:50:47.791,ns_1@10.242.238.90:<0.24993.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:50:47.791,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.24993.0> (ok) [rebalance:debug,2014-08-19T16:50:47.792,ns_1@10.242.238.90:<0.24990.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:50:47.792,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.24990.0> (ok) [rebalance:debug,2014-08-19T16:50:47.902,ns_1@10.242.238.90:<0.24996.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 413 [rebalance:debug,2014-08-19T16:50:47.902,ns_1@10.242.238.90:<0.24999.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 408 [rebalance:debug,2014-08-19T16:50:47.903,ns_1@10.242.238.90:<0.24999.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:50:47.903,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.24999.0> (ok) [rebalance:debug,2014-08-19T16:50:47.904,ns_1@10.242.238.90:<0.24996.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:50:47.904,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.24996.0> (ok) [rebalance:debug,2014-08-19T16:50:47.969,ns_1@10.242.238.90:<0.25002.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 410 [rebalance:debug,2014-08-19T16:50:47.969,ns_1@10.242.238.90:<0.25005.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 415 [rebalance:debug,2014-08-19T16:50:47.970,ns_1@10.242.238.90:<0.25002.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:50:47.970,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.25002.0> (ok) [rebalance:debug,2014-08-19T16:50:47.971,ns_1@10.242.238.90:<0.25005.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:50:47.971,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.25005.0> (ok) [rebalance:debug,2014-08-19T16:50:48.036,ns_1@10.242.238.90:<0.25008.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 417 [rebalance:debug,2014-08-19T16:50:48.036,ns_1@10.242.238.90:<0.25011.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 412 [rebalance:debug,2014-08-19T16:50:48.037,ns_1@10.242.238.90:<0.25011.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:50:48.037,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.25011.0> (ok) [rebalance:debug,2014-08-19T16:50:48.038,ns_1@10.242.238.90:<0.25008.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:50:48.038,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.25008.0> (ok) [rebalance:debug,2014-08-19T16:50:48.103,ns_1@10.242.238.90:<0.25014.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 419 [rebalance:debug,2014-08-19T16:50:48.103,ns_1@10.242.238.90:<0.25017.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 414 [rebalance:debug,2014-08-19T16:50:48.104,ns_1@10.242.238.90:<0.25017.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:50:48.105,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.25017.0> (ok) [rebalance:debug,2014-08-19T16:50:48.105,ns_1@10.242.238.90:<0.25014.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:50:48.105,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.25014.0> (ok) [rebalance:debug,2014-08-19T16:50:48.171,ns_1@10.242.238.90:<0.25020.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 416 [rebalance:debug,2014-08-19T16:50:48.171,ns_1@10.242.238.90:<0.25023.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 421 [rebalance:debug,2014-08-19T16:50:48.172,ns_1@10.242.238.90:<0.25020.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:50:48.172,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.25020.0> (ok) [rebalance:debug,2014-08-19T16:50:48.172,ns_1@10.242.238.90:<0.25023.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:50:48.172,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.25023.0> (ok) [rebalance:debug,2014-08-19T16:50:48.263,ns_1@10.242.238.90:<0.25026.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 418 [rebalance:debug,2014-08-19T16:50:48.264,ns_1@10.242.238.90:<0.25029.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 423 [rebalance:debug,2014-08-19T16:50:48.265,ns_1@10.242.238.90:<0.25026.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:50:48.265,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.25026.0> (ok) [rebalance:debug,2014-08-19T16:50:48.265,ns_1@10.242.238.90:<0.25029.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:50:48.265,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.25029.0> (ok) [rebalance:debug,2014-08-19T16:50:48.354,ns_1@10.242.238.90:<0.25040.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 420 [rebalance:debug,2014-08-19T16:50:48.354,ns_1@10.242.238.90:<0.25043.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 425 [rebalance:debug,2014-08-19T16:50:48.355,ns_1@10.242.238.90:<0.25040.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:50:48.355,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.25040.0> (ok) [rebalance:debug,2014-08-19T16:50:48.356,ns_1@10.242.238.90:<0.25043.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:50:48.356,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.25043.0> (ok) [rebalance:debug,2014-08-19T16:50:48.446,ns_1@10.242.238.90:<0.25049.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 422 [rebalance:debug,2014-08-19T16:50:48.447,ns_1@10.242.238.90:<0.25052.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 663 [rebalance:debug,2014-08-19T16:50:48.448,ns_1@10.242.238.90:<0.25049.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:50:48.448,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.25049.0> (ok) [rebalance:debug,2014-08-19T16:50:48.448,ns_1@10.242.238.90:<0.25052.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:50:48.448,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.25052.0> (ok) [rebalance:debug,2014-08-19T16:50:48.573,ns_1@10.242.238.90:<0.25055.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 665 [rebalance:debug,2014-08-19T16:50:48.574,ns_1@10.242.238.90:<0.25058.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 424 [rebalance:debug,2014-08-19T16:50:48.575,ns_1@10.242.238.90:<0.25058.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:50:48.575,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.25058.0> (ok) [rebalance:debug,2014-08-19T16:50:48.575,ns_1@10.242.238.90:<0.25055.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:50:48.575,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.25055.0> (ok) [rebalance:debug,2014-08-19T16:50:48.715,ns_1@10.242.238.90:<0.25061.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 667 [rebalance:debug,2014-08-19T16:50:48.716,ns_1@10.242.238.90:<0.25061.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:50:48.717,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.25061.0> (ok) [rebalance:debug,2014-08-19T16:50:48.762,ns_1@10.242.238.90:<0.24099.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:50:48.762,ns_1@10.242.238.90:<0.24099.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:50:48.762,ns_1@10.242.238.90:<0.25064.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:50:48.762,ns_1@10.242.238.90:<0.25064.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:50:48.762,ns_1@10.242.238.90:<0.24099.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:info,2014-08-19T16:50:48.767,ns_1@10.242.238.90:<0.18786.0>:ns_memcached:do_handle_call:527]Changed vbucket 426 state to replica [ns_server:info,2014-08-19T16:50:48.768,ns_1@10.242.238.90:tap_replication_manager-default<0.18775.0>:tap_replication_manager:start_child:172]Starting replication from 'ns_1@10.242.238.89' for [426] [error_logger:info,2014-08-19T16:50:48.770,ns_1@10.242.238.90:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,'ns_vbm_new_sup-default'} started: [{pid,<0.25065.0>}, {name,{new_child_id,[426],'ns_1@10.242.238.89'}}, {mfargs, {ebucketmigrator_srv,start_link, [{"10.242.238.89",11209}, {"10.242.238.90",11209}, [{on_not_ready_vbuckets, #Fun}, {username,"default"}, {password,get_from_config}, {vbuckets,[426]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]]}}, {restart_type,temporary}, {shutdown,60000}, {child_type,worker}] [ns_server:info,2014-08-19T16:50:48.771,ns_1@10.242.238.90:<0.25065.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 426 to state replica [ns_server:debug,2014-08-19T16:50:48.781,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:50:48.784,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:48.784,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:48.784,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 2257 us [ns_server:debug,2014-08-19T16:50:48.785,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{426, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:50:48.786,ns_1@10.242.238.90:<0.25065.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_ns_1@10.242.238.90 [rebalance:info,2014-08-19T16:50:48.788,ns_1@10.242.238.90:<0.25065.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[426]}, {checkpoints,[{426,1}]}, {name,<<"replication_ns_1@10.242.238.90">>}, {takeover,false}] {{"10.242.238.89",11209}, {"10.242.238.90",11209}, [{on_not_ready_vbuckets,#Fun}, {username,"default"}, {password,get_from_config}, {vbuckets,[426]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]} [rebalance:debug,2014-08-19T16:50:48.788,ns_1@10.242.238.90:<0.25065.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.25067.0> [rebalance:info,2014-08-19T16:50:48.789,ns_1@10.242.238.90:<0.25065.0>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [rebalance:debug,2014-08-19T16:50:48.799,ns_1@10.242.238.90:<0.25068.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 662 [rebalance:debug,2014-08-19T16:50:48.799,ns_1@10.242.238.90:<0.25071.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 669 [rebalance:debug,2014-08-19T16:50:48.800,ns_1@10.242.238.90:<0.25068.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:50:48.800,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.25068.0> (ok) [rebalance:debug,2014-08-19T16:50:48.801,ns_1@10.242.238.90:<0.25071.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:50:48.801,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.25071.0> (ok) [rebalance:debug,2014-08-19T16:50:49.008,ns_1@10.242.238.90:<0.25074.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 671 [rebalance:debug,2014-08-19T16:50:49.008,ns_1@10.242.238.90:<0.25077.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 664 [rebalance:debug,2014-08-19T16:50:49.009,ns_1@10.242.238.90:<0.25077.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:50:49.009,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.25077.0> (ok) [rebalance:debug,2014-08-19T16:50:49.010,ns_1@10.242.238.90:<0.25074.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:50:49.010,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.25074.0> (ok) [rebalance:debug,2014-08-19T16:50:49.150,ns_1@10.242.238.90:<0.25080.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 666 [rebalance:debug,2014-08-19T16:50:49.150,ns_1@10.242.238.90:<0.25083.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 673 [rebalance:debug,2014-08-19T16:50:49.151,ns_1@10.242.238.90:<0.25080.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:50:49.151,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.25080.0> (ok) [rebalance:debug,2014-08-19T16:50:49.152,ns_1@10.242.238.90:<0.25083.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:50:49.152,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.25083.0> (ok) [rebalance:debug,2014-08-19T16:50:49.314,ns_1@10.242.238.90:<0.25086.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 668 [rebalance:debug,2014-08-19T16:50:49.314,ns_1@10.242.238.90:<0.25089.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 675 [rebalance:debug,2014-08-19T16:50:49.315,ns_1@10.242.238.90:<0.25086.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:50:49.315,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.25086.0> (ok) [rebalance:debug,2014-08-19T16:50:49.316,ns_1@10.242.238.90:<0.25089.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:50:49.316,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.25089.0> (ok) [rebalance:debug,2014-08-19T16:50:49.468,ns_1@10.242.238.90:<0.25098.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 677 [rebalance:debug,2014-08-19T16:50:49.468,ns_1@10.242.238.90:<0.25101.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 670 [rebalance:debug,2014-08-19T16:50:49.469,ns_1@10.242.238.90:<0.25101.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:50:49.469,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.25101.0> (ok) [rebalance:debug,2014-08-19T16:50:49.469,ns_1@10.242.238.90:<0.25098.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:50:49.470,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.25098.0> (ok) [rebalance:debug,2014-08-19T16:50:49.610,ns_1@10.242.238.90:<0.25104.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 679 [rebalance:debug,2014-08-19T16:50:49.610,ns_1@10.242.238.90:<0.25107.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 672 [rebalance:debug,2014-08-19T16:50:49.611,ns_1@10.242.238.90:<0.25107.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:50:49.611,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.25107.0> (ok) [rebalance:debug,2014-08-19T16:50:49.612,ns_1@10.242.238.90:<0.25104.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:50:49.612,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.25104.0> (ok) [rebalance:debug,2014-08-19T16:50:49.714,ns_1@10.242.238.90:<0.25110.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 674 [rebalance:debug,2014-08-19T16:50:49.714,ns_1@10.242.238.90:<0.25113.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 681 [rebalance:debug,2014-08-19T16:50:49.715,ns_1@10.242.238.90:<0.25110.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:50:49.715,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.25110.0> (ok) [rebalance:debug,2014-08-19T16:50:49.715,ns_1@10.242.238.90:<0.25113.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:50:49.715,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.25113.0> (ok) [rebalance:debug,2014-08-19T16:50:49.806,ns_1@10.242.238.90:<0.25116.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 676 [rebalance:debug,2014-08-19T16:50:49.807,ns_1@10.242.238.90:<0.25116.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:50:49.807,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.25116.0> (ok) [rebalance:debug,2014-08-19T16:50:49.891,ns_1@10.242.238.90:<0.25119.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 678 [rebalance:debug,2014-08-19T16:50:49.892,ns_1@10.242.238.90:<0.25119.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:50:49.892,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.25119.0> (ok) [rebalance:debug,2014-08-19T16:50:49.957,ns_1@10.242.238.90:<0.25122.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 680 [rebalance:debug,2014-08-19T16:50:49.958,ns_1@10.242.238.90:<0.25122.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:50:49.958,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.25122.0> (ok) [ns_server:debug,2014-08-19T16:50:50.900,ns_1@10.242.238.90:compaction_daemon<0.17567.0>:compaction_daemon:handle_info:447]Starting compaction for the following buckets: [<<"default">>] [ns_server:info,2014-08-19T16:50:50.902,ns_1@10.242.238.90:<0.25125.0>:compaction_daemon:try_to_cleanup_indexes:650]Cleaning up indexes for bucket `default` [ns_server:info,2014-08-19T16:50:50.903,ns_1@10.242.238.90:<0.25125.0>:compaction_daemon:spawn_bucket_compactor:609]Compacting bucket default with config: [{database_fragmentation_threshold,{30,undefined}}, {view_fragmentation_threshold,{30,undefined}}] [ns_server:debug,2014-08-19T16:50:50.909,ns_1@10.242.238.90:<0.25128.0>:compaction_daemon:bucket_needs_compaction:1042]`default` data size is 24186, disk size is 2858820 [ns_server:debug,2014-08-19T16:50:50.909,ns_1@10.242.238.90:compaction_daemon<0.17567.0>:compaction_daemon:handle_info:505]Finished compaction iteration. [ns_server:debug,2014-08-19T16:50:50.909,ns_1@10.242.238.90:compaction_daemon<0.17567.0>:compaction_daemon:schedule_next_compaction:1519]Finished compaction too soon. Next run will be in 30s [ns_server:debug,2014-08-19T16:50:50.922,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:50:50.926,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 2527 us [ns_server:debug,2014-08-19T16:50:50.927,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:50.928,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:50.928,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{937, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:50:51.082,ns_1@10.242.238.90:<0.25131.0>:capi_set_view_manager:do_wait_index_updated:618]References to wait: [] ("default", 663) [ns_server:debug,2014-08-19T16:50:51.082,ns_1@10.242.238.90:<0.25131.0>:capi_set_view_manager:do_wait_index_updated:638]All refs fired [ns_server:debug,2014-08-19T16:50:51.082,ns_1@10.242.238.90:<0.25133.0>:capi_set_view_manager:do_wait_index_updated:618]References to wait: [] ("default", 665) [ns_server:debug,2014-08-19T16:50:51.082,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.25130.0> (ok) [ns_server:debug,2014-08-19T16:50:51.082,ns_1@10.242.238.90:<0.25133.0>:capi_set_view_manager:do_wait_index_updated:638]All refs fired [ns_server:debug,2014-08-19T16:50:51.083,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.25132.0> (ok) [ns_server:debug,2014-08-19T16:50:51.083,ns_1@10.242.238.90:<0.25135.0>:capi_set_view_manager:do_wait_index_updated:618]References to wait: [] ("default", 667) [ns_server:debug,2014-08-19T16:50:51.083,ns_1@10.242.238.90:<0.25135.0>:capi_set_view_manager:do_wait_index_updated:638]All refs fired [ns_server:debug,2014-08-19T16:50:51.083,ns_1@10.242.238.90:<0.25137.0>:capi_set_view_manager:do_wait_index_updated:618]References to wait: [] ("default", 669) [ns_server:debug,2014-08-19T16:50:51.083,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.25134.0> (ok) [ns_server:debug,2014-08-19T16:50:51.083,ns_1@10.242.238.90:<0.25137.0>:capi_set_view_manager:do_wait_index_updated:638]All refs fired [rebalance:debug,2014-08-19T16:50:51.083,ns_1@10.242.238.90:<0.24612.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:50:51.083,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.25136.0> (ok) [rebalance:debug,2014-08-19T16:50:51.083,ns_1@10.242.238.90:<0.24548.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:50:51.083,ns_1@10.242.238.90:<0.25139.0>:capi_set_view_manager:do_wait_index_updated:618]References to wait: [] ("default", 662) [ns_server:debug,2014-08-19T16:50:51.083,ns_1@10.242.238.90:<0.24612.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:50:51.083,ns_1@10.242.238.90:<0.24548.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:50:51.083,ns_1@10.242.238.90:<0.25141.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:50:51.084,ns_1@10.242.238.90:<0.25142.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [rebalance:debug,2014-08-19T16:50:51.084,ns_1@10.242.238.90:<0.24498.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:50:51.084,ns_1@10.242.238.90:<0.25139.0>:capi_set_view_manager:do_wait_index_updated:638]All refs fired [ns_server:debug,2014-08-19T16:50:51.084,ns_1@10.242.238.90:<0.25145.0>:capi_set_view_manager:do_wait_index_updated:618]References to wait: [] ("default", 671) [rebalance:debug,2014-08-19T16:50:51.084,ns_1@10.242.238.90:<0.24455.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:50:51.084,ns_1@10.242.238.90:<0.25142.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [ns_server:debug,2014-08-19T16:50:51.084,ns_1@10.242.238.90:<0.25141.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [ns_server:debug,2014-08-19T16:50:51.084,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.25138.0> (ok) [ns_server:debug,2014-08-19T16:50:51.084,ns_1@10.242.238.90:<0.25145.0>:capi_set_view_manager:do_wait_index_updated:638]All refs fired [ns_server:debug,2014-08-19T16:50:51.084,ns_1@10.242.238.90:<0.25147.0>:capi_set_view_manager:do_wait_index_updated:618]References to wait: [] ("default", 675) [rebalance:info,2014-08-19T16:50:51.084,ns_1@10.242.238.90:<0.24612.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:50:51.084,ns_1@10.242.238.90:<0.24498.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:50:51.084,ns_1@10.242.238.90:<0.25148.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:50:51.084,ns_1@10.242.238.90:<0.24455.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:50:51.084,ns_1@10.242.238.90:<0.25150.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:50:51.084,ns_1@10.242.238.90:<0.25147.0>:capi_set_view_manager:do_wait_index_updated:638]All refs fired [ns_server:debug,2014-08-19T16:50:51.084,ns_1@10.242.238.90:<0.25153.0>:capi_set_view_manager:do_wait_index_updated:618]References to wait: [] ("default", 664) [rebalance:info,2014-08-19T16:50:51.084,ns_1@10.242.238.90:<0.24548.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:50:51.084,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.25140.0> (ok) [ns_server:debug,2014-08-19T16:50:51.084,ns_1@10.242.238.90:<0.25148.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [ns_server:debug,2014-08-19T16:50:51.084,ns_1@10.242.238.90:<0.25150.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [ns_server:debug,2014-08-19T16:50:51.084,ns_1@10.242.238.90:<0.25153.0>:capi_set_view_manager:do_wait_index_updated:638]All refs fired [ns_server:debug,2014-08-19T16:50:51.085,ns_1@10.242.238.90:<0.25154.0>:capi_set_view_manager:do_wait_index_updated:618]References to wait: [] ("default", 666) [rebalance:info,2014-08-19T16:50:51.085,ns_1@10.242.238.90:<0.24498.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:50:51.085,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.25143.0> (ok) [rebalance:debug,2014-08-19T16:50:51.085,ns_1@10.242.238.90:<0.24651.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [rebalance:info,2014-08-19T16:50:51.085,ns_1@10.242.238.90:<0.24455.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:50:51.085,ns_1@10.242.238.90:<0.25154.0>:capi_set_view_manager:do_wait_index_updated:638]All refs fired [ns_server:debug,2014-08-19T16:50:51.085,ns_1@10.242.238.90:<0.25159.0>:capi_set_view_manager:do_wait_index_updated:618]References to wait: [] ("default", 674) [ns_server:debug,2014-08-19T16:50:51.085,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.25144.0> (ok) [ns_server:debug,2014-08-19T16:50:51.085,ns_1@10.242.238.90:<0.25159.0>:capi_set_view_manager:do_wait_index_updated:638]All refs fired [ns_server:debug,2014-08-19T16:50:51.085,ns_1@10.242.238.90:<0.24651.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:50:51.085,ns_1@10.242.238.90:<0.25160.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:50:51.085,ns_1@10.242.238.90:<0.25161.0>:capi_set_view_manager:do_wait_index_updated:618]References to wait: [] ("default", 670) [ns_server:debug,2014-08-19T16:50:51.085,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.25146.0> (ok) [rebalance:debug,2014-08-19T16:50:51.085,ns_1@10.242.238.90:<0.24391.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:50:51.085,ns_1@10.242.238.90:<0.25161.0>:capi_set_view_manager:do_wait_index_updated:638]All refs fired [ns_server:debug,2014-08-19T16:50:51.085,ns_1@10.242.238.90:<0.25160.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [ns_server:debug,2014-08-19T16:50:51.085,ns_1@10.242.238.90:<0.25164.0>:capi_set_view_manager:do_wait_index_updated:618]References to wait: [] ("default", 673) [rebalance:info,2014-08-19T16:50:51.085,ns_1@10.242.238.90:<0.24651.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:50:51.085,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.25149.0> (ok) [ns_server:debug,2014-08-19T16:50:51.085,ns_1@10.242.238.90:<0.24391.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:50:51.085,ns_1@10.242.238.90:<0.25164.0>:capi_set_view_manager:do_wait_index_updated:638]All refs fired [ns_server:debug,2014-08-19T16:50:51.085,ns_1@10.242.238.90:<0.25166.0>:capi_set_view_manager:do_wait_index_updated:618]References to wait: [] ("default", 672) [ns_server:debug,2014-08-19T16:50:51.085,ns_1@10.242.238.90:<0.25167.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:50:51.085,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.25151.0> (ok) [ns_server:debug,2014-08-19T16:50:51.085,ns_1@10.242.238.90:<0.25166.0>:capi_set_view_manager:do_wait_index_updated:638]All refs fired [ns_server:debug,2014-08-19T16:50:51.085,ns_1@10.242.238.90:<0.25167.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:debug,2014-08-19T16:50:51.085,ns_1@10.242.238.90:<0.24264.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:50:51.086,ns_1@10.242.238.90:<0.25169.0>:capi_set_view_manager:do_wait_index_updated:618]References to wait: [] ("default", 677) [rebalance:debug,2014-08-19T16:50:51.086,ns_1@10.242.238.90:<0.24587.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:50:51.086,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.25152.0> (ok) [ns_server:debug,2014-08-19T16:50:51.086,ns_1@10.242.238.90:<0.25169.0>:capi_set_view_manager:do_wait_index_updated:638]All refs fired [rebalance:debug,2014-08-19T16:50:51.086,ns_1@10.242.238.90:<0.24523.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:50:51.086,ns_1@10.242.238.90:<0.25170.0>:capi_set_view_manager:do_wait_index_updated:618]References to wait: [] ("default", 668) [ns_server:debug,2014-08-19T16:50:51.086,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.25155.0> (ok) [ns_server:debug,2014-08-19T16:50:51.086,ns_1@10.242.238.90:<0.24264.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:50:51.086,ns_1@10.242.238.90:<0.25171.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [rebalance:info,2014-08-19T16:50:51.086,ns_1@10.242.238.90:<0.24391.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:50:51.086,ns_1@10.242.238.90:<0.24587.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:50:51.086,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.25156.0> (ok) [ns_server:debug,2014-08-19T16:50:51.086,ns_1@10.242.238.90:<0.25170.0>:capi_set_view_manager:do_wait_index_updated:638]All refs fired [ns_server:debug,2014-08-19T16:50:51.086,ns_1@10.242.238.90:<0.25172.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:50:51.086,ns_1@10.242.238.90:<0.24523.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:50:51.086,ns_1@10.242.238.90:<0.25173.0>:capi_set_view_manager:do_wait_index_updated:618]References to wait: [] ("default", 681) [ns_server:debug,2014-08-19T16:50:51.086,ns_1@10.242.238.90:<0.25174.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [rebalance:debug,2014-08-19T16:50:51.086,ns_1@10.242.238.90:<0.24311.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:50:51.086,ns_1@10.242.238.90:<0.25171.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [ns_server:debug,2014-08-19T16:50:51.086,ns_1@10.242.238.90:<0.25172.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [ns_server:debug,2014-08-19T16:50:51.086,ns_1@10.242.238.90:<0.25173.0>:capi_set_view_manager:do_wait_index_updated:638]All refs fired [ns_server:debug,2014-08-19T16:50:51.086,ns_1@10.242.238.90:<0.25174.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [ns_server:debug,2014-08-19T16:50:51.086,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.25158.0> (ok) [ns_server:debug,2014-08-19T16:50:51.086,ns_1@10.242.238.90:<0.25175.0>:capi_set_view_manager:do_wait_index_updated:618]References to wait: [] ("default", 679) [ns_server:debug,2014-08-19T16:50:51.086,ns_1@10.242.238.90:<0.24311.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [rebalance:info,2014-08-19T16:50:51.086,ns_1@10.242.238.90:<0.24587.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [rebalance:debug,2014-08-19T16:50:51.086,ns_1@10.242.238.90:<0.24430.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [rebalance:info,2014-08-19T16:50:51.086,ns_1@10.242.238.90:<0.24264.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:50:51.086,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.25157.0> (ok) [ns_server:debug,2014-08-19T16:50:51.086,ns_1@10.242.238.90:<0.25176.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:50:51.086,ns_1@10.242.238.90:<0.25175.0>:capi_set_view_manager:do_wait_index_updated:638]All refs fired [rebalance:info,2014-08-19T16:50:51.086,ns_1@10.242.238.90:<0.24523.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:50:51.087,ns_1@10.242.238.90:<0.25177.0>:capi_set_view_manager:do_wait_index_updated:618]References to wait: [] ("default", 676) [rebalance:debug,2014-08-19T16:50:51.086,ns_1@10.242.238.90:<0.24341.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:50:51.087,ns_1@10.242.238.90:<0.25176.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [ns_server:debug,2014-08-19T16:50:51.087,ns_1@10.242.238.90:<0.24430.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:50:51.087,ns_1@10.242.238.90:<0.25177.0>:capi_set_view_manager:do_wait_index_updated:638]All refs fired [ns_server:debug,2014-08-19T16:50:51.087,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.25162.0> (ok) [ns_server:debug,2014-08-19T16:50:51.087,ns_1@10.242.238.90:<0.25178.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [rebalance:debug,2014-08-19T16:50:51.087,ns_1@10.242.238.90:<0.24366.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:50:51.087,ns_1@10.242.238.90:<0.25179.0>:capi_set_view_manager:do_wait_index_updated:618]References to wait: [] ("default", 678) [rebalance:debug,2014-08-19T16:50:51.087,ns_1@10.242.238.90:<0.24228.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:50:51.087,ns_1@10.242.238.90:<0.24341.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:50:51.087,ns_1@10.242.238.90:<0.25179.0>:capi_set_view_manager:do_wait_index_updated:638]All refs fired [ns_server:debug,2014-08-19T16:50:51.087,ns_1@10.242.238.90:<0.25178.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [ns_server:debug,2014-08-19T16:50:51.087,ns_1@10.242.238.90:<0.25180.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:50:51.087,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.25163.0> (ok) [rebalance:info,2014-08-19T16:50:51.087,ns_1@10.242.238.90:<0.24311.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:50:51.087,ns_1@10.242.238.90:<0.24366.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [rebalance:debug,2014-08-19T16:50:51.087,ns_1@10.242.238.90:<0.24486.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:50:51.087,ns_1@10.242.238.90:<0.25181.0>:capi_set_view_manager:do_wait_index_updated:618]References to wait: [] ("default", 680) [ns_server:debug,2014-08-19T16:50:51.087,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.25165.0> (ok) [ns_server:debug,2014-08-19T16:50:51.087,ns_1@10.242.238.90:<0.25183.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:50:51.087,ns_1@10.242.238.90:<0.24228.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [rebalance:info,2014-08-19T16:50:51.087,ns_1@10.242.238.90:<0.24430.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:50:51.087,ns_1@10.242.238.90:<0.25180.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [ns_server:debug,2014-08-19T16:50:51.087,ns_1@10.242.238.90:<0.25182.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [rebalance:debug,2014-08-19T16:50:51.087,ns_1@10.242.238.90:<0.24118.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:50:51.087,ns_1@10.242.238.90:<0.25183.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:50:51.087,ns_1@10.242.238.90:<0.24341.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:50:51.087,ns_1@10.242.238.90:<0.24486.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:50:51.087,ns_1@10.242.238.90:<0.25181.0>:capi_set_view_manager:do_wait_index_updated:638]All refs fired [ns_server:debug,2014-08-19T16:50:51.087,ns_1@10.242.238.90:<0.25182.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [ns_server:debug,2014-08-19T16:50:51.087,ns_1@10.242.238.90:<0.25184.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [rebalance:info,2014-08-19T16:50:51.087,ns_1@10.242.238.90:<0.24228.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:50:51.087,ns_1@10.242.238.90:<0.25185.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:50:51.087,ns_1@10.242.238.90:<0.24118.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [rebalance:debug,2014-08-19T16:50:51.087,ns_1@10.242.238.90:<0.24177.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [rebalance:info,2014-08-19T16:50:51.088,ns_1@10.242.238.90:<0.24366.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:50:51.088,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.25168.0> (ok) [rebalance:debug,2014-08-19T16:50:51.088,ns_1@10.242.238.90:<0.24203.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [rebalance:debug,2014-08-19T16:50:51.088,ns_1@10.242.238.90:<0.24239.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:50:51.088,ns_1@10.242.238.90:<0.25184.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [ns_server:debug,2014-08-19T16:50:51.088,ns_1@10.242.238.90:<0.25185.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:50:51.088,ns_1@10.242.238.90:<0.24486.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [rebalance:info,2014-08-19T16:50:51.088,ns_1@10.242.238.90:<0.24118.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:50:51.088,ns_1@10.242.238.90:<0.24177.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:50:51.088,ns_1@10.242.238.90:<0.24203.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:50:51.088,ns_1@10.242.238.90:<0.25186.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:50:51.088,ns_1@10.242.238.90:<0.24239.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:50:51.088,ns_1@10.242.238.90:<0.25187.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:50:51.088,ns_1@10.242.238.90:<0.25188.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:50:51.088,ns_1@10.242.238.90:<0.25186.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [ns_server:debug,2014-08-19T16:50:51.088,ns_1@10.242.238.90:<0.25187.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:50:51.088,ns_1@10.242.238.90:<0.24177.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:50:51.088,ns_1@10.242.238.90:<0.25188.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:50:51.088,ns_1@10.242.238.90:<0.24203.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [rebalance:info,2014-08-19T16:50:51.088,ns_1@10.242.238.90:<0.24239.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [rebalance:debug,2014-08-19T16:50:51.088,ns_1@10.242.238.90:<0.24152.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:50:51.089,ns_1@10.242.238.90:<0.24152.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:50:51.089,ns_1@10.242.238.90:<0.25189.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:50:51.089,ns_1@10.242.238.90:<0.25189.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:50:51.089,ns_1@10.242.238.90:<0.24152.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:info,2014-08-19T16:50:51.255,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 663 state to active [ns_server:info,2014-08-19T16:50:51.261,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 669 state to active [ns_server:info,2014-08-19T16:50:51.263,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 667 state to active [ns_server:info,2014-08-19T16:50:51.264,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 665 state to active [ns_server:info,2014-08-19T16:50:51.275,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 662 state to active [ns_server:info,2014-08-19T16:50:51.279,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 664 state to active [ns_server:debug,2014-08-19T16:50:51.280,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:info,2014-08-19T16:50:51.282,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 671 state to active [ns_server:debug,2014-08-19T16:50:51.283,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:51.283,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 3120 us [ns_server:debug,2014-08-19T16:50:51.283,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:51.284,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{936, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:50:51.294,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:info,2014-08-19T16:50:51.299,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 675 state to active [ns_server:debug,2014-08-19T16:50:51.302,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 8505 us [ns_server:debug,2014-08-19T16:50:51.302,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:51.303,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:51.304,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{663, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [views:debug,2014-08-19T16:50:51.306,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/663. Updated state: active (1) [ns_server:info,2014-08-19T16:50:51.311,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 674 state to active [ns_server:debug,2014-08-19T16:50:51.322,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",663,active,1} [ns_server:info,2014-08-19T16:50:51.324,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 676 state to active [ns_server:debug,2014-08-19T16:50:51.334,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:info,2014-08-19T16:50:51.338,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 668 state to active [ns_server:debug,2014-08-19T16:50:51.339,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:51.339,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 4926 us [ns_server:debug,2014-08-19T16:50:51.340,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:51.340,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{669, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:50:51.363,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:50:51.364,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 1159 us [ns_server:debug,2014-08-19T16:50:51.365,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:51.365,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:51.366,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{665, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [views:debug,2014-08-19T16:50:51.381,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/675. Updated state: active (1) [ns_server:debug,2014-08-19T16:50:51.381,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",675,active,1} [rebalance:debug,2014-08-19T16:50:51.385,ns_1@10.242.238.90:<0.24543.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:50:51.386,ns_1@10.242.238.90:<0.24543.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:50:51.386,ns_1@10.242.238.90:<0.25199.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:50:51.386,ns_1@10.242.238.90:<0.25199.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:50:51.386,ns_1@10.242.238.90:<0.24543.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:50:51.387,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:50:51.391,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:51.391,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 3804 us [ns_server:debug,2014-08-19T16:50:51.392,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:51.393,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{667, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [rebalance:debug,2014-08-19T16:50:51.410,ns_1@10.242.238.90:<0.24436.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:50:51.410,ns_1@10.242.238.90:<0.24436.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:50:51.410,ns_1@10.242.238.90:<0.25201.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:50:51.410,ns_1@10.242.238.90:<0.25201.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:50:51.410,ns_1@10.242.238.90:<0.24436.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [rebalance:debug,2014-08-19T16:50:51.426,ns_1@10.242.238.90:<0.24632.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:50:51.426,ns_1@10.242.238.90:<0.24632.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:50:51.426,ns_1@10.242.238.90:<0.25203.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:50:51.427,ns_1@10.242.238.90:<0.25203.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:50:51.427,ns_1@10.242.238.90:<0.24632.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [rebalance:debug,2014-08-19T16:50:51.427,ns_1@10.242.238.90:<0.24411.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:50:51.427,ns_1@10.242.238.90:<0.24411.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:50:51.427,ns_1@10.242.238.90:<0.25204.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:50:51.427,ns_1@10.242.238.90:<0.25204.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:50:51.427,ns_1@10.242.238.90:<0.24411.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:50:51.432,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:50:51.433,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 1422 us [ns_server:debug,2014-08-19T16:50:51.437,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:51.437,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:51.438,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{662, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [views:debug,2014-08-19T16:50:51.440,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/671. Updated state: active (1) [ns_server:debug,2014-08-19T16:50:51.440,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",671,active,1} [rebalance:debug,2014-08-19T16:50:51.457,ns_1@10.242.238.90:<0.24518.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:50:51.457,ns_1@10.242.238.90:<0.24518.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:50:51.457,ns_1@10.242.238.90:<0.25207.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:50:51.457,ns_1@10.242.238.90:<0.25207.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:50:51.457,ns_1@10.242.238.90:<0.24518.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:50:51.460,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:50:51.463,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:51.463,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 2777 us [ns_server:debug,2014-08-19T16:50:51.464,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:51.464,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{664, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [rebalance:debug,2014-08-19T16:50:51.483,ns_1@10.242.238.90:<0.24158.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:50:51.483,ns_1@10.242.238.90:<0.24158.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:50:51.483,ns_1@10.242.238.90:<0.25208.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:50:51.483,ns_1@10.242.238.90:<0.25208.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:50:51.483,ns_1@10.242.238.90:<0.24158.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:50:51.485,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:50:51.491,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:51.491,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 5752 us [ns_server:debug,2014-08-19T16:50:51.493,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{671, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:50:51.495,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [rebalance:debug,2014-08-19T16:50:51.502,ns_1@10.242.238.90:<0.24284.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:50:51.503,ns_1@10.242.238.90:<0.24284.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:50:51.503,ns_1@10.242.238.90:<0.25210.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:50:51.503,ns_1@10.242.238.90:<0.25210.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [views:debug,2014-08-19T16:50:51.503,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/669. Updated state: active (1) [rebalance:info,2014-08-19T16:50:51.503,ns_1@10.242.238.90:<0.24284.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:50:51.503,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",669,active,1} [rebalance:debug,2014-08-19T16:50:51.515,ns_1@10.242.238.90:<0.24183.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:50:51.516,ns_1@10.242.238.90:<0.24183.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:50:51.516,ns_1@10.242.238.90:<0.25212.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:50:51.516,ns_1@10.242.238.90:<0.25212.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:50:51.516,ns_1@10.242.238.90:<0.24183.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:50:51.519,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:50:51.522,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:51.522,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 3019 us [ns_server:debug,2014-08-19T16:50:51.523,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:51.524,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{675, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [rebalance:debug,2014-08-19T16:50:51.540,ns_1@10.242.238.90:<0.24322.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:50:51.541,ns_1@10.242.238.90:<0.24322.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:50:51.541,ns_1@10.242.238.90:<0.25213.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [rebalance:info,2014-08-19T16:50:51.541,ns_1@10.242.238.90:<0.24322.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:50:51.541,ns_1@10.242.238.90:<0.25213.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [ns_server:debug,2014-08-19T16:50:51.544,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:50:51.546,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 2249 us [ns_server:debug,2014-08-19T16:50:51.547,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:51.548,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{674, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:50:51.548,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [views:debug,2014-08-19T16:50:51.549,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/667. Updated state: active (1) [ns_server:debug,2014-08-19T16:50:51.549,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",667,active,1} [rebalance:debug,2014-08-19T16:50:51.580,ns_1@10.242.238.90:<0.24138.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:50:51.581,ns_1@10.242.238.90:<0.24138.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:50:51.581,ns_1@10.242.238.90:<0.25216.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:50:51.581,ns_1@10.242.238.90:<0.25216.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:50:51.581,ns_1@10.242.238.90:<0.24138.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [rebalance:debug,2014-08-19T16:50:51.582,ns_1@10.242.238.90:<0.24568.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:50:51.582,ns_1@10.242.238.90:<0.24568.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:50:51.582,ns_1@10.242.238.90:<0.25217.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:50:51.582,ns_1@10.242.238.90:<0.25217.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:50:51.582,ns_1@10.242.238.90:<0.24568.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:50:51.585,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [rebalance:debug,2014-08-19T16:50:51.590,ns_1@10.242.238.90:<0.24372.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:50:51.590,ns_1@10.242.238.90:<0.24372.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:50:51.591,ns_1@10.242.238.90:<0.25218.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:50:51.591,ns_1@10.242.238.90:<0.25218.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [ns_server:debug,2014-08-19T16:50:51.591,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [rebalance:info,2014-08-19T16:50:51.591,ns_1@10.242.238.90:<0.24372.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:50:51.592,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 6814 us [ns_server:debug,2014-08-19T16:50:51.592,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{676, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:50:51.592,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [views:debug,2014-08-19T16:50:51.598,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/665. Updated state: active (1) [ns_server:debug,2014-08-19T16:50:51.598,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",665,active,1} [rebalance:debug,2014-08-19T16:50:51.610,ns_1@10.242.238.90:<0.24234.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:50:51.611,ns_1@10.242.238.90:<0.24234.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:50:51.611,ns_1@10.242.238.90:<0.25220.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:50:51.611,ns_1@10.242.238.90:<0.25220.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:50:51.611,ns_1@10.242.238.90:<0.24234.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [rebalance:debug,2014-08-19T16:50:51.612,ns_1@10.242.238.90:<0.24209.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:50:51.612,ns_1@10.242.238.90:<0.24209.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:50:51.612,ns_1@10.242.238.90:<0.25221.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:50:51.613,ns_1@10.242.238.90:<0.25221.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [ns_server:debug,2014-08-19T16:50:51.613,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [rebalance:info,2014-08-19T16:50:51.613,ns_1@10.242.238.90:<0.24209.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:50:51.616,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 2775 us [ns_server:debug,2014-08-19T16:50:51.616,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:51.616,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:51.617,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{668, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:50:51.644,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:50:51.647,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:51.647,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 2889 us [ns_server:debug,2014-08-19T16:50:51.647,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:51.648,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{926, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [rebalance:debug,2014-08-19T16:50:51.654,ns_1@10.242.238.90:<0.24461.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:50:51.654,ns_1@10.242.238.90:<0.24461.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:50:51.654,ns_1@10.242.238.90:<0.25223.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:50:51.654,ns_1@10.242.238.90:<0.25223.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:50:51.654,ns_1@10.242.238.90:<0.24461.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [views:debug,2014-08-19T16:50:51.657,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/676. Updated state: active (1) [ns_server:debug,2014-08-19T16:50:51.657,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",676,active,1} [rebalance:debug,2014-08-19T16:50:51.672,ns_1@10.242.238.90:<0.24593.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:50:51.673,ns_1@10.242.238.90:<0.24593.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:50:51.673,ns_1@10.242.238.90:<0.25224.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:50:51.673,ns_1@10.242.238.90:<0.25224.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:50:51.673,ns_1@10.242.238.90:<0.24593.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:50:51.675,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:50:51.683,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:51.683,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 5964 us [ns_server:debug,2014-08-19T16:50:51.683,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:51.684,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{933, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:info,2014-08-19T16:50:51.686,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 410 state to replica [ns_server:info,2014-08-19T16:50:51.686,ns_1@10.242.238.90:tap_replication_manager-default<0.18775.0>:tap_replication_manager:change_vbucket_filter:200]Going to change replication from 'ns_1@10.242.238.89' to have [410,426] ([410], []) [ns_server:debug,2014-08-19T16:50:51.687,ns_1@10.242.238.90:<0.25226.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:135]Registered myself under id:{"default", {new_child_id,[410,426],'ns_1@10.242.238.89'}, #Ref<0.0.1.19866>} Args:[{"10.242.238.89",11209}, {"10.242.238.90",11209}, [{old_state_retriever,#Fun}, {on_not_ready_vbuckets,#Fun}, {username,"default"}, {password,get_from_config}, {vbuckets,[410,426]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]] [ns_server:debug,2014-08-19T16:50:51.687,ns_1@10.242.238.90:<0.25226.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:139]Linked myself to old ebucketmigrator <0.25065.0> [ns_server:info,2014-08-19T16:50:51.688,ns_1@10.242.238.90:<0.25065.0>:ebucketmigrator_srv:handle_call:270]Starting new-style vbucket filter change on stream `replication_ns_1@10.242.238.90` [ns_server:info,2014-08-19T16:50:51.699,ns_1@10.242.238.90:<0.25065.0>:ebucketmigrator_srv:handle_call:297]Changing vbucket filter on tap stream `replication_ns_1@10.242.238.90`: [{410,1},{426,1}] [ns_server:info,2014-08-19T16:50:51.699,ns_1@10.242.238.90:<0.25065.0>:ebucketmigrator_srv:handle_call:307]Successfully changed vbucket filter on tap stream `replication_ns_1@10.242.238.90`. [ns_server:info,2014-08-19T16:50:51.700,ns_1@10.242.238.90:<0.25065.0>:ebucketmigrator_srv:process_upstream:1027]Got vbucket filter change completion message. Silencing upstream sender [ns_server:info,2014-08-19T16:50:51.700,ns_1@10.242.238.90:<0.25065.0>:ebucketmigrator_srv:handle_info:366]Got reply from upstream silencing request. Completing state transition to a new ebucketmigrator. [ns_server:debug,2014-08-19T16:50:51.700,ns_1@10.242.238.90:<0.25065.0>:ebucketmigrator_srv:complete_native_vb_filter_change:170]Proceeding with reading unread binaries [ns_server:debug,2014-08-19T16:50:51.700,ns_1@10.242.238.90:<0.25065.0>:ebucketmigrator_srv:confirm_downstream:197]Going to confirm reception downstream messages [ns_server:debug,2014-08-19T16:50:51.700,ns_1@10.242.238.90:<0.25065.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:50:51.700,ns_1@10.242.238.90:<0.25228.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:50:51.700,ns_1@10.242.238.90:<0.25228.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:50:51.700,ns_1@10.242.238.90:<0.25065.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:50:51.700,ns_1@10.242.238.90:<0.25065.0>:ebucketmigrator_srv:confirm_downstream:201]Confirmed upstream messages are feeded to kernel [ns_server:debug,2014-08-19T16:50:51.700,ns_1@10.242.238.90:<0.25065.0>:ebucketmigrator_srv:reply_and_die:210]Passed old state to caller [ns_server:debug,2014-08-19T16:50:51.701,ns_1@10.242.238.90:<0.25065.0>:ebucketmigrator_srv:reply_and_die:213]Sent out state. Preparing to die [ns_server:debug,2014-08-19T16:50:51.701,ns_1@10.242.238.90:<0.25226.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:143]Got old state from previous ebucketmigrator: <0.25065.0> [ns_server:debug,2014-08-19T16:50:51.701,ns_1@10.242.238.90:<0.25226.0>:ns_vbm_new_sup:perform_vbucket_filter_change_loop:184]Sent old state to new instance [ns_server:info,2014-08-19T16:50:51.701,ns_1@10.242.238.90:<0.25230.0>:ns_vbm_new_sup:mk_old_state_retriever:83]Got vbucket filter change old state. Proceeding vbucket filter change operation [ns_server:debug,2014-08-19T16:50:51.701,ns_1@10.242.238.90:<0.25230.0>:ebucketmigrator_srv:init:494]Got old ebucketmigrator state from <0.25065.0>: {state,#Port<0.16372>,#Port<0.16368>,#Port<0.16373>,#Port<0.16369>, <0.25067.0>,<<"cut off">>,<<"cut off">>,[],7,false,false,0, {1408,452651,700064}, completed, {<0.25226.0>,#Ref<0.0.1.19879>}, <<"replication_ns_1@10.242.238.90">>,<0.25065.0>, {had_backfill,false,undefined,[]}, completed,false}. [error_logger:info,2014-08-19T16:50:51.702,ns_1@10.242.238.90:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,'ns_vbm_new_sup-default'} started: [{pid,<0.25230.0>}, {name,{new_child_id,[410,426],'ns_1@10.242.238.89'}}, {mfargs, {ebucketmigrator_srv,start_link, [{"10.242.238.89",11209}, {"10.242.238.90",11209}, [{old_state_retriever, #Fun}, {on_not_ready_vbuckets, #Fun}, {username,"default"}, {password,get_from_config}, {vbuckets,[410,426]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]]}}, {restart_type,temporary}, {shutdown,60000}, {child_type,worker}] [ns_server:debug,2014-08-19T16:50:51.702,ns_1@10.242.238.90:<0.17162.0>:ns_process_registry:handle_info:98]Got exit msg: {'EXIT',<0.25226.0>,{#Ref<0.0.1.19868>,<0.25230.0>}} [ns_server:info,2014-08-19T16:50:51.702,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 680 state to active [ns_server:debug,2014-08-19T16:50:51.708,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [rebalance:debug,2014-08-19T16:50:51.708,ns_1@10.242.238.90:<0.24493.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:50:51.709,ns_1@10.242.238.90:<0.24493.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:50:51.709,ns_1@10.242.238.90:<0.25231.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:50:51.709,ns_1@10.242.238.90:<0.25231.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:50:51.709,ns_1@10.242.238.90:<0.24493.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:50:51.711,ns_1@10.242.238.90:<0.25230.0>:ebucketmigrator_srv:init:621]Reusing old upstream: [{vbuckets,[410,426]}, {name,<<"replication_ns_1@10.242.238.90">>}, {takeover,false}] [rebalance:debug,2014-08-19T16:50:51.712,ns_1@10.242.238.90:<0.25230.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.25233.0> [ns_server:debug,2014-08-19T16:50:51.713,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:51.713,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 4801 us [ns_server:debug,2014-08-19T16:50:51.714,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:51.714,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{410, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [views:debug,2014-08-19T16:50:51.716,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/674. Updated state: active (1) [ns_server:debug,2014-08-19T16:50:51.716,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",674,active,1} [ns_server:info,2014-08-19T16:50:51.733,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 414 state to replica [ns_server:info,2014-08-19T16:50:51.733,ns_1@10.242.238.90:tap_replication_manager-default<0.18775.0>:tap_replication_manager:change_vbucket_filter:200]Going to change replication from 'ns_1@10.242.238.89' to have [410,414,426] ([414], []) [ns_server:debug,2014-08-19T16:50:51.734,ns_1@10.242.238.90:<0.25234.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:135]Registered myself under id:{"default", {new_child_id,[410,414,426],'ns_1@10.242.238.89'}, #Ref<0.0.1.20063>} Args:[{"10.242.238.89",11209}, {"10.242.238.90",11209}, [{old_state_retriever,#Fun}, {on_not_ready_vbuckets,#Fun}, {username,"default"}, {password,get_from_config}, {vbuckets,[410,414,426]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]] [ns_server:debug,2014-08-19T16:50:51.734,ns_1@10.242.238.90:<0.25234.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:139]Linked myself to old ebucketmigrator <0.25230.0> [ns_server:info,2014-08-19T16:50:51.734,ns_1@10.242.238.90:<0.25230.0>:ebucketmigrator_srv:handle_call:270]Starting new-style vbucket filter change on stream `replication_ns_1@10.242.238.90` [rebalance:debug,2014-08-19T16:50:51.738,ns_1@10.242.238.90:<0.24657.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:50:51.739,ns_1@10.242.238.90:<0.24657.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:50:51.739,ns_1@10.242.238.90:<0.25236.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:50:51.739,ns_1@10.242.238.90:<0.25236.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:50:51.739,ns_1@10.242.238.90:<0.24657.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:info,2014-08-19T16:50:51.741,ns_1@10.242.238.90:<0.25230.0>:ebucketmigrator_srv:handle_call:297]Changing vbucket filter on tap stream `replication_ns_1@10.242.238.90`: [{410,1},{414,1},{426,1}] [ns_server:info,2014-08-19T16:50:51.742,ns_1@10.242.238.90:<0.25230.0>:ebucketmigrator_srv:handle_call:307]Successfully changed vbucket filter on tap stream `replication_ns_1@10.242.238.90`. [ns_server:info,2014-08-19T16:50:51.742,ns_1@10.242.238.90:<0.25230.0>:ebucketmigrator_srv:process_upstream:1027]Got vbucket filter change completion message. Silencing upstream sender [ns_server:info,2014-08-19T16:50:51.742,ns_1@10.242.238.90:<0.25230.0>:ebucketmigrator_srv:handle_info:366]Got reply from upstream silencing request. Completing state transition to a new ebucketmigrator. [ns_server:debug,2014-08-19T16:50:51.742,ns_1@10.242.238.90:<0.25230.0>:ebucketmigrator_srv:complete_native_vb_filter_change:170]Proceeding with reading unread binaries [ns_server:debug,2014-08-19T16:50:51.742,ns_1@10.242.238.90:<0.25230.0>:ebucketmigrator_srv:confirm_downstream:197]Going to confirm reception downstream messages [ns_server:debug,2014-08-19T16:50:51.742,ns_1@10.242.238.90:<0.25230.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:50:51.742,ns_1@10.242.238.90:<0.25237.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:50:51.743,ns_1@10.242.238.90:<0.25237.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:50:51.743,ns_1@10.242.238.90:<0.25230.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:50:51.743,ns_1@10.242.238.90:<0.25230.0>:ebucketmigrator_srv:confirm_downstream:201]Confirmed upstream messages are feeded to kernel [ns_server:debug,2014-08-19T16:50:51.743,ns_1@10.242.238.90:<0.25230.0>:ebucketmigrator_srv:reply_and_die:210]Passed old state to caller [ns_server:debug,2014-08-19T16:50:51.743,ns_1@10.242.238.90:<0.25230.0>:ebucketmigrator_srv:reply_and_die:213]Sent out state. Preparing to die [ns_server:debug,2014-08-19T16:50:51.743,ns_1@10.242.238.90:<0.25234.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:143]Got old state from previous ebucketmigrator: <0.25230.0> [ns_server:debug,2014-08-19T16:50:51.744,ns_1@10.242.238.90:<0.25234.0>:ns_vbm_new_sup:perform_vbucket_filter_change_loop:184]Sent old state to new instance [ns_server:info,2014-08-19T16:50:51.744,ns_1@10.242.238.90:<0.25239.0>:ns_vbm_new_sup:mk_old_state_retriever:83]Got vbucket filter change old state. Proceeding vbucket filter change operation [ns_server:debug,2014-08-19T16:50:51.744,ns_1@10.242.238.90:<0.25239.0>:ebucketmigrator_srv:init:494]Got old ebucketmigrator state from <0.25230.0>: {state,#Port<0.16372>,#Port<0.16368>,#Port<0.16373>,#Port<0.16369>, <0.25233.0>,<<"cut off">>,<<"cut off">>,[],10,false,false,0, {1408,452651,742400}, completed, {<0.25234.0>,#Ref<0.0.1.20076>}, <<"replication_ns_1@10.242.238.90">>,<0.25230.0>, {had_backfill,false,undefined,[]}, completed,false}. [ns_server:debug,2014-08-19T16:50:51.744,ns_1@10.242.238.90:<0.17162.0>:ns_process_registry:handle_info:98]Got exit msg: {'EXIT',<0.25234.0>,{#Ref<0.0.1.20065>,<0.25239.0>}} [error_logger:info,2014-08-19T16:50:51.744,ns_1@10.242.238.90:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,'ns_vbm_new_sup-default'} started: [{pid,<0.25239.0>}, {name, {new_child_id,[410,414,426],'ns_1@10.242.238.89'}}, {mfargs, {ebucketmigrator_srv,start_link, [{"10.242.238.89",11209}, {"10.242.238.90",11209}, [{old_state_retriever, #Fun}, {on_not_ready_vbuckets, #Fun}, {username,"default"}, {password,get_from_config}, {vbuckets,[410,414,426]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]]}}, {restart_type,temporary}, {shutdown,60000}, {child_type,worker}] [ns_server:info,2014-08-19T16:50:51.745,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 679 state to active [ns_server:debug,2014-08-19T16:50:51.750,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:50:51.753,ns_1@10.242.238.90:<0.25239.0>:ebucketmigrator_srv:init:621]Reusing old upstream: [{vbuckets,[410,414,426]}, {name,<<"replication_ns_1@10.242.238.90">>}, {takeover,false}] [rebalance:debug,2014-08-19T16:50:51.753,ns_1@10.242.238.90:<0.25239.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.25241.0> [ns_server:debug,2014-08-19T16:50:51.755,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 5240 us [ns_server:debug,2014-08-19T16:50:51.756,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:51.756,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:51.757,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{414, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:info,2014-08-19T16:50:51.758,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 407 state to replica [ns_server:info,2014-08-19T16:50:51.759,ns_1@10.242.238.90:tap_replication_manager-default<0.18775.0>:tap_replication_manager:change_vbucket_filter:200]Going to change replication from 'ns_1@10.242.238.89' to have [407,410,414,426] ([407], []) [ns_server:debug,2014-08-19T16:50:51.759,ns_1@10.242.238.90:<0.25242.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:135]Registered myself under id:{"default", {new_child_id, [407,410,414,426], 'ns_1@10.242.238.89'}, #Ref<0.0.1.20224>} Args:[{"10.242.238.89",11209}, {"10.242.238.90",11209}, [{old_state_retriever,#Fun}, {on_not_ready_vbuckets,#Fun}, {username,"default"}, {password,get_from_config}, {vbuckets,[407,410,414,426]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]] [ns_server:debug,2014-08-19T16:50:51.760,ns_1@10.242.238.90:<0.25242.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:139]Linked myself to old ebucketmigrator <0.25239.0> [ns_server:info,2014-08-19T16:50:51.760,ns_1@10.242.238.90:<0.25239.0>:ebucketmigrator_srv:handle_call:270]Starting new-style vbucket filter change on stream `replication_ns_1@10.242.238.90` [ns_server:info,2014-08-19T16:50:51.768,ns_1@10.242.238.90:<0.25239.0>:ebucketmigrator_srv:handle_call:297]Changing vbucket filter on tap stream `replication_ns_1@10.242.238.90`: [{407,1},{410,1},{414,1},{426,1}] [ns_server:info,2014-08-19T16:50:51.768,ns_1@10.242.238.90:<0.25239.0>:ebucketmigrator_srv:handle_call:307]Successfully changed vbucket filter on tap stream `replication_ns_1@10.242.238.90`. [ns_server:info,2014-08-19T16:50:51.769,ns_1@10.242.238.90:<0.25239.0>:ebucketmigrator_srv:process_upstream:1027]Got vbucket filter change completion message. Silencing upstream sender [ns_server:info,2014-08-19T16:50:51.769,ns_1@10.242.238.90:<0.25239.0>:ebucketmigrator_srv:handle_info:366]Got reply from upstream silencing request. Completing state transition to a new ebucketmigrator. [ns_server:debug,2014-08-19T16:50:51.769,ns_1@10.242.238.90:<0.25239.0>:ebucketmigrator_srv:complete_native_vb_filter_change:170]Proceeding with reading unread binaries [ns_server:debug,2014-08-19T16:50:51.769,ns_1@10.242.238.90:<0.25239.0>:ebucketmigrator_srv:confirm_downstream:197]Going to confirm reception downstream messages [ns_server:debug,2014-08-19T16:50:51.769,ns_1@10.242.238.90:<0.25239.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:50:51.769,ns_1@10.242.238.90:<0.25244.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:50:51.769,ns_1@10.242.238.90:<0.25244.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:50:51.769,ns_1@10.242.238.90:<0.25239.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:50:51.769,ns_1@10.242.238.90:<0.25239.0>:ebucketmigrator_srv:confirm_downstream:201]Confirmed upstream messages are feeded to kernel [ns_server:debug,2014-08-19T16:50:51.770,ns_1@10.242.238.90:<0.25239.0>:ebucketmigrator_srv:reply_and_die:210]Passed old state to caller [ns_server:debug,2014-08-19T16:50:51.770,ns_1@10.242.238.90:<0.25239.0>:ebucketmigrator_srv:reply_and_die:213]Sent out state. Preparing to die [ns_server:debug,2014-08-19T16:50:51.770,ns_1@10.242.238.90:<0.25242.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:143]Got old state from previous ebucketmigrator: <0.25239.0> [ns_server:debug,2014-08-19T16:50:51.770,ns_1@10.242.238.90:<0.25242.0>:ns_vbm_new_sup:perform_vbucket_filter_change_loop:184]Sent old state to new instance [ns_server:info,2014-08-19T16:50:51.770,ns_1@10.242.238.90:<0.25246.0>:ns_vbm_new_sup:mk_old_state_retriever:83]Got vbucket filter change old state. Proceeding vbucket filter change operation [ns_server:debug,2014-08-19T16:50:51.770,ns_1@10.242.238.90:<0.25246.0>:ebucketmigrator_srv:init:494]Got old ebucketmigrator state from <0.25239.0>: {state,#Port<0.16372>,#Port<0.16368>,#Port<0.16373>,#Port<0.16369>, <0.25241.0>,<<"cut off">>,<<"cut off">>,[],13,false,false,0, {1408,452651,769044}, completed, {<0.25242.0>,#Ref<0.0.1.20237>}, <<"replication_ns_1@10.242.238.90">>,<0.25239.0>, {had_backfill,false,undefined,[]}, completed,false}. [ns_server:debug,2014-08-19T16:50:51.770,ns_1@10.242.238.90:<0.17162.0>:ns_process_registry:handle_info:98]Got exit msg: {'EXIT',<0.25242.0>,{#Ref<0.0.1.20226>,<0.25246.0>}} [error_logger:info,2014-08-19T16:50:51.770,ns_1@10.242.238.90:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,'ns_vbm_new_sup-default'} started: [{pid,<0.25246.0>}, {name, {new_child_id, [407,410,414,426], 'ns_1@10.242.238.89'}}, {mfargs, {ebucketmigrator_srv,start_link, [{"10.242.238.89",11209}, {"10.242.238.90",11209}, [{old_state_retriever, #Fun}, {on_not_ready_vbuckets, #Fun}, {username,"default"}, {password,get_from_config}, {vbuckets,[407,410,414,426]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]]}}, {restart_type,temporary}, {shutdown,60000}, {child_type,worker}] [views:debug,2014-08-19T16:50:51.774,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/668. Updated state: active (1) [ns_server:debug,2014-08-19T16:50:51.775,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",668,active,1} [ns_server:debug,2014-08-19T16:50:51.777,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:50:51.780,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:51.780,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 2945 us [ns_server:debug,2014-08-19T16:50:51.780,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:51.780,ns_1@10.242.238.90:<0.25246.0>:ebucketmigrator_srv:init:621]Reusing old upstream: [{vbuckets,[407,410,414,426]}, {name,<<"replication_ns_1@10.242.238.90">>}, {takeover,false}] [rebalance:debug,2014-08-19T16:50:51.781,ns_1@10.242.238.90:<0.25246.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.25248.0> [ns_server:debug,2014-08-19T16:50:51.781,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{407, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:info,2014-08-19T16:50:51.782,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 415 state to replica [ns_server:info,2014-08-19T16:50:51.783,ns_1@10.242.238.90:tap_replication_manager-default<0.18775.0>:tap_replication_manager:change_vbucket_filter:200]Going to change replication from 'ns_1@10.242.238.89' to have [407,410,414,415,426] ([415], []) [ns_server:debug,2014-08-19T16:50:51.784,ns_1@10.242.238.90:<0.25249.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:135]Registered myself under id:{"default", {new_child_id, [407,410,414,415,426], 'ns_1@10.242.238.89'}, #Ref<0.0.1.20391>} Args:[{"10.242.238.89",11209}, {"10.242.238.90",11209}, [{old_state_retriever,#Fun}, {on_not_ready_vbuckets,#Fun}, {username,"default"}, {password,get_from_config}, {vbuckets,[407,410,414,415,426]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]] [ns_server:debug,2014-08-19T16:50:51.784,ns_1@10.242.238.90:<0.25249.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:139]Linked myself to old ebucketmigrator <0.25246.0> [ns_server:info,2014-08-19T16:50:51.785,ns_1@10.242.238.90:<0.25246.0>:ebucketmigrator_srv:handle_call:270]Starting new-style vbucket filter change on stream `replication_ns_1@10.242.238.90` [ns_server:info,2014-08-19T16:50:51.791,ns_1@10.242.238.90:<0.25246.0>:ebucketmigrator_srv:handle_call:297]Changing vbucket filter on tap stream `replication_ns_1@10.242.238.90`: [{407,1},{410,1},{414,1},{415,1},{426,1}] [ns_server:info,2014-08-19T16:50:51.792,ns_1@10.242.238.90:<0.25246.0>:ebucketmigrator_srv:handle_call:307]Successfully changed vbucket filter on tap stream `replication_ns_1@10.242.238.90`. [ns_server:info,2014-08-19T16:50:51.792,ns_1@10.242.238.90:<0.25246.0>:ebucketmigrator_srv:process_upstream:1027]Got vbucket filter change completion message. Silencing upstream sender [ns_server:info,2014-08-19T16:50:51.792,ns_1@10.242.238.90:<0.25246.0>:ebucketmigrator_srv:handle_info:366]Got reply from upstream silencing request. Completing state transition to a new ebucketmigrator. [ns_server:debug,2014-08-19T16:50:51.792,ns_1@10.242.238.90:<0.25246.0>:ebucketmigrator_srv:complete_native_vb_filter_change:170]Proceeding with reading unread binaries [ns_server:debug,2014-08-19T16:50:51.792,ns_1@10.242.238.90:<0.25246.0>:ebucketmigrator_srv:confirm_downstream:197]Going to confirm reception downstream messages [ns_server:debug,2014-08-19T16:50:51.792,ns_1@10.242.238.90:<0.25246.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:50:51.792,ns_1@10.242.238.90:<0.25251.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:50:51.793,ns_1@10.242.238.90:<0.25251.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:50:51.793,ns_1@10.242.238.90:<0.25246.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:50:51.793,ns_1@10.242.238.90:<0.25246.0>:ebucketmigrator_srv:confirm_downstream:201]Confirmed upstream messages are feeded to kernel [ns_server:debug,2014-08-19T16:50:51.793,ns_1@10.242.238.90:<0.25246.0>:ebucketmigrator_srv:reply_and_die:210]Passed old state to caller [ns_server:debug,2014-08-19T16:50:51.793,ns_1@10.242.238.90:<0.25246.0>:ebucketmigrator_srv:reply_and_die:213]Sent out state. Preparing to die [ns_server:debug,2014-08-19T16:50:51.793,ns_1@10.242.238.90:<0.25249.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:143]Got old state from previous ebucketmigrator: <0.25246.0> [ns_server:debug,2014-08-19T16:50:51.794,ns_1@10.242.238.90:<0.25249.0>:ns_vbm_new_sup:perform_vbucket_filter_change_loop:184]Sent old state to new instance [ns_server:info,2014-08-19T16:50:51.794,ns_1@10.242.238.90:<0.25253.0>:ns_vbm_new_sup:mk_old_state_retriever:83]Got vbucket filter change old state. Proceeding vbucket filter change operation [ns_server:debug,2014-08-19T16:50:51.794,ns_1@10.242.238.90:<0.25253.0>:ebucketmigrator_srv:init:494]Got old ebucketmigrator state from <0.25246.0>: {state,#Port<0.16372>,#Port<0.16368>,#Port<0.16373>,#Port<0.16369>, <0.25248.0>,<<"cut off">>,<<"cut off">>,[],16,false,false,0, {1408,452651,792466}, completed, {<0.25249.0>,#Ref<0.0.1.20404>}, <<"replication_ns_1@10.242.238.90">>,<0.25246.0>, {had_backfill,false,undefined,[]}, completed,false}. [ns_server:debug,2014-08-19T16:50:51.794,ns_1@10.242.238.90:<0.17162.0>:ns_process_registry:handle_info:98]Got exit msg: {'EXIT',<0.25249.0>,{#Ref<0.0.1.20393>,<0.25253.0>}} [error_logger:info,2014-08-19T16:50:51.794,ns_1@10.242.238.90:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,'ns_vbm_new_sup-default'} started: [{pid,<0.25253.0>}, {name, {new_child_id, [407,410,414,415,426], 'ns_1@10.242.238.89'}}, {mfargs, {ebucketmigrator_srv,start_link, [{"10.242.238.89",11209}, {"10.242.238.90",11209}, [{old_state_retriever, #Fun}, {on_not_ready_vbuckets, #Fun}, {username,"default"}, {password,get_from_config}, {vbuckets,[407,410,414,415,426]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]]}}, {restart_type,temporary}, {shutdown,60000}, {child_type,worker}] [ns_server:debug,2014-08-19T16:50:51.801,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:50:51.802,ns_1@10.242.238.90:<0.25253.0>:ebucketmigrator_srv:init:621]Reusing old upstream: [{vbuckets,[407,410,414,415,426]}, {name,<<"replication_ns_1@10.242.238.90">>}, {takeover,false}] [rebalance:debug,2014-08-19T16:50:51.802,ns_1@10.242.238.90:<0.25253.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.25254.0> [ns_server:debug,2014-08-19T16:50:51.804,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 2902 us [ns_server:debug,2014-08-19T16:50:51.804,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:51.804,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:51.805,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{415, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:info,2014-08-19T16:50:51.818,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 681 state to active [views:debug,2014-08-19T16:50:51.833,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/664. Updated state: active (1) [ns_server:debug,2014-08-19T16:50:51.833,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",664,active,1} [rebalance:debug,2014-08-19T16:50:51.836,ns_1@10.242.238.90:<0.24259.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:50:51.836,ns_1@10.242.238.90:<0.24259.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:50:51.836,ns_1@10.242.238.90:<0.25256.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:50:51.836,ns_1@10.242.238.90:<0.25256.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:50:51.837,ns_1@10.242.238.90:<0.24259.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:50:51.838,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:50:51.840,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:51.840,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 2758 us [ns_server:debug,2014-08-19T16:50:51.841,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:51.842,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{930, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:info,2014-08-19T16:50:51.844,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 411 state to replica [ns_server:info,2014-08-19T16:50:51.844,ns_1@10.242.238.90:tap_replication_manager-default<0.18775.0>:tap_replication_manager:change_vbucket_filter:200]Going to change replication from 'ns_1@10.242.238.89' to have [407,410,411,414,415,426] ([411], []) [ns_server:debug,2014-08-19T16:50:51.845,ns_1@10.242.238.90:<0.25257.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:135]Registered myself under id:{"default", {new_child_id, [407,410,411,414,415,426], 'ns_1@10.242.238.89'}, #Ref<0.0.1.20605>} Args:[{"10.242.238.89",11209}, {"10.242.238.90",11209}, [{old_state_retriever,#Fun}, {on_not_ready_vbuckets,#Fun}, {username,"default"}, {password,get_from_config}, {vbuckets,[407,410,411,414,415,426]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]] [ns_server:debug,2014-08-19T16:50:51.845,ns_1@10.242.238.90:<0.25257.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:139]Linked myself to old ebucketmigrator <0.25253.0> [ns_server:info,2014-08-19T16:50:51.845,ns_1@10.242.238.90:<0.25253.0>:ebucketmigrator_srv:handle_call:270]Starting new-style vbucket filter change on stream `replication_ns_1@10.242.238.90` [ns_server:info,2014-08-19T16:50:51.853,ns_1@10.242.238.90:<0.25253.0>:ebucketmigrator_srv:handle_call:297]Changing vbucket filter on tap stream `replication_ns_1@10.242.238.90`: [{407,1},{410,1},{411,1},{414,1},{415,1},{426,1}] [ns_server:info,2014-08-19T16:50:51.853,ns_1@10.242.238.90:<0.25253.0>:ebucketmigrator_srv:handle_call:307]Successfully changed vbucket filter on tap stream `replication_ns_1@10.242.238.90`. [ns_server:info,2014-08-19T16:50:51.854,ns_1@10.242.238.90:<0.25253.0>:ebucketmigrator_srv:process_upstream:1027]Got vbucket filter change completion message. Silencing upstream sender [ns_server:info,2014-08-19T16:50:51.854,ns_1@10.242.238.90:<0.25253.0>:ebucketmigrator_srv:handle_info:366]Got reply from upstream silencing request. Completing state transition to a new ebucketmigrator. [ns_server:debug,2014-08-19T16:50:51.854,ns_1@10.242.238.90:<0.25253.0>:ebucketmigrator_srv:complete_native_vb_filter_change:170]Proceeding with reading unread binaries [ns_server:debug,2014-08-19T16:50:51.854,ns_1@10.242.238.90:<0.25253.0>:ebucketmigrator_srv:confirm_downstream:197]Going to confirm reception downstream messages [ns_server:debug,2014-08-19T16:50:51.854,ns_1@10.242.238.90:<0.25253.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:50:51.854,ns_1@10.242.238.90:<0.25260.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:50:51.854,ns_1@10.242.238.90:<0.25260.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:50:51.854,ns_1@10.242.238.90:<0.25253.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:50:51.855,ns_1@10.242.238.90:<0.25253.0>:ebucketmigrator_srv:confirm_downstream:201]Confirmed upstream messages are feeded to kernel [ns_server:debug,2014-08-19T16:50:51.855,ns_1@10.242.238.90:<0.25253.0>:ebucketmigrator_srv:reply_and_die:210]Passed old state to caller [ns_server:debug,2014-08-19T16:50:51.855,ns_1@10.242.238.90:<0.25253.0>:ebucketmigrator_srv:reply_and_die:213]Sent out state. Preparing to die [ns_server:debug,2014-08-19T16:50:51.855,ns_1@10.242.238.90:<0.25257.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:143]Got old state from previous ebucketmigrator: <0.25253.0> [ns_server:debug,2014-08-19T16:50:51.855,ns_1@10.242.238.90:<0.25257.0>:ns_vbm_new_sup:perform_vbucket_filter_change_loop:184]Sent old state to new instance [ns_server:info,2014-08-19T16:50:51.856,ns_1@10.242.238.90:<0.25262.0>:ns_vbm_new_sup:mk_old_state_retriever:83]Got vbucket filter change old state. Proceeding vbucket filter change operation [ns_server:debug,2014-08-19T16:50:51.856,ns_1@10.242.238.90:<0.25262.0>:ebucketmigrator_srv:init:494]Got old ebucketmigrator state from <0.25253.0>: {state,#Port<0.16372>,#Port<0.16368>,#Port<0.16373>,#Port<0.16369>, <0.25254.0>,<<"cut off">>,<<"cut off">>,[],19,false,false,0, {1408,452651,854145}, completed, {<0.25257.0>,#Ref<0.0.1.20619>}, <<"replication_ns_1@10.242.238.90">>,<0.25253.0>, {had_backfill,false,undefined,[]}, completed,false}. [ns_server:debug,2014-08-19T16:50:51.856,ns_1@10.242.238.90:<0.17162.0>:ns_process_registry:handle_info:98]Got exit msg: {'EXIT',<0.25257.0>,{#Ref<0.0.1.20607>,<0.25262.0>}} [error_logger:info,2014-08-19T16:50:51.856,ns_1@10.242.238.90:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,'ns_vbm_new_sup-default'} started: [{pid,<0.25262.0>}, {name, {new_child_id, [407,410,411,414,415,426], 'ns_1@10.242.238.89'}}, {mfargs, {ebucketmigrator_srv,start_link, [{"10.242.238.89",11209}, {"10.242.238.90",11209}, [{old_state_retriever, #Fun}, {on_not_ready_vbuckets, #Fun}, {username,"default"}, {password,get_from_config}, {vbuckets,[407,410,411,414,415,426]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]]}}, {restart_type,temporary}, {shutdown,60000}, {child_type,worker}] [ns_server:info,2014-08-19T16:50:51.856,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 673 state to active [ns_server:debug,2014-08-19T16:50:51.860,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:50:51.864,ns_1@10.242.238.90:<0.25262.0>:ebucketmigrator_srv:init:621]Reusing old upstream: [{vbuckets,[407,410,411,414,415,426]}, {name,<<"replication_ns_1@10.242.238.90">>}, {takeover,false}] [rebalance:debug,2014-08-19T16:50:51.864,ns_1@10.242.238.90:<0.25262.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.25263.0> [ns_server:debug,2014-08-19T16:50:51.868,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 7751 us [ns_server:debug,2014-08-19T16:50:51.868,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:51.869,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:51.869,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{411, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [views:debug,2014-08-19T16:50:51.891,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/662. Updated state: active (1) [ns_server:debug,2014-08-19T16:50:51.891,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",662,active,1} [ns_server:info,2014-08-19T16:50:51.904,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 677 state to active [rebalance:debug,2014-08-19T16:50:51.906,ns_1@10.242.238.90:<0.24347.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:50:51.906,ns_1@10.242.238.90:<0.24347.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:50:51.906,ns_1@10.242.238.90:<0.25265.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:50:51.906,ns_1@10.242.238.90:<0.25265.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:50:51.907,ns_1@10.242.238.90:<0.24347.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:50:51.912,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:50:51.915,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:51.916,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:51.916,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 3590 us [ns_server:debug,2014-08-19T16:50:51.917,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{922, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:info,2014-08-19T16:50:51.918,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 424 state to replica [ns_server:info,2014-08-19T16:50:51.918,ns_1@10.242.238.90:tap_replication_manager-default<0.18775.0>:tap_replication_manager:change_vbucket_filter:200]Going to change replication from 'ns_1@10.242.238.89' to have [407,410,411,414,415,424,426] ([424], []) [ns_server:debug,2014-08-19T16:50:51.921,ns_1@10.242.238.90:<0.25267.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:135]Registered myself under id:{"default", {new_child_id, [407,410,411,414,415,424,426], 'ns_1@10.242.238.89'}, #Ref<0.0.1.20836>} Args:[{"10.242.238.89",11209}, {"10.242.238.90",11209}, [{old_state_retriever,#Fun}, {on_not_ready_vbuckets,#Fun}, {username,"default"}, {password,get_from_config}, {vbuckets,[407,410,411,414,415,424,426]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]] [ns_server:debug,2014-08-19T16:50:51.921,ns_1@10.242.238.90:<0.25267.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:139]Linked myself to old ebucketmigrator <0.25262.0> [ns_server:info,2014-08-19T16:50:51.921,ns_1@10.242.238.90:<0.25262.0>:ebucketmigrator_srv:handle_call:270]Starting new-style vbucket filter change on stream `replication_ns_1@10.242.238.90` [ns_server:info,2014-08-19T16:50:51.932,ns_1@10.242.238.90:<0.25262.0>:ebucketmigrator_srv:handle_call:297]Changing vbucket filter on tap stream `replication_ns_1@10.242.238.90`: [{407,1},{410,1},{411,1},{414,1},{415,1},{424,1},{426,1}] [ns_server:info,2014-08-19T16:50:51.932,ns_1@10.242.238.90:<0.25262.0>:ebucketmigrator_srv:handle_call:307]Successfully changed vbucket filter on tap stream `replication_ns_1@10.242.238.90`. [ns_server:info,2014-08-19T16:50:51.933,ns_1@10.242.238.90:<0.25262.0>:ebucketmigrator_srv:process_upstream:1027]Got vbucket filter change completion message. Silencing upstream sender [ns_server:info,2014-08-19T16:50:51.933,ns_1@10.242.238.90:<0.25262.0>:ebucketmigrator_srv:handle_info:366]Got reply from upstream silencing request. Completing state transition to a new ebucketmigrator. [ns_server:debug,2014-08-19T16:50:51.933,ns_1@10.242.238.90:<0.25262.0>:ebucketmigrator_srv:complete_native_vb_filter_change:170]Proceeding with reading unread binaries [ns_server:debug,2014-08-19T16:50:51.933,ns_1@10.242.238.90:<0.25262.0>:ebucketmigrator_srv:confirm_downstream:197]Going to confirm reception downstream messages [ns_server:debug,2014-08-19T16:50:51.933,ns_1@10.242.238.90:<0.25262.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:50:51.933,ns_1@10.242.238.90:<0.25269.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:50:51.933,ns_1@10.242.238.90:<0.25269.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:50:51.933,ns_1@10.242.238.90:<0.25262.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:50:51.934,ns_1@10.242.238.90:<0.25262.0>:ebucketmigrator_srv:confirm_downstream:201]Confirmed upstream messages are feeded to kernel [ns_server:debug,2014-08-19T16:50:51.934,ns_1@10.242.238.90:<0.25262.0>:ebucketmigrator_srv:reply_and_die:210]Passed old state to caller [ns_server:debug,2014-08-19T16:50:51.934,ns_1@10.242.238.90:<0.25262.0>:ebucketmigrator_srv:reply_and_die:213]Sent out state. Preparing to die [ns_server:debug,2014-08-19T16:50:51.934,ns_1@10.242.238.90:<0.25267.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:143]Got old state from previous ebucketmigrator: <0.25262.0> [ns_server:debug,2014-08-19T16:50:51.934,ns_1@10.242.238.90:<0.25267.0>:ns_vbm_new_sup:perform_vbucket_filter_change_loop:184]Sent old state to new instance [ns_server:info,2014-08-19T16:50:51.934,ns_1@10.242.238.90:<0.25271.0>:ns_vbm_new_sup:mk_old_state_retriever:83]Got vbucket filter change old state. Proceeding vbucket filter change operation [ns_server:debug,2014-08-19T16:50:51.935,ns_1@10.242.238.90:<0.25271.0>:ebucketmigrator_srv:init:494]Got old ebucketmigrator state from <0.25262.0>: {state,#Port<0.16372>,#Port<0.16368>,#Port<0.16373>,#Port<0.16369>, <0.25263.0>,<<"cut off">>,<<"cut off">>,[],22,false,false,0, {1408,452651,933041}, completed, {<0.25267.0>,#Ref<0.0.1.20849>}, <<"replication_ns_1@10.242.238.90">>,<0.25262.0>, {had_backfill,false,undefined,[]}, completed,false}. [ns_server:debug,2014-08-19T16:50:51.935,ns_1@10.242.238.90:<0.17162.0>:ns_process_registry:handle_info:98]Got exit msg: {'EXIT',<0.25267.0>,{#Ref<0.0.1.20838>,<0.25271.0>}} [error_logger:info,2014-08-19T16:50:51.935,ns_1@10.242.238.90:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,'ns_vbm_new_sup-default'} started: [{pid,<0.25271.0>}, {name, {new_child_id, [407,410,411,414,415,424,426], 'ns_1@10.242.238.89'}}, {mfargs, {ebucketmigrator_srv,start_link, [{"10.242.238.89",11209}, {"10.242.238.90",11209}, [{old_state_retriever, #Fun}, {on_not_ready_vbuckets, #Fun}, {username,"default"}, {password,get_from_config}, {vbuckets,[407,410,411,414,415,424,426]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]]}}, {restart_type,temporary}, {shutdown,60000}, {child_type,worker}] [ns_server:debug,2014-08-19T16:50:51.940,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:50:51.942,ns_1@10.242.238.90:<0.25271.0>:ebucketmigrator_srv:init:621]Reusing old upstream: [{vbuckets,[407,410,411,414,415,424,426]}, {name,<<"replication_ns_1@10.242.238.90">>}, {takeover,false}] [rebalance:debug,2014-08-19T16:50:51.942,ns_1@10.242.238.90:<0.25271.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.25272.0> [ns_server:debug,2014-08-19T16:50:51.942,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 2716 us [ns_server:debug,2014-08-19T16:50:51.943,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:51.943,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:51.944,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{424, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [views:debug,2014-08-19T16:50:51.944,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/680. Updated state: active (1) [ns_server:debug,2014-08-19T16:50:51.944,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",680,active,1} [ns_server:info,2014-08-19T16:50:51.946,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 419 state to replica [ns_server:info,2014-08-19T16:50:51.947,ns_1@10.242.238.90:tap_replication_manager-default<0.18775.0>:tap_replication_manager:change_vbucket_filter:200]Going to change replication from 'ns_1@10.242.238.89' to have [407,410,411,414,415,419,424,426] ([419], []) [ns_server:debug,2014-08-19T16:50:51.947,ns_1@10.242.238.90:<0.25274.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:135]Registered myself under id:{"default", {new_child_id, [407,410,411,414,415,419,424,426], 'ns_1@10.242.238.89'}, #Ref<0.0.1.20995>} Args:[{"10.242.238.89",11209}, {"10.242.238.90",11209}, [{old_state_retriever,#Fun}, {on_not_ready_vbuckets,#Fun}, {username,"default"}, {password,get_from_config}, {vbuckets,[407,410,411,414,415,419,424,426]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]] [ns_server:debug,2014-08-19T16:50:51.948,ns_1@10.242.238.90:<0.25274.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:139]Linked myself to old ebucketmigrator <0.25271.0> [ns_server:info,2014-08-19T16:50:51.948,ns_1@10.242.238.90:<0.25271.0>:ebucketmigrator_srv:handle_call:270]Starting new-style vbucket filter change on stream `replication_ns_1@10.242.238.90` [ns_server:info,2014-08-19T16:50:51.954,ns_1@10.242.238.90:<0.25271.0>:ebucketmigrator_srv:handle_call:297]Changing vbucket filter on tap stream `replication_ns_1@10.242.238.90`: [{407,1},{410,1},{411,1},{414,1},{415,1},{419,1},{424,1},{426,1}] [ns_server:info,2014-08-19T16:50:51.955,ns_1@10.242.238.90:<0.25271.0>:ebucketmigrator_srv:handle_call:307]Successfully changed vbucket filter on tap stream `replication_ns_1@10.242.238.90`. [ns_server:info,2014-08-19T16:50:51.955,ns_1@10.242.238.90:<0.25271.0>:ebucketmigrator_srv:process_upstream:1027]Got vbucket filter change completion message. Silencing upstream sender [ns_server:info,2014-08-19T16:50:51.955,ns_1@10.242.238.90:<0.25271.0>:ebucketmigrator_srv:handle_info:366]Got reply from upstream silencing request. Completing state transition to a new ebucketmigrator. [ns_server:debug,2014-08-19T16:50:51.955,ns_1@10.242.238.90:<0.25271.0>:ebucketmigrator_srv:complete_native_vb_filter_change:170]Proceeding with reading unread binaries [ns_server:debug,2014-08-19T16:50:51.955,ns_1@10.242.238.90:<0.25271.0>:ebucketmigrator_srv:confirm_downstream:197]Going to confirm reception downstream messages [ns_server:debug,2014-08-19T16:50:51.956,ns_1@10.242.238.90:<0.25271.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:50:51.956,ns_1@10.242.238.90:<0.25276.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:50:51.956,ns_1@10.242.238.90:<0.25276.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:50:51.956,ns_1@10.242.238.90:<0.25271.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:50:51.956,ns_1@10.242.238.90:<0.25271.0>:ebucketmigrator_srv:confirm_downstream:201]Confirmed upstream messages are feeded to kernel [ns_server:debug,2014-08-19T16:50:51.956,ns_1@10.242.238.90:<0.25271.0>:ebucketmigrator_srv:reply_and_die:210]Passed old state to caller [ns_server:debug,2014-08-19T16:50:51.956,ns_1@10.242.238.90:<0.25271.0>:ebucketmigrator_srv:reply_and_die:213]Sent out state. Preparing to die [ns_server:debug,2014-08-19T16:50:51.956,ns_1@10.242.238.90:<0.25274.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:143]Got old state from previous ebucketmigrator: <0.25271.0> [ns_server:debug,2014-08-19T16:50:51.957,ns_1@10.242.238.90:<0.25274.0>:ns_vbm_new_sup:perform_vbucket_filter_change_loop:184]Sent old state to new instance [ns_server:info,2014-08-19T16:50:51.957,ns_1@10.242.238.90:<0.25278.0>:ns_vbm_new_sup:mk_old_state_retriever:83]Got vbucket filter change old state. Proceeding vbucket filter change operation [ns_server:debug,2014-08-19T16:50:51.957,ns_1@10.242.238.90:<0.25278.0>:ebucketmigrator_srv:init:494]Got old ebucketmigrator state from <0.25271.0>: {state,#Port<0.16372>,#Port<0.16368>,#Port<0.16373>,#Port<0.16369>, <0.25272.0>,<<"cut off">>,<<"cut off">>,[],25,false,false,0, {1408,452651,955527}, completed, {<0.25274.0>,#Ref<0.0.1.21008>}, <<"replication_ns_1@10.242.238.90">>,<0.25271.0>, {had_backfill,false,undefined,[]}, completed,false}. [ns_server:debug,2014-08-19T16:50:51.957,ns_1@10.242.238.90:<0.17162.0>:ns_process_registry:handle_info:98]Got exit msg: {'EXIT',<0.25274.0>,{#Ref<0.0.1.20997>,<0.25278.0>}} [error_logger:info,2014-08-19T16:50:51.957,ns_1@10.242.238.90:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,'ns_vbm_new_sup-default'} started: [{pid,<0.25278.0>}, {name, {new_child_id, [407,410,411,414,415,419,424,426], 'ns_1@10.242.238.89'}}, {mfargs, {ebucketmigrator_srv,start_link, [{"10.242.238.89",11209}, {"10.242.238.90",11209}, [{old_state_retriever, #Fun}, {on_not_ready_vbuckets, #Fun}, {username,"default"}, {password,get_from_config}, {vbuckets,[407,410,411,414,415,419,424,426]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]]}}, {restart_type,temporary}, {shutdown,60000}, {child_type,worker}] [ns_server:info,2014-08-19T16:50:51.957,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 670 state to active [ns_server:debug,2014-08-19T16:50:51.964,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:50:51.964,ns_1@10.242.238.90:<0.25278.0>:ebucketmigrator_srv:init:621]Reusing old upstream: [{vbuckets,[407,410,411,414,415,419,424,426]}, {name,<<"replication_ns_1@10.242.238.90">>}, {takeover,false}] [rebalance:debug,2014-08-19T16:50:51.965,ns_1@10.242.238.90:<0.25278.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.25279.0> [ns_server:debug,2014-08-19T16:50:51.968,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:51.968,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 3509 us [ns_server:debug,2014-08-19T16:50:51.969,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:51.969,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{419, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:info,2014-08-19T16:50:51.972,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 423 state to replica [ns_server:info,2014-08-19T16:50:51.972,ns_1@10.242.238.90:tap_replication_manager-default<0.18775.0>:tap_replication_manager:change_vbucket_filter:200]Going to change replication from 'ns_1@10.242.238.89' to have [407,410,411,414,415,419,423,424,426] ([423], []) [ns_server:debug,2014-08-19T16:50:51.978,ns_1@10.242.238.90:<0.25280.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:135]Registered myself under id:{"default", {new_child_id, [407,410,411,414,415,419,423,424,426], 'ns_1@10.242.238.89'}, #Ref<0.0.1.21147>} Args:[{"10.242.238.89",11209}, {"10.242.238.90",11209}, [{old_state_retriever,#Fun}, {on_not_ready_vbuckets,#Fun}, {username,"default"}, {password,get_from_config}, {vbuckets,[407,410,411,414,415,419,423,424,426]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]] [ns_server:debug,2014-08-19T16:50:51.978,ns_1@10.242.238.90:<0.25280.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:139]Linked myself to old ebucketmigrator <0.25278.0> [ns_server:info,2014-08-19T16:50:51.978,ns_1@10.242.238.90:<0.25278.0>:ebucketmigrator_srv:handle_call:270]Starting new-style vbucket filter change on stream `replication_ns_1@10.242.238.90` [ns_server:info,2014-08-19T16:50:51.990,ns_1@10.242.238.90:<0.25278.0>:ebucketmigrator_srv:handle_call:297]Changing vbucket filter on tap stream `replication_ns_1@10.242.238.90`: [{407,1},{410,1},{411,1},{414,1},{415,1},{419,1},{423,1},{424,1},{426,1}] [ns_server:info,2014-08-19T16:50:51.991,ns_1@10.242.238.90:<0.25278.0>:ebucketmigrator_srv:handle_call:307]Successfully changed vbucket filter on tap stream `replication_ns_1@10.242.238.90`. [ns_server:info,2014-08-19T16:50:51.991,ns_1@10.242.238.90:<0.25278.0>:ebucketmigrator_srv:process_upstream:1027]Got vbucket filter change completion message. Silencing upstream sender [ns_server:info,2014-08-19T16:50:51.991,ns_1@10.242.238.90:<0.25278.0>:ebucketmigrator_srv:handle_info:366]Got reply from upstream silencing request. Completing state transition to a new ebucketmigrator. [ns_server:debug,2014-08-19T16:50:51.992,ns_1@10.242.238.90:<0.25278.0>:ebucketmigrator_srv:complete_native_vb_filter_change:170]Proceeding with reading unread binaries [ns_server:debug,2014-08-19T16:50:51.992,ns_1@10.242.238.90:<0.25278.0>:ebucketmigrator_srv:confirm_downstream:197]Going to confirm reception downstream messages [ns_server:debug,2014-08-19T16:50:51.992,ns_1@10.242.238.90:<0.25278.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:50:51.992,ns_1@10.242.238.90:<0.25283.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:50:51.992,ns_1@10.242.238.90:<0.25283.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:50:51.992,ns_1@10.242.238.90:<0.25278.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:50:51.992,ns_1@10.242.238.90:<0.25278.0>:ebucketmigrator_srv:confirm_downstream:201]Confirmed upstream messages are feeded to kernel [ns_server:debug,2014-08-19T16:50:51.992,ns_1@10.242.238.90:<0.25278.0>:ebucketmigrator_srv:reply_and_die:210]Passed old state to caller [ns_server:debug,2014-08-19T16:50:51.992,ns_1@10.242.238.90:<0.25278.0>:ebucketmigrator_srv:reply_and_die:213]Sent out state. Preparing to die [ns_server:debug,2014-08-19T16:50:51.992,ns_1@10.242.238.90:<0.25280.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:143]Got old state from previous ebucketmigrator: <0.25278.0> [ns_server:debug,2014-08-19T16:50:51.993,ns_1@10.242.238.90:<0.25280.0>:ns_vbm_new_sup:perform_vbucket_filter_change_loop:184]Sent old state to new instance [ns_server:info,2014-08-19T16:50:51.993,ns_1@10.242.238.90:<0.25285.0>:ns_vbm_new_sup:mk_old_state_retriever:83]Got vbucket filter change old state. Proceeding vbucket filter change operation [ns_server:debug,2014-08-19T16:50:51.993,ns_1@10.242.238.90:<0.25285.0>:ebucketmigrator_srv:init:494]Got old ebucketmigrator state from <0.25278.0>: {state,#Port<0.16372>,#Port<0.16368>,#Port<0.16373>,#Port<0.16369>, <0.25279.0>,<<"cut off">>,<<"cut off">>,[],28,false,false,0, {1408,452651,991808}, completed, {<0.25280.0>,#Ref<0.0.1.21162>}, <<"replication_ns_1@10.242.238.90">>,<0.25278.0>, {had_backfill,false,undefined,[]}, completed,false}. [ns_server:debug,2014-08-19T16:50:51.993,ns_1@10.242.238.90:<0.17162.0>:ns_process_registry:handle_info:98]Got exit msg: {'EXIT',<0.25280.0>,{#Ref<0.0.1.21149>,<0.25285.0>}} [ns_server:info,2014-08-19T16:50:51.994,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 672 state to active [error_logger:info,2014-08-19T16:50:51.993,ns_1@10.242.238.90:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,'ns_vbm_new_sup-default'} started: [{pid,<0.25285.0>}, {name, {new_child_id, [407,410,411,414,415,419,423,424,426], 'ns_1@10.242.238.89'}}, {mfargs, {ebucketmigrator_srv,start_link, [{"10.242.238.89",11209}, {"10.242.238.90",11209}, [{old_state_retriever, #Fun}, {on_not_ready_vbuckets, #Fun}, {username,"default"}, {password,get_from_config}, {vbuckets, [407,410,411,414,415,419,423,424,426]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]]}}, {restart_type,temporary}, {shutdown,60000}, {child_type,worker}] [ns_server:info,2014-08-19T16:50:51.995,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 666 state to active [ns_server:debug,2014-08-19T16:50:51.999,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:50:52.002,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 2492 us [ns_server:debug,2014-08-19T16:50:52.002,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:52.003,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:52.003,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{423, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:50:52.005,ns_1@10.242.238.90:<0.25285.0>:ebucketmigrator_srv:init:621]Reusing old upstream: [{vbuckets,[407,410,411,414,415,419,423,424,426]}, {name,<<"replication_ns_1@10.242.238.90">>}, {takeover,false}] [rebalance:debug,2014-08-19T16:50:52.005,ns_1@10.242.238.90:<0.25285.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.25286.0> [views:debug,2014-08-19T16:50:52.019,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/681. Updated state: active (1) [ns_server:debug,2014-08-19T16:50:52.019,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",681,active,1} [ns_server:info,2014-08-19T16:50:52.031,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 678 state to active [ns_server:debug,2014-08-19T16:50:52.038,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:50:52.042,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:52.042,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 3915 us [ns_server:debug,2014-08-19T16:50:52.043,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:52.043,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{924, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:info,2014-08-19T16:50:52.045,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 418 state to replica [ns_server:info,2014-08-19T16:50:52.046,ns_1@10.242.238.90:tap_replication_manager-default<0.18775.0>:tap_replication_manager:change_vbucket_filter:200]Going to change replication from 'ns_1@10.242.238.89' to have [407,410,411,414,415,418,419,423,424,426] ([418], []) [ns_server:debug,2014-08-19T16:50:52.046,ns_1@10.242.238.90:<0.25288.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:135]Registered myself under id:{"default", {new_child_id, [407,410,411,414,415,418,419,423,424,426], 'ns_1@10.242.238.89'}, #Ref<0.0.1.21374>} Args:[{"10.242.238.89",11209}, {"10.242.238.90",11209}, [{old_state_retriever,#Fun}, {on_not_ready_vbuckets,#Fun}, {username,"default"}, {password,get_from_config}, {vbuckets,[407,410,411,414,415,418,419,423,424,426]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]] [ns_server:debug,2014-08-19T16:50:52.047,ns_1@10.242.238.90:<0.25288.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:139]Linked myself to old ebucketmigrator <0.25285.0> [ns_server:info,2014-08-19T16:50:52.047,ns_1@10.242.238.90:<0.25285.0>:ebucketmigrator_srv:handle_call:270]Starting new-style vbucket filter change on stream `replication_ns_1@10.242.238.90` [ns_server:info,2014-08-19T16:50:52.058,ns_1@10.242.238.90:<0.25285.0>:ebucketmigrator_srv:handle_call:297]Changing vbucket filter on tap stream `replication_ns_1@10.242.238.90`: [{407,1}, {410,1}, {411,1}, {414,1}, {415,1}, {418,1}, {419,1}, {423,1}, {424,1}, {426,1}] [ns_server:info,2014-08-19T16:50:52.058,ns_1@10.242.238.90:<0.25285.0>:ebucketmigrator_srv:handle_call:307]Successfully changed vbucket filter on tap stream `replication_ns_1@10.242.238.90`. [ns_server:info,2014-08-19T16:50:52.059,ns_1@10.242.238.90:<0.25285.0>:ebucketmigrator_srv:process_upstream:1027]Got vbucket filter change completion message. Silencing upstream sender [ns_server:info,2014-08-19T16:50:52.059,ns_1@10.242.238.90:<0.25285.0>:ebucketmigrator_srv:handle_info:366]Got reply from upstream silencing request. Completing state transition to a new ebucketmigrator. [ns_server:debug,2014-08-19T16:50:52.059,ns_1@10.242.238.90:<0.25285.0>:ebucketmigrator_srv:complete_native_vb_filter_change:170]Proceeding with reading unread binaries [ns_server:debug,2014-08-19T16:50:52.059,ns_1@10.242.238.90:<0.25285.0>:ebucketmigrator_srv:confirm_downstream:197]Going to confirm reception downstream messages [ns_server:debug,2014-08-19T16:50:52.059,ns_1@10.242.238.90:<0.25285.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:50:52.059,ns_1@10.242.238.90:<0.25291.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:50:52.059,ns_1@10.242.238.90:<0.25291.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:50:52.059,ns_1@10.242.238.90:<0.25285.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:50:52.060,ns_1@10.242.238.90:<0.25285.0>:ebucketmigrator_srv:confirm_downstream:201]Confirmed upstream messages are feeded to kernel [ns_server:debug,2014-08-19T16:50:52.060,ns_1@10.242.238.90:<0.25285.0>:ebucketmigrator_srv:reply_and_die:210]Passed old state to caller [ns_server:debug,2014-08-19T16:50:52.060,ns_1@10.242.238.90:<0.25285.0>:ebucketmigrator_srv:reply_and_die:213]Sent out state. Preparing to die [ns_server:debug,2014-08-19T16:50:52.060,ns_1@10.242.238.90:<0.25288.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:143]Got old state from previous ebucketmigrator: <0.25285.0> [ns_server:debug,2014-08-19T16:50:52.060,ns_1@10.242.238.90:<0.25288.0>:ns_vbm_new_sup:perform_vbucket_filter_change_loop:184]Sent old state to new instance [ns_server:info,2014-08-19T16:50:52.060,ns_1@10.242.238.90:<0.25293.0>:ns_vbm_new_sup:mk_old_state_retriever:83]Got vbucket filter change old state. Proceeding vbucket filter change operation [ns_server:debug,2014-08-19T16:50:52.060,ns_1@10.242.238.90:<0.25293.0>:ebucketmigrator_srv:init:494]Got old ebucketmigrator state from <0.25285.0>: {state,#Port<0.16372>,#Port<0.16368>,#Port<0.16373>,#Port<0.16369>, <0.25286.0>,<<"cut off">>,<<"cut off">>,[],31,false,false,0, {1408,452652,59146}, completed, {<0.25288.0>,#Ref<0.0.1.21388>}, <<"replication_ns_1@10.242.238.90">>,<0.25285.0>, {had_backfill,false,undefined,[]}, completed,false}. [ns_server:debug,2014-08-19T16:50:52.061,ns_1@10.242.238.90:<0.17162.0>:ns_process_registry:handle_info:98]Got exit msg: {'EXIT',<0.25288.0>,{#Ref<0.0.1.21376>,<0.25293.0>}} [error_logger:info,2014-08-19T16:50:52.061,ns_1@10.242.238.90:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,'ns_vbm_new_sup-default'} started: [{pid,<0.25293.0>}, {name, {new_child_id, [407,410,411,414,415,418,419,423,424,426], 'ns_1@10.242.238.89'}}, {mfargs, {ebucketmigrator_srv,start_link, [{"10.242.238.89",11209}, {"10.242.238.90",11209}, [{old_state_retriever, #Fun}, {on_not_ready_vbuckets, #Fun}, {username,"default"}, {password,get_from_config}, {vbuckets, [407,410,411,414,415,418,419,423,424, 426]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]]}}, {restart_type,temporary}, {shutdown,60000}, {child_type,worker}] [ns_server:debug,2014-08-19T16:50:52.065,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:50:52.068,ns_1@10.242.238.90:<0.25293.0>:ebucketmigrator_srv:init:621]Reusing old upstream: [{vbuckets,[407,410,411,414,415,418,419,423,424,426]}, {name,<<"replication_ns_1@10.242.238.90">>}, {takeover,false}] [rebalance:debug,2014-08-19T16:50:52.068,ns_1@10.242.238.90:<0.25293.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.25294.0> [views:debug,2014-08-19T16:50:52.072,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/679. Updated state: active (1) [ns_server:debug,2014-08-19T16:50:52.073,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",679,active,1} [ns_server:debug,2014-08-19T16:50:52.073,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 7299 us [ns_server:debug,2014-08-19T16:50:52.073,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:52.073,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:52.074,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{418, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:info,2014-08-19T16:50:52.075,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 425 state to replica [ns_server:info,2014-08-19T16:50:52.075,ns_1@10.242.238.90:tap_replication_manager-default<0.18775.0>:tap_replication_manager:change_vbucket_filter:200]Going to change replication from 'ns_1@10.242.238.89' to have [407,410,411,414,415,418,419,423,424,425,426] ([425], []) [ns_server:debug,2014-08-19T16:50:52.076,ns_1@10.242.238.90:<0.25295.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:135]Registered myself under id:{"default", {new_child_id, [407,410,411,414,415,418,419,423,424,425,426], 'ns_1@10.242.238.89'}, #Ref<0.0.1.21536>} Args:[{"10.242.238.89",11209}, {"10.242.238.90",11209}, [{old_state_retriever,#Fun}, {on_not_ready_vbuckets,#Fun}, {username,"default"}, {password,get_from_config}, {vbuckets,[407,410,411,414,415,418,419,423,424,425,426]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]] [ns_server:debug,2014-08-19T16:50:52.076,ns_1@10.242.238.90:<0.25295.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:139]Linked myself to old ebucketmigrator <0.25293.0> [ns_server:info,2014-08-19T16:50:52.076,ns_1@10.242.238.90:<0.25293.0>:ebucketmigrator_srv:handle_call:270]Starting new-style vbucket filter change on stream `replication_ns_1@10.242.238.90` [ns_server:info,2014-08-19T16:50:52.083,ns_1@10.242.238.90:<0.25293.0>:ebucketmigrator_srv:handle_call:297]Changing vbucket filter on tap stream `replication_ns_1@10.242.238.90`: [{407,1}, {410,1}, {411,1}, {414,1}, {415,1}, {418,1}, {419,1}, {423,1}, {424,1}, {425,1}, {426,1}] [ns_server:info,2014-08-19T16:50:52.084,ns_1@10.242.238.90:<0.25293.0>:ebucketmigrator_srv:handle_call:307]Successfully changed vbucket filter on tap stream `replication_ns_1@10.242.238.90`. [ns_server:info,2014-08-19T16:50:52.084,ns_1@10.242.238.90:<0.25293.0>:ebucketmigrator_srv:process_upstream:1027]Got vbucket filter change completion message. Silencing upstream sender [ns_server:info,2014-08-19T16:50:52.084,ns_1@10.242.238.90:<0.25293.0>:ebucketmigrator_srv:handle_info:366]Got reply from upstream silencing request. Completing state transition to a new ebucketmigrator. [ns_server:debug,2014-08-19T16:50:52.084,ns_1@10.242.238.90:<0.25293.0>:ebucketmigrator_srv:complete_native_vb_filter_change:170]Proceeding with reading unread binaries [ns_server:debug,2014-08-19T16:50:52.084,ns_1@10.242.238.90:<0.25293.0>:ebucketmigrator_srv:confirm_downstream:197]Going to confirm reception downstream messages [ns_server:debug,2014-08-19T16:50:52.085,ns_1@10.242.238.90:<0.25293.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:50:52.085,ns_1@10.242.238.90:<0.25298.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:50:52.085,ns_1@10.242.238.90:<0.25298.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:50:52.085,ns_1@10.242.238.90:<0.25293.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:50:52.085,ns_1@10.242.238.90:<0.25293.0>:ebucketmigrator_srv:confirm_downstream:201]Confirmed upstream messages are feeded to kernel [ns_server:debug,2014-08-19T16:50:52.085,ns_1@10.242.238.90:<0.25293.0>:ebucketmigrator_srv:reply_and_die:210]Passed old state to caller [ns_server:debug,2014-08-19T16:50:52.085,ns_1@10.242.238.90:<0.25293.0>:ebucketmigrator_srv:reply_and_die:213]Sent out state. Preparing to die [ns_server:debug,2014-08-19T16:50:52.085,ns_1@10.242.238.90:<0.25295.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:143]Got old state from previous ebucketmigrator: <0.25293.0> [ns_server:debug,2014-08-19T16:50:52.086,ns_1@10.242.238.90:<0.25295.0>:ns_vbm_new_sup:perform_vbucket_filter_change_loop:184]Sent old state to new instance [ns_server:info,2014-08-19T16:50:52.086,ns_1@10.242.238.90:<0.25300.0>:ns_vbm_new_sup:mk_old_state_retriever:83]Got vbucket filter change old state. Proceeding vbucket filter change operation [ns_server:debug,2014-08-19T16:50:52.086,ns_1@10.242.238.90:<0.25300.0>:ebucketmigrator_srv:init:494]Got old ebucketmigrator state from <0.25293.0>: {state,#Port<0.16372>,#Port<0.16368>,#Port<0.16373>,#Port<0.16369>, <0.25294.0>,<<"cut off">>,<<"cut off">>,[],34,false,false,0, {1408,452652,84534}, completed, {<0.25295.0>,#Ref<0.0.1.21549>}, <<"replication_ns_1@10.242.238.90">>,<0.25293.0>, {had_backfill,false,undefined,[]}, completed,false}. [ns_server:debug,2014-08-19T16:50:52.086,ns_1@10.242.238.90:<0.17162.0>:ns_process_registry:handle_info:98]Got exit msg: {'EXIT',<0.25295.0>,{#Ref<0.0.1.21538>,<0.25300.0>}} [error_logger:info,2014-08-19T16:50:52.086,ns_1@10.242.238.90:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,'ns_vbm_new_sup-default'} started: [{pid,<0.25300.0>}, {name, {new_child_id, [407,410,411,414,415,418,419,423,424,425,426], 'ns_1@10.242.238.89'}}, {mfargs, {ebucketmigrator_srv,start_link, [{"10.242.238.89",11209}, {"10.242.238.90",11209}, [{old_state_retriever, #Fun}, {on_not_ready_vbuckets, #Fun}, {username,"default"}, {password,get_from_config}, {vbuckets, [407,410,411,414,415,418,419,423,424,425, 426]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]]}}, {restart_type,temporary}, {shutdown,60000}, {child_type,worker}] [ns_server:debug,2014-08-19T16:50:52.091,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:50:52.095,ns_1@10.242.238.90:<0.25300.0>:ebucketmigrator_srv:init:621]Reusing old upstream: [{vbuckets,[407,410,411,414,415,418,419,423,424,425,426]}, {name,<<"replication_ns_1@10.242.238.90">>}, {takeover,false}] [ns_server:debug,2014-08-19T16:50:52.098,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:52.098,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{425, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:50:52.114,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 10 us [rebalance:debug,2014-08-19T16:50:52.114,ns_1@10.242.238.90:<0.25300.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.25301.0> [ns_server:debug,2014-08-19T16:50:52.114,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:info,2014-08-19T16:50:52.119,ns_1@10.242.238.90:<0.18786.0>:ns_memcached:do_handle_call:527]Changed vbucket 409 state to replica [ns_server:info,2014-08-19T16:50:52.120,ns_1@10.242.238.90:tap_replication_manager-default<0.18775.0>:tap_replication_manager:change_vbucket_filter:200]Going to change replication from 'ns_1@10.242.238.89' to have [407,409,410,411,414,415,418,419,423,424,425,426] ([409], []) [views:debug,2014-08-19T16:50:52.120,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/677. Updated state: active (1) [ns_server:debug,2014-08-19T16:50:52.120,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",677,active,1} [ns_server:debug,2014-08-19T16:50:52.120,ns_1@10.242.238.90:<0.25303.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:135]Registered myself under id:{"default", {new_child_id, [407,409,410,411,414,415,418,419,423,424,425, 426], 'ns_1@10.242.238.89'}, #Ref<0.0.1.21718>} Args:[{"10.242.238.89",11209}, {"10.242.238.90",11209}, [{old_state_retriever,#Fun}, {on_not_ready_vbuckets,#Fun}, {username,"default"}, {password,get_from_config}, {vbuckets,[407,409,410,411,414,415,418,419,423,424,425,426]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]] [ns_server:debug,2014-08-19T16:50:52.121,ns_1@10.242.238.90:<0.25303.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:139]Linked myself to old ebucketmigrator <0.25300.0> [ns_server:info,2014-08-19T16:50:52.121,ns_1@10.242.238.90:<0.25300.0>:ebucketmigrator_srv:handle_call:270]Starting new-style vbucket filter change on stream `replication_ns_1@10.242.238.90` [ns_server:info,2014-08-19T16:50:52.127,ns_1@10.242.238.90:<0.25300.0>:ebucketmigrator_srv:handle_call:297]Changing vbucket filter on tap stream `replication_ns_1@10.242.238.90`: [{407,1}, {409,1}, {410,1}, {411,1}, {414,1}, {415,1}, {418,1}, {419,1}, {423,1}, {424,1}, {425,1}, {426,1}] [ns_server:info,2014-08-19T16:50:52.128,ns_1@10.242.238.90:<0.25300.0>:ebucketmigrator_srv:handle_call:307]Successfully changed vbucket filter on tap stream `replication_ns_1@10.242.238.90`. [ns_server:info,2014-08-19T16:50:52.128,ns_1@10.242.238.90:<0.25300.0>:ebucketmigrator_srv:process_upstream:1027]Got vbucket filter change completion message. Silencing upstream sender [ns_server:info,2014-08-19T16:50:52.128,ns_1@10.242.238.90:<0.25300.0>:ebucketmigrator_srv:handle_info:366]Got reply from upstream silencing request. Completing state transition to a new ebucketmigrator. [ns_server:debug,2014-08-19T16:50:52.128,ns_1@10.242.238.90:<0.25300.0>:ebucketmigrator_srv:complete_native_vb_filter_change:170]Proceeding with reading unread binaries [ns_server:debug,2014-08-19T16:50:52.128,ns_1@10.242.238.90:<0.25300.0>:ebucketmigrator_srv:confirm_downstream:197]Going to confirm reception downstream messages [ns_server:debug,2014-08-19T16:50:52.129,ns_1@10.242.238.90:<0.25300.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:50:52.129,ns_1@10.242.238.90:<0.25305.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:50:52.129,ns_1@10.242.238.90:<0.25305.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:50:52.129,ns_1@10.242.238.90:<0.25300.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:50:52.129,ns_1@10.242.238.90:<0.25300.0>:ebucketmigrator_srv:confirm_downstream:201]Confirmed upstream messages are feeded to kernel [ns_server:debug,2014-08-19T16:50:52.129,ns_1@10.242.238.90:<0.25300.0>:ebucketmigrator_srv:reply_and_die:210]Passed old state to caller [ns_server:debug,2014-08-19T16:50:52.129,ns_1@10.242.238.90:<0.25300.0>:ebucketmigrator_srv:reply_and_die:213]Sent out state. Preparing to die [ns_server:debug,2014-08-19T16:50:52.129,ns_1@10.242.238.90:<0.25303.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:143]Got old state from previous ebucketmigrator: <0.25300.0> [ns_server:debug,2014-08-19T16:50:52.130,ns_1@10.242.238.90:<0.25303.0>:ns_vbm_new_sup:perform_vbucket_filter_change_loop:184]Sent old state to new instance [ns_server:info,2014-08-19T16:50:52.130,ns_1@10.242.238.90:<0.25307.0>:ns_vbm_new_sup:mk_old_state_retriever:83]Got vbucket filter change old state. Proceeding vbucket filter change operation [ns_server:debug,2014-08-19T16:50:52.130,ns_1@10.242.238.90:<0.25307.0>:ebucketmigrator_srv:init:494]Got old ebucketmigrator state from <0.25300.0>: {state,#Port<0.16372>,#Port<0.16368>,#Port<0.16373>,#Port<0.16369>, <0.25301.0>,<<"cut off">>,<<"cut off">>,[],37,false,false,0, {1408,452652,128527}, completed, {<0.25303.0>,#Ref<0.0.1.21731>}, <<"replication_ns_1@10.242.238.90">>,<0.25300.0>, {had_backfill,false,undefined,[]}, completed,false}. [ns_server:debug,2014-08-19T16:50:52.130,ns_1@10.242.238.90:<0.17162.0>:ns_process_registry:handle_info:98]Got exit msg: {'EXIT',<0.25303.0>,{#Ref<0.0.1.21720>,<0.25307.0>}} [error_logger:info,2014-08-19T16:50:52.130,ns_1@10.242.238.90:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,'ns_vbm_new_sup-default'} started: [{pid,<0.25307.0>}, {name, {new_child_id, [407,409,410,411,414,415,418,419,423,424,425, 426], 'ns_1@10.242.238.89'}}, {mfargs, {ebucketmigrator_srv,start_link, [{"10.242.238.89",11209}, {"10.242.238.90",11209}, [{old_state_retriever, #Fun}, {on_not_ready_vbuckets, #Fun}, {username,"default"}, {password,get_from_config}, {vbuckets, [407,409,410,411,414,415,418,419,423,424, 425,426]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]]}}, {restart_type,temporary}, {shutdown,60000}, {child_type,worker}] [ns_server:debug,2014-08-19T16:50:52.136,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:50:52.138,ns_1@10.242.238.90:<0.25307.0>:ebucketmigrator_srv:init:621]Reusing old upstream: [{vbuckets,[407,409,410,411,414,415,418,419,423,424,425,426]}, {name,<<"replication_ns_1@10.242.238.90">>}, {takeover,false}] [rebalance:debug,2014-08-19T16:50:52.138,ns_1@10.242.238.90:<0.25307.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.25308.0> [ns_server:debug,2014-08-19T16:50:52.140,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 3886 us [ns_server:debug,2014-08-19T16:50:52.141,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:52.141,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:52.142,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{409, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:info,2014-08-19T16:50:52.143,ns_1@10.242.238.90:<0.18786.0>:ns_memcached:do_handle_call:527]Changed vbucket 416 state to replica [ns_server:info,2014-08-19T16:50:52.143,ns_1@10.242.238.90:tap_replication_manager-default<0.18775.0>:tap_replication_manager:change_vbucket_filter:200]Going to change replication from 'ns_1@10.242.238.89' to have [407,409,410,411,414,415,416,418,419,423,424,425,426] ([416], []) [ns_server:debug,2014-08-19T16:50:52.146,ns_1@10.242.238.90:<0.25309.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:135]Registered myself under id:{"default", {new_child_id, [407,409,410,411,414,415,416,418,419,423,424, 425,426], 'ns_1@10.242.238.89'}, #Ref<0.0.1.21850>} Args:[{"10.242.238.89",11209}, {"10.242.238.90",11209}, [{old_state_retriever,#Fun}, {on_not_ready_vbuckets,#Fun}, {username,"default"}, {password,get_from_config}, {vbuckets,[407,409,410,411,414,415,416,418,419,423,424,425,426]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]] [ns_server:debug,2014-08-19T16:50:52.146,ns_1@10.242.238.90:<0.25309.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:139]Linked myself to old ebucketmigrator <0.25307.0> [ns_server:info,2014-08-19T16:50:52.146,ns_1@10.242.238.90:<0.25307.0>:ebucketmigrator_srv:handle_call:270]Starting new-style vbucket filter change on stream `replication_ns_1@10.242.238.90` [ns_server:info,2014-08-19T16:50:52.153,ns_1@10.242.238.90:<0.25307.0>:ebucketmigrator_srv:handle_call:297]Changing vbucket filter on tap stream `replication_ns_1@10.242.238.90`: [{407,1}, {409,1}, {410,1}, {411,1}, {414,1}, {415,1}, {416,1}, {418,1}, {419,1}, {423,1}, {424,1}, {425,1}, {426,1}] [ns_server:info,2014-08-19T16:50:52.153,ns_1@10.242.238.90:<0.25307.0>:ebucketmigrator_srv:handle_call:307]Successfully changed vbucket filter on tap stream `replication_ns_1@10.242.238.90`. [ns_server:info,2014-08-19T16:50:52.154,ns_1@10.242.238.90:<0.25307.0>:ebucketmigrator_srv:process_upstream:1027]Got vbucket filter change completion message. Silencing upstream sender [ns_server:info,2014-08-19T16:50:52.154,ns_1@10.242.238.90:<0.25307.0>:ebucketmigrator_srv:handle_info:366]Got reply from upstream silencing request. Completing state transition to a new ebucketmigrator. [ns_server:debug,2014-08-19T16:50:52.154,ns_1@10.242.238.90:<0.25307.0>:ebucketmigrator_srv:complete_native_vb_filter_change:170]Proceeding with reading unread binaries [ns_server:debug,2014-08-19T16:50:52.154,ns_1@10.242.238.90:<0.25307.0>:ebucketmigrator_srv:confirm_downstream:197]Going to confirm reception downstream messages [ns_server:debug,2014-08-19T16:50:52.154,ns_1@10.242.238.90:<0.25307.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:50:52.154,ns_1@10.242.238.90:<0.25312.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:50:52.154,ns_1@10.242.238.90:<0.25312.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:50:52.154,ns_1@10.242.238.90:<0.25307.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:50:52.154,ns_1@10.242.238.90:<0.25307.0>:ebucketmigrator_srv:confirm_downstream:201]Confirmed upstream messages are feeded to kernel [ns_server:debug,2014-08-19T16:50:52.155,ns_1@10.242.238.90:<0.25307.0>:ebucketmigrator_srv:reply_and_die:210]Passed old state to caller [ns_server:debug,2014-08-19T16:50:52.155,ns_1@10.242.238.90:<0.25307.0>:ebucketmigrator_srv:reply_and_die:213]Sent out state. Preparing to die [ns_server:debug,2014-08-19T16:50:52.155,ns_1@10.242.238.90:<0.25309.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:143]Got old state from previous ebucketmigrator: <0.25307.0> [ns_server:debug,2014-08-19T16:50:52.157,ns_1@10.242.238.90:<0.25309.0>:ns_vbm_new_sup:perform_vbucket_filter_change_loop:184]Sent old state to new instance [ns_server:info,2014-08-19T16:50:52.158,ns_1@10.242.238.90:<0.25314.0>:ns_vbm_new_sup:mk_old_state_retriever:83]Got vbucket filter change old state. Proceeding vbucket filter change operation [ns_server:debug,2014-08-19T16:50:52.158,ns_1@10.242.238.90:<0.25314.0>:ebucketmigrator_srv:init:494]Got old ebucketmigrator state from <0.25307.0>: {state,#Port<0.16372>,#Port<0.16368>,#Port<0.16373>,#Port<0.16369>, <0.25308.0>,<<"cut off">>,<<"cut off">>,[],40,false,false,0, {1408,452652,153996}, completed, {<0.25309.0>,#Ref<0.0.1.21863>}, <<"replication_ns_1@10.242.238.90">>,<0.25307.0>, {had_backfill,false,undefined,[]}, completed,false}. [ns_server:debug,2014-08-19T16:50:52.158,ns_1@10.242.238.90:<0.17162.0>:ns_process_registry:handle_info:98]Got exit msg: {'EXIT',<0.25309.0>,{#Ref<0.0.1.21852>,<0.25314.0>}} [error_logger:info,2014-08-19T16:50:52.158,ns_1@10.242.238.90:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,'ns_vbm_new_sup-default'} started: [{pid,<0.25314.0>}, {name, {new_child_id, [407,409,410,411,414,415,416,418,419,423,424, 425,426], 'ns_1@10.242.238.89'}}, {mfargs, {ebucketmigrator_srv,start_link, [{"10.242.238.89",11209}, {"10.242.238.90",11209}, [{old_state_retriever, #Fun}, {on_not_ready_vbuckets, #Fun}, {username,"default"}, {password,get_from_config}, {vbuckets, [407,409,410,411,414,415,416,418,419,423, 424,425,426]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]]}}, {restart_type,temporary}, {shutdown,60000}, {child_type,worker}] [ns_server:debug,2014-08-19T16:50:52.163,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [views:debug,2014-08-19T16:50:52.173,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/673. Updated state: active (1) [ns_server:debug,2014-08-19T16:50:52.173,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",673,active,1} [ns_server:debug,2014-08-19T16:50:52.173,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 9856 us [ns_server:debug,2014-08-19T16:50:52.174,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:52.174,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:52.174,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{416, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:50:52.175,ns_1@10.242.238.90:<0.25314.0>:ebucketmigrator_srv:init:621]Reusing old upstream: [{vbuckets,[407,409,410,411,414,415,416,418,419,423,424,425,426]}, {name,<<"replication_ns_1@10.242.238.90">>}, {takeover,false}] [rebalance:debug,2014-08-19T16:50:52.175,ns_1@10.242.238.90:<0.25314.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.25315.0> [ns_server:info,2014-08-19T16:50:52.176,ns_1@10.242.238.90:<0.18786.0>:ns_memcached:do_handle_call:527]Changed vbucket 421 state to replica [ns_server:info,2014-08-19T16:50:52.176,ns_1@10.242.238.90:tap_replication_manager-default<0.18775.0>:tap_replication_manager:change_vbucket_filter:200]Going to change replication from 'ns_1@10.242.238.89' to have [407,409,410,411,414,415,416,418,419,421,423,424,425,426] ([421], []) [ns_server:debug,2014-08-19T16:50:52.177,ns_1@10.242.238.90:<0.25316.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:135]Registered myself under id:{"default", {new_child_id, [407,409,410,411,414,415,416,418,419,421,423, 424,425,426], 'ns_1@10.242.238.89'}, #Ref<0.0.1.22017>} Args:[{"10.242.238.89",11209}, {"10.242.238.90",11209}, [{old_state_retriever,#Fun}, {on_not_ready_vbuckets,#Fun}, {username,"default"}, {password,get_from_config}, {vbuckets,[407,409,410,411,414,415,416,418,419,421,423,424,425,426]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]] [ns_server:debug,2014-08-19T16:50:52.177,ns_1@10.242.238.90:<0.25316.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:139]Linked myself to old ebucketmigrator <0.25314.0> [ns_server:info,2014-08-19T16:50:52.177,ns_1@10.242.238.90:<0.25314.0>:ebucketmigrator_srv:handle_call:270]Starting new-style vbucket filter change on stream `replication_ns_1@10.242.238.90` [ns_server:info,2014-08-19T16:50:52.184,ns_1@10.242.238.90:<0.25314.0>:ebucketmigrator_srv:handle_call:297]Changing vbucket filter on tap stream `replication_ns_1@10.242.238.90`: [{407,1}, {409,1}, {410,1}, {411,1}, {414,1}, {415,1}, {416,1}, {418,1}, {419,1}, {421,1}, {423,1}, {424,1}, {425,1}, {426,1}] [ns_server:info,2014-08-19T16:50:52.185,ns_1@10.242.238.90:<0.25314.0>:ebucketmigrator_srv:handle_call:307]Successfully changed vbucket filter on tap stream `replication_ns_1@10.242.238.90`. [ns_server:info,2014-08-19T16:50:52.185,ns_1@10.242.238.90:<0.25314.0>:ebucketmigrator_srv:process_upstream:1027]Got vbucket filter change completion message. Silencing upstream sender [ns_server:info,2014-08-19T16:50:52.185,ns_1@10.242.238.90:<0.25314.0>:ebucketmigrator_srv:handle_info:366]Got reply from upstream silencing request. Completing state transition to a new ebucketmigrator. [ns_server:debug,2014-08-19T16:50:52.185,ns_1@10.242.238.90:<0.25314.0>:ebucketmigrator_srv:complete_native_vb_filter_change:170]Proceeding with reading unread binaries [ns_server:debug,2014-08-19T16:50:52.185,ns_1@10.242.238.90:<0.25314.0>:ebucketmigrator_srv:confirm_downstream:197]Going to confirm reception downstream messages [ns_server:debug,2014-08-19T16:50:52.185,ns_1@10.242.238.90:<0.25314.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:50:52.185,ns_1@10.242.238.90:<0.25319.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:50:52.186,ns_1@10.242.238.90:<0.25319.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:50:52.186,ns_1@10.242.238.90:<0.25314.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:50:52.186,ns_1@10.242.238.90:<0.25314.0>:ebucketmigrator_srv:confirm_downstream:201]Confirmed upstream messages are feeded to kernel [ns_server:debug,2014-08-19T16:50:52.186,ns_1@10.242.238.90:<0.25314.0>:ebucketmigrator_srv:reply_and_die:210]Passed old state to caller [ns_server:debug,2014-08-19T16:50:52.186,ns_1@10.242.238.90:<0.25314.0>:ebucketmigrator_srv:reply_and_die:213]Sent out state. Preparing to die [ns_server:debug,2014-08-19T16:50:52.186,ns_1@10.242.238.90:<0.25316.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:143]Got old state from previous ebucketmigrator: <0.25314.0> [ns_server:debug,2014-08-19T16:50:52.186,ns_1@10.242.238.90:<0.25316.0>:ns_vbm_new_sup:perform_vbucket_filter_change_loop:184]Sent old state to new instance [ns_server:info,2014-08-19T16:50:52.187,ns_1@10.242.238.90:<0.25321.0>:ns_vbm_new_sup:mk_old_state_retriever:83]Got vbucket filter change old state. Proceeding vbucket filter change operation [ns_server:debug,2014-08-19T16:50:52.187,ns_1@10.242.238.90:<0.25321.0>:ebucketmigrator_srv:init:494]Got old ebucketmigrator state from <0.25314.0>: {state,#Port<0.16372>,#Port<0.16368>,#Port<0.16373>,#Port<0.16369>, <0.25315.0>,<<"cut off">>,<<"cut off">>,[],43,false,false,0, {1408,452652,185375}, completed, {<0.25316.0>,#Ref<0.0.1.22030>}, <<"replication_ns_1@10.242.238.90">>,<0.25314.0>, {had_backfill,false,undefined,[]}, completed,false}. [ns_server:debug,2014-08-19T16:50:52.187,ns_1@10.242.238.90:<0.17162.0>:ns_process_registry:handle_info:98]Got exit msg: {'EXIT',<0.25316.0>,{#Ref<0.0.1.22019>,<0.25321.0>}} [error_logger:info,2014-08-19T16:50:52.187,ns_1@10.242.238.90:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,'ns_vbm_new_sup-default'} started: [{pid,<0.25321.0>}, {name, {new_child_id, [407,409,410,411,414,415,416,418,419,421,423, 424,425,426], 'ns_1@10.242.238.89'}}, {mfargs, {ebucketmigrator_srv,start_link, [{"10.242.238.89",11209}, {"10.242.238.90",11209}, [{old_state_retriever, #Fun}, {on_not_ready_vbuckets, #Fun}, {username,"default"}, {password,get_from_config}, {vbuckets, [407,409,410,411,414,415,416,418,419,421, 423,424,425,426]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]]}}, {restart_type,temporary}, {shutdown,60000}, {child_type,worker}] [ns_server:debug,2014-08-19T16:50:52.191,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:50:52.194,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 2893 us [ns_server:debug,2014-08-19T16:50:52.194,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:52.194,ns_1@10.242.238.90:<0.25321.0>:ebucketmigrator_srv:init:621]Reusing old upstream: [{vbuckets,[407,409,410,411,414,415,416,418,419,421,423,424,425,426]}, {name,<<"replication_ns_1@10.242.238.90">>}, {takeover,false}] [rebalance:debug,2014-08-19T16:50:52.194,ns_1@10.242.238.90:<0.25321.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.25322.0> [ns_server:debug,2014-08-19T16:50:52.195,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:52.195,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{421, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:info,2014-08-19T16:50:52.199,ns_1@10.242.238.90:<0.18786.0>:ns_memcached:do_handle_call:527]Changed vbucket 422 state to replica [ns_server:info,2014-08-19T16:50:52.200,ns_1@10.242.238.90:tap_replication_manager-default<0.18775.0>:tap_replication_manager:change_vbucket_filter:200]Going to change replication from 'ns_1@10.242.238.89' to have [407,409,410,411,414,415,416,418,419,421,422,423,424,425,426] ([422], []) [ns_server:debug,2014-08-19T16:50:52.203,ns_1@10.242.238.90:<0.25323.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:135]Registered myself under id:{"default", {new_child_id, [407,409,410,411,414,415,416,418,419,421,422, 423,424,425,426], 'ns_1@10.242.238.89'}, #Ref<0.0.1.22153>} Args:[{"10.242.238.89",11209}, {"10.242.238.90",11209}, [{old_state_retriever,#Fun}, {on_not_ready_vbuckets,#Fun}, {username,"default"}, {password,get_from_config}, {vbuckets,[407,409,410,411,414,415,416,418,419,421,422,423,424,425, 426]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]] [ns_server:debug,2014-08-19T16:50:52.203,ns_1@10.242.238.90:<0.25323.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:139]Linked myself to old ebucketmigrator <0.25321.0> [ns_server:info,2014-08-19T16:50:52.203,ns_1@10.242.238.90:<0.25321.0>:ebucketmigrator_srv:handle_call:270]Starting new-style vbucket filter change on stream `replication_ns_1@10.242.238.90` [ns_server:info,2014-08-19T16:50:52.210,ns_1@10.242.238.90:<0.25321.0>:ebucketmigrator_srv:handle_call:297]Changing vbucket filter on tap stream `replication_ns_1@10.242.238.90`: [{407,1}, {409,1}, {410,1}, {411,1}, {414,1}, {415,1}, {416,1}, {418,1}, {419,1}, {421,1}, {422,1}, {423,1}, {424,1}, {425,1}, {426,1}] [ns_server:info,2014-08-19T16:50:52.210,ns_1@10.242.238.90:<0.25321.0>:ebucketmigrator_srv:handle_call:307]Successfully changed vbucket filter on tap stream `replication_ns_1@10.242.238.90`. [ns_server:info,2014-08-19T16:50:52.211,ns_1@10.242.238.90:<0.25321.0>:ebucketmigrator_srv:process_upstream:1027]Got vbucket filter change completion message. Silencing upstream sender [ns_server:info,2014-08-19T16:50:52.211,ns_1@10.242.238.90:<0.25321.0>:ebucketmigrator_srv:handle_info:366]Got reply from upstream silencing request. Completing state transition to a new ebucketmigrator. [ns_server:debug,2014-08-19T16:50:52.211,ns_1@10.242.238.90:<0.25321.0>:ebucketmigrator_srv:complete_native_vb_filter_change:170]Proceeding with reading unread binaries [ns_server:debug,2014-08-19T16:50:52.211,ns_1@10.242.238.90:<0.25321.0>:ebucketmigrator_srv:confirm_downstream:197]Going to confirm reception downstream messages [ns_server:debug,2014-08-19T16:50:52.211,ns_1@10.242.238.90:<0.25321.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:50:52.211,ns_1@10.242.238.90:<0.25325.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:50:52.211,ns_1@10.242.238.90:<0.25325.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:50:52.211,ns_1@10.242.238.90:<0.25321.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:50:52.212,ns_1@10.242.238.90:<0.25321.0>:ebucketmigrator_srv:confirm_downstream:201]Confirmed upstream messages are feeded to kernel [ns_server:debug,2014-08-19T16:50:52.212,ns_1@10.242.238.90:<0.25321.0>:ebucketmigrator_srv:reply_and_die:210]Passed old state to caller [ns_server:debug,2014-08-19T16:50:52.212,ns_1@10.242.238.90:<0.25321.0>:ebucketmigrator_srv:reply_and_die:213]Sent out state. Preparing to die [ns_server:debug,2014-08-19T16:50:52.212,ns_1@10.242.238.90:<0.25323.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:143]Got old state from previous ebucketmigrator: <0.25321.0> [ns_server:debug,2014-08-19T16:50:52.212,ns_1@10.242.238.90:<0.25323.0>:ns_vbm_new_sup:perform_vbucket_filter_change_loop:184]Sent old state to new instance [ns_server:info,2014-08-19T16:50:52.212,ns_1@10.242.238.90:<0.25327.0>:ns_vbm_new_sup:mk_old_state_retriever:83]Got vbucket filter change old state. Proceeding vbucket filter change operation [ns_server:debug,2014-08-19T16:50:52.212,ns_1@10.242.238.90:<0.25327.0>:ebucketmigrator_srv:init:494]Got old ebucketmigrator state from <0.25321.0>: {state,#Port<0.16372>,#Port<0.16368>,#Port<0.16373>,#Port<0.16369>, <0.25322.0>,<<"cut off">>,<<"cut off">>,[],46,false,false,0, {1408,452652,211226}, completed, {<0.25323.0>,#Ref<0.0.1.22167>}, <<"replication_ns_1@10.242.238.90">>,<0.25321.0>, {had_backfill,false,undefined,[]}, completed,false}. [ns_server:debug,2014-08-19T16:50:52.213,ns_1@10.242.238.90:<0.17162.0>:ns_process_registry:handle_info:98]Got exit msg: {'EXIT',<0.25323.0>,{#Ref<0.0.1.22155>,<0.25327.0>}} [error_logger:info,2014-08-19T16:50:52.213,ns_1@10.242.238.90:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,'ns_vbm_new_sup-default'} started: [{pid,<0.25327.0>}, {name, {new_child_id, [407,409,410,411,414,415,416,418,419,421,422, 423,424,425,426], 'ns_1@10.242.238.89'}}, {mfargs, {ebucketmigrator_srv,start_link, [{"10.242.238.89",11209}, {"10.242.238.90",11209}, [{old_state_retriever, #Fun}, {on_not_ready_vbuckets, #Fun}, {username,"default"}, {password,get_from_config}, {vbuckets, [407,409,410,411,414,415,416,418,419,421, 422,423,424,425,426]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]]}}, {restart_type,temporary}, {shutdown,60000}, {child_type,worker}] [ns_server:debug,2014-08-19T16:50:52.217,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:50:52.220,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:52.220,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 2782 us [views:debug,2014-08-19T16:50:52.220,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/678. Updated state: active (1) [ns_server:debug,2014-08-19T16:50:52.221,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:52.221,ns_1@10.242.238.90:<0.25327.0>:ebucketmigrator_srv:init:621]Reusing old upstream: [{vbuckets,[407,409,410,411,414,415,416,418,419,421,422,423,424,425,426]}, {name,<<"replication_ns_1@10.242.238.90">>}, {takeover,false}] [rebalance:debug,2014-08-19T16:50:52.221,ns_1@10.242.238.90:<0.25327.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.25329.0> [ns_server:debug,2014-08-19T16:50:52.221,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",678,active,1} [ns_server:debug,2014-08-19T16:50:52.221,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{422, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:50:52.251,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:50:52.259,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 7812 us [ns_server:debug,2014-08-19T16:50:52.259,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:52.260,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:52.260,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{928, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [views:debug,2014-08-19T16:50:52.271,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/672. Updated state: active (1) [ns_server:debug,2014-08-19T16:50:52.271,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",672,active,1} [ns_server:debug,2014-08-19T16:50:52.281,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:50:52.285,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 3691 us [ns_server:debug,2014-08-19T16:50:52.286,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:52.286,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:52.287,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{934, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:info,2014-08-19T16:50:52.288,ns_1@10.242.238.90:<0.18786.0>:ns_memcached:do_handle_call:527]Changed vbucket 413 state to replica [ns_server:info,2014-08-19T16:50:52.288,ns_1@10.242.238.90:tap_replication_manager-default<0.18775.0>:tap_replication_manager:change_vbucket_filter:200]Going to change replication from 'ns_1@10.242.238.89' to have [407,409,410,411,413,414,415,416,418,419,421,422,423,424,425,426] ([413], []) [ns_server:debug,2014-08-19T16:50:52.289,ns_1@10.242.238.90:<0.25332.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:135]Registered myself under id:{"default", {new_child_id, [407,409,410,411,413,414,415,416,418,419,421, 422,423,424,425,426], 'ns_1@10.242.238.89'}, #Ref<0.0.1.22387>} Args:[{"10.242.238.89",11209}, {"10.242.238.90",11209}, [{old_state_retriever,#Fun}, {on_not_ready_vbuckets,#Fun}, {username,"default"}, {password,get_from_config}, {vbuckets,[407,409,410,411,413,414,415,416,418,419,421,422,423,424,425, 426]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]] [ns_server:debug,2014-08-19T16:50:52.289,ns_1@10.242.238.90:<0.25332.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:139]Linked myself to old ebucketmigrator <0.25327.0> [ns_server:info,2014-08-19T16:50:52.290,ns_1@10.242.238.90:<0.25327.0>:ebucketmigrator_srv:handle_call:270]Starting new-style vbucket filter change on stream `replication_ns_1@10.242.238.90` [ns_server:info,2014-08-19T16:50:52.302,ns_1@10.242.238.90:<0.25327.0>:ebucketmigrator_srv:handle_call:297]Changing vbucket filter on tap stream `replication_ns_1@10.242.238.90`: [{407,1}, {409,1}, {410,1}, {411,1}, {413,1}, {414,1}, {415,1}, {416,1}, {418,1}, {419,1}, {421,1}, {422,1}, {423,1}, {424,1}, {425,1}, {426,1}] [ns_server:info,2014-08-19T16:50:52.302,ns_1@10.242.238.90:<0.25327.0>:ebucketmigrator_srv:handle_call:307]Successfully changed vbucket filter on tap stream `replication_ns_1@10.242.238.90`. [ns_server:info,2014-08-19T16:50:52.302,ns_1@10.242.238.90:<0.25327.0>:ebucketmigrator_srv:process_upstream:1027]Got vbucket filter change completion message. Silencing upstream sender [ns_server:info,2014-08-19T16:50:52.303,ns_1@10.242.238.90:<0.25327.0>:ebucketmigrator_srv:handle_info:366]Got reply from upstream silencing request. Completing state transition to a new ebucketmigrator. [ns_server:debug,2014-08-19T16:50:52.303,ns_1@10.242.238.90:<0.25327.0>:ebucketmigrator_srv:complete_native_vb_filter_change:170]Proceeding with reading unread binaries [ns_server:debug,2014-08-19T16:50:52.303,ns_1@10.242.238.90:<0.25327.0>:ebucketmigrator_srv:confirm_downstream:197]Going to confirm reception downstream messages [ns_server:debug,2014-08-19T16:50:52.303,ns_1@10.242.238.90:<0.25327.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:50:52.303,ns_1@10.242.238.90:<0.25334.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:50:52.303,ns_1@10.242.238.90:<0.25334.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:50:52.303,ns_1@10.242.238.90:<0.25327.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:50:52.303,ns_1@10.242.238.90:<0.25327.0>:ebucketmigrator_srv:confirm_downstream:201]Confirmed upstream messages are feeded to kernel [ns_server:debug,2014-08-19T16:50:52.304,ns_1@10.242.238.90:<0.25327.0>:ebucketmigrator_srv:reply_and_die:210]Passed old state to caller [ns_server:debug,2014-08-19T16:50:52.304,ns_1@10.242.238.90:<0.25327.0>:ebucketmigrator_srv:reply_and_die:213]Sent out state. Preparing to die [ns_server:debug,2014-08-19T16:50:52.304,ns_1@10.242.238.90:<0.25332.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:143]Got old state from previous ebucketmigrator: <0.25327.0> [ns_server:debug,2014-08-19T16:50:52.304,ns_1@10.242.238.90:<0.25332.0>:ns_vbm_new_sup:perform_vbucket_filter_change_loop:184]Sent old state to new instance [ns_server:info,2014-08-19T16:50:52.304,ns_1@10.242.238.90:<0.25336.0>:ns_vbm_new_sup:mk_old_state_retriever:83]Got vbucket filter change old state. Proceeding vbucket filter change operation [ns_server:debug,2014-08-19T16:50:52.304,ns_1@10.242.238.90:<0.25336.0>:ebucketmigrator_srv:init:494]Got old ebucketmigrator state from <0.25327.0>: {state,#Port<0.16372>,#Port<0.16368>,#Port<0.16373>,#Port<0.16369>, <0.25329.0>,<<"cut off">>,<<"cut off">>,[],49,false,false,0, {1408,452652,302937}, completed, {<0.25332.0>,#Ref<0.0.1.22400>}, <<"replication_ns_1@10.242.238.90">>,<0.25327.0>, {had_backfill,false,undefined,[]}, completed,false}. [ns_server:debug,2014-08-19T16:50:52.305,ns_1@10.242.238.90:<0.17162.0>:ns_process_registry:handle_info:98]Got exit msg: {'EXIT',<0.25332.0>,{#Ref<0.0.1.22389>,<0.25336.0>}} [error_logger:info,2014-08-19T16:50:52.305,ns_1@10.242.238.90:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,'ns_vbm_new_sup-default'} started: [{pid,<0.25336.0>}, {name, {new_child_id, [407,409,410,411,413,414,415,416,418,419,421, 422,423,424,425,426], 'ns_1@10.242.238.89'}}, {mfargs, {ebucketmigrator_srv,start_link, [{"10.242.238.89",11209}, {"10.242.238.90",11209}, [{old_state_retriever, #Fun}, {on_not_ready_vbuckets, #Fun}, {username,"default"}, {password,get_from_config}, {vbuckets, [407,409,410,411,413,414,415,416,418,419, 421,422,423,424,425,426]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]]}}, {restart_type,temporary}, {shutdown,60000}, {child_type,worker}] [ns_server:debug,2014-08-19T16:50:52.310,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:50:52.313,ns_1@10.242.238.90:<0.25336.0>:ebucketmigrator_srv:init:621]Reusing old upstream: [{vbuckets,[407,409,410,411,413,414,415,416,418,419,421,422,423,424,425,426]}, {name,<<"replication_ns_1@10.242.238.90">>}, {takeover,false}] [rebalance:debug,2014-08-19T16:50:52.313,ns_1@10.242.238.90:<0.25336.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.25337.0> [ns_server:debug,2014-08-19T16:50:52.315,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:52.315,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 4611 us [ns_server:debug,2014-08-19T16:50:52.316,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:52.316,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{413, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:info,2014-08-19T16:50:52.320,ns_1@10.242.238.90:<0.18786.0>:ns_memcached:do_handle_call:527]Changed vbucket 408 state to replica [ns_server:info,2014-08-19T16:50:52.320,ns_1@10.242.238.90:tap_replication_manager-default<0.18775.0>:tap_replication_manager:change_vbucket_filter:200]Going to change replication from 'ns_1@10.242.238.89' to have [407,408,409,410,411,413,414,415,416,418,419,421,422,423,424,425,426] ([408], []) [views:debug,2014-08-19T16:50:52.321,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/670. Updated state: active (1) [ns_server:debug,2014-08-19T16:50:52.321,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",670,active,1} [ns_server:debug,2014-08-19T16:50:52.325,ns_1@10.242.238.90:<0.25338.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:135]Registered myself under id:{"default", {new_child_id, [407,408,409,410,411,413,414,415,416,418,419, 421,422,423,424,425,426], 'ns_1@10.242.238.89'}, #Ref<0.0.1.22552>} Args:[{"10.242.238.89",11209}, {"10.242.238.90",11209}, [{old_state_retriever,#Fun}, {on_not_ready_vbuckets,#Fun}, {username,"default"}, {password,get_from_config}, {vbuckets,[407,408,409,410,411,413,414,415,416,418,419,421,422,423,424, 425,426]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]] [ns_server:debug,2014-08-19T16:50:52.326,ns_1@10.242.238.90:<0.25338.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:139]Linked myself to old ebucketmigrator <0.25336.0> [ns_server:info,2014-08-19T16:50:52.326,ns_1@10.242.238.90:<0.25336.0>:ebucketmigrator_srv:handle_call:270]Starting new-style vbucket filter change on stream `replication_ns_1@10.242.238.90` [ns_server:info,2014-08-19T16:50:52.338,ns_1@10.242.238.90:<0.25336.0>:ebucketmigrator_srv:handle_call:297]Changing vbucket filter on tap stream `replication_ns_1@10.242.238.90`: [{407,1}, {408,1}, {409,1}, {410,1}, {411,1}, {413,1}, {414,1}, {415,1}, {416,1}, {418,1}, {419,1}, {421,1}, {422,1}, {423,1}, {424,1}, {425,1}, {426,1}] [ns_server:info,2014-08-19T16:50:52.339,ns_1@10.242.238.90:<0.25336.0>:ebucketmigrator_srv:handle_call:307]Successfully changed vbucket filter on tap stream `replication_ns_1@10.242.238.90`. [ns_server:info,2014-08-19T16:50:52.339,ns_1@10.242.238.90:<0.25336.0>:ebucketmigrator_srv:process_upstream:1027]Got vbucket filter change completion message. Silencing upstream sender [ns_server:info,2014-08-19T16:50:52.339,ns_1@10.242.238.90:<0.25336.0>:ebucketmigrator_srv:handle_info:366]Got reply from upstream silencing request. Completing state transition to a new ebucketmigrator. [ns_server:debug,2014-08-19T16:50:52.339,ns_1@10.242.238.90:<0.25336.0>:ebucketmigrator_srv:complete_native_vb_filter_change:170]Proceeding with reading unread binaries [ns_server:debug,2014-08-19T16:50:52.339,ns_1@10.242.238.90:<0.25336.0>:ebucketmigrator_srv:confirm_downstream:197]Going to confirm reception downstream messages [ns_server:debug,2014-08-19T16:50:52.339,ns_1@10.242.238.90:<0.25336.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:50:52.339,ns_1@10.242.238.90:<0.25341.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:50:52.339,ns_1@10.242.238.90:<0.25341.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:50:52.340,ns_1@10.242.238.90:<0.25336.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:50:52.340,ns_1@10.242.238.90:<0.25336.0>:ebucketmigrator_srv:confirm_downstream:201]Confirmed upstream messages are feeded to kernel [ns_server:debug,2014-08-19T16:50:52.340,ns_1@10.242.238.90:<0.25336.0>:ebucketmigrator_srv:reply_and_die:210]Passed old state to caller [ns_server:debug,2014-08-19T16:50:52.340,ns_1@10.242.238.90:<0.25336.0>:ebucketmigrator_srv:reply_and_die:213]Sent out state. Preparing to die [ns_server:debug,2014-08-19T16:50:52.340,ns_1@10.242.238.90:<0.25338.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:143]Got old state from previous ebucketmigrator: <0.25336.0> [ns_server:debug,2014-08-19T16:50:52.340,ns_1@10.242.238.90:<0.25338.0>:ns_vbm_new_sup:perform_vbucket_filter_change_loop:184]Sent old state to new instance [ns_server:info,2014-08-19T16:50:52.341,ns_1@10.242.238.90:<0.25343.0>:ns_vbm_new_sup:mk_old_state_retriever:83]Got vbucket filter change old state. Proceeding vbucket filter change operation [ns_server:debug,2014-08-19T16:50:52.341,ns_1@10.242.238.90:<0.25343.0>:ebucketmigrator_srv:init:494]Got old ebucketmigrator state from <0.25336.0>: {state,#Port<0.16372>,#Port<0.16368>,#Port<0.16373>,#Port<0.16369>, <0.25337.0>,<<"cut off">>,<<"cut off">>,[],52,false,false,0, {1408,452652,339317}, completed, {<0.25338.0>,#Ref<0.0.1.22567>}, <<"replication_ns_1@10.242.238.90">>,<0.25336.0>, {had_backfill,false,undefined,[]}, completed,false}. [ns_server:debug,2014-08-19T16:50:52.341,ns_1@10.242.238.90:<0.17162.0>:ns_process_registry:handle_info:98]Got exit msg: {'EXIT',<0.25338.0>,{#Ref<0.0.1.22554>,<0.25343.0>}} [error_logger:info,2014-08-19T16:50:52.341,ns_1@10.242.238.90:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,'ns_vbm_new_sup-default'} started: [{pid,<0.25343.0>}, {name, {new_child_id, [407,408,409,410,411,413,414,415,416,418,419, 421,422,423,424,425,426], 'ns_1@10.242.238.89'}}, {mfargs, {ebucketmigrator_srv,start_link, [{"10.242.238.89",11209}, {"10.242.238.90",11209}, [{old_state_retriever, #Fun}, {on_not_ready_vbuckets, #Fun}, {username,"default"}, {password,get_from_config}, {vbuckets, [407,408,409,410,411,413,414,415,416,418, 419,421,422,423,424,425,426]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]]}}, {restart_type,temporary}, {shutdown,60000}, {child_type,worker}] [ns_server:debug,2014-08-19T16:50:52.346,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:50:52.349,ns_1@10.242.238.90:<0.25343.0>:ebucketmigrator_srv:init:621]Reusing old upstream: [{vbuckets,[407,408,409,410,411,413,414,415,416,418,419,421,422,423,424,425, 426]}, {name,<<"replication_ns_1@10.242.238.90">>}, {takeover,false}] [rebalance:debug,2014-08-19T16:50:52.349,ns_1@10.242.238.90:<0.25343.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.25344.0> [ns_server:debug,2014-08-19T16:50:52.349,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:52.349,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 3713 us [ns_server:debug,2014-08-19T16:50:52.350,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:52.350,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{408, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:50:52.373,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:50:52.376,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 2819 us [ns_server:debug,2014-08-19T16:50:52.376,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:52.376,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:52.377,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{917, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [views:debug,2014-08-19T16:50:52.387,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/666. Updated state: active (1) [ns_server:debug,2014-08-19T16:50:52.388,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",666,active,1} [ns_server:debug,2014-08-19T16:50:52.399,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:50:52.402,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:52.402,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 3345 us [ns_server:debug,2014-08-19T16:50:52.403,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:52.403,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{680, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:info,2014-08-19T16:50:52.405,ns_1@10.242.238.90:<0.18786.0>:ns_memcached:do_handle_call:527]Changed vbucket 412 state to replica [ns_server:info,2014-08-19T16:50:52.405,ns_1@10.242.238.90:tap_replication_manager-default<0.18775.0>:tap_replication_manager:change_vbucket_filter:200]Going to change replication from 'ns_1@10.242.238.89' to have [407,408,409,410,411,412,413,414,415,416,418,419,421,422,423,424,425,426] ([412], []) [ns_server:debug,2014-08-19T16:50:52.406,ns_1@10.242.238.90:<0.25347.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:135]Registered myself under id:{"default", {new_child_id, [407,408,409,410,411,412,413,414,415,416,418, 419,421,422,423,424,425,426], 'ns_1@10.242.238.89'}, #Ref<0.0.1.22764>} Args:[{"10.242.238.89",11209}, {"10.242.238.90",11209}, [{old_state_retriever,#Fun}, {on_not_ready_vbuckets,#Fun}, {username,"default"}, {password,get_from_config}, {vbuckets,[407,408,409,410,411,412,413,414,415,416,418,419,421,422,423, 424,425,426]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]] [ns_server:debug,2014-08-19T16:50:52.407,ns_1@10.242.238.90:<0.25347.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:139]Linked myself to old ebucketmigrator <0.25343.0> [ns_server:info,2014-08-19T16:50:52.407,ns_1@10.242.238.90:<0.25343.0>:ebucketmigrator_srv:handle_call:270]Starting new-style vbucket filter change on stream `replication_ns_1@10.242.238.90` [ns_server:info,2014-08-19T16:50:52.420,ns_1@10.242.238.90:<0.25343.0>:ebucketmigrator_srv:handle_call:297]Changing vbucket filter on tap stream `replication_ns_1@10.242.238.90`: [{407,1}, {408,1}, {409,1}, {410,1}, {411,1}, {412,1}, {413,1}, {414,1}, {415,1}, {416,1}, {418,1}, {419,1}, {421,1}, {422,1}, {423,1}, {424,1}, {425,1}, {426,1}] [ns_server:info,2014-08-19T16:50:52.421,ns_1@10.242.238.90:<0.25343.0>:ebucketmigrator_srv:handle_call:307]Successfully changed vbucket filter on tap stream `replication_ns_1@10.242.238.90`. [ns_server:info,2014-08-19T16:50:52.421,ns_1@10.242.238.90:<0.25343.0>:ebucketmigrator_srv:process_upstream:1027]Got vbucket filter change completion message. Silencing upstream sender [ns_server:info,2014-08-19T16:50:52.421,ns_1@10.242.238.90:<0.25343.0>:ebucketmigrator_srv:handle_info:366]Got reply from upstream silencing request. Completing state transition to a new ebucketmigrator. [ns_server:debug,2014-08-19T16:50:52.421,ns_1@10.242.238.90:<0.25343.0>:ebucketmigrator_srv:complete_native_vb_filter_change:170]Proceeding with reading unread binaries [ns_server:debug,2014-08-19T16:50:52.421,ns_1@10.242.238.90:<0.25343.0>:ebucketmigrator_srv:confirm_downstream:197]Going to confirm reception downstream messages [ns_server:debug,2014-08-19T16:50:52.421,ns_1@10.242.238.90:<0.25343.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:50:52.421,ns_1@10.242.238.90:<0.25349.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:50:52.421,ns_1@10.242.238.90:<0.25349.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:50:52.422,ns_1@10.242.238.90:<0.25343.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:50:52.422,ns_1@10.242.238.90:<0.25343.0>:ebucketmigrator_srv:confirm_downstream:201]Confirmed upstream messages are feeded to kernel [ns_server:debug,2014-08-19T16:50:52.422,ns_1@10.242.238.90:<0.25343.0>:ebucketmigrator_srv:reply_and_die:210]Passed old state to caller [ns_server:debug,2014-08-19T16:50:52.422,ns_1@10.242.238.90:<0.25343.0>:ebucketmigrator_srv:reply_and_die:213]Sent out state. Preparing to die [ns_server:debug,2014-08-19T16:50:52.422,ns_1@10.242.238.90:<0.25347.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:143]Got old state from previous ebucketmigrator: <0.25343.0> [ns_server:debug,2014-08-19T16:50:52.422,ns_1@10.242.238.90:<0.25347.0>:ns_vbm_new_sup:perform_vbucket_filter_change_loop:184]Sent old state to new instance [ns_server:info,2014-08-19T16:50:52.422,ns_1@10.242.238.90:<0.25351.0>:ns_vbm_new_sup:mk_old_state_retriever:83]Got vbucket filter change old state. Proceeding vbucket filter change operation [ns_server:debug,2014-08-19T16:50:52.422,ns_1@10.242.238.90:<0.25351.0>:ebucketmigrator_srv:init:494]Got old ebucketmigrator state from <0.25343.0>: {state,#Port<0.16372>,#Port<0.16368>,#Port<0.16373>,#Port<0.16369>, <0.25344.0>,<<"cut off">>,<<"cut off">>,[],55,false,false,0, {1408,452652,421262}, completed, {<0.25347.0>,#Ref<0.0.1.22777>}, <<"replication_ns_1@10.242.238.90">>,<0.25343.0>, {had_backfill,false,undefined,[]}, completed,false}. [ns_server:debug,2014-08-19T16:50:52.423,ns_1@10.242.238.90:<0.17162.0>:ns_process_registry:handle_info:98]Got exit msg: {'EXIT',<0.25347.0>,{#Ref<0.0.1.22766>,<0.25351.0>}} [error_logger:info,2014-08-19T16:50:52.423,ns_1@10.242.238.90:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,'ns_vbm_new_sup-default'} started: [{pid,<0.25351.0>}, {name, {new_child_id, [407,408,409,410,411,412,413,414,415,416,418, 419,421,422,423,424,425,426], 'ns_1@10.242.238.89'}}, {mfargs, {ebucketmigrator_srv,start_link, [{"10.242.238.89",11209}, {"10.242.238.90",11209}, [{old_state_retriever, #Fun}, {on_not_ready_vbuckets, #Fun}, {username,"default"}, {password,get_from_config}, {vbuckets, [407,408,409,410,411,412,413,414,415,416, 418,419,421,422,423,424,425,426]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]]}}, {restart_type,temporary}, {shutdown,60000}, {child_type,worker}] [ns_server:debug,2014-08-19T16:50:52.427,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:50:52.433,ns_1@10.242.238.90:<0.25351.0>:ebucketmigrator_srv:init:621]Reusing old upstream: [{vbuckets,[407,408,409,410,411,412,413,414,415,416,418,419,421,422,423,424, 425,426]}, {name,<<"replication_ns_1@10.242.238.90">>}, {takeover,false}] [rebalance:debug,2014-08-19T16:50:52.433,ns_1@10.242.238.90:<0.25351.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.25353.0> [ns_server:debug,2014-08-19T16:50:52.434,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 6929 us [ns_server:debug,2014-08-19T16:50:52.434,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:52.435,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:52.435,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{412, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:info,2014-08-19T16:50:52.437,ns_1@10.242.238.90:<0.18786.0>:ns_memcached:do_handle_call:527]Changed vbucket 406 state to replica [ns_server:info,2014-08-19T16:50:52.437,ns_1@10.242.238.90:tap_replication_manager-default<0.18775.0>:tap_replication_manager:change_vbucket_filter:200]Going to change replication from 'ns_1@10.242.238.89' to have [406,407,408,409,410,411,412,413,414,415,416,418,419,421,422,423,424,425,426] ([406], []) [ns_server:debug,2014-08-19T16:50:52.440,ns_1@10.242.238.90:<0.25354.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:135]Registered myself under id:{"default", {new_child_id, [406,407,408,409,410,411,412,413,414,415,416, 418,419,421,422,423,424,425,426], 'ns_1@10.242.238.89'}, #Ref<0.0.1.22904>} Args:[{"10.242.238.89",11209}, {"10.242.238.90",11209}, [{old_state_retriever,#Fun}, {on_not_ready_vbuckets,#Fun}, {username,"default"}, {password,get_from_config}, {vbuckets,[406,407,408,409,410,411,412,413,414,415,416,418,419,421,422, 423,424,425,426]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]] [ns_server:debug,2014-08-19T16:50:52.441,ns_1@10.242.238.90:<0.25354.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:139]Linked myself to old ebucketmigrator <0.25351.0> [ns_server:info,2014-08-19T16:50:52.441,ns_1@10.242.238.90:<0.25351.0>:ebucketmigrator_srv:handle_call:270]Starting new-style vbucket filter change on stream `replication_ns_1@10.242.238.90` [ns_server:info,2014-08-19T16:50:52.448,ns_1@10.242.238.90:<0.25351.0>:ebucketmigrator_srv:handle_call:297]Changing vbucket filter on tap stream `replication_ns_1@10.242.238.90`: [{406,1}, {407,1}, {408,1}, {409,1}, {410,1}, {411,1}, {412,1}, {413,1}, {414,1}, {415,1}, {416,1}, {418,1}, {419,1}, {421,1}, {422,1}, {423,1}, {424,1}, {425,1}, {426,1}] [ns_server:info,2014-08-19T16:50:52.449,ns_1@10.242.238.90:<0.25351.0>:ebucketmigrator_srv:handle_call:307]Successfully changed vbucket filter on tap stream `replication_ns_1@10.242.238.90`. [ns_server:info,2014-08-19T16:50:52.449,ns_1@10.242.238.90:<0.25351.0>:ebucketmigrator_srv:process_upstream:1027]Got vbucket filter change completion message. Silencing upstream sender [ns_server:info,2014-08-19T16:50:52.449,ns_1@10.242.238.90:<0.25351.0>:ebucketmigrator_srv:handle_info:366]Got reply from upstream silencing request. Completing state transition to a new ebucketmigrator. [ns_server:debug,2014-08-19T16:50:52.449,ns_1@10.242.238.90:<0.25351.0>:ebucketmigrator_srv:complete_native_vb_filter_change:170]Proceeding with reading unread binaries [ns_server:debug,2014-08-19T16:50:52.449,ns_1@10.242.238.90:<0.25351.0>:ebucketmigrator_srv:confirm_downstream:197]Going to confirm reception downstream messages [ns_server:debug,2014-08-19T16:50:52.449,ns_1@10.242.238.90:<0.25351.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:50:52.449,ns_1@10.242.238.90:<0.25356.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:50:52.450,ns_1@10.242.238.90:<0.25356.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:50:52.450,ns_1@10.242.238.90:<0.25351.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:50:52.450,ns_1@10.242.238.90:<0.25351.0>:ebucketmigrator_srv:confirm_downstream:201]Confirmed upstream messages are feeded to kernel [ns_server:debug,2014-08-19T16:50:52.450,ns_1@10.242.238.90:<0.25351.0>:ebucketmigrator_srv:reply_and_die:210]Passed old state to caller [ns_server:debug,2014-08-19T16:50:52.450,ns_1@10.242.238.90:<0.25351.0>:ebucketmigrator_srv:reply_and_die:213]Sent out state. Preparing to die [ns_server:debug,2014-08-19T16:50:52.450,ns_1@10.242.238.90:<0.25354.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:143]Got old state from previous ebucketmigrator: <0.25351.0> [ns_server:debug,2014-08-19T16:50:52.450,ns_1@10.242.238.90:<0.25354.0>:ns_vbm_new_sup:perform_vbucket_filter_change_loop:184]Sent old state to new instance [ns_server:info,2014-08-19T16:50:52.451,ns_1@10.242.238.90:<0.25358.0>:ns_vbm_new_sup:mk_old_state_retriever:83]Got vbucket filter change old state. Proceeding vbucket filter change operation [ns_server:debug,2014-08-19T16:50:52.451,ns_1@10.242.238.90:<0.25358.0>:ebucketmigrator_srv:init:494]Got old ebucketmigrator state from <0.25351.0>: {state,#Port<0.16372>,#Port<0.16368>,#Port<0.16373>,#Port<0.16369>, <0.25353.0>,<<"cut off">>,<<"cut off">>,[],58,false,false,0, {1408,452652,449390}, completed, {<0.25354.0>,#Ref<0.0.1.22919>}, <<"replication_ns_1@10.242.238.90">>,<0.25351.0>, {had_backfill,false,undefined,[]}, completed,false}. [ns_server:debug,2014-08-19T16:50:52.451,ns_1@10.242.238.90:<0.17162.0>:ns_process_registry:handle_info:98]Got exit msg: {'EXIT',<0.25354.0>,{#Ref<0.0.1.22906>,<0.25358.0>}} [error_logger:info,2014-08-19T16:50:52.451,ns_1@10.242.238.90:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,'ns_vbm_new_sup-default'} started: [{pid,<0.25358.0>}, {name, {new_child_id, [406,407,408,409,410,411,412,413,414,415,416, 418,419,421,422,423,424,425,426], 'ns_1@10.242.238.89'}}, {mfargs, {ebucketmigrator_srv,start_link, [{"10.242.238.89",11209}, {"10.242.238.90",11209}, [{old_state_retriever, #Fun}, {on_not_ready_vbuckets, #Fun}, {username,"default"}, {password,get_from_config}, {vbuckets, [406,407,408,409,410,411,412,413,414,415, 416,418,419,421,422,423,424,425,426]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]]}}, {restart_type,temporary}, {shutdown,60000}, {child_type,worker}] [ns_server:debug,2014-08-19T16:50:52.456,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:50:52.458,ns_1@10.242.238.90:<0.25358.0>:ebucketmigrator_srv:init:621]Reusing old upstream: [{vbuckets,[406,407,408,409,410,411,412,413,414,415,416,418,419,421,422,423, 424,425,426]}, {name,<<"replication_ns_1@10.242.238.90">>}, {takeover,false}] [rebalance:debug,2014-08-19T16:50:52.459,ns_1@10.242.238.90:<0.25358.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.25359.0> [ns_server:debug,2014-08-19T16:50:52.460,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 1951 us [ns_server:debug,2014-08-19T16:50:52.460,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:52.460,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:52.461,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{406, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:50:52.501,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:50:52.505,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:52.506,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 4252 us [ns_server:debug,2014-08-19T16:50:52.506,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:52.507,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{679, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:50:52.529,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:50:52.532,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:52.533,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 3094 us [ns_server:debug,2014-08-19T16:50:52.535,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:52.535,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{935, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:50:52.561,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:50:52.564,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 2822 us [ns_server:debug,2014-08-19T16:50:52.564,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:52.565,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:52.565,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{927, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:50:52.589,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:50:52.591,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:52.592,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 2794 us [ns_server:debug,2014-08-19T16:50:52.592,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:52.592,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{925, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:50:52.614,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:50:52.622,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 6650 us [ns_server:debug,2014-08-19T16:50:52.622,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:52.623,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:52.623,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{918, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:50:52.653,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:50:52.656,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 1086 us [ns_server:debug,2014-08-19T16:50:52.656,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:52.657,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:52.658,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{920, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:50:52.680,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:50:52.681,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 1255 us [ns_server:debug,2014-08-19T16:50:52.683,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:52.683,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:52.684,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{681, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:info,2014-08-19T16:50:52.685,ns_1@10.242.238.90:<0.18786.0>:ns_memcached:do_handle_call:527]Changed vbucket 420 state to replica [ns_server:info,2014-08-19T16:50:52.685,ns_1@10.242.238.90:tap_replication_manager-default<0.18775.0>:tap_replication_manager:change_vbucket_filter:200]Going to change replication from 'ns_1@10.242.238.89' to have [406,407,408,409,410,411,412,413,414,415,416,418,419,420,421,422,423,424,425, 426] ([420], []) [ns_server:debug,2014-08-19T16:50:52.686,ns_1@10.242.238.90:<0.25367.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:135]Registered myself under id:{"default", {new_child_id, [406,407,408,409,410,411,412,413,414,415,416, 418,419,420,421,422,423,424,425,426], 'ns_1@10.242.238.89'}, #Ref<0.0.1.23242>} Args:[{"10.242.238.89",11209}, {"10.242.238.90",11209}, [{old_state_retriever,#Fun}, {on_not_ready_vbuckets,#Fun}, {username,"default"}, {password,get_from_config}, {vbuckets,[406,407,408,409,410,411,412,413,414,415,416,418,419,420,421, 422,423,424,425,426]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]] [ns_server:debug,2014-08-19T16:50:52.687,ns_1@10.242.238.90:<0.25367.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:139]Linked myself to old ebucketmigrator <0.25358.0> [ns_server:info,2014-08-19T16:50:52.687,ns_1@10.242.238.90:<0.25358.0>:ebucketmigrator_srv:handle_call:270]Starting new-style vbucket filter change on stream `replication_ns_1@10.242.238.90` [ns_server:info,2014-08-19T16:50:52.698,ns_1@10.242.238.90:<0.25358.0>:ebucketmigrator_srv:handle_call:297]Changing vbucket filter on tap stream `replication_ns_1@10.242.238.90`: [{406,1}, {407,1}, {408,1}, {409,1}, {410,1}, {411,1}, {412,1}, {413,1}, {414,1}, {415,1}, {416,1}, {418,1}, {419,1}, {420,1}, {421,1}, {422,1}, {423,1}, {424,1}, {425,1}, {426,1}] [ns_server:info,2014-08-19T16:50:52.699,ns_1@10.242.238.90:<0.25358.0>:ebucketmigrator_srv:handle_call:307]Successfully changed vbucket filter on tap stream `replication_ns_1@10.242.238.90`. [ns_server:info,2014-08-19T16:50:52.699,ns_1@10.242.238.90:<0.25358.0>:ebucketmigrator_srv:process_upstream:1027]Got vbucket filter change completion message. Silencing upstream sender [ns_server:info,2014-08-19T16:50:52.699,ns_1@10.242.238.90:<0.25358.0>:ebucketmigrator_srv:handle_info:366]Got reply from upstream silencing request. Completing state transition to a new ebucketmigrator. [ns_server:debug,2014-08-19T16:50:52.699,ns_1@10.242.238.90:<0.25358.0>:ebucketmigrator_srv:complete_native_vb_filter_change:170]Proceeding with reading unread binaries [ns_server:debug,2014-08-19T16:50:52.700,ns_1@10.242.238.90:<0.25358.0>:ebucketmigrator_srv:confirm_downstream:197]Going to confirm reception downstream messages [ns_server:debug,2014-08-19T16:50:52.700,ns_1@10.242.238.90:<0.25358.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:50:52.700,ns_1@10.242.238.90:<0.25369.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:50:52.700,ns_1@10.242.238.90:<0.25369.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:50:52.700,ns_1@10.242.238.90:<0.25358.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:50:52.700,ns_1@10.242.238.90:<0.25358.0>:ebucketmigrator_srv:confirm_downstream:201]Confirmed upstream messages are feeded to kernel [ns_server:debug,2014-08-19T16:50:52.700,ns_1@10.242.238.90:<0.25358.0>:ebucketmigrator_srv:reply_and_die:210]Passed old state to caller [ns_server:debug,2014-08-19T16:50:52.700,ns_1@10.242.238.90:<0.25358.0>:ebucketmigrator_srv:reply_and_die:213]Sent out state. Preparing to die [ns_server:debug,2014-08-19T16:50:52.700,ns_1@10.242.238.90:<0.25367.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:143]Got old state from previous ebucketmigrator: <0.25358.0> [ns_server:debug,2014-08-19T16:50:52.701,ns_1@10.242.238.90:<0.25367.0>:ns_vbm_new_sup:perform_vbucket_filter_change_loop:184]Sent old state to new instance [ns_server:info,2014-08-19T16:50:52.701,ns_1@10.242.238.90:<0.25371.0>:ns_vbm_new_sup:mk_old_state_retriever:83]Got vbucket filter change old state. Proceeding vbucket filter change operation [ns_server:debug,2014-08-19T16:50:52.701,ns_1@10.242.238.90:<0.25371.0>:ebucketmigrator_srv:init:494]Got old ebucketmigrator state from <0.25358.0>: {state,#Port<0.16372>,#Port<0.16368>,#Port<0.16373>,#Port<0.16369>, <0.25359.0>,<<"cut off">>,<<"cut off">>,[],61,false,false,0, {1408,452652,699698}, completed, {<0.25367.0>,#Ref<0.0.1.23255>}, <<"replication_ns_1@10.242.238.90">>,<0.25358.0>, {had_backfill,false,undefined,[]}, completed,false}. [ns_server:debug,2014-08-19T16:50:52.701,ns_1@10.242.238.90:<0.17162.0>:ns_process_registry:handle_info:98]Got exit msg: {'EXIT',<0.25367.0>,{#Ref<0.0.1.23244>,<0.25371.0>}} [error_logger:info,2014-08-19T16:50:52.701,ns_1@10.242.238.90:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,'ns_vbm_new_sup-default'} started: [{pid,<0.25371.0>}, {name, {new_child_id, [406,407,408,409,410,411,412,413,414,415,416, 418,419,420,421,422,423,424,425,426], 'ns_1@10.242.238.89'}}, {mfargs, {ebucketmigrator_srv,start_link, [{"10.242.238.89",11209}, {"10.242.238.90",11209}, [{old_state_retriever, #Fun}, {on_not_ready_vbuckets, #Fun}, {username,"default"}, {password,get_from_config}, {vbuckets, [406,407,408,409,410,411,412,413,414,415, 416,418,419,420,421,422,423,424,425, 426]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]]}}, {restart_type,temporary}, {shutdown,60000}, {child_type,worker}] [ns_server:debug,2014-08-19T16:50:52.707,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:50:52.709,ns_1@10.242.238.90:<0.25371.0>:ebucketmigrator_srv:init:621]Reusing old upstream: [{vbuckets,[406,407,408,409,410,411,412,413,414,415,416,418,419,420,421,422, 423,424,425,426]}, {name,<<"replication_ns_1@10.242.238.90">>}, {takeover,false}] [rebalance:debug,2014-08-19T16:50:52.709,ns_1@10.242.238.90:<0.25371.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.25373.0> [ns_server:debug,2014-08-19T16:50:52.710,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:52.710,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 3034 us [ns_server:debug,2014-08-19T16:50:52.710,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:52.711,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{420, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:50:52.734,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:50:52.740,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 5743 us [ns_server:debug,2014-08-19T16:50:52.740,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:52.741,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{673, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:50:52.744,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:52.768,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:50:52.771,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:52.771,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 2667 us [ns_server:debug,2014-08-19T16:50:52.772,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:52.772,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{929, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:50:52.802,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:50:52.808,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:52.808,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 6071 us [ns_server:debug,2014-08-19T16:50:52.809,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:52.809,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{921, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:info,2014-08-19T16:50:52.811,ns_1@10.242.238.90:<0.18786.0>:ns_memcached:do_handle_call:527]Changed vbucket 417 state to replica [ns_server:info,2014-08-19T16:50:52.811,ns_1@10.242.238.90:tap_replication_manager-default<0.18775.0>:tap_replication_manager:change_vbucket_filter:200]Going to change replication from 'ns_1@10.242.238.89' to have [406,407,408,409,410,411,412,413,414,415,416,417,418,419,420,421,422,423,424, 425,426] ([417], []) [ns_server:debug,2014-08-19T16:50:52.813,ns_1@10.242.238.90:<0.25378.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:135]Registered myself under id:{"default", {new_child_id, [406,407,408,409,410,411,412,413,414,415,416, 417,418,419,420,421,422,423,424,425,426], 'ns_1@10.242.238.89'}, #Ref<0.0.1.23466>} Args:[{"10.242.238.89",11209}, {"10.242.238.90",11209}, [{old_state_retriever,#Fun}, {on_not_ready_vbuckets,#Fun}, {username,"default"}, {password,get_from_config}, {vbuckets,[406,407,408,409,410,411,412,413,414,415,416,417,418,419,420, 421,422,423,424,425,426]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]] [ns_server:debug,2014-08-19T16:50:52.813,ns_1@10.242.238.90:<0.25378.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:139]Linked myself to old ebucketmigrator <0.25371.0> [ns_server:info,2014-08-19T16:50:52.814,ns_1@10.242.238.90:<0.25371.0>:ebucketmigrator_srv:handle_call:270]Starting new-style vbucket filter change on stream `replication_ns_1@10.242.238.90` [ns_server:info,2014-08-19T16:50:52.825,ns_1@10.242.238.90:<0.25371.0>:ebucketmigrator_srv:handle_call:297]Changing vbucket filter on tap stream `replication_ns_1@10.242.238.90`: [{406,1}, {407,1}, {408,1}, {409,1}, {410,1}, {411,1}, {412,1}, {413,1}, {414,1}, {415,1}, {416,1}, {417,1}, {418,1}, {419,1}, {420,1}, {421,1}, {422,1}, {423,1}, {424,1}, {425,1}, {426,1}] [ns_server:info,2014-08-19T16:50:52.826,ns_1@10.242.238.90:<0.25371.0>:ebucketmigrator_srv:handle_call:307]Successfully changed vbucket filter on tap stream `replication_ns_1@10.242.238.90`. [ns_server:info,2014-08-19T16:50:52.826,ns_1@10.242.238.90:<0.25371.0>:ebucketmigrator_srv:process_upstream:1027]Got vbucket filter change completion message. Silencing upstream sender [ns_server:info,2014-08-19T16:50:52.826,ns_1@10.242.238.90:<0.25371.0>:ebucketmigrator_srv:handle_info:366]Got reply from upstream silencing request. Completing state transition to a new ebucketmigrator. [ns_server:debug,2014-08-19T16:50:52.826,ns_1@10.242.238.90:<0.25371.0>:ebucketmigrator_srv:complete_native_vb_filter_change:170]Proceeding with reading unread binaries [ns_server:debug,2014-08-19T16:50:52.827,ns_1@10.242.238.90:<0.25371.0>:ebucketmigrator_srv:confirm_downstream:197]Going to confirm reception downstream messages [ns_server:debug,2014-08-19T16:50:52.827,ns_1@10.242.238.90:<0.25371.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:50:52.827,ns_1@10.242.238.90:<0.25380.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:50:52.827,ns_1@10.242.238.90:<0.25380.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:50:52.827,ns_1@10.242.238.90:<0.25371.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:50:52.827,ns_1@10.242.238.90:<0.25371.0>:ebucketmigrator_srv:confirm_downstream:201]Confirmed upstream messages are feeded to kernel [ns_server:debug,2014-08-19T16:50:52.827,ns_1@10.242.238.90:<0.25371.0>:ebucketmigrator_srv:reply_and_die:210]Passed old state to caller [ns_server:debug,2014-08-19T16:50:52.827,ns_1@10.242.238.90:<0.25371.0>:ebucketmigrator_srv:reply_and_die:213]Sent out state. Preparing to die [ns_server:debug,2014-08-19T16:50:52.827,ns_1@10.242.238.90:<0.25378.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:143]Got old state from previous ebucketmigrator: <0.25371.0> [ns_server:debug,2014-08-19T16:50:52.828,ns_1@10.242.238.90:<0.25378.0>:ns_vbm_new_sup:perform_vbucket_filter_change_loop:184]Sent old state to new instance [ns_server:info,2014-08-19T16:50:52.828,ns_1@10.242.238.90:<0.25382.0>:ns_vbm_new_sup:mk_old_state_retriever:83]Got vbucket filter change old state. Proceeding vbucket filter change operation [ns_server:debug,2014-08-19T16:50:52.828,ns_1@10.242.238.90:<0.25382.0>:ebucketmigrator_srv:init:494]Got old ebucketmigrator state from <0.25371.0>: {state,#Port<0.16372>,#Port<0.16368>,#Port<0.16373>,#Port<0.16369>, <0.25373.0>,<<"cut off">>,<<"cut off">>,[],64,false,false,0, {1408,452652,826616}, completed, {<0.25378.0>,#Ref<0.0.1.23479>}, <<"replication_ns_1@10.242.238.90">>,<0.25371.0>, {had_backfill,false,undefined,[]}, completed,false}. [ns_server:debug,2014-08-19T16:50:52.828,ns_1@10.242.238.90:<0.17162.0>:ns_process_registry:handle_info:98]Got exit msg: {'EXIT',<0.25378.0>,{#Ref<0.0.1.23468>,<0.25382.0>}} [error_logger:info,2014-08-19T16:50:52.828,ns_1@10.242.238.90:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,'ns_vbm_new_sup-default'} started: [{pid,<0.25382.0>}, {name, {new_child_id, [406,407,408,409,410,411,412,413,414,415,416, 417,418,419,420,421,422,423,424,425,426], 'ns_1@10.242.238.89'}}, {mfargs, {ebucketmigrator_srv,start_link, [{"10.242.238.89",11209}, {"10.242.238.90",11209}, [{old_state_retriever, #Fun}, {on_not_ready_vbuckets, #Fun}, {username,"default"}, {password,get_from_config}, {vbuckets, [406,407,408,409,410,411,412,413,414,415, 416,417,418,419,420,421,422,423,424,425, 426]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]]}}, {restart_type,temporary}, {shutdown,60000}, {child_type,worker}] [ns_server:debug,2014-08-19T16:50:52.834,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:50:52.837,ns_1@10.242.238.90:<0.25382.0>:ebucketmigrator_srv:init:621]Reusing old upstream: [{vbuckets,[406,407,408,409,410,411,412,413,414,415,416,417,418,419,420,421, 422,423,424,425,426]}, {name,<<"replication_ns_1@10.242.238.90">>}, {takeover,false}] [ns_server:debug,2014-08-19T16:50:52.837,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:52.837,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 2877 us [rebalance:debug,2014-08-19T16:50:52.837,ns_1@10.242.238.90:<0.25382.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.25383.0> [ns_server:debug,2014-08-19T16:50:52.838,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:52.838,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{417, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:50:52.857,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:50:52.861,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 3962 us [ns_server:debug,2014-08-19T16:50:52.861,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:52.862,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:52.863,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{677, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:50:52.886,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:50:52.890,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:52.890,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 3759 us [ns_server:debug,2014-08-19T16:50:52.891,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:52.891,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{923, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:50:52.915,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:50:52.919,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:52.919,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 3731 us [ns_server:debug,2014-08-19T16:50:52.919,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:52.920,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{916, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:50:52.949,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:50:52.952,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 2838 us [ns_server:debug,2014-08-19T16:50:52.952,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:52.952,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:52.953,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{670, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:50:52.976,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:50:52.987,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 10828 us [ns_server:debug,2014-08-19T16:50:52.987,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:52.988,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:52.988,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{932, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:50:53.011,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:50:53.015,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:53.015,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 3080 us [ns_server:debug,2014-08-19T16:50:53.015,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:53.016,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{672, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:50:53.038,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:50:53.040,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:53.040,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 1457 us [ns_server:debug,2014-08-19T16:50:53.040,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:53.041,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{666, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:50:53.069,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:50:53.074,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:53.074,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 4684 us [ns_server:debug,2014-08-19T16:50:53.075,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:53.075,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{931, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:50:53.102,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:50:53.106,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 4011 us [ns_server:debug,2014-08-19T16:50:53.107,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:53.108,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:53.109,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{919, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:50:53.133,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:50:53.134,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 1293 us [ns_server:debug,2014-08-19T16:50:53.135,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:53.135,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:53.136,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{678, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:info,2014-08-19T16:50:53.208,ns_1@10.242.238.90:<0.18786.0>:ns_memcached:do_handle_call:527]Changed vbucket 661 state to replica [ns_server:info,2014-08-19T16:50:53.214,ns_1@10.242.238.90:<0.25394.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 661 to state replica [ns_server:debug,2014-08-19T16:50:53.253,ns_1@10.242.238.90:<0.25394.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_661_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:50:53.255,ns_1@10.242.238.90:<0.25394.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[661]}, {checkpoints,[{661,0}]}, {name,<<"replication_building_661_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[661]}, {takeover,false}, {suffix,"building_661_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",661,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,true}]} [rebalance:debug,2014-08-19T16:50:53.256,ns_1@10.242.238.90:<0.25394.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.25395.0> [rebalance:debug,2014-08-19T16:50:53.256,ns_1@10.242.238.90:<0.25394.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:50:53.257,ns_1@10.242.238.90:<0.25394.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.14112.1>,#Ref<16550.0.1.193759>}]} [rebalance:info,2014-08-19T16:50:53.257,ns_1@10.242.238.90:<0.25394.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 661 [rebalance:debug,2014-08-19T16:50:53.257,ns_1@10.242.238.90:<0.25394.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.14112.1>,#Ref<16550.0.1.193759>}] [ns_server:debug,2014-08-19T16:50:53.258,ns_1@10.242.238.90:<0.25394.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:50:53.258,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.25396.0> (ok) [rebalance:debug,2014-08-19T16:50:53.260,ns_1@10.242.238.90:<0.25397.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 661 [ns_server:info,2014-08-19T16:50:53.264,ns_1@10.242.238.90:<0.18786.0>:ns_memcached:do_handle_call:527]Changed vbucket 405 state to replica [ns_server:info,2014-08-19T16:50:53.268,ns_1@10.242.238.90:<0.25400.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 405 to state replica [ns_server:debug,2014-08-19T16:50:53.294,ns_1@10.242.238.90:<0.25400.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_405_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:50:53.295,ns_1@10.242.238.90:<0.25400.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[405]}, {checkpoints,[{405,0}]}, {name,<<"replication_building_405_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[405]}, {takeover,false}, {suffix,"building_405_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",405,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,false}]} [rebalance:debug,2014-08-19T16:50:53.296,ns_1@10.242.238.90:<0.25400.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.25401.0> [rebalance:debug,2014-08-19T16:50:53.296,ns_1@10.242.238.90:<0.25400.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:50:53.297,ns_1@10.242.238.90:<0.25400.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.14148.1>,#Ref<16550.0.1.193925>}]} [rebalance:info,2014-08-19T16:50:53.297,ns_1@10.242.238.90:<0.25400.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 405 [rebalance:debug,2014-08-19T16:50:53.297,ns_1@10.242.238.90:<0.25400.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.14148.1>,#Ref<16550.0.1.193925>}] [ns_server:debug,2014-08-19T16:50:53.298,ns_1@10.242.238.90:<0.25400.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:debug,2014-08-19T16:50:53.313,ns_1@10.242.238.90:<0.25416.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 405 [ns_server:debug,2014-08-19T16:50:53.364,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 661. Nacking mccouch update. [views:debug,2014-08-19T16:50:53.364,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/661. Updated state: replica (0) [ns_server:debug,2014-08-19T16:50:53.364,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",661,replica,0} [ns_server:debug,2014-08-19T16:50:53.365,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,750,686,984,737,673,426,971,724,413,958,711,1022,945,762,698,1009,996, 749,685,983,736,672,425,970,723,412,957,710,1021,944,761,697,1008,995,748, 684,982,950,767,735,703,671,424,1014,969,754,722,690,411,1001,988,956,741, 709,677,1020,975,943,760,728,696,664,417,1007,994,962,747,715,683,981,949, 766,734,702,670,423,1013,968,753,721,689,410,1000,987,955,740,708,676,1019, 974,942,759,727,695,663,416,1006,993,961,746,714,682,980,948,765,733,701,669, 422,1012,999,967,752,720,688,409,986,954,739,707,675,1018,973,941,758,726, 694,662,415,1005,992,960,745,713,681,979,947,764,732,700,668,421,1011,998, 966,751,719,687,408,985,953,738,706,674,1017,972,940,757,725,693,661,414, 1004,991,959,744,712,680,1023,978,946,763,731,699,667,420,1010,965,718,407, 952,705,1016,939,756,692,1003,990,743,679,977,730,666,419,964,717,406,951, 704,1015,938,755,691,1002,989,742,678,976,729,665,418,963,716] [ns_server:info,2014-08-19T16:50:53.371,ns_1@10.242.238.90:<0.18786.0>:ns_memcached:do_handle_call:527]Changed vbucket 660 state to replica [ns_server:info,2014-08-19T16:50:53.377,ns_1@10.242.238.90:<0.25433.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 660 to state replica [ns_server:debug,2014-08-19T16:50:53.412,ns_1@10.242.238.90:<0.25433.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_660_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:50:53.413,ns_1@10.242.238.90:<0.25433.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[660]}, {checkpoints,[{660,0}]}, {name,<<"replication_building_660_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[660]}, {takeover,false}, {suffix,"building_660_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",660,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,true}]} [rebalance:debug,2014-08-19T16:50:53.414,ns_1@10.242.238.90:<0.25433.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.25434.0> [rebalance:debug,2014-08-19T16:50:53.415,ns_1@10.242.238.90:<0.25433.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:50:53.415,ns_1@10.242.238.90:<0.25433.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.14204.1>,#Ref<16550.0.1.194225>}]} [rebalance:info,2014-08-19T16:50:53.415,ns_1@10.242.238.90:<0.25433.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 660 [rebalance:debug,2014-08-19T16:50:53.416,ns_1@10.242.238.90:<0.25433.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.14204.1>,#Ref<16550.0.1.194225>}] [ns_server:debug,2014-08-19T16:50:53.416,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.25435.0> (ok) [ns_server:debug,2014-08-19T16:50:53.416,ns_1@10.242.238.90:<0.25433.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:debug,2014-08-19T16:50:53.418,ns_1@10.242.238.90:<0.25436.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 660 [ns_server:info,2014-08-19T16:50:53.422,ns_1@10.242.238.90:<0.18786.0>:ns_memcached:do_handle_call:527]Changed vbucket 404 state to replica [ns_server:info,2014-08-19T16:50:53.426,ns_1@10.242.238.90:<0.25439.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 404 to state replica [views:debug,2014-08-19T16:50:53.432,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/661. Updated state: replica (0) [ns_server:debug,2014-08-19T16:50:53.432,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",661,replica,0} [ns_server:debug,2014-08-19T16:50:53.451,ns_1@10.242.238.90:<0.25439.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_404_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:50:53.452,ns_1@10.242.238.90:<0.25439.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[404]}, {checkpoints,[{404,0}]}, {name,<<"replication_building_404_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[404]}, {takeover,false}, {suffix,"building_404_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",404,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,false}]} [rebalance:debug,2014-08-19T16:50:53.452,ns_1@10.242.238.90:<0.25439.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.25442.0> [rebalance:debug,2014-08-19T16:50:53.453,ns_1@10.242.238.90:<0.25439.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:50:53.453,ns_1@10.242.238.90:<0.25439.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.14226.1>,#Ref<16550.0.1.194356>}]} [rebalance:info,2014-08-19T16:50:53.453,ns_1@10.242.238.90:<0.25439.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 404 [rebalance:debug,2014-08-19T16:50:53.453,ns_1@10.242.238.90:<0.25439.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.14226.1>,#Ref<16550.0.1.194356>}] [ns_server:debug,2014-08-19T16:50:53.454,ns_1@10.242.238.90:<0.25439.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:debug,2014-08-19T16:50:53.470,ns_1@10.242.238.90:<0.25443.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 404 [views:debug,2014-08-19T16:50:53.498,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/661. Updated state: pending (0) [ns_server:debug,2014-08-19T16:50:53.499,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",661,pending,0} [ns_server:info,2014-08-19T16:50:53.528,ns_1@10.242.238.90:<0.18786.0>:ns_memcached:do_handle_call:527]Changed vbucket 659 state to replica [ns_server:info,2014-08-19T16:50:53.534,ns_1@10.242.238.90:<0.25446.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 659 to state replica [ns_server:debug,2014-08-19T16:50:53.568,ns_1@10.242.238.90:<0.25446.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_659_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:50:53.569,ns_1@10.242.238.90:<0.25446.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[659]}, {checkpoints,[{659,0}]}, {name,<<"replication_building_659_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[659]}, {takeover,false}, {suffix,"building_659_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",659,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,true}]} [rebalance:debug,2014-08-19T16:50:53.570,ns_1@10.242.238.90:<0.25446.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.25461.0> [rebalance:debug,2014-08-19T16:50:53.570,ns_1@10.242.238.90:<0.25446.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:50:53.571,ns_1@10.242.238.90:<0.25446.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.14281.1>,#Ref<16550.0.1.194629>}]} [rebalance:info,2014-08-19T16:50:53.571,ns_1@10.242.238.90:<0.25446.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 659 [rebalance:debug,2014-08-19T16:50:53.571,ns_1@10.242.238.90:<0.25446.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.14281.1>,#Ref<16550.0.1.194629>}] [ns_server:debug,2014-08-19T16:50:53.572,ns_1@10.242.238.90:<0.25446.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:50:53.572,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.25462.0> (ok) [rebalance:debug,2014-08-19T16:50:53.574,ns_1@10.242.238.90:<0.25463.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 659 [ns_server:info,2014-08-19T16:50:53.579,ns_1@10.242.238.90:<0.18786.0>:ns_memcached:do_handle_call:527]Changed vbucket 403 state to replica [ns_server:info,2014-08-19T16:50:53.584,ns_1@10.242.238.90:<0.25466.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 403 to state replica [ns_server:debug,2014-08-19T16:50:53.608,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 405. Nacking mccouch update. [views:debug,2014-08-19T16:50:53.608,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/405. Updated state: replica (0) [ns_server:debug,2014-08-19T16:50:53.608,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",405,replica,0} [ns_server:debug,2014-08-19T16:50:53.609,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,750,686,984,737,673,426,971,724,413,958,711,1022,945,762,698,1009,996, 749,685,983,736,672,425,970,723,412,957,710,1021,944,761,697,1008,995,748, 684,982,950,767,735,703,671,424,1014,969,754,722,690,411,1001,988,956,741, 709,677,1020,975,943,760,728,696,664,417,1007,994,962,747,715,683,981,949, 766,734,702,670,423,1013,968,753,721,689,410,1000,987,955,740,708,676,1019, 974,942,759,727,695,663,416,1006,993,961,746,714,682,980,948,765,733,701,669, 422,1012,999,967,752,720,688,409,986,954,739,707,675,1018,973,941,758,726, 694,662,415,1005,992,960,745,713,681,979,947,764,732,700,668,421,1011,998, 966,751,719,687,408,985,953,738,706,674,1017,972,940,757,725,693,661,414, 1004,991,959,744,712,680,1023,978,946,763,731,699,667,420,1010,965,718,407, 952,705,1016,939,756,692,1003,990,743,679,977,730,666,419,964,717,406,951, 704,1015,938,755,691,1002,989,742,678,976,729,665,418,963,716,405] [ns_server:debug,2014-08-19T16:50:53.611,ns_1@10.242.238.90:<0.25466.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_403_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:50:53.613,ns_1@10.242.238.90:<0.25466.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[403]}, {checkpoints,[{403,0}]}, {name,<<"replication_building_403_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[403]}, {takeover,false}, {suffix,"building_403_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",403,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,false}]} [rebalance:debug,2014-08-19T16:50:53.614,ns_1@10.242.238.90:<0.25466.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.25467.0> [rebalance:debug,2014-08-19T16:50:53.614,ns_1@10.242.238.90:<0.25466.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:50:53.614,ns_1@10.242.238.90:<0.25466.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.14303.1>,#Ref<16550.0.1.194752>}]} [rebalance:info,2014-08-19T16:50:53.615,ns_1@10.242.238.90:<0.25466.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 403 [rebalance:debug,2014-08-19T16:50:53.615,ns_1@10.242.238.90:<0.25466.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.14303.1>,#Ref<16550.0.1.194752>}] [ns_server:debug,2014-08-19T16:50:53.615,ns_1@10.242.238.90:<0.25466.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:debug,2014-08-19T16:50:53.627,ns_1@10.242.238.90:<0.25468.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 403 [views:debug,2014-08-19T16:50:53.642,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/405. Updated state: replica (0) [ns_server:debug,2014-08-19T16:50:53.642,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",405,replica,0} [ns_server:info,2014-08-19T16:50:53.682,ns_1@10.242.238.90:<0.18786.0>:ns_memcached:do_handle_call:527]Changed vbucket 658 state to replica [ns_server:info,2014-08-19T16:50:53.689,ns_1@10.242.238.90:<0.25485.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 658 to state replica [ns_server:debug,2014-08-19T16:50:53.709,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 660. Nacking mccouch update. [views:debug,2014-08-19T16:50:53.709,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/660. Updated state: pending (0) [ns_server:debug,2014-08-19T16:50:53.709,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",660,pending,0} [ns_server:debug,2014-08-19T16:50:53.709,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,750,686,984,737,673,426,971,724,660,413,958,711,1022,945,762,698,1009, 996,749,685,983,736,672,425,970,723,412,957,710,1021,944,761,697,1008,995, 748,684,982,735,671,424,969,754,722,690,411,1001,988,956,741,709,677,1020, 975,943,760,728,696,664,417,1007,994,962,747,715,683,981,949,766,734,702,670, 423,1013,968,753,721,689,410,1000,987,955,740,708,676,1019,974,942,759,727, 695,663,416,1006,993,961,746,714,682,980,948,765,733,701,669,422,1012,999, 967,752,720,688,409,986,954,739,707,675,1018,973,941,758,726,694,662,415, 1005,992,960,745,713,681,979,947,764,732,700,668,421,1011,998,966,751,719, 687,408,985,953,738,706,674,1017,972,940,757,725,693,661,414,1004,991,959, 744,712,680,1023,978,946,763,731,699,667,420,1010,965,718,407,952,705,1016, 939,756,692,1003,990,743,679,977,730,666,419,964,717,406,951,704,1015,938, 755,691,1002,989,742,678,976,729,665,418,963,716,405,950,767,703,1014] [ns_server:debug,2014-08-19T16:50:53.723,ns_1@10.242.238.90:<0.25485.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_658_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:50:53.724,ns_1@10.242.238.90:<0.25485.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[658]}, {checkpoints,[{658,0}]}, {name,<<"replication_building_658_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[658]}, {takeover,false}, {suffix,"building_658_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",658,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,true}]} [rebalance:debug,2014-08-19T16:50:53.725,ns_1@10.242.238.90:<0.25485.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.25486.0> [rebalance:debug,2014-08-19T16:50:53.725,ns_1@10.242.238.90:<0.25485.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:50:53.726,ns_1@10.242.238.90:<0.25485.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.14358.1>,#Ref<16550.0.1.195015>}]} [rebalance:info,2014-08-19T16:50:53.726,ns_1@10.242.238.90:<0.25485.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 658 [rebalance:debug,2014-08-19T16:50:53.726,ns_1@10.242.238.90:<0.25485.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.14358.1>,#Ref<16550.0.1.195015>}] [ns_server:debug,2014-08-19T16:50:53.727,ns_1@10.242.238.90:<0.25485.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:50:53.727,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.25487.0> (ok) [rebalance:debug,2014-08-19T16:50:53.731,ns_1@10.242.238.90:<0.25488.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 658 [ns_server:info,2014-08-19T16:50:53.735,ns_1@10.242.238.90:<0.18786.0>:ns_memcached:do_handle_call:527]Changed vbucket 402 state to replica [ns_server:info,2014-08-19T16:50:53.738,ns_1@10.242.238.90:<0.25491.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 402 to state replica [views:debug,2014-08-19T16:50:53.743,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/660. Updated state: pending (0) [ns_server:debug,2014-08-19T16:50:53.743,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",660,pending,0} [ns_server:debug,2014-08-19T16:50:53.763,ns_1@10.242.238.90:<0.25491.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_402_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:50:53.765,ns_1@10.242.238.90:<0.25491.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[402]}, {checkpoints,[{402,0}]}, {name,<<"replication_building_402_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[402]}, {takeover,false}, {suffix,"building_402_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",402,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,false}]} [rebalance:debug,2014-08-19T16:50:53.765,ns_1@10.242.238.90:<0.25491.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.25492.0> [rebalance:debug,2014-08-19T16:50:53.765,ns_1@10.242.238.90:<0.25491.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:50:53.766,ns_1@10.242.238.90:<0.25491.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.14380.1>,#Ref<16550.0.1.195133>}]} [rebalance:info,2014-08-19T16:50:53.766,ns_1@10.242.238.90:<0.25491.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 402 [rebalance:debug,2014-08-19T16:50:53.766,ns_1@10.242.238.90:<0.25491.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.14380.1>,#Ref<16550.0.1.195133>}] [ns_server:debug,2014-08-19T16:50:53.767,ns_1@10.242.238.90:<0.25491.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:debug,2014-08-19T16:50:53.781,ns_1@10.242.238.90:<0.25507.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 402 [ns_server:debug,2014-08-19T16:50:53.835,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 404. Nacking mccouch update. [views:debug,2014-08-19T16:50:53.835,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/404. Updated state: replica (0) [ns_server:debug,2014-08-19T16:50:53.835,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",404,replica,0} [ns_server:debug,2014-08-19T16:50:53.835,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,750,686,984,737,673,426,971,724,660,413,958,711,1022,945,762,698,1009, 996,749,685,983,736,672,425,970,723,412,957,710,1021,944,761,697,1008,995, 748,684,982,735,671,424,969,754,722,690,411,1001,988,956,741,709,677,1020, 975,943,760,728,696,664,417,1007,994,962,747,715,683,404,981,949,766,734,702, 670,423,1013,968,753,721,689,410,1000,987,955,740,708,676,1019,974,942,759, 727,695,663,416,1006,993,961,746,714,682,980,948,765,733,701,669,422,1012, 999,967,752,720,688,409,986,954,739,707,675,1018,973,941,758,726,694,662,415, 1005,992,960,745,713,681,979,947,764,732,700,668,421,1011,998,966,751,719, 687,408,985,953,738,706,674,1017,972,940,757,725,693,661,414,1004,991,959, 744,712,680,1023,978,946,763,731,699,667,420,1010,965,718,407,952,705,1016, 939,756,692,1003,990,743,679,977,730,666,419,964,717,406,951,704,1015,938, 755,691,1002,989,742,678,976,729,665,418,963,716,405,950,767,703,1014] [ns_server:info,2014-08-19T16:50:53.839,ns_1@10.242.238.90:<0.18786.0>:ns_memcached:do_handle_call:527]Changed vbucket 657 state to replica [ns_server:info,2014-08-19T16:50:53.845,ns_1@10.242.238.90:<0.25510.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 657 to state replica [ns_server:debug,2014-08-19T16:50:53.882,ns_1@10.242.238.90:<0.25510.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_657_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:50:53.884,ns_1@10.242.238.90:<0.25510.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[657]}, {checkpoints,[{657,0}]}, {name,<<"replication_building_657_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[657]}, {takeover,false}, {suffix,"building_657_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",657,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,true}]} [rebalance:debug,2014-08-19T16:50:53.884,ns_1@10.242.238.90:<0.25510.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.25511.0> [rebalance:debug,2014-08-19T16:50:53.885,ns_1@10.242.238.90:<0.25510.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:50:53.885,ns_1@10.242.238.90:<0.25510.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.14435.1>,#Ref<16550.0.1.195394>}]} [rebalance:info,2014-08-19T16:50:53.885,ns_1@10.242.238.90:<0.25510.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 657 [rebalance:debug,2014-08-19T16:50:53.886,ns_1@10.242.238.90:<0.25510.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.14435.1>,#Ref<16550.0.1.195394>}] [ns_server:debug,2014-08-19T16:50:53.886,ns_1@10.242.238.90:<0.25510.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:50:53.886,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.25512.0> (ok) [rebalance:debug,2014-08-19T16:50:53.888,ns_1@10.242.238.90:<0.25513.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 657 [ns_server:info,2014-08-19T16:50:53.893,ns_1@10.242.238.90:<0.18786.0>:ns_memcached:do_handle_call:527]Changed vbucket 401 state to replica [views:debug,2014-08-19T16:50:53.893,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/404. Updated state: replica (0) [ns_server:debug,2014-08-19T16:50:53.893,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",404,replica,0} [rebalance:debug,2014-08-19T16:50:53.894,ns_1@10.242.238.90:<0.25507.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:50:53.894,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.25507.0> (ok) [rebalance:debug,2014-08-19T16:50:53.895,ns_1@10.242.238.90:<0.25468.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:50:53.895,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.25468.0> (ok) [ns_server:info,2014-08-19T16:50:53.896,ns_1@10.242.238.90:<0.25516.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 401 to state replica [ns_server:debug,2014-08-19T16:50:53.921,ns_1@10.242.238.90:<0.25516.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_401_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:50:53.922,ns_1@10.242.238.90:<0.25516.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[401]}, {checkpoints,[{401,0}]}, {name,<<"replication_building_401_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[401]}, {takeover,false}, {suffix,"building_401_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",401,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,false}]} [rebalance:debug,2014-08-19T16:50:53.923,ns_1@10.242.238.90:<0.25516.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.25517.0> [rebalance:debug,2014-08-19T16:50:53.923,ns_1@10.242.238.90:<0.25516.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:50:53.923,ns_1@10.242.238.90:<0.25516.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.14457.1>,#Ref<16550.0.1.195490>}]} [rebalance:info,2014-08-19T16:50:53.924,ns_1@10.242.238.90:<0.25516.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 401 [rebalance:debug,2014-08-19T16:50:53.924,ns_1@10.242.238.90:<0.25516.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.14457.1>,#Ref<16550.0.1.195490>}] [ns_server:debug,2014-08-19T16:50:53.924,ns_1@10.242.238.90:<0.25516.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:debug,2014-08-19T16:50:53.939,ns_1@10.242.238.90:<0.25518.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 401 [ns_server:debug,2014-08-19T16:50:53.977,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 659. Nacking mccouch update. [views:debug,2014-08-19T16:50:53.977,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/659. Updated state: pending (0) [ns_server:debug,2014-08-19T16:50:53.977,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",659,pending,0} [ns_server:debug,2014-08-19T16:50:53.978,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,750,686,984,737,673,426,971,724,660,413,958,711,1022,945,762,698,1009, 996,749,685,983,736,672,425,970,723,659,412,957,710,1021,944,761,697,1008, 995,748,684,982,735,671,424,969,754,722,690,411,1001,988,956,741,709,677, 1020,975,943,760,728,696,664,417,1007,994,962,747,715,683,404,981,949,766, 734,702,670,423,1013,968,753,721,689,410,1000,987,955,740,708,676,1019,974, 942,759,727,695,663,416,1006,993,961,746,714,682,980,948,765,733,701,669,422, 1012,999,967,752,720,688,409,986,954,739,707,675,1018,973,941,758,726,694, 662,415,1005,992,960,745,713,681,979,947,764,732,700,668,421,1011,998,966, 751,719,687,408,985,953,738,706,674,1017,972,940,757,725,693,661,414,1004, 991,959,744,712,680,1023,978,946,763,731,699,667,420,1010,965,718,407,952, 705,1016,939,756,692,1003,990,743,679,977,730,666,419,964,717,406,951,704, 1015,938,755,691,1002,989,742,678,976,729,665,418,963,716,405,950,767,703, 1014] [ns_server:info,2014-08-19T16:50:53.996,ns_1@10.242.238.90:<0.18786.0>:ns_memcached:do_handle_call:527]Changed vbucket 656 state to replica [ns_server:info,2014-08-19T16:50:54.003,ns_1@10.242.238.90:<0.25535.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 656 to state replica [views:debug,2014-08-19T16:50:54.011,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/659. Updated state: pending (0) [ns_server:debug,2014-08-19T16:50:54.011,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",659,pending,0} [ns_server:debug,2014-08-19T16:50:54.037,ns_1@10.242.238.90:<0.25535.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_656_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:50:54.039,ns_1@10.242.238.90:<0.25535.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[656]}, {checkpoints,[{656,0}]}, {name,<<"replication_building_656_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[656]}, {takeover,false}, {suffix,"building_656_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",656,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,true}]} [rebalance:debug,2014-08-19T16:50:54.040,ns_1@10.242.238.90:<0.25535.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.25536.0> [rebalance:debug,2014-08-19T16:50:54.040,ns_1@10.242.238.90:<0.25535.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:50:54.040,ns_1@10.242.238.90:<0.25535.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.14512.1>,#Ref<16550.0.1.195749>}]} [rebalance:info,2014-08-19T16:50:54.041,ns_1@10.242.238.90:<0.25535.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 656 [rebalance:debug,2014-08-19T16:50:54.041,ns_1@10.242.238.90:<0.25535.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.14512.1>,#Ref<16550.0.1.195749>}] [ns_server:debug,2014-08-19T16:50:54.042,ns_1@10.242.238.90:<0.25535.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:50:54.042,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.25537.0> (ok) [rebalance:debug,2014-08-19T16:50:54.043,ns_1@10.242.238.90:<0.25538.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 656 [ns_server:info,2014-08-19T16:50:54.048,ns_1@10.242.238.90:<0.18786.0>:ns_memcached:do_handle_call:527]Changed vbucket 400 state to replica [ns_server:info,2014-08-19T16:50:54.052,ns_1@10.242.238.90:<0.25555.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 400 to state replica [ns_server:debug,2014-08-19T16:50:54.076,ns_1@10.242.238.90:<0.25555.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_400_'ns_1@10.242.238.90' [ns_server:debug,2014-08-19T16:50:54.078,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 657. Nacking mccouch update. [views:debug,2014-08-19T16:50:54.078,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/657. Updated state: pending (0) [ns_server:debug,2014-08-19T16:50:54.078,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",657,pending,0} [rebalance:info,2014-08-19T16:50:54.078,ns_1@10.242.238.90:<0.25555.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[400]}, {checkpoints,[{400,0}]}, {name,<<"replication_building_400_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[400]}, {takeover,false}, {suffix,"building_400_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",400,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,false}]} [ns_server:debug,2014-08-19T16:50:54.078,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,750,686,984,737,673,426,971,724,660,413,958,711,1022,945,762,698,1009, 996,749,685,983,736,672,425,970,723,659,412,957,710,1021,944,761,697,1008, 995,748,684,982,735,671,424,969,754,722,690,411,1001,988,956,741,709,677, 1020,975,943,760,728,696,664,417,1007,994,962,747,715,683,404,981,949,766, 734,702,670,423,1013,968,753,721,689,657,410,1000,987,955,740,708,676,1019, 974,942,759,727,695,663,416,1006,993,961,746,714,682,980,948,765,733,701,669, 422,1012,999,967,752,720,688,409,986,954,739,707,675,1018,973,941,758,726, 694,662,415,1005,992,960,745,713,681,979,947,764,732,700,668,421,1011,998, 966,751,719,687,408,985,953,738,706,674,1017,972,940,757,725,693,661,414, 1004,991,959,744,712,680,1023,978,946,763,731,699,667,420,1010,965,718,407, 952,705,1016,939,756,692,1003,990,743,679,977,730,666,419,964,717,406,951, 704,1015,938,755,691,1002,989,742,678,976,729,665,418,963,716,405,950,767, 703,1014] [rebalance:debug,2014-08-19T16:50:54.079,ns_1@10.242.238.90:<0.25555.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.25556.0> [rebalance:debug,2014-08-19T16:50:54.079,ns_1@10.242.238.90:<0.25555.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:50:54.080,ns_1@10.242.238.90:<0.25555.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.14534.1>,#Ref<16550.0.1.195848>}]} [rebalance:info,2014-08-19T16:50:54.080,ns_1@10.242.238.90:<0.25555.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 400 [rebalance:debug,2014-08-19T16:50:54.080,ns_1@10.242.238.90:<0.25555.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.14534.1>,#Ref<16550.0.1.195848>}] [ns_server:debug,2014-08-19T16:50:54.081,ns_1@10.242.238.90:<0.25555.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:debug,2014-08-19T16:50:54.096,ns_1@10.242.238.90:<0.25557.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 400 [views:debug,2014-08-19T16:50:54.132,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/657. Updated state: pending (0) [ns_server:debug,2014-08-19T16:50:54.133,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",657,pending,0} [ns_server:info,2014-08-19T16:50:54.153,ns_1@10.242.238.90:<0.18786.0>:ns_memcached:do_handle_call:527]Changed vbucket 655 state to replica [ns_server:info,2014-08-19T16:50:54.161,ns_1@10.242.238.90:<0.25560.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 655 to state replica [ns_server:debug,2014-08-19T16:50:54.200,ns_1@10.242.238.90:<0.25560.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_655_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:50:54.202,ns_1@10.242.238.90:<0.25560.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[655]}, {checkpoints,[{655,0}]}, {name,<<"replication_building_655_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[655]}, {takeover,false}, {suffix,"building_655_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",655,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,true}]} [rebalance:debug,2014-08-19T16:50:54.202,ns_1@10.242.238.90:<0.25560.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.25575.0> [rebalance:debug,2014-08-19T16:50:54.202,ns_1@10.242.238.90:<0.25560.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:50:54.203,ns_1@10.242.238.90:<0.25560.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.14575.1>,#Ref<16550.0.1.196071>}]} [rebalance:info,2014-08-19T16:50:54.203,ns_1@10.242.238.90:<0.25560.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 655 [rebalance:debug,2014-08-19T16:50:54.204,ns_1@10.242.238.90:<0.25560.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.14575.1>,#Ref<16550.0.1.196071>}] [ns_server:debug,2014-08-19T16:50:54.204,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.25576.0> (ok) [ns_server:debug,2014-08-19T16:50:54.204,ns_1@10.242.238.90:<0.25560.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:debug,2014-08-19T16:50:54.206,ns_1@10.242.238.90:<0.25577.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 655 [ns_server:info,2014-08-19T16:50:54.211,ns_1@10.242.238.90:<0.18786.0>:ns_memcached:do_handle_call:527]Changed vbucket 399 state to replica [ns_server:info,2014-08-19T16:50:54.214,ns_1@10.242.238.90:<0.25580.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 399 to state replica [ns_server:debug,2014-08-19T16:50:54.224,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 403. Nacking mccouch update. [views:debug,2014-08-19T16:50:54.224,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/403. Updated state: replica (1) [ns_server:debug,2014-08-19T16:50:54.224,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",403,replica,1} [ns_server:debug,2014-08-19T16:50:54.225,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,750,686,984,737,673,426,971,724,660,413,958,711,1022,945,762,698,1009, 996,749,685,983,736,672,425,970,723,659,412,957,710,1021,944,761,697,1008, 995,748,684,982,735,671,424,969,754,722,690,411,1001,988,956,741,709,677, 1020,975,943,760,728,696,664,417,1007,994,962,747,715,683,404,981,949,766, 734,702,670,423,1013,968,753,721,689,657,410,1000,987,955,740,708,676,1019, 974,942,759,727,695,663,416,1006,993,961,746,714,682,403,980,948,765,733,701, 669,422,1012,999,967,752,720,688,409,986,954,739,707,675,1018,973,941,758, 726,694,662,415,1005,992,960,745,713,681,979,947,764,732,700,668,421,1011, 998,966,751,719,687,408,985,953,738,706,674,1017,972,940,757,725,693,661,414, 1004,991,959,744,712,680,1023,978,946,763,731,699,667,420,1010,965,718,407, 952,705,1016,939,756,692,1003,990,743,679,977,730,666,419,964,717,406,951, 704,1015,938,755,691,1002,989,742,678,976,729,665,418,963,716,405,950,767, 703,1014] [ns_server:debug,2014-08-19T16:50:54.240,ns_1@10.242.238.90:<0.25580.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_399_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:50:54.241,ns_1@10.242.238.90:<0.25580.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[399]}, {checkpoints,[{399,0}]}, {name,<<"replication_building_399_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[399]}, {takeover,false}, {suffix,"building_399_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",399,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,false}]} [rebalance:debug,2014-08-19T16:50:54.242,ns_1@10.242.238.90:<0.25580.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.25581.0> [rebalance:debug,2014-08-19T16:50:54.242,ns_1@10.242.238.90:<0.25580.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:50:54.243,ns_1@10.242.238.90:<0.25580.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.14597.1>,#Ref<16550.0.1.196189>}]} [rebalance:info,2014-08-19T16:50:54.243,ns_1@10.242.238.90:<0.25580.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 399 [rebalance:debug,2014-08-19T16:50:54.243,ns_1@10.242.238.90:<0.25580.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.14597.1>,#Ref<16550.0.1.196189>}] [ns_server:debug,2014-08-19T16:50:54.244,ns_1@10.242.238.90:<0.25580.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:debug,2014-08-19T16:50:54.256,ns_1@10.242.238.90:<0.25582.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 399 [views:debug,2014-08-19T16:50:54.258,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/403. Updated state: replica (1) [ns_server:debug,2014-08-19T16:50:54.258,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",403,replica,1} [ns_server:info,2014-08-19T16:50:54.313,ns_1@10.242.238.90:<0.18786.0>:ns_memcached:do_handle_call:527]Changed vbucket 654 state to replica [ns_server:info,2014-08-19T16:50:54.320,ns_1@10.242.238.90:<0.25599.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 654 to state replica [ns_server:debug,2014-08-19T16:50:54.350,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 401. Nacking mccouch update. [views:debug,2014-08-19T16:50:54.350,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/401. Updated state: replica (0) [ns_server:debug,2014-08-19T16:50:54.350,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",401,replica,0} [ns_server:debug,2014-08-19T16:50:54.351,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,750,686,984,737,673,426,971,724,660,413,958,711,1022,945,762,698,1009, 996,749,685,983,736,672,425,970,723,659,412,957,710,1021,944,761,697,1008, 995,748,684,982,735,671,424,969,722,411,988,956,741,709,677,1020,975,943,760, 728,696,664,417,1007,994,962,747,715,683,404,981,949,766,734,702,670,423, 1013,968,753,721,689,657,410,1000,987,955,740,708,676,1019,974,942,759,727, 695,663,416,1006,993,961,746,714,682,403,980,948,765,733,701,669,422,1012, 999,967,752,720,688,409,986,954,739,707,675,1018,973,941,758,726,694,662,415, 1005,992,960,745,713,681,979,947,764,732,700,668,421,1011,998,966,751,719, 687,408,985,953,738,706,674,1017,972,940,757,725,693,661,414,1004,991,959, 744,712,680,401,1023,978,946,763,731,699,667,420,1010,965,718,407,952,705, 1016,939,756,692,1003,990,743,679,977,730,666,419,964,717,406,951,704,1015, 938,755,691,1002,989,742,678,976,729,665,418,963,716,405,950,767,703,1014, 754,690,1001] [ns_server:debug,2014-08-19T16:50:54.354,ns_1@10.242.238.90:<0.25599.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_654_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:50:54.356,ns_1@10.242.238.90:<0.25599.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[654]}, {checkpoints,[{654,0}]}, {name,<<"replication_building_654_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[654]}, {takeover,false}, {suffix,"building_654_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",654,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,true}]} [rebalance:debug,2014-08-19T16:50:54.357,ns_1@10.242.238.90:<0.25599.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.25600.0> [rebalance:debug,2014-08-19T16:50:54.357,ns_1@10.242.238.90:<0.25599.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:50:54.357,ns_1@10.242.238.90:<0.25599.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.14652.1>,#Ref<16550.0.1.196429>}]} [rebalance:info,2014-08-19T16:50:54.358,ns_1@10.242.238.90:<0.25599.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 654 [rebalance:debug,2014-08-19T16:50:54.358,ns_1@10.242.238.90:<0.25599.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.14652.1>,#Ref<16550.0.1.196429>}] [ns_server:debug,2014-08-19T16:50:54.359,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.25601.0> (ok) [ns_server:debug,2014-08-19T16:50:54.359,ns_1@10.242.238.90:<0.25599.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:debug,2014-08-19T16:50:54.360,ns_1@10.242.238.90:<0.25602.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 654 [ns_server:info,2014-08-19T16:50:54.365,ns_1@10.242.238.90:<0.18786.0>:ns_memcached:do_handle_call:527]Changed vbucket 398 state to replica [ns_server:info,2014-08-19T16:50:54.369,ns_1@10.242.238.90:<0.25605.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 398 to state replica [views:debug,2014-08-19T16:50:54.392,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/401. Updated state: replica (0) [ns_server:debug,2014-08-19T16:50:54.392,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",401,replica,0} [ns_server:debug,2014-08-19T16:50:54.395,ns_1@10.242.238.90:<0.25605.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_398_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:50:54.396,ns_1@10.242.238.90:<0.25605.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[398]}, {checkpoints,[{398,0}]}, {name,<<"replication_building_398_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[398]}, {takeover,false}, {suffix,"building_398_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",398,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,false}]} [rebalance:debug,2014-08-19T16:50:54.397,ns_1@10.242.238.90:<0.25605.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.25606.0> [rebalance:debug,2014-08-19T16:50:54.397,ns_1@10.242.238.90:<0.25605.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:50:54.398,ns_1@10.242.238.90:<0.25605.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.14674.1>,#Ref<16550.0.1.196561>}]} [rebalance:info,2014-08-19T16:50:54.398,ns_1@10.242.238.90:<0.25605.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 398 [rebalance:debug,2014-08-19T16:50:54.398,ns_1@10.242.238.90:<0.25605.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.14674.1>,#Ref<16550.0.1.196561>}] [ns_server:debug,2014-08-19T16:50:54.399,ns_1@10.242.238.90:<0.25605.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:debug,2014-08-19T16:50:54.412,ns_1@10.242.238.90:<0.25607.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 398 [ns_server:info,2014-08-19T16:50:54.466,ns_1@10.242.238.90:<0.18786.0>:ns_memcached:do_handle_call:527]Changed vbucket 653 state to replica [ns_server:info,2014-08-19T16:50:54.474,ns_1@10.242.238.90:<0.25625.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 653 to state replica [ns_server:debug,2014-08-19T16:50:54.502,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 658. Nacking mccouch update. [views:debug,2014-08-19T16:50:54.503,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/658. Updated state: pending (0) [ns_server:debug,2014-08-19T16:50:54.503,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",658,pending,0} [ns_server:debug,2014-08-19T16:50:54.503,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,750,686,984,737,673,426,971,724,660,413,958,711,1022,945,762,698,1009, 996,749,685,983,736,672,425,970,723,659,412,957,710,1021,944,761,697,1008, 995,748,684,982,735,671,424,969,722,658,411,988,956,741,709,677,1020,975,943, 760,728,696,664,417,1007,994,962,747,715,683,404,981,949,766,734,702,670,423, 1013,968,753,721,689,657,410,1000,987,955,740,708,676,1019,974,942,759,727, 695,663,416,1006,993,961,746,714,682,403,980,948,765,733,701,669,422,1012, 999,967,752,720,688,409,986,954,739,707,675,1018,973,941,758,726,694,662,415, 1005,992,960,745,713,681,979,947,764,732,700,668,421,1011,998,966,751,719, 687,408,985,953,738,706,674,1017,972,940,757,725,693,661,414,1004,991,959, 744,712,680,401,1023,978,946,763,731,699,667,420,1010,965,718,407,952,705, 1016,939,756,692,1003,990,743,679,977,730,666,419,964,717,406,951,704,1015, 938,755,691,1002,989,742,678,976,729,665,418,963,716,405,950,767,703,1014, 754,690,1001] [ns_server:debug,2014-08-19T16:50:54.514,ns_1@10.242.238.90:<0.25625.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_653_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:50:54.515,ns_1@10.242.238.90:<0.25625.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[653]}, {checkpoints,[{653,0}]}, {name,<<"replication_building_653_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[653]}, {takeover,false}, {suffix,"building_653_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",653,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,true}]} [rebalance:debug,2014-08-19T16:50:54.516,ns_1@10.242.238.90:<0.25625.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.25626.0> [rebalance:debug,2014-08-19T16:50:54.516,ns_1@10.242.238.90:<0.25625.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:50:54.516,ns_1@10.242.238.90:<0.25625.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.14715.1>,#Ref<16550.0.1.196773>}]} [rebalance:info,2014-08-19T16:50:54.516,ns_1@10.242.238.90:<0.25625.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 653 [rebalance:debug,2014-08-19T16:50:54.517,ns_1@10.242.238.90:<0.25625.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.14715.1>,#Ref<16550.0.1.196773>}] [ns_server:debug,2014-08-19T16:50:54.517,ns_1@10.242.238.90:<0.25625.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:50:54.517,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.25627.0> (ok) [rebalance:debug,2014-08-19T16:50:54.519,ns_1@10.242.238.90:<0.25628.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 653 [ns_server:info,2014-08-19T16:50:54.524,ns_1@10.242.238.90:<0.18786.0>:ns_memcached:do_handle_call:527]Changed vbucket 397 state to replica [ns_server:info,2014-08-19T16:50:54.528,ns_1@10.242.238.90:<0.25631.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 397 to state replica [views:debug,2014-08-19T16:50:54.535,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/658. Updated state: pending (0) [ns_server:debug,2014-08-19T16:50:54.535,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",658,pending,0} [ns_server:debug,2014-08-19T16:50:54.553,ns_1@10.242.238.90:<0.25631.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_397_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:50:54.555,ns_1@10.242.238.90:<0.25631.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[397]}, {checkpoints,[{397,0}]}, {name,<<"replication_building_397_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[397]}, {takeover,false}, {suffix,"building_397_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",397,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,false}]} [rebalance:debug,2014-08-19T16:50:54.556,ns_1@10.242.238.90:<0.25631.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.25632.0> [rebalance:debug,2014-08-19T16:50:54.556,ns_1@10.242.238.90:<0.25631.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:50:54.556,ns_1@10.242.238.90:<0.25631.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.14751.1>,#Ref<16550.0.1.196918>}]} [rebalance:info,2014-08-19T16:50:54.557,ns_1@10.242.238.90:<0.25631.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 397 [rebalance:debug,2014-08-19T16:50:54.557,ns_1@10.242.238.90:<0.25631.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.14751.1>,#Ref<16550.0.1.196918>}] [ns_server:debug,2014-08-19T16:50:54.558,ns_1@10.242.238.90:<0.25631.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:debug,2014-08-19T16:50:54.572,ns_1@10.242.238.90:<0.25633.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 397 [ns_server:debug,2014-08-19T16:50:54.627,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 656. Nacking mccouch update. [views:debug,2014-08-19T16:50:54.627,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/656. Updated state: pending (0) [ns_server:debug,2014-08-19T16:50:54.627,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",656,pending,0} [ns_server:debug,2014-08-19T16:50:54.627,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,750,686,984,737,673,426,971,724,660,413,958,711,1022,945,762,698,1009, 996,749,685,983,736,672,425,970,723,659,412,957,710,1021,944,761,697,1008, 995,748,684,982,735,671,424,969,722,658,411,988,956,741,709,677,1020,975,943, 760,728,696,664,417,1007,994,962,747,715,683,404,981,949,766,734,702,670,423, 1013,968,753,721,689,657,410,1000,987,955,740,708,676,1019,974,942,759,727, 695,663,416,1006,993,961,746,714,682,403,980,948,765,733,701,669,422,1012, 999,967,752,720,688,656,409,986,954,739,707,675,1018,973,941,758,726,694,662, 415,1005,992,960,745,713,681,979,947,764,732,700,668,421,1011,998,966,751, 719,687,408,985,953,738,706,674,1017,972,940,757,725,693,661,414,1004,991, 959,744,712,680,401,1023,978,946,763,731,699,667,420,1010,965,718,407,952, 705,1016,939,756,692,1003,990,743,679,977,730,666,419,964,717,406,951,704, 1015,938,755,691,1002,989,742,678,976,729,665,418,963,716,405,950,767,703, 1014,754,690,1001] [ns_server:info,2014-08-19T16:50:54.629,ns_1@10.242.238.90:<0.18786.0>:ns_memcached:do_handle_call:527]Changed vbucket 652 state to replica [ns_server:info,2014-08-19T16:50:54.635,ns_1@10.242.238.90:<0.25650.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 652 to state replica [views:debug,2014-08-19T16:50:54.661,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/656. Updated state: pending (0) [ns_server:debug,2014-08-19T16:50:54.661,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",656,pending,0} [ns_server:debug,2014-08-19T16:50:54.669,ns_1@10.242.238.90:<0.25650.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_652_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:50:54.671,ns_1@10.242.238.90:<0.25650.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[652]}, {checkpoints,[{652,0}]}, {name,<<"replication_building_652_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[652]}, {takeover,false}, {suffix,"building_652_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",652,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,true}]} [rebalance:debug,2014-08-19T16:50:54.671,ns_1@10.242.238.90:<0.25650.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.25651.0> [rebalance:debug,2014-08-19T16:50:54.671,ns_1@10.242.238.90:<0.25650.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:50:54.672,ns_1@10.242.238.90:<0.25650.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.14792.1>,#Ref<16550.0.1.197131>}]} [rebalance:info,2014-08-19T16:50:54.672,ns_1@10.242.238.90:<0.25650.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 652 [rebalance:debug,2014-08-19T16:50:54.672,ns_1@10.242.238.90:<0.25650.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.14792.1>,#Ref<16550.0.1.197131>}] [ns_server:debug,2014-08-19T16:50:54.673,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.25652.0> (ok) [ns_server:debug,2014-08-19T16:50:54.673,ns_1@10.242.238.90:<0.25650.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:debug,2014-08-19T16:50:54.674,ns_1@10.242.238.90:<0.25653.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 652 [ns_server:info,2014-08-19T16:50:54.679,ns_1@10.242.238.90:<0.18786.0>:ns_memcached:do_handle_call:527]Changed vbucket 396 state to replica [ns_server:info,2014-08-19T16:50:54.683,ns_1@10.242.238.90:<0.25656.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 396 to state replica [ns_server:debug,2014-08-19T16:50:54.707,ns_1@10.242.238.90:<0.25656.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_396_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:50:54.709,ns_1@10.242.238.90:<0.25656.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[396]}, {checkpoints,[{396,0}]}, {name,<<"replication_building_396_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[396]}, {takeover,false}, {suffix,"building_396_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",396,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,false}]} [rebalance:debug,2014-08-19T16:50:54.709,ns_1@10.242.238.90:<0.25656.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.25657.0> [rebalance:debug,2014-08-19T16:50:54.709,ns_1@10.242.238.90:<0.25656.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:50:54.710,ns_1@10.242.238.90:<0.25656.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.14814.1>,#Ref<16550.0.1.197250>}]} [rebalance:info,2014-08-19T16:50:54.710,ns_1@10.242.238.90:<0.25656.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 396 [rebalance:debug,2014-08-19T16:50:54.710,ns_1@10.242.238.90:<0.25656.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.14814.1>,#Ref<16550.0.1.197250>}] [ns_server:debug,2014-08-19T16:50:54.711,ns_1@10.242.238.90:<0.25656.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:debug,2014-08-19T16:50:54.727,ns_1@10.242.238.90:<0.25658.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 396 [ns_server:info,2014-08-19T16:50:54.785,ns_1@10.242.238.90:<0.18786.0>:ns_memcached:do_handle_call:527]Changed vbucket 651 state to replica [ns_server:info,2014-08-19T16:50:54.792,ns_1@10.242.238.90:<0.25675.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 651 to state replica [ns_server:debug,2014-08-19T16:50:54.828,ns_1@10.242.238.90:<0.25675.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_651_'ns_1@10.242.238.90' [ns_server:debug,2014-08-19T16:50:54.829,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 654. Nacking mccouch update. [views:debug,2014-08-19T16:50:54.829,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/654. Updated state: pending (0) [ns_server:debug,2014-08-19T16:50:54.829,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",654,pending,0} [rebalance:info,2014-08-19T16:50:54.829,ns_1@10.242.238.90:<0.25675.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[651]}, {checkpoints,[{651,0}]}, {name,<<"replication_building_651_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[651]}, {takeover,false}, {suffix,"building_651_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",651,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,true}]} [ns_server:debug,2014-08-19T16:50:54.829,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,750,686,984,737,673,426,971,724,660,413,958,711,1022,945,762,698,1009, 996,749,685,983,736,672,425,970,723,659,412,957,710,1021,944,761,697,1008, 995,748,684,982,735,671,424,969,722,658,411,988,956,741,709,677,1020,975,943, 760,728,696,664,417,1007,994,962,747,715,683,404,981,949,766,734,702,670,423, 1013,968,753,721,689,657,410,1000,987,955,740,708,676,1019,974,942,759,727, 695,663,416,1006,993,961,746,714,682,403,980,948,765,733,701,669,422,1012, 999,967,752,720,688,656,409,986,954,739,707,675,1018,973,941,758,726,694,662, 415,1005,992,960,745,713,681,979,947,764,732,700,668,421,1011,998,966,751, 719,687,408,985,953,738,706,674,1017,972,940,757,725,693,661,414,1004,991, 959,744,712,680,401,1023,978,946,763,731,699,667,420,1010,965,718,654,407, 952,705,1016,939,756,692,1003,990,743,679,977,730,666,419,964,717,406,951, 704,1015,938,755,691,1002,989,742,678,976,729,665,418,963,716,405,950,767, 703,1014,754,690,1001] [rebalance:debug,2014-08-19T16:50:54.830,ns_1@10.242.238.90:<0.25675.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.25676.0> [rebalance:debug,2014-08-19T16:50:54.830,ns_1@10.242.238.90:<0.25675.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:50:54.830,ns_1@10.242.238.90:<0.25675.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.14869.1>,#Ref<16550.0.1.197493>}]} [rebalance:info,2014-08-19T16:50:54.831,ns_1@10.242.238.90:<0.25675.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 651 [rebalance:debug,2014-08-19T16:50:54.831,ns_1@10.242.238.90:<0.25675.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.14869.1>,#Ref<16550.0.1.197493>}] [ns_server:debug,2014-08-19T16:50:54.832,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.25677.0> (ok) [ns_server:debug,2014-08-19T16:50:54.832,ns_1@10.242.238.90:<0.25675.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:debug,2014-08-19T16:50:54.833,ns_1@10.242.238.90:<0.25678.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 651 [ns_server:info,2014-08-19T16:50:54.838,ns_1@10.242.238.90:<0.18786.0>:ns_memcached:do_handle_call:527]Changed vbucket 395 state to replica [ns_server:info,2014-08-19T16:50:54.842,ns_1@10.242.238.90:<0.25681.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 395 to state replica [ns_server:debug,2014-08-19T16:50:54.866,ns_1@10.242.238.90:<0.25681.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_395_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:50:54.868,ns_1@10.242.238.90:<0.25681.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[395]}, {checkpoints,[{395,0}]}, {name,<<"replication_building_395_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[395]}, {takeover,false}, {suffix,"building_395_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",395,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,false}]} [rebalance:debug,2014-08-19T16:50:54.869,ns_1@10.242.238.90:<0.25681.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.25682.0> [rebalance:debug,2014-08-19T16:50:54.869,ns_1@10.242.238.90:<0.25681.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:50:54.869,ns_1@10.242.238.90:<0.25681.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.14891.1>,#Ref<16550.0.1.197610>}]} [rebalance:info,2014-08-19T16:50:54.869,ns_1@10.242.238.90:<0.25681.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 395 [rebalance:debug,2014-08-19T16:50:54.870,ns_1@10.242.238.90:<0.25681.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.14891.1>,#Ref<16550.0.1.197610>}] [ns_server:debug,2014-08-19T16:50:54.870,ns_1@10.242.238.90:<0.25681.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:debug,2014-08-19T16:50:54.888,ns_1@10.242.238.90:<0.25683.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 395 [views:debug,2014-08-19T16:50:54.896,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/654. Updated state: pending (0) [ns_server:debug,2014-08-19T16:50:54.897,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",654,pending,0} [ns_server:info,2014-08-19T16:50:54.945,ns_1@10.242.238.90:<0.18786.0>:ns_memcached:do_handle_call:527]Changed vbucket 650 state to replica [ns_server:info,2014-08-19T16:50:54.951,ns_1@10.242.238.90:<0.25686.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 650 to state replica [ns_server:debug,2014-08-19T16:50:54.988,ns_1@10.242.238.90:<0.25686.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_650_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:50:54.989,ns_1@10.242.238.90:<0.25686.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[650]}, {checkpoints,[{650,0}]}, {name,<<"replication_building_650_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[650]}, {takeover,false}, {suffix,"building_650_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",650,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,true}]} [rebalance:debug,2014-08-19T16:50:54.990,ns_1@10.242.238.90:<0.25686.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.25701.0> [rebalance:debug,2014-08-19T16:50:54.990,ns_1@10.242.238.90:<0.25686.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:50:54.990,ns_1@10.242.238.90:<0.25686.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.14946.1>,#Ref<16550.0.1.197871>}]} [rebalance:info,2014-08-19T16:50:54.991,ns_1@10.242.238.90:<0.25686.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 650 [rebalance:debug,2014-08-19T16:50:54.991,ns_1@10.242.238.90:<0.25686.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.14946.1>,#Ref<16550.0.1.197871>}] [ns_server:debug,2014-08-19T16:50:54.992,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.25702.0> (ok) [ns_server:debug,2014-08-19T16:50:54.992,ns_1@10.242.238.90:<0.25686.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:debug,2014-08-19T16:50:54.993,ns_1@10.242.238.90:<0.25703.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 650 [ns_server:info,2014-08-19T16:50:54.998,ns_1@10.242.238.90:<0.18786.0>:ns_memcached:do_handle_call:527]Changed vbucket 394 state to replica [ns_server:info,2014-08-19T16:50:55.002,ns_1@10.242.238.90:<0.25706.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 394 to state replica [ns_server:debug,2014-08-19T16:50:55.034,ns_1@10.242.238.90:<0.25706.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_394_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:50:55.036,ns_1@10.242.238.90:<0.25706.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[394]}, {checkpoints,[{394,0}]}, {name,<<"replication_building_394_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[394]}, {takeover,false}, {suffix,"building_394_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",394,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,false}]} [rebalance:debug,2014-08-19T16:50:55.036,ns_1@10.242.238.90:<0.25706.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.25707.0> [rebalance:debug,2014-08-19T16:50:55.036,ns_1@10.242.238.90:<0.25706.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:50:55.037,ns_1@10.242.238.90:<0.25706.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.14968.1>,#Ref<16550.0.1.197990>}]} [rebalance:info,2014-08-19T16:50:55.037,ns_1@10.242.238.90:<0.25706.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 394 [rebalance:debug,2014-08-19T16:50:55.037,ns_1@10.242.238.90:<0.25706.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.14968.1>,#Ref<16550.0.1.197990>}] [ns_server:debug,2014-08-19T16:50:55.038,ns_1@10.242.238.90:<0.25706.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:50:55.039,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 402. Nacking mccouch update. [views:debug,2014-08-19T16:50:55.039,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/402. Updated state: replica (1) [ns_server:debug,2014-08-19T16:50:55.039,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",402,replica,1} [ns_server:debug,2014-08-19T16:50:55.040,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,750,686,984,737,673,426,971,724,660,413,958,711,1022,945,762,698,1009, 996,749,685,983,736,672,425,970,723,659,412,957,710,1021,944,761,697,1008, 995,748,684,982,735,671,424,969,722,658,411,988,956,741,709,677,1020,975,943, 760,728,696,664,417,1007,994,962,747,715,683,404,981,949,766,734,702,670,423, 1013,968,753,721,689,657,410,1000,987,955,740,708,676,1019,974,942,759,727, 695,663,416,1006,993,961,746,714,682,403,980,948,765,733,701,669,422,1012, 999,967,752,720,688,656,409,986,954,739,707,675,1018,973,941,758,726,694,662, 415,1005,992,960,745,713,681,402,979,947,764,732,700,668,421,1011,998,966, 751,719,687,408,985,953,738,706,674,1017,972,940,757,725,693,661,414,1004, 991,959,744,712,680,401,1023,978,946,763,731,699,667,420,1010,965,718,654, 407,952,705,1016,939,756,692,1003,990,743,679,977,730,666,419,964,717,406, 951,704,1015,938,755,691,1002,989,742,678,976,729,665,418,963,716,405,950, 767,703,1014,754,690,1001] [rebalance:debug,2014-08-19T16:50:55.047,ns_1@10.242.238.90:<0.25708.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 394 [views:debug,2014-08-19T16:50:55.106,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/402. Updated state: replica (1) [ns_server:debug,2014-08-19T16:50:55.106,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",402,replica,1} [ns_server:info,2014-08-19T16:50:55.110,ns_1@10.242.238.90:<0.18786.0>:ns_memcached:do_handle_call:527]Changed vbucket 649 state to replica [ns_server:info,2014-08-19T16:50:55.118,ns_1@10.242.238.90:<0.25711.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 649 to state replica [ns_server:debug,2014-08-19T16:50:55.158,ns_1@10.242.238.90:<0.25711.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_649_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:50:55.160,ns_1@10.242.238.90:<0.25711.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[649]}, {checkpoints,[{649,0}]}, {name,<<"replication_building_649_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[649]}, {takeover,false}, {suffix,"building_649_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",649,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,true}]} [rebalance:debug,2014-08-19T16:50:55.160,ns_1@10.242.238.90:<0.25711.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.25712.0> [rebalance:debug,2014-08-19T16:50:55.161,ns_1@10.242.238.90:<0.25711.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:50:55.161,ns_1@10.242.238.90:<0.25711.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.15028.1>,#Ref<16550.0.1.198314>}]} [rebalance:info,2014-08-19T16:50:55.161,ns_1@10.242.238.90:<0.25711.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 649 [rebalance:debug,2014-08-19T16:50:55.162,ns_1@10.242.238.90:<0.25711.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.15028.1>,#Ref<16550.0.1.198314>}] [ns_server:debug,2014-08-19T16:50:55.162,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.25713.0> (ok) [ns_server:debug,2014-08-19T16:50:55.162,ns_1@10.242.238.90:<0.25711.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:debug,2014-08-19T16:50:55.163,ns_1@10.242.238.90:<0.25714.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 649 [ns_server:info,2014-08-19T16:50:55.169,ns_1@10.242.238.90:<0.18786.0>:ns_memcached:do_handle_call:527]Changed vbucket 393 state to replica [ns_server:info,2014-08-19T16:50:55.172,ns_1@10.242.238.90:<0.25717.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 393 to state replica [ns_server:debug,2014-08-19T16:50:55.198,ns_1@10.242.238.90:<0.25717.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_393_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:50:55.199,ns_1@10.242.238.90:<0.25717.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[393]}, {checkpoints,[{393,0}]}, {name,<<"replication_building_393_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[393]}, {takeover,false}, {suffix,"building_393_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",393,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,false}]} [rebalance:debug,2014-08-19T16:50:55.200,ns_1@10.242.238.90:<0.25717.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.25732.0> [rebalance:debug,2014-08-19T16:50:55.200,ns_1@10.242.238.90:<0.25717.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:50:55.201,ns_1@10.242.238.90:<0.25717.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.15064.1>,#Ref<16550.0.1.198459>}]} [rebalance:info,2014-08-19T16:50:55.201,ns_1@10.242.238.90:<0.25717.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 393 [rebalance:debug,2014-08-19T16:50:55.201,ns_1@10.242.238.90:<0.25717.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.15064.1>,#Ref<16550.0.1.198459>}] [ns_server:debug,2014-08-19T16:50:55.202,ns_1@10.242.238.90:<0.25717.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:debug,2014-08-19T16:50:55.215,ns_1@10.242.238.90:<0.25733.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 393 [ns_server:debug,2014-08-19T16:50:55.256,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 400. Nacking mccouch update. [views:debug,2014-08-19T16:50:55.256,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/400. Updated state: replica (0) [ns_server:debug,2014-08-19T16:50:55.257,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",400,replica,0} [ns_server:debug,2014-08-19T16:50:55.257,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,750,686,984,737,673,426,971,724,660,413,958,711,400,1022,945,762,698, 1009,996,749,685,983,736,672,425,970,723,659,412,957,710,1021,944,761,697, 1008,995,748,684,982,735,671,424,969,722,658,411,956,709,1020,975,943,760, 728,696,664,417,1007,994,962,747,715,683,404,981,949,766,734,702,670,423, 1013,968,753,721,689,657,410,1000,987,955,740,708,676,1019,974,942,759,727, 695,663,416,1006,993,961,746,714,682,403,980,948,765,733,701,669,422,1012, 999,967,752,720,688,656,409,986,954,739,707,675,1018,973,941,758,726,694,662, 415,1005,992,960,745,713,681,402,979,947,764,732,700,668,421,1011,998,966, 751,719,687,408,985,953,738,706,674,1017,972,940,757,725,693,661,414,1004, 991,959,744,712,680,401,1023,978,946,763,731,699,667,420,1010,965,718,654, 407,952,705,1016,939,756,692,1003,990,743,679,977,730,666,419,964,717,406, 951,704,1015,938,755,691,1002,989,742,678,976,729,665,418,963,716,405,950, 767,703,1014,754,690,1001,988,741,677] [ns_server:info,2014-08-19T16:50:55.273,ns_1@10.242.238.90:<0.18786.0>:ns_memcached:do_handle_call:527]Changed vbucket 648 state to replica [ns_server:info,2014-08-19T16:50:55.280,ns_1@10.242.238.90:<0.25736.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 648 to state replica [ns_server:debug,2014-08-19T16:50:55.316,ns_1@10.242.238.90:<0.25736.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_648_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:50:55.317,ns_1@10.242.238.90:<0.25736.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[648]}, {checkpoints,[{648,0}]}, {name,<<"replication_building_648_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[648]}, {takeover,false}, {suffix,"building_648_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",648,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,true}]} [rebalance:debug,2014-08-19T16:50:55.318,ns_1@10.242.238.90:<0.25736.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.25737.0> [rebalance:debug,2014-08-19T16:50:55.318,ns_1@10.242.238.90:<0.25736.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:50:55.318,ns_1@10.242.238.90:<0.25736.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.15110.1>,#Ref<16550.0.1.198698>}]} [rebalance:info,2014-08-19T16:50:55.319,ns_1@10.242.238.90:<0.25736.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 648 [rebalance:debug,2014-08-19T16:50:55.319,ns_1@10.242.238.90:<0.25736.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.15110.1>,#Ref<16550.0.1.198698>}] [ns_server:debug,2014-08-19T16:50:55.320,ns_1@10.242.238.90:<0.25736.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:50:55.320,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.25738.0> (ok) [rebalance:debug,2014-08-19T16:50:55.321,ns_1@10.242.238.90:<0.25739.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 648 [ns_server:info,2014-08-19T16:50:55.326,ns_1@10.242.238.90:<0.18786.0>:ns_memcached:do_handle_call:527]Changed vbucket 392 state to replica [ns_server:info,2014-08-19T16:50:55.330,ns_1@10.242.238.90:<0.25742.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 392 to state replica [views:debug,2014-08-19T16:50:55.332,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/400. Updated state: replica (0) [ns_server:debug,2014-08-19T16:50:55.332,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",400,replica,0} [ns_server:debug,2014-08-19T16:50:55.354,ns_1@10.242.238.90:<0.25742.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_392_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:50:55.356,ns_1@10.242.238.90:<0.25742.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[392]}, {checkpoints,[{392,0}]}, {name,<<"replication_building_392_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[392]}, {takeover,false}, {suffix,"building_392_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",392,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,false}]} [rebalance:debug,2014-08-19T16:50:55.356,ns_1@10.242.238.90:<0.25742.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.25743.0> [rebalance:debug,2014-08-19T16:50:55.356,ns_1@10.242.238.90:<0.25742.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [ns_server:info,2014-08-19T16:50:55.357,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:do_pull:341]Pulling config from: 'ns_1@10.242.238.88' [rebalance:debug,2014-08-19T16:50:55.357,ns_1@10.242.238.90:<0.25742.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.15141.1>,#Ref<16550.0.1.198862>}]} [rebalance:info,2014-08-19T16:50:55.357,ns_1@10.242.238.90:<0.25742.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 392 [rebalance:debug,2014-08-19T16:50:55.357,ns_1@10.242.238.90:<0.25742.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.15141.1>,#Ref<16550.0.1.198862>}] [ns_server:debug,2014-08-19T16:50:55.358,ns_1@10.242.238.90:<0.25742.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:debug,2014-08-19T16:50:55.373,ns_1@10.242.238.90:<0.25752.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 392 [ns_server:info,2014-08-19T16:50:55.429,ns_1@10.242.238.90:<0.18786.0>:ns_memcached:do_handle_call:527]Changed vbucket 647 state to replica [ns_server:info,2014-08-19T16:50:55.435,ns_1@10.242.238.90:<0.25769.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 647 to state replica [ns_server:debug,2014-08-19T16:50:55.471,ns_1@10.242.238.90:<0.25769.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_647_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:50:55.472,ns_1@10.242.238.90:<0.25769.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[647]}, {checkpoints,[{647,0}]}, {name,<<"replication_building_647_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[647]}, {takeover,false}, {suffix,"building_647_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",647,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,true}]} [rebalance:debug,2014-08-19T16:50:55.473,ns_1@10.242.238.90:<0.25769.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.25770.0> [rebalance:debug,2014-08-19T16:50:55.473,ns_1@10.242.238.90:<0.25769.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:50:55.474,ns_1@10.242.238.90:<0.25769.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.15199.1>,#Ref<16550.0.1.199141>}]} [rebalance:info,2014-08-19T16:50:55.474,ns_1@10.242.238.90:<0.25769.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 647 [rebalance:debug,2014-08-19T16:50:55.474,ns_1@10.242.238.90:<0.25769.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.15199.1>,#Ref<16550.0.1.199141>}] [ns_server:debug,2014-08-19T16:50:55.475,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.25771.0> (ok) [ns_server:debug,2014-08-19T16:50:55.475,ns_1@10.242.238.90:<0.25769.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:debug,2014-08-19T16:50:55.476,ns_1@10.242.238.90:<0.25772.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 647 [ns_server:info,2014-08-19T16:50:55.481,ns_1@10.242.238.90:<0.18786.0>:ns_memcached:do_handle_call:527]Changed vbucket 391 state to replica [ns_server:info,2014-08-19T16:50:55.484,ns_1@10.242.238.90:<0.25775.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 391 to state replica [ns_server:debug,2014-08-19T16:50:55.510,ns_1@10.242.238.90:<0.25775.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_391_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:50:55.511,ns_1@10.242.238.90:<0.25775.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[391]}, {checkpoints,[{391,0}]}, {name,<<"replication_building_391_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[391]}, {takeover,false}, {suffix,"building_391_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",391,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,false}]} [rebalance:debug,2014-08-19T16:50:55.512,ns_1@10.242.238.90:<0.25775.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.25776.0> [rebalance:debug,2014-08-19T16:50:55.512,ns_1@10.242.238.90:<0.25775.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:50:55.513,ns_1@10.242.238.90:<0.25775.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.15221.1>,#Ref<16550.0.1.199259>}]} [rebalance:info,2014-08-19T16:50:55.513,ns_1@10.242.238.90:<0.25775.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 391 [rebalance:debug,2014-08-19T16:50:55.513,ns_1@10.242.238.90:<0.25775.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.15221.1>,#Ref<16550.0.1.199259>}] [ns_server:debug,2014-08-19T16:50:55.514,ns_1@10.242.238.90:<0.25775.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:50:55.515,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 398. Nacking mccouch update. [views:debug,2014-08-19T16:50:55.516,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/398. Updated state: replica (0) [ns_server:debug,2014-08-19T16:50:55.516,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",398,replica,0} [ns_server:debug,2014-08-19T16:50:55.516,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,750,686,984,737,673,426,971,724,660,413,958,711,400,1022,945,762,698, 1009,996,749,685,983,736,672,425,970,723,659,412,957,710,1021,944,761,697, 1008,995,748,684,982,735,671,424,969,722,658,411,956,709,398,1020,975,943, 760,728,696,664,417,1007,994,962,747,715,683,404,981,949,766,734,702,670,423, 1013,968,753,721,689,657,410,1000,987,955,740,708,676,1019,974,942,759,727, 695,663,416,1006,993,961,746,714,682,403,980,948,765,733,701,669,422,1012, 999,967,752,720,688,656,409,986,954,739,707,675,1018,973,941,758,726,694,662, 415,1005,992,960,745,713,681,402,979,947,764,732,700,668,421,1011,998,966, 751,719,687,408,985,953,738,706,674,1017,972,940,757,725,693,661,414,1004, 991,959,744,712,680,401,1023,978,946,763,731,699,667,420,1010,965,718,654, 407,952,705,1016,939,756,692,1003,990,743,679,977,730,666,419,964,717,406, 951,704,1015,938,755,691,1002,989,742,678,976,729,665,418,963,716,405,950, 767,703,1014,754,690,1001,988,741,677] [rebalance:debug,2014-08-19T16:50:55.528,ns_1@10.242.238.90:<0.25777.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 391 [ns_server:info,2014-08-19T16:50:55.590,ns_1@10.242.238.90:<0.18786.0>:ns_memcached:do_handle_call:527]Changed vbucket 646 state to replica [ns_server:info,2014-08-19T16:50:55.597,ns_1@10.242.238.90:<0.25780.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 646 to state replica [views:debug,2014-08-19T16:50:55.600,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/398. Updated state: replica (0) [ns_server:debug,2014-08-19T16:50:55.601,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",398,replica,0} [ns_server:debug,2014-08-19T16:50:55.631,ns_1@10.242.238.90:<0.25780.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_646_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:50:55.633,ns_1@10.242.238.90:<0.25780.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[646]}, {checkpoints,[{646,0}]}, {name,<<"replication_building_646_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[646]}, {takeover,false}, {suffix,"building_646_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",646,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,true}]} [rebalance:debug,2014-08-19T16:50:55.634,ns_1@10.242.238.90:<0.25780.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.25781.0> [rebalance:debug,2014-08-19T16:50:55.634,ns_1@10.242.238.90:<0.25780.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:50:55.634,ns_1@10.242.238.90:<0.25780.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.15270.1>,#Ref<16550.0.1.199506>}]} [rebalance:info,2014-08-19T16:50:55.635,ns_1@10.242.238.90:<0.25780.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 646 [rebalance:debug,2014-08-19T16:50:55.635,ns_1@10.242.238.90:<0.25780.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.15270.1>,#Ref<16550.0.1.199506>}] [ns_server:debug,2014-08-19T16:50:55.635,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.25782.0> (ok) [ns_server:debug,2014-08-19T16:50:55.636,ns_1@10.242.238.90:<0.25780.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:debug,2014-08-19T16:50:55.637,ns_1@10.242.238.90:<0.25783.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 646 [ns_server:info,2014-08-19T16:50:55.642,ns_1@10.242.238.90:<0.18786.0>:ns_memcached:do_handle_call:527]Changed vbucket 390 state to replica [ns_server:info,2014-08-19T16:50:55.645,ns_1@10.242.238.90:<0.25786.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 390 to state replica [ns_server:debug,2014-08-19T16:50:55.670,ns_1@10.242.238.90:<0.25786.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_390_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:50:55.671,ns_1@10.242.238.90:<0.25786.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[390]}, {checkpoints,[{390,0}]}, {name,<<"replication_building_390_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[390]}, {takeover,false}, {suffix,"building_390_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",390,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,false}]} [rebalance:debug,2014-08-19T16:50:55.672,ns_1@10.242.238.90:<0.25786.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.25787.0> [rebalance:debug,2014-08-19T16:50:55.672,ns_1@10.242.238.90:<0.25786.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:50:55.673,ns_1@10.242.238.90:<0.25786.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.15298.1>,#Ref<16550.0.1.199620>}]} [rebalance:info,2014-08-19T16:50:55.673,ns_1@10.242.238.90:<0.25786.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 390 [rebalance:debug,2014-08-19T16:50:55.673,ns_1@10.242.238.90:<0.25786.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.15298.1>,#Ref<16550.0.1.199620>}] [ns_server:debug,2014-08-19T16:50:55.674,ns_1@10.242.238.90:<0.25786.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:debug,2014-08-19T16:50:55.688,ns_1@10.242.238.90:<0.25802.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 390 [ns_server:debug,2014-08-19T16:50:55.727,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 655. Nacking mccouch update. [views:debug,2014-08-19T16:50:55.727,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/655. Updated state: pending (0) [ns_server:debug,2014-08-19T16:50:55.727,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",655,pending,0} [ns_server:debug,2014-08-19T16:50:55.728,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,750,686,984,737,673,426,971,724,660,413,958,711,400,1022,945,762,698, 1009,996,749,685,983,736,672,425,970,723,659,412,957,710,1021,944,761,697, 1008,995,748,684,982,735,671,424,969,722,658,411,956,709,398,1020,975,943, 760,728,696,664,417,1007,994,962,747,715,683,404,981,949,766,734,702,670,423, 1013,968,753,721,689,657,410,1000,987,955,740,708,676,1019,974,942,759,727, 695,663,416,1006,993,961,746,714,682,403,980,948,765,733,701,669,422,1012, 999,967,752,720,688,656,409,986,954,739,707,675,1018,973,941,758,726,694,662, 415,1005,992,960,745,713,681,402,979,947,764,732,700,668,421,1011,998,966, 751,719,687,655,408,985,953,738,706,674,1017,972,940,757,725,693,661,414, 1004,991,959,744,712,680,401,1023,978,946,763,731,699,667,420,1010,965,718, 654,407,952,705,1016,939,756,692,1003,990,743,679,977,730,666,419,964,717, 406,951,704,1015,938,755,691,1002,989,742,678,976,729,665,418,963,716,405, 950,767,703,1014,754,690,1001,988,741,677] [ns_server:info,2014-08-19T16:50:55.749,ns_1@10.242.238.90:<0.18786.0>:ns_memcached:do_handle_call:527]Changed vbucket 645 state to replica [ns_server:info,2014-08-19T16:50:55.757,ns_1@10.242.238.90:<0.25805.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 645 to state replica [views:debug,2014-08-19T16:50:55.761,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/655. Updated state: pending (0) [ns_server:debug,2014-08-19T16:50:55.761,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",655,pending,0} [ns_server:debug,2014-08-19T16:50:55.792,ns_1@10.242.238.90:<0.25805.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_645_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:50:55.794,ns_1@10.242.238.90:<0.25805.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[645]}, {checkpoints,[{645,0}]}, {name,<<"replication_building_645_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[645]}, {takeover,false}, {suffix,"building_645_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",645,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,true}]} [rebalance:debug,2014-08-19T16:50:55.795,ns_1@10.242.238.90:<0.25805.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.25806.0> [rebalance:debug,2014-08-19T16:50:55.795,ns_1@10.242.238.90:<0.25805.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:50:55.796,ns_1@10.242.238.90:<0.25805.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.15339.1>,#Ref<16550.0.1.199832>}]} [rebalance:info,2014-08-19T16:50:55.796,ns_1@10.242.238.90:<0.25805.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 645 [rebalance:debug,2014-08-19T16:50:55.796,ns_1@10.242.238.90:<0.25805.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.15339.1>,#Ref<16550.0.1.199832>}] [ns_server:debug,2014-08-19T16:50:55.797,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.25813.0> (ok) [ns_server:debug,2014-08-19T16:50:55.797,ns_1@10.242.238.90:<0.25805.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:debug,2014-08-19T16:50:55.798,ns_1@10.242.238.90:<0.25819.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 645 [ns_server:info,2014-08-19T16:50:55.803,ns_1@10.242.238.90:<0.18786.0>:ns_memcached:do_handle_call:527]Changed vbucket 389 state to replica [ns_server:info,2014-08-19T16:50:55.807,ns_1@10.242.238.90:<0.25825.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 389 to state replica [ns_server:debug,2014-08-19T16:50:55.828,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 653. Nacking mccouch update. [views:debug,2014-08-19T16:50:55.828,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/653. Updated state: pending (0) [ns_server:debug,2014-08-19T16:50:55.828,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",653,pending,0} [ns_server:debug,2014-08-19T16:50:55.830,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,750,686,984,737,673,426,971,724,660,413,958,711,400,1022,945,762,698, 1009,996,749,685,983,736,672,425,970,723,659,412,957,710,1021,944,761,697, 1008,995,748,684,982,735,671,424,969,722,658,411,956,709,398,1020,975,943, 760,728,696,664,417,1007,994,962,747,715,683,404,981,949,766,734,702,670,423, 1013,968,753,721,689,657,410,1000,987,955,740,708,676,1019,974,942,759,727, 695,663,416,1006,993,961,746,714,682,403,980,948,765,733,701,669,422,1012, 999,967,752,720,688,656,409,986,954,739,707,675,1018,973,941,758,726,694,662, 415,1005,992,960,745,713,681,402,979,947,764,732,700,668,421,1011,998,966, 751,719,687,655,408,985,953,738,706,674,1017,972,940,757,725,693,661,414, 1004,991,959,744,712,680,401,1023,978,946,763,731,699,667,420,1010,965,718, 654,407,952,705,1016,939,756,692,1003,990,743,679,977,730,666,419,964,717, 653,406,951,704,1015,938,755,691,1002,989,742,678,976,729,665,418,963,716, 405,950,767,703,1014,754,690,1001,988,741,677] [ns_server:debug,2014-08-19T16:50:55.832,ns_1@10.242.238.90:<0.25825.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_389_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:50:55.837,ns_1@10.242.238.90:<0.25825.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[389]}, {checkpoints,[{389,0}]}, {name,<<"replication_building_389_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[389]}, {takeover,false}, {suffix,"building_389_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",389,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,false}]} [rebalance:debug,2014-08-19T16:50:55.838,ns_1@10.242.238.90:<0.25825.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.25826.0> [rebalance:debug,2014-08-19T16:50:55.838,ns_1@10.242.238.90:<0.25825.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:50:55.839,ns_1@10.242.238.90:<0.25825.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.15361.1>,#Ref<16550.0.1.199950>}]} [rebalance:info,2014-08-19T16:50:55.839,ns_1@10.242.238.90:<0.25825.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 389 [rebalance:debug,2014-08-19T16:50:55.839,ns_1@10.242.238.90:<0.25825.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.15361.1>,#Ref<16550.0.1.199950>}] [ns_server:debug,2014-08-19T16:50:55.840,ns_1@10.242.238.90:<0.25825.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:debug,2014-08-19T16:50:55.850,ns_1@10.242.238.90:<0.25827.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 389 [views:debug,2014-08-19T16:50:55.862,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/653. Updated state: pending (0) [ns_server:debug,2014-08-19T16:50:55.862,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",653,pending,0} [ns_server:info,2014-08-19T16:50:55.906,ns_1@10.242.238.90:<0.18786.0>:ns_memcached:do_handle_call:527]Changed vbucket 644 state to replica [ns_server:info,2014-08-19T16:50:55.912,ns_1@10.242.238.90:<0.25830.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 644 to state replica [ns_server:debug,2014-08-19T16:50:55.947,ns_1@10.242.238.90:<0.25830.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_644_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:50:55.949,ns_1@10.242.238.90:<0.25830.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[644]}, {checkpoints,[{644,0}]}, {name,<<"replication_building_644_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[644]}, {takeover,false}, {suffix,"building_644_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",644,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,true}]} [rebalance:debug,2014-08-19T16:50:55.949,ns_1@10.242.238.90:<0.25830.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.25845.0> [rebalance:debug,2014-08-19T16:50:55.950,ns_1@10.242.238.90:<0.25830.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:50:55.950,ns_1@10.242.238.90:<0.25830.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.15416.1>,#Ref<16550.0.1.200190>}]} [rebalance:info,2014-08-19T16:50:55.950,ns_1@10.242.238.90:<0.25830.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 644 [rebalance:debug,2014-08-19T16:50:55.950,ns_1@10.242.238.90:<0.25830.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.15416.1>,#Ref<16550.0.1.200190>}] [ns_server:debug,2014-08-19T16:50:55.951,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.25846.0> (ok) [ns_server:debug,2014-08-19T16:50:55.951,ns_1@10.242.238.90:<0.25830.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:debug,2014-08-19T16:50:55.953,ns_1@10.242.238.90:<0.25847.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 644 [ns_server:debug,2014-08-19T16:50:55.954,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 651. Nacking mccouch update. [views:debug,2014-08-19T16:50:55.954,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/651. Updated state: pending (0) [ns_server:debug,2014-08-19T16:50:55.954,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",651,pending,0} [ns_server:debug,2014-08-19T16:50:55.954,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,750,686,984,737,673,426,971,724,660,413,958,711,400,1022,945,762,698, 1009,996,749,685,983,736,672,425,970,723,659,412,957,710,1021,944,761,697, 1008,995,748,684,982,735,671,424,969,722,658,411,956,709,398,1020,975,943, 760,728,696,664,417,1007,994,962,747,715,683,651,404,981,949,766,734,702,670, 423,1013,968,753,721,689,657,410,1000,987,955,740,708,676,1019,974,942,759, 727,695,663,416,1006,993,961,746,714,682,403,980,948,765,733,701,669,422, 1012,999,967,752,720,688,656,409,986,954,739,707,675,1018,973,941,758,726, 694,662,415,1005,992,960,745,713,681,402,979,947,764,732,700,668,421,1011, 998,966,751,719,687,655,408,985,953,738,706,674,1017,972,940,757,725,693,661, 414,1004,991,959,744,712,680,401,1023,978,946,763,731,699,667,420,1010,965, 718,654,407,952,705,1016,939,756,692,1003,990,743,679,977,730,666,419,964, 717,653,406,951,704,1015,938,755,691,1002,989,742,678,976,729,665,418,963, 716,405,950,767,703,1014,754,690,1001,988,741,677] [ns_server:info,2014-08-19T16:50:55.957,ns_1@10.242.238.90:<0.18786.0>:ns_memcached:do_handle_call:527]Changed vbucket 388 state to replica [ns_server:info,2014-08-19T16:50:55.962,ns_1@10.242.238.90:<0.25850.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 388 to state replica [ns_server:debug,2014-08-19T16:50:55.987,ns_1@10.242.238.90:<0.25850.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_388_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:50:55.989,ns_1@10.242.238.90:<0.25850.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[388]}, {checkpoints,[{388,0}]}, {name,<<"replication_building_388_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[388]}, {takeover,false}, {suffix,"building_388_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",388,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,false}]} [rebalance:debug,2014-08-19T16:50:55.989,ns_1@10.242.238.90:<0.25850.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.25851.0> [rebalance:debug,2014-08-19T16:50:55.990,ns_1@10.242.238.90:<0.25850.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:50:55.990,ns_1@10.242.238.90:<0.25850.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.15438.1>,#Ref<16550.0.1.200307>}]} [rebalance:info,2014-08-19T16:50:55.990,ns_1@10.242.238.90:<0.25850.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 388 [rebalance:debug,2014-08-19T16:50:55.991,ns_1@10.242.238.90:<0.25850.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.15438.1>,#Ref<16550.0.1.200307>}] [ns_server:debug,2014-08-19T16:50:55.992,ns_1@10.242.238.90:<0.25850.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [views:debug,2014-08-19T16:50:55.997,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/651. Updated state: pending (0) [ns_server:debug,2014-08-19T16:50:55.997,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",651,pending,0} [rebalance:debug,2014-08-19T16:50:56.005,ns_1@10.242.238.90:<0.25852.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 388 [ns_server:info,2014-08-19T16:50:56.061,ns_1@10.242.238.90:<0.18786.0>:ns_memcached:do_handle_call:527]Changed vbucket 643 state to replica [ns_server:info,2014-08-19T16:50:56.068,ns_1@10.242.238.90:<0.25869.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 643 to state replica [ns_server:debug,2014-08-19T16:50:56.071,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 649. Nacking mccouch update. [views:debug,2014-08-19T16:50:56.071,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/649. Updated state: pending (0) [ns_server:debug,2014-08-19T16:50:56.071,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",649,pending,0} [ns_server:debug,2014-08-19T16:50:56.072,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,750,686,984,737,673,426,971,724,660,413,958,711,400,1022,945,762,698, 1009,996,749,685,983,736,672,425,970,723,659,412,957,710,1021,944,761,697, 1008,995,748,684,982,735,671,424,969,722,658,411,956,709,398,1020,943,760, 696,1007,994,962,747,715,683,651,404,981,949,766,734,702,670,423,1013,968, 753,721,689,657,410,1000,987,955,740,708,676,1019,974,942,759,727,695,663, 416,1006,993,961,746,714,682,403,980,948,765,733,701,669,422,1012,999,967, 752,720,688,656,409,986,954,739,707,675,1018,973,941,758,726,694,662,415, 1005,992,960,745,713,681,649,402,979,947,764,732,700,668,421,1011,998,966, 751,719,687,655,408,985,953,738,706,674,1017,972,940,757,725,693,661,414, 1004,991,959,744,712,680,401,1023,978,946,763,731,699,667,420,1010,965,718, 654,407,952,705,1016,939,756,692,1003,990,743,679,977,730,666,419,964,717, 653,406,951,704,1015,938,755,691,1002,989,742,678,976,729,665,418,963,716, 405,950,767,703,1014,754,690,1001,988,741,677,975,728,664,417] [ns_server:debug,2014-08-19T16:50:56.103,ns_1@10.242.238.90:<0.25869.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_643_'ns_1@10.242.238.90' [views:debug,2014-08-19T16:50:56.105,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/649. Updated state: pending (0) [ns_server:debug,2014-08-19T16:50:56.105,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",649,pending,0} [rebalance:info,2014-08-19T16:50:56.110,ns_1@10.242.238.90:<0.25869.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[643]}, {checkpoints,[{643,0}]}, {name,<<"replication_building_643_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[643]}, {takeover,false}, {suffix,"building_643_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",643,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,true}]} [rebalance:debug,2014-08-19T16:50:56.111,ns_1@10.242.238.90:<0.25869.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.25870.0> [rebalance:debug,2014-08-19T16:50:56.111,ns_1@10.242.238.90:<0.25869.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:50:56.112,ns_1@10.242.238.90:<0.25869.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.15487.1>,#Ref<16550.0.1.200550>}]} [rebalance:info,2014-08-19T16:50:56.112,ns_1@10.242.238.90:<0.25869.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 643 [rebalance:debug,2014-08-19T16:50:56.112,ns_1@10.242.238.90:<0.25869.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.15487.1>,#Ref<16550.0.1.200550>}] [ns_server:debug,2014-08-19T16:50:56.113,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.25871.0> (ok) [ns_server:debug,2014-08-19T16:50:56.113,ns_1@10.242.238.90:<0.25869.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:debug,2014-08-19T16:50:56.114,ns_1@10.242.238.90:<0.25872.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 643 [ns_server:info,2014-08-19T16:50:56.151,ns_1@10.242.238.90:<0.18786.0>:ns_memcached:do_handle_call:527]Changed vbucket 387 state to replica [ns_server:info,2014-08-19T16:50:56.155,ns_1@10.242.238.90:<0.25875.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 387 to state replica [ns_server:debug,2014-08-19T16:50:56.180,ns_1@10.242.238.90:<0.25875.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_387_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:50:56.182,ns_1@10.242.238.90:<0.25875.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[387]}, {checkpoints,[{387,0}]}, {name,<<"replication_building_387_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[387]}, {takeover,false}, {suffix,"building_387_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",387,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,false}]} [rebalance:debug,2014-08-19T16:50:56.183,ns_1@10.242.238.90:<0.25875.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.25890.0> [rebalance:debug,2014-08-19T16:50:56.183,ns_1@10.242.238.90:<0.25875.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:50:56.183,ns_1@10.242.238.90:<0.25875.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.15515.1>,#Ref<16550.0.1.200700>}]} [rebalance:info,2014-08-19T16:50:56.184,ns_1@10.242.238.90:<0.25875.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 387 [rebalance:debug,2014-08-19T16:50:56.184,ns_1@10.242.238.90:<0.25875.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.15515.1>,#Ref<16550.0.1.200700>}] [ns_server:debug,2014-08-19T16:50:56.185,ns_1@10.242.238.90:<0.25875.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:50:56.190,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 647. Nacking mccouch update. [views:debug,2014-08-19T16:50:56.190,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/647. Updated state: pending (0) [ns_server:debug,2014-08-19T16:50:56.190,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",647,pending,0} [ns_server:debug,2014-08-19T16:50:56.191,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,750,686,984,737,673,426,971,724,660,413,958,711,647,400,1022,945,762,698, 1009,996,749,685,983,736,672,425,970,723,659,412,957,710,1021,944,761,697, 1008,995,748,684,982,735,671,424,969,722,658,411,956,709,398,1020,943,760, 696,1007,994,962,747,715,683,651,404,981,949,766,734,702,670,423,1013,968, 753,721,689,657,410,1000,987,955,740,708,676,1019,974,942,759,727,695,663, 416,1006,993,961,746,714,682,403,980,948,765,733,701,669,422,1012,999,967, 752,720,688,656,409,986,954,739,707,675,1018,973,941,758,726,694,662,415, 1005,992,960,745,713,681,649,402,979,947,764,732,700,668,421,1011,998,966, 751,719,687,655,408,985,953,738,706,674,1017,972,940,757,725,693,661,414, 1004,991,959,744,712,680,401,1023,978,946,763,731,699,667,420,1010,965,718, 654,407,952,705,1016,939,756,692,1003,990,743,679,977,730,666,419,964,717, 653,406,951,704,1015,938,755,691,1002,989,742,678,976,729,665,418,963,716, 405,950,767,703,1014,754,690,1001,988,741,677,975,728,664,417] [rebalance:debug,2014-08-19T16:50:56.198,ns_1@10.242.238.90:<0.25891.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 387 [ns_server:info,2014-08-19T16:50:56.255,ns_1@10.242.238.90:<0.18786.0>:ns_memcached:do_handle_call:527]Changed vbucket 642 state to replica [ns_server:info,2014-08-19T16:50:56.261,ns_1@10.242.238.90:<0.25894.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 642 to state replica [views:debug,2014-08-19T16:50:56.267,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/647. Updated state: pending (0) [ns_server:debug,2014-08-19T16:50:56.267,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",647,pending,0} [ns_server:debug,2014-08-19T16:50:56.297,ns_1@10.242.238.90:<0.25894.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_642_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:50:56.298,ns_1@10.242.238.90:<0.25894.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[642]}, {checkpoints,[{642,0}]}, {name,<<"replication_building_642_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[642]}, {takeover,false}, {suffix,"building_642_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",642,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,true}]} [rebalance:debug,2014-08-19T16:50:56.299,ns_1@10.242.238.90:<0.25894.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.25895.0> [rebalance:debug,2014-08-19T16:50:56.299,ns_1@10.242.238.90:<0.25894.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:50:56.299,ns_1@10.242.238.90:<0.25894.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.15556.1>,#Ref<16550.0.1.200911>}]} [rebalance:info,2014-08-19T16:50:56.300,ns_1@10.242.238.90:<0.25894.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 642 [rebalance:debug,2014-08-19T16:50:56.300,ns_1@10.242.238.90:<0.25894.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.15556.1>,#Ref<16550.0.1.200911>}] [ns_server:debug,2014-08-19T16:50:56.300,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.25896.0> (ok) [ns_server:debug,2014-08-19T16:50:56.301,ns_1@10.242.238.90:<0.25894.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:debug,2014-08-19T16:50:56.302,ns_1@10.242.238.90:<0.25897.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 642 [ns_server:info,2014-08-19T16:50:56.306,ns_1@10.242.238.90:<0.18786.0>:ns_memcached:do_handle_call:527]Changed vbucket 386 state to replica [ns_server:info,2014-08-19T16:50:56.311,ns_1@10.242.238.90:<0.25900.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 386 to state replica [ns_server:debug,2014-08-19T16:50:56.336,ns_1@10.242.238.90:<0.25900.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_386_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:50:56.337,ns_1@10.242.238.90:<0.25900.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[386]}, {checkpoints,[{386,0}]}, {name,<<"replication_building_386_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[386]}, {takeover,false}, {suffix,"building_386_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",386,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,false}]} [rebalance:debug,2014-08-19T16:50:56.338,ns_1@10.242.238.90:<0.25900.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.25901.0> [rebalance:debug,2014-08-19T16:50:56.338,ns_1@10.242.238.90:<0.25900.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:50:56.339,ns_1@10.242.238.90:<0.25900.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.15603.1>,#Ref<16550.0.1.201112>}]} [rebalance:info,2014-08-19T16:50:56.339,ns_1@10.242.238.90:<0.25900.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 386 [rebalance:debug,2014-08-19T16:50:56.339,ns_1@10.242.238.90:<0.25900.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.15603.1>,#Ref<16550.0.1.201112>}] [ns_server:debug,2014-08-19T16:50:56.340,ns_1@10.242.238.90:<0.25900.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:debug,2014-08-19T16:50:56.356,ns_1@10.242.238.90:<0.25916.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 386 [ns_server:info,2014-08-19T16:50:56.361,ns_1@10.242.238.90:<0.18786.0>:ns_memcached:do_handle_call:527]Changed vbucket 641 state to replica [ns_server:info,2014-08-19T16:50:56.366,ns_1@10.242.238.90:<0.25919.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 641 to state replica [ns_server:debug,2014-08-19T16:50:56.404,ns_1@10.242.238.90:<0.25919.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_641_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:50:56.406,ns_1@10.242.238.90:<0.25919.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[641]}, {checkpoints,[{641,0}]}, {name,<<"replication_building_641_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[641]}, {takeover,false}, {suffix,"building_641_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",641,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,true}]} [rebalance:debug,2014-08-19T16:50:56.407,ns_1@10.242.238.90:<0.25919.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.25920.0> [rebalance:debug,2014-08-19T16:50:56.407,ns_1@10.242.238.90:<0.25919.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:50:56.408,ns_1@10.242.238.90:<0.25919.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.15626.1>,#Ref<16550.0.1.201271>}]} [rebalance:info,2014-08-19T16:50:56.408,ns_1@10.242.238.90:<0.25919.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 641 [rebalance:debug,2014-08-19T16:50:56.408,ns_1@10.242.238.90:<0.25919.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.15626.1>,#Ref<16550.0.1.201271>}] [ns_server:debug,2014-08-19T16:50:56.409,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.25921.0> (ok) [ns_server:debug,2014-08-19T16:50:56.409,ns_1@10.242.238.90:<0.25919.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:debug,2014-08-19T16:50:56.410,ns_1@10.242.238.90:<0.25922.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 641 [ns_server:info,2014-08-19T16:50:56.414,ns_1@10.242.238.90:<0.18786.0>:ns_memcached:do_handle_call:527]Changed vbucket 385 state to replica [ns_server:info,2014-08-19T16:50:56.418,ns_1@10.242.238.90:<0.25925.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 385 to state replica [ns_server:debug,2014-08-19T16:50:56.443,ns_1@10.242.238.90:<0.25925.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_385_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:50:56.444,ns_1@10.242.238.90:<0.25925.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[385]}, {checkpoints,[{385,0}]}, {name,<<"replication_building_385_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[385]}, {takeover,false}, {suffix,"building_385_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",385,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,false}]} [rebalance:debug,2014-08-19T16:50:56.445,ns_1@10.242.238.90:<0.25925.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.25926.0> [rebalance:debug,2014-08-19T16:50:56.445,ns_1@10.242.238.90:<0.25925.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:50:56.446,ns_1@10.242.238.90:<0.25925.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.15648.1>,#Ref<16550.0.1.201387>}]} [rebalance:info,2014-08-19T16:50:56.446,ns_1@10.242.238.90:<0.25925.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 385 [rebalance:debug,2014-08-19T16:50:56.446,ns_1@10.242.238.90:<0.25925.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.15648.1>,#Ref<16550.0.1.201387>}] [ns_server:debug,2014-08-19T16:50:56.447,ns_1@10.242.238.90:<0.25925.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:50:56.450,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 399. Nacking mccouch update. [views:debug,2014-08-19T16:50:56.450,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/399. Updated state: replica (0) [ns_server:debug,2014-08-19T16:50:56.451,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",399,replica,0} [ns_server:debug,2014-08-19T16:50:56.451,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,750,686,984,737,673,426,971,724,660,413,958,711,647,400,1022,945,762,698, 1009,996,749,685,983,736,672,425,970,723,659,412,957,710,399,1021,944,761, 697,1008,995,748,684,982,735,671,424,969,722,658,411,956,709,398,1020,943, 760,696,1007,994,962,747,715,683,651,404,981,949,766,734,702,670,423,1013, 968,753,721,689,657,410,1000,987,955,740,708,676,1019,974,942,759,727,695, 663,416,1006,993,961,746,714,682,403,980,948,765,733,701,669,422,1012,999, 967,752,720,688,656,409,986,954,739,707,675,1018,973,941,758,726,694,662,415, 1005,992,960,745,713,681,649,402,979,947,764,732,700,668,421,1011,998,966, 751,719,687,655,408,985,953,738,706,674,1017,972,940,757,725,693,661,414, 1004,991,959,744,712,680,401,1023,978,946,763,731,699,667,420,1010,965,718, 654,407,952,705,1016,939,756,692,1003,990,743,679,977,730,666,419,964,717, 653,406,951,704,1015,938,755,691,1002,989,742,678,976,729,665,418,963,716, 405,950,767,703,1014,754,690,1001,988,741,677,975,728,664,417] [rebalance:debug,2014-08-19T16:50:56.463,ns_1@10.242.238.90:<0.25927.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 385 [ns_server:info,2014-08-19T16:50:56.467,ns_1@10.242.238.90:<0.18786.0>:ns_memcached:do_handle_call:527]Changed vbucket 640 state to replica [ns_server:info,2014-08-19T16:50:56.473,ns_1@10.242.238.90:<0.25930.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 640 to state replica [ns_server:debug,2014-08-19T16:50:56.509,ns_1@10.242.238.90:<0.25930.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_640_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:50:56.511,ns_1@10.242.238.90:<0.25930.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[640]}, {checkpoints,[{640,0}]}, {name,<<"replication_building_640_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[640]}, {takeover,false}, {suffix,"building_640_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",640,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,true}]} [rebalance:debug,2014-08-19T16:50:56.511,ns_1@10.242.238.90:<0.25930.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.25931.0> [rebalance:debug,2014-08-19T16:50:56.512,ns_1@10.242.238.90:<0.25930.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:50:56.512,ns_1@10.242.238.90:<0.25930.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.15668.1>,#Ref<16550.0.1.201507>}]} [rebalance:info,2014-08-19T16:50:56.512,ns_1@10.242.238.90:<0.25930.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 640 [rebalance:debug,2014-08-19T16:50:56.512,ns_1@10.242.238.90:<0.25930.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.15668.1>,#Ref<16550.0.1.201507>}] [ns_server:debug,2014-08-19T16:50:56.513,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.25932.0> (ok) [ns_server:debug,2014-08-19T16:50:56.513,ns_1@10.242.238.90:<0.25930.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:debug,2014-08-19T16:50:56.515,ns_1@10.242.238.90:<0.25933.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 640 [views:debug,2014-08-19T16:50:56.518,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/399. Updated state: replica (0) [ns_server:debug,2014-08-19T16:50:56.518,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",399,replica,0} [ns_server:info,2014-08-19T16:50:56.518,ns_1@10.242.238.90:<0.18786.0>:ns_memcached:do_handle_call:527]Changed vbucket 384 state to replica [ns_server:info,2014-08-19T16:50:56.522,ns_1@10.242.238.90:<0.25936.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 384 to state replica [ns_server:debug,2014-08-19T16:50:56.547,ns_1@10.242.238.90:<0.25936.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_384_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:50:56.549,ns_1@10.242.238.90:<0.25936.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[384]}, {checkpoints,[{384,0}]}, {name,<<"replication_building_384_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[384]}, {takeover,false}, {suffix,"building_384_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",384,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,false}]} [rebalance:debug,2014-08-19T16:50:56.550,ns_1@10.242.238.90:<0.25936.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.25937.0> [rebalance:debug,2014-08-19T16:50:56.550,ns_1@10.242.238.90:<0.25936.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:50:56.550,ns_1@10.242.238.90:<0.25936.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.15691.1>,#Ref<16550.0.1.201609>}]} [rebalance:info,2014-08-19T16:50:56.550,ns_1@10.242.238.90:<0.25936.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 384 [rebalance:debug,2014-08-19T16:50:56.551,ns_1@10.242.238.90:<0.25936.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.15691.1>,#Ref<16550.0.1.201609>}] [ns_server:debug,2014-08-19T16:50:56.552,ns_1@10.242.238.90:<0.25936.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:debug,2014-08-19T16:50:56.566,ns_1@10.242.238.90:<0.25938.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 384 [ns_server:debug,2014-08-19T16:50:56.668,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 397. Nacking mccouch update. [views:debug,2014-08-19T16:50:56.668,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/397. Updated state: replica (0) [ns_server:debug,2014-08-19T16:50:56.668,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",397,replica,0} [ns_server:debug,2014-08-19T16:50:56.669,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,750,686,984,737,673,426,971,724,660,413,958,711,647,400,1022,945,762,698, 1009,996,749,685,983,736,672,425,970,723,659,412,957,710,399,1021,944,761, 697,1008,995,748,684,982,735,671,424,969,722,658,411,956,709,398,1020,943, 760,696,1007,994,962,747,715,683,651,404,981,949,766,734,702,670,423,1013, 968,753,721,689,657,410,1000,987,955,740,708,676,397,1019,974,942,759,727, 695,663,416,1006,993,961,746,714,682,403,980,948,765,733,701,669,422,1012, 999,967,752,720,688,656,409,986,954,739,707,675,1018,973,941,758,726,694,662, 415,1005,992,960,745,713,681,649,402,979,947,764,732,700,668,421,1011,998, 966,751,719,687,655,408,985,953,738,706,674,1017,972,940,757,725,693,661,414, 1004,991,959,744,712,680,401,1023,978,946,763,731,699,667,420,1010,965,718, 654,407,952,705,1016,939,756,692,1003,990,743,679,977,730,666,419,964,717, 653,406,951,704,1015,938,755,691,1002,989,742,678,976,729,665,418,963,716, 405,950,767,703,1014,754,690,1001,988,741,677,975,728,664,417] [views:debug,2014-08-19T16:50:56.744,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/397. Updated state: replica (0) [ns_server:debug,2014-08-19T16:50:56.744,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",397,replica,0} [ns_server:debug,2014-08-19T16:50:56.885,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 395. Nacking mccouch update. [views:debug,2014-08-19T16:50:56.886,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/395. Updated state: replica (0) [ns_server:debug,2014-08-19T16:50:56.886,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",395,replica,0} [ns_server:debug,2014-08-19T16:50:56.886,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,750,686,984,737,673,426,971,724,660,413,958,711,647,400,1022,945,762,698, 1009,996,749,685,983,736,672,425,970,723,659,412,957,710,399,1021,944,761, 697,1008,995,748,684,982,735,671,424,969,722,658,411,956,709,398,1020,943, 760,696,1007,994,962,747,715,683,651,404,981,949,766,734,702,670,423,1013, 968,753,721,689,657,410,1000,987,955,740,708,676,397,1019,974,942,759,727, 695,663,416,1006,993,961,746,714,682,403,980,948,765,733,701,669,422,1012, 999,967,752,720,688,656,409,986,954,739,707,675,1018,973,941,758,726,694,662, 415,1005,992,960,745,713,681,649,402,979,947,764,732,700,668,421,1011,998, 966,751,719,687,655,408,985,953,738,706,674,395,1017,972,940,757,725,693,661, 414,1004,991,959,744,712,680,401,1023,978,946,763,731,699,667,420,1010,965, 718,654,407,952,705,1016,939,756,692,1003,990,743,679,977,730,666,419,964, 717,653,406,951,704,1015,938,755,691,1002,989,742,678,976,729,665,418,963, 716,405,950,767,703,1014,754,690,1001,988,741,677,975,728,664,417] [views:debug,2014-08-19T16:50:56.953,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/395. Updated state: replica (0) [ns_server:debug,2014-08-19T16:50:56.953,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",395,replica,0} [ns_server:debug,2014-08-19T16:50:57.070,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 393. Nacking mccouch update. [views:debug,2014-08-19T16:50:57.070,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/393. Updated state: replica (0) [ns_server:debug,2014-08-19T16:50:57.070,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",393,replica,0} [ns_server:debug,2014-08-19T16:50:57.070,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,750,686,984,737,673,426,971,724,660,413,958,711,647,400,1022,945,762,698, 1009,996,749,685,983,736,672,425,970,723,659,412,957,710,399,1021,944,761, 697,1008,995,748,684,982,735,671,424,969,722,658,411,956,709,398,1020,943, 760,696,1007,994,747,683,981,949,766,734,702,670,423,1013,968,753,721,689, 657,410,1000,987,955,740,708,676,397,1019,974,942,759,727,695,663,416,1006, 993,961,746,714,682,403,980,948,765,733,701,669,422,1012,999,967,752,720,688, 656,409,986,954,739,707,675,1018,973,941,758,726,694,662,415,1005,992,960, 745,713,681,649,402,979,947,764,732,700,668,421,1011,998,966,751,719,687,655, 408,985,953,738,706,674,395,1017,972,940,757,725,693,661,414,1004,991,959, 744,712,680,401,1023,978,946,763,731,699,667,420,1010,965,718,654,407,952, 705,1016,939,756,692,1003,990,743,679,977,730,666,419,964,717,653,406,951, 704,393,1015,938,755,691,1002,989,742,678,976,729,665,418,963,716,405,950, 767,703,1014,754,690,1001,988,741,677,975,728,664,417,962,715,651,404] [views:debug,2014-08-19T16:50:57.137,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/393. Updated state: replica (0) [ns_server:debug,2014-08-19T16:50:57.137,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",393,replica,0} [ns_server:debug,2014-08-19T16:50:57.232,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 391. Nacking mccouch update. [views:debug,2014-08-19T16:50:57.232,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/391. Updated state: replica (0) [ns_server:debug,2014-08-19T16:50:57.233,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",391,replica,0} [ns_server:debug,2014-08-19T16:50:57.233,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,750,686,984,737,673,426,971,724,660,413,958,711,647,400,1022,945,762,698, 1009,996,749,685,983,736,672,425,970,723,659,412,957,710,399,1021,944,761, 697,1008,995,748,684,982,735,671,424,969,722,658,411,956,709,398,1020,943, 760,696,1007,994,747,683,981,949,766,734,702,670,423,391,1013,968,753,721, 689,657,410,1000,987,955,740,708,676,397,1019,974,942,759,727,695,663,416, 1006,993,961,746,714,682,403,980,948,765,733,701,669,422,1012,999,967,752, 720,688,656,409,986,954,739,707,675,1018,973,941,758,726,694,662,415,1005, 992,960,745,713,681,649,402,979,947,764,732,700,668,421,1011,998,966,751,719, 687,655,408,985,953,738,706,674,395,1017,972,940,757,725,693,661,414,1004, 991,959,744,712,680,401,1023,978,946,763,731,699,667,420,1010,965,718,654, 407,952,705,1016,939,756,692,1003,990,743,679,977,730,666,419,964,717,653, 406,951,704,393,1015,938,755,691,1002,989,742,678,976,729,665,418,963,716, 405,950,767,703,1014,754,690,1001,988,741,677,975,728,664,417,962,715,651, 404] [views:debug,2014-08-19T16:50:57.283,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/391. Updated state: replica (0) [ns_server:debug,2014-08-19T16:50:57.283,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",391,replica,0} [ns_server:debug,2014-08-19T16:50:57.375,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 652. Nacking mccouch update. [views:debug,2014-08-19T16:50:57.375,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/652. Updated state: pending (0) [ns_server:debug,2014-08-19T16:50:57.375,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",652,pending,0} [ns_server:debug,2014-08-19T16:50:57.375,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,750,686,984,737,673,426,971,724,660,413,958,711,647,400,1022,945,762,698, 1009,996,749,685,983,736,672,425,970,723,659,412,957,710,399,1021,944,761, 697,1008,995,748,684,982,735,671,424,969,722,658,411,956,709,398,1020,943, 760,696,1007,994,747,683,981,949,766,734,702,670,423,391,1013,968,753,721, 689,657,410,1000,987,955,740,708,676,397,1019,974,942,759,727,695,663,416, 1006,993,961,746,714,682,403,980,948,765,733,701,669,422,1012,999,967,752, 720,688,656,409,986,954,739,707,675,1018,973,941,758,726,694,662,415,1005, 992,960,745,713,681,649,402,979,947,764,732,700,668,421,1011,998,966,751,719, 687,655,408,985,953,738,706,674,395,1017,972,940,757,725,693,661,414,1004, 991,959,744,712,680,401,1023,978,946,763,731,699,667,420,1010,965,718,654, 407,952,705,1016,939,756,692,1003,990,743,679,977,730,666,419,964,717,653, 406,951,704,393,1015,938,755,691,1002,989,742,678,976,729,665,418,963,716, 652,405,950,767,703,1014,754,690,1001,988,741,677,975,728,664,417,962,715, 651,404] [views:debug,2014-08-19T16:50:57.425,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/652. Updated state: pending (0) [ns_server:debug,2014-08-19T16:50:57.425,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",652,pending,0} [ns_server:debug,2014-08-19T16:50:57.509,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 650. Nacking mccouch update. [views:debug,2014-08-19T16:50:57.509,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/650. Updated state: pending (0) [ns_server:debug,2014-08-19T16:50:57.509,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",650,pending,0} [ns_server:debug,2014-08-19T16:50:57.510,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,750,686,984,737,673,426,971,724,660,413,958,711,647,400,1022,945,762,698, 1009,996,749,685,983,736,672,425,970,723,659,412,957,710,399,1021,944,761, 697,1008,995,748,684,982,735,671,424,969,722,658,411,956,709,398,1020,943, 760,696,1007,994,747,683,981,949,766,734,702,670,423,391,1013,968,753,721, 689,657,410,1000,987,955,740,708,676,397,1019,974,942,759,727,695,663,416, 1006,993,961,746,714,682,650,403,980,948,765,733,701,669,422,1012,999,967, 752,720,688,656,409,986,954,739,707,675,1018,973,941,758,726,694,662,415, 1005,992,960,745,713,681,649,402,979,947,764,732,700,668,421,1011,998,966, 751,719,687,655,408,985,953,738,706,674,395,1017,972,940,757,725,693,661,414, 1004,991,959,744,712,680,401,1023,978,946,763,731,699,667,420,1010,965,718, 654,407,952,705,1016,939,756,692,1003,990,743,679,977,730,666,419,964,717, 653,406,951,704,393,1015,938,755,691,1002,989,742,678,976,729,665,418,963, 716,652,405,950,767,703,1014,754,690,1001,988,741,677,975,728,664,417,962, 715,651,404] [views:debug,2014-08-19T16:50:57.559,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/650. Updated state: pending (0) [ns_server:debug,2014-08-19T16:50:57.560,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",650,pending,0} [ns_server:debug,2014-08-19T16:50:57.651,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 648. Nacking mccouch update. [views:debug,2014-08-19T16:50:57.651,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/648. Updated state: pending (0) [ns_server:debug,2014-08-19T16:50:57.652,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",648,pending,0} [ns_server:debug,2014-08-19T16:50:57.652,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,750,686,984,737,673,426,971,724,660,413,958,711,647,400,1022,945,762,698, 1009,996,749,685,983,736,672,425,970,723,659,412,957,710,399,1021,944,761, 697,1008,995,748,684,982,735,671,424,969,722,658,411,956,709,398,1020,943, 760,696,1007,994,747,683,981,949,766,734,702,670,423,391,1013,968,753,721, 689,657,410,1000,987,955,740,708,676,397,1019,974,942,759,727,695,663,416, 1006,993,961,746,714,682,650,403,980,948,765,733,701,669,422,1012,999,967, 752,720,688,656,409,986,954,739,707,675,1018,973,941,758,726,694,662,415, 1005,992,960,745,713,681,649,402,979,947,764,732,700,668,421,1011,998,966, 751,719,687,655,408,985,953,738,706,674,395,1017,972,940,757,725,693,661,414, 1004,991,959,744,712,680,648,401,1023,978,946,763,731,699,667,420,1010,965, 718,654,407,952,705,1016,939,756,692,1003,990,743,679,977,730,666,419,964, 717,653,406,951,704,393,1015,938,755,691,1002,989,742,678,976,729,665,418, 963,716,652,405,950,767,703,1014,754,690,1001,988,741,677,975,728,664,417, 962,715,651,404] [views:debug,2014-08-19T16:50:57.702,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/648. Updated state: pending (0) [ns_server:debug,2014-08-19T16:50:57.702,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",648,pending,0} [ns_server:debug,2014-08-19T16:50:57.812,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 646. Nacking mccouch update. [views:debug,2014-08-19T16:50:57.812,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/646. Updated state: pending (0) [ns_server:debug,2014-08-19T16:50:57.812,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",646,pending,0} [ns_server:debug,2014-08-19T16:50:57.813,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,750,686,984,737,673,426,971,724,660,413,958,711,647,400,1022,945,762,698, 1009,996,749,685,983,736,672,425,970,723,659,412,957,710,646,399,1021,944, 761,697,1008,995,748,684,982,735,671,424,969,722,658,411,956,709,398,1020, 943,760,696,1007,994,747,683,981,734,670,423,968,753,721,689,657,410,1000, 987,955,740,708,676,397,1019,974,942,759,727,695,663,416,1006,993,961,746, 714,682,650,403,980,948,765,733,701,669,422,1012,999,967,752,720,688,656,409, 986,954,739,707,675,1018,973,941,758,726,694,662,415,1005,992,960,745,713, 681,649,402,979,947,764,732,700,668,421,1011,998,966,751,719,687,655,408,985, 953,738,706,674,395,1017,972,940,757,725,693,661,414,1004,991,959,744,712, 680,648,401,1023,978,946,763,731,699,667,420,1010,965,718,654,407,952,705, 1016,939,756,692,1003,990,743,679,977,730,666,419,964,717,653,406,951,704, 393,1015,938,755,691,1002,989,742,678,976,729,665,418,963,716,652,405,950, 767,703,1014,754,690,1001,988,741,677,975,728,664,417,962,715,651,404,949, 766,702,391,1013] [views:debug,2014-08-19T16:50:57.879,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/646. Updated state: pending (0) [ns_server:debug,2014-08-19T16:50:57.879,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",646,pending,0} [ns_server:debug,2014-08-19T16:50:58.004,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 644. Nacking mccouch update. [views:debug,2014-08-19T16:50:58.004,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/644. Updated state: pending (0) [ns_server:debug,2014-08-19T16:50:58.004,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",644,pending,0} [ns_server:debug,2014-08-19T16:50:58.005,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,750,686,984,737,673,426,971,724,660,413,958,711,647,400,1022,945,762,698, 1009,996,749,685,983,736,672,425,970,723,659,412,957,710,646,399,1021,944, 761,697,1008,995,748,684,982,735,671,424,969,722,658,411,956,709,398,1020, 943,760,696,1007,994,747,683,981,734,670,423,968,753,721,689,657,410,1000, 987,955,740,708,676,644,397,1019,974,942,759,727,695,663,416,1006,993,961, 746,714,682,650,403,980,948,765,733,701,669,422,1012,999,967,752,720,688,656, 409,986,954,739,707,675,1018,973,941,758,726,694,662,415,1005,992,960,745, 713,681,649,402,979,947,764,732,700,668,421,1011,998,966,751,719,687,655,408, 985,953,738,706,674,395,1017,972,940,757,725,693,661,414,1004,991,959,744, 712,680,648,401,1023,978,946,763,731,699,667,420,1010,965,718,654,407,952, 705,1016,939,756,692,1003,990,743,679,977,730,666,419,964,717,653,406,951, 704,393,1015,938,755,691,1002,989,742,678,976,729,665,418,963,716,652,405, 950,767,703,1014,754,690,1001,988,741,677,975,728,664,417,962,715,651,404, 949,766,702,391,1013] [views:debug,2014-08-19T16:50:58.072,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/644. Updated state: pending (0) [ns_server:debug,2014-08-19T16:50:58.072,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",644,pending,0} [ns_server:debug,2014-08-19T16:50:58.230,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 642. Nacking mccouch update. [views:debug,2014-08-19T16:50:58.230,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/642. Updated state: pending (0) [ns_server:debug,2014-08-19T16:50:58.230,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",642,pending,0} [ns_server:debug,2014-08-19T16:50:58.231,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,750,686,984,737,673,426,971,724,660,413,958,711,647,400,1022,945,762,698, 1009,996,749,685,983,736,672,425,970,723,659,412,957,710,646,399,1021,944, 761,697,1008,995,748,684,982,735,671,424,969,722,658,411,956,709,398,1020, 943,760,696,1007,994,747,683,981,734,670,423,968,753,721,689,657,410,1000, 987,955,740,708,676,644,397,1019,974,942,759,727,695,663,416,1006,993,961, 746,714,682,650,403,980,948,765,733,701,669,422,1012,999,967,752,720,688,656, 409,986,954,739,707,675,1018,973,941,758,726,694,662,415,1005,992,960,745, 713,681,649,402,979,947,764,732,700,668,421,1011,998,966,751,719,687,655,408, 985,953,738,706,674,642,395,1017,972,940,757,725,693,661,414,1004,991,959, 744,712,680,648,401,1023,978,946,763,731,699,667,420,1010,965,718,654,407, 952,705,1016,939,756,692,1003,990,743,679,977,730,666,419,964,717,653,406, 951,704,393,1015,938,755,691,1002,989,742,678,976,729,665,418,963,716,652, 405,950,767,703,1014,754,690,1001,988,741,677,975,728,664,417,962,715,651, 404,949,766,702,391,1013] [views:debug,2014-08-19T16:50:58.282,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/642. Updated state: pending (0) [ns_server:debug,2014-08-19T16:50:58.282,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",642,pending,0} [ns_server:debug,2014-08-19T16:50:58.407,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 640. Nacking mccouch update. [views:debug,2014-08-19T16:50:58.407,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/640. Updated state: pending (0) [ns_server:debug,2014-08-19T16:50:58.407,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",640,pending,0} [ns_server:debug,2014-08-19T16:50:58.408,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,750,686,984,737,673,426,971,724,660,413,958,711,647,400,1022,945,762,698, 1009,996,749,685,983,736,672,425,970,723,659,412,957,710,646,399,1021,944, 761,697,1008,995,748,684,982,735,671,424,969,722,658,411,956,709,398,1020, 943,760,696,1007,994,747,683,981,734,670,423,968,753,721,689,657,410,1000, 987,955,740,708,676,644,397,1019,974,942,759,727,695,663,416,1006,993,961, 746,714,682,650,403,980,948,765,733,701,669,422,1012,999,967,752,720,688,656, 409,986,954,739,707,675,1018,973,941,758,726,694,662,415,1005,992,960,745, 713,681,649,402,979,947,764,732,700,668,421,1011,998,966,751,719,687,655,408, 985,953,738,706,674,642,395,1017,972,940,757,725,693,661,414,1004,991,959, 744,712,680,648,401,1023,978,946,763,731,699,667,420,1010,965,718,654,407, 952,705,1016,939,756,692,1003,990,743,679,977,730,666,419,964,717,653,406, 951,704,640,393,1015,938,755,691,1002,989,742,678,976,729,665,418,963,716, 652,405,950,767,703,1014,754,690,1001,988,741,677,975,728,664,417,962,715, 651,404,949,766,702,391,1013] [views:debug,2014-08-19T16:50:58.492,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/640. Updated state: pending (0) [ns_server:debug,2014-08-19T16:50:58.492,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",640,pending,0} [ns_server:debug,2014-08-19T16:50:58.618,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 396. Nacking mccouch update. [views:debug,2014-08-19T16:50:58.619,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/396. Updated state: replica (0) [ns_server:debug,2014-08-19T16:50:58.619,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",396,replica,0} [ns_server:debug,2014-08-19T16:50:58.619,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,750,686,984,737,673,426,971,724,660,413,958,711,647,400,1022,945,762,698, 1009,996,749,685,983,736,672,425,970,723,659,412,957,710,646,399,1021,944, 761,697,1008,995,748,684,982,735,671,424,969,722,658,411,956,709,398,1020, 943,760,696,1007,994,747,683,981,734,670,423,968,753,721,689,657,410,1000, 987,955,740,708,676,644,397,1019,974,942,759,727,695,663,416,1006,993,961, 746,714,682,650,403,980,948,765,733,701,669,422,1012,999,967,752,720,688,656, 409,986,954,739,707,675,396,1018,973,941,758,726,694,662,415,1005,992,960, 745,713,681,649,402,979,947,764,732,700,668,421,1011,998,966,751,719,687,655, 408,985,953,738,706,674,642,395,1017,972,940,757,725,693,661,414,1004,991, 959,744,712,680,648,401,1023,978,946,763,731,699,667,420,1010,965,718,654, 407,952,705,1016,939,756,692,1003,990,743,679,977,730,666,419,964,717,653, 406,951,704,640,393,1015,938,755,691,1002,989,742,678,976,729,665,418,963, 716,652,405,950,767,703,1014,754,690,1001,988,741,677,975,728,664,417,962, 715,651,404,949,766,702,391,1013] [views:debug,2014-08-19T16:50:58.687,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/396. Updated state: replica (0) [ns_server:debug,2014-08-19T16:50:58.687,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",396,replica,0} [ns_server:debug,2014-08-19T16:50:58.778,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 394. Nacking mccouch update. [views:debug,2014-08-19T16:50:58.778,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/394. Updated state: replica (0) [ns_server:debug,2014-08-19T16:50:58.778,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",394,replica,0} [ns_server:debug,2014-08-19T16:50:58.778,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,750,686,984,737,673,426,971,724,660,413,958,711,647,400,1022,945,762,698, 1009,996,749,685,983,736,672,425,970,723,659,412,957,710,646,399,1021,944, 761,697,1008,995,748,684,982,735,671,424,969,722,658,411,956,709,398,1020, 943,760,696,1007,994,747,683,981,734,670,423,968,721,657,410,987,955,740,708, 676,644,397,1019,974,942,759,727,695,663,416,1006,993,961,746,714,682,650, 403,980,948,765,733,701,669,422,1012,999,967,752,720,688,656,409,986,954,739, 707,675,396,1018,973,941,758,726,694,662,415,1005,992,960,745,713,681,649, 402,979,947,764,732,700,668,421,1011,998,966,751,719,687,655,408,985,953,738, 706,674,642,395,1017,972,940,757,725,693,661,414,1004,991,959,744,712,680, 648,401,1023,978,946,763,731,699,667,420,1010,965,718,654,407,952,705,394, 1016,939,756,692,1003,990,743,679,977,730,666,419,964,717,653,406,951,704, 640,393,1015,938,755,691,1002,989,742,678,976,729,665,418,963,716,652,405, 950,767,703,1014,754,690,1001,988,741,677,975,728,664,417,962,715,651,404, 949,766,702,391,1013,753,689,1000] [views:debug,2014-08-19T16:50:58.829,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/394. Updated state: replica (0) [ns_server:debug,2014-08-19T16:50:58.829,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",394,replica,0} [ns_server:debug,2014-08-19T16:50:58.920,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 392. Nacking mccouch update. [views:debug,2014-08-19T16:50:58.920,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/392. Updated state: replica (0) [ns_server:debug,2014-08-19T16:50:58.921,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",392,replica,0} [ns_server:debug,2014-08-19T16:50:58.921,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,750,686,984,737,673,426,971,724,660,413,958,711,647,400,1022,945,762,698, 1009,996,749,685,983,736,672,425,970,723,659,412,957,710,646,399,1021,944, 761,697,1008,995,748,684,982,735,671,424,969,722,658,411,956,709,398,1020, 943,760,696,1007,994,747,683,981,734,670,423,968,721,657,410,987,955,740,708, 676,644,397,1019,974,942,759,727,695,663,416,1006,993,961,746,714,682,650, 403,980,948,765,733,701,669,422,1012,999,967,752,720,688,656,409,986,954,739, 707,675,396,1018,973,941,758,726,694,662,415,1005,992,960,745,713,681,649, 402,979,947,764,732,700,668,421,1011,998,966,751,719,687,655,408,985,953,738, 706,674,642,395,1017,972,940,757,725,693,661,414,1004,991,959,744,712,680, 648,401,1023,978,946,763,731,699,667,420,1010,965,718,654,407,952,705,394, 1016,939,756,692,1003,990,743,679,977,730,666,419,964,717,653,406,951,704, 640,393,1015,938,755,691,1002,989,742,678,976,729,665,418,963,716,652,405, 950,767,703,392,1014,754,690,1001,988,741,677,975,728,664,417,962,715,651, 404,949,766,702,391,1013,753,689,1000] [views:debug,2014-08-19T16:50:58.972,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/392. Updated state: replica (0) [ns_server:debug,2014-08-19T16:50:58.972,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",392,replica,0} [ns_server:debug,2014-08-19T16:50:59.063,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 390. Nacking mccouch update. [views:debug,2014-08-19T16:50:59.063,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/390. Updated state: replica (0) [ns_server:debug,2014-08-19T16:50:59.063,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",390,replica,0} [ns_server:debug,2014-08-19T16:50:59.064,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,750,686,984,737,673,426,971,724,660,413,958,711,647,400,1022,945,762,698, 1009,996,749,685,983,736,672,425,970,723,659,412,957,710,646,399,1021,944, 761,697,1008,995,748,684,982,735,671,424,969,722,658,411,956,709,398,1020, 943,760,696,1007,994,747,683,981,734,670,423,968,721,657,410,987,955,740,708, 676,644,397,1019,974,942,759,727,695,663,416,1006,993,961,746,714,682,650, 403,980,948,765,733,701,669,422,390,1012,999,967,752,720,688,656,409,986,954, 739,707,675,396,1018,973,941,758,726,694,662,415,1005,992,960,745,713,681, 649,402,979,947,764,732,700,668,421,1011,998,966,751,719,687,655,408,985,953, 738,706,674,642,395,1017,972,940,757,725,693,661,414,1004,991,959,744,712, 680,648,401,1023,978,946,763,731,699,667,420,1010,965,718,654,407,952,705, 394,1016,939,756,692,1003,990,743,679,977,730,666,419,964,717,653,406,951, 704,640,393,1015,938,755,691,1002,989,742,678,976,729,665,418,963,716,652, 405,950,767,703,392,1014,754,690,1001,988,741,677,975,728,664,417,962,715, 651,404,949,766,702,391,1013,753,689,1000] [views:debug,2014-08-19T16:50:59.097,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/390. Updated state: replica (0) [ns_server:debug,2014-08-19T16:50:59.097,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",390,replica,0} [ns_server:debug,2014-08-19T16:50:59.188,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 388. Nacking mccouch update. [views:debug,2014-08-19T16:50:59.189,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/388. Updated state: replica (0) [ns_server:debug,2014-08-19T16:50:59.189,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",388,replica,0} [ns_server:debug,2014-08-19T16:50:59.189,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,750,686,984,737,673,426,971,724,660,413,958,711,647,400,1022,945,762,698, 1009,996,749,685,983,736,672,425,970,723,659,412,957,710,646,399,1021,944, 761,697,1008,995,748,684,982,735,671,424,969,722,658,411,956,709,398,1020, 943,760,696,1007,994,747,683,981,734,670,423,968,721,657,410,987,955,740,708, 676,644,397,1019,974,942,759,727,695,663,416,1006,993,961,746,714,682,650, 403,980,948,765,733,701,669,422,390,1012,999,967,752,720,688,656,409,986,954, 739,707,675,396,1018,973,941,758,726,694,662,415,1005,992,960,745,713,681, 649,402,979,947,764,732,700,668,421,1011,998,966,751,719,687,655,408,985,953, 738,706,674,642,395,1017,972,940,757,725,693,661,414,1004,991,959,744,712, 680,648,401,1023,978,946,763,731,699,667,420,388,1010,965,718,654,407,952, 705,394,1016,939,756,692,1003,990,743,679,977,730,666,419,964,717,653,406, 951,704,640,393,1015,938,755,691,1002,989,742,678,976,729,665,418,963,716, 652,405,950,767,703,392,1014,754,690,1001,988,741,677,975,728,664,417,962, 715,651,404,949,766,702,391,1013,753,689,1000] [views:debug,2014-08-19T16:50:59.234,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/388. Updated state: replica (0) [ns_server:debug,2014-08-19T16:50:59.234,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",388,replica,0} [ns_server:debug,2014-08-19T16:50:59.384,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 386. Nacking mccouch update. [views:debug,2014-08-19T16:50:59.384,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/386. Updated state: replica (0) [ns_server:debug,2014-08-19T16:50:59.384,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",386,replica,0} [ns_server:debug,2014-08-19T16:50:59.384,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,750,686,984,737,673,426,971,724,660,413,958,711,647,400,1022,945,762,698, 1009,996,749,685,983,736,672,425,970,723,659,412,957,710,646,399,1021,944, 761,697,386,1008,995,748,684,982,735,671,424,969,722,658,411,956,709,398, 1020,943,760,696,1007,994,747,683,981,734,670,423,968,721,657,410,987,955, 740,708,676,644,397,1019,974,942,759,727,695,663,416,1006,993,961,746,714, 682,650,403,980,948,765,733,701,669,422,390,1012,999,967,752,720,688,656,409, 986,954,739,707,675,396,1018,973,941,758,726,694,662,415,1005,992,960,745, 713,681,649,402,979,947,764,732,700,668,421,1011,998,966,751,719,687,655,408, 985,953,738,706,674,642,395,1017,972,940,757,725,693,661,414,1004,991,959, 744,712,680,648,401,1023,978,946,763,731,699,667,420,388,1010,965,718,654, 407,952,705,394,1016,939,756,692,1003,990,743,679,977,730,666,419,964,717, 653,406,951,704,640,393,1015,938,755,691,1002,989,742,678,976,729,665,418, 963,716,652,405,950,767,703,392,1014,754,690,1001,988,741,677,975,728,664, 417,962,715,651,404,949,766,702,391,1013,753,689,1000] [views:debug,2014-08-19T16:50:59.467,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/386. Updated state: replica (0) [ns_server:debug,2014-08-19T16:50:59.468,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",386,replica,0} [ns_server:debug,2014-08-19T16:50:59.609,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 384. Nacking mccouch update. [views:debug,2014-08-19T16:50:59.610,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/384. Updated state: replica (0) [ns_server:debug,2014-08-19T16:50:59.610,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",384,replica,0} [ns_server:debug,2014-08-19T16:50:59.610,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,750,686,984,737,673,426,971,724,660,413,958,711,647,400,1022,945,762,698, 1009,996,749,685,983,736,672,425,970,723,659,412,957,710,646,399,1021,944, 761,697,386,1008,995,748,684,982,735,671,424,969,722,658,411,956,709,398, 1020,943,760,696,1007,994,747,683,981,734,670,423,968,721,657,410,955,708, 644,397,1019,974,942,759,727,695,663,416,384,1006,993,961,746,714,682,650, 403,980,948,765,733,701,669,422,390,1012,999,967,752,720,688,656,409,986,954, 739,707,675,396,1018,973,941,758,726,694,662,415,1005,992,960,745,713,681, 649,402,979,947,764,732,700,668,421,1011,998,966,751,719,687,655,408,985,953, 738,706,674,642,395,1017,972,940,757,725,693,661,414,1004,991,959,744,712, 680,648,401,1023,978,946,763,731,699,667,420,388,1010,965,718,654,407,952, 705,394,1016,939,756,692,1003,990,743,679,977,730,666,419,964,717,653,406, 951,704,640,393,1015,938,755,691,1002,989,742,678,976,729,665,418,963,716, 652,405,950,767,703,392,1014,754,690,1001,988,741,677,975,728,664,417,962, 715,651,404,949,766,702,391,1013,753,689,1000,987,740,676] [views:debug,2014-08-19T16:50:59.685,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/384. Updated state: replica (0) [ns_server:debug,2014-08-19T16:50:59.685,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",384,replica,0} [rebalance:debug,2014-08-19T16:50:59.686,ns_1@10.242.238.90:<0.25443.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:50:59.686,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.25443.0> (ok) [rebalance:debug,2014-08-19T16:50:59.686,ns_1@10.242.238.90:<0.25416.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:50:59.686,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.25416.0> (ok) [ns_server:debug,2014-08-19T16:50:59.844,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 645. Nacking mccouch update. [views:debug,2014-08-19T16:50:59.844,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/645. Updated state: pending (0) [ns_server:debug,2014-08-19T16:50:59.844,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",645,pending,0} [ns_server:debug,2014-08-19T16:50:59.844,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,750,686,984,737,673,426,971,724,660,413,958,711,647,400,1022,945,762,698, 1009,996,749,685,983,736,672,425,970,723,659,412,957,710,646,399,1021,944, 761,697,386,1008,995,748,684,982,735,671,424,969,722,658,411,956,709,645,398, 1020,943,760,696,1007,994,747,683,981,734,670,423,968,721,657,410,955,708, 644,397,1019,974,942,759,727,695,663,416,384,1006,993,961,746,714,682,650, 403,980,948,765,733,701,669,422,390,1012,999,967,752,720,688,656,409,986,954, 739,707,675,396,1018,973,941,758,726,694,662,415,1005,992,960,745,713,681, 649,402,979,947,764,732,700,668,421,1011,998,966,751,719,687,655,408,985,953, 738,706,674,642,395,1017,972,940,757,725,693,661,414,1004,991,959,744,712, 680,648,401,1023,978,946,763,731,699,667,420,388,1010,965,718,654,407,952, 705,394,1016,939,756,692,1003,990,743,679,977,730,666,419,964,717,653,406, 951,704,640,393,1015,938,755,691,1002,989,742,678,976,729,665,418,963,716, 652,405,950,767,703,392,1014,754,690,1001,988,741,677,975,728,664,417,962, 715,651,404,949,766,702,391,1013,753,689,1000,987,740,676] [views:debug,2014-08-19T16:50:59.912,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/645. Updated state: pending (0) [ns_server:debug,2014-08-19T16:50:59.912,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",645,pending,0} [ns_server:debug,2014-08-19T16:51:00.028,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 643. Nacking mccouch update. [views:debug,2014-08-19T16:51:00.028,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/643. Updated state: pending (0) [ns_server:debug,2014-08-19T16:51:00.028,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",643,pending,0} [ns_server:debug,2014-08-19T16:51:00.029,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,750,686,984,737,673,426,971,724,660,413,958,711,647,400,1022,945,762,698, 1009,996,749,685,983,736,672,425,970,723,659,412,957,710,646,399,1021,944, 761,697,386,1008,995,748,684,982,735,671,424,969,722,658,411,956,709,645,398, 1020,943,760,696,1007,994,747,683,981,734,670,423,968,721,657,410,955,708, 644,397,1019,974,942,759,727,695,663,416,384,1006,993,961,746,714,682,650, 403,980,948,765,733,701,669,422,390,1012,999,967,752,720,688,656,409,986,954, 739,707,675,643,396,1018,973,941,758,726,694,662,415,1005,992,960,745,713, 681,649,402,979,947,764,732,700,668,421,1011,998,966,751,719,687,655,408,985, 953,738,706,674,642,395,1017,972,940,757,725,693,661,414,1004,991,959,744, 712,680,648,401,1023,978,946,763,731,699,667,420,388,1010,965,718,654,407, 952,705,394,1016,939,756,692,1003,990,743,679,977,730,666,419,964,717,653, 406,951,704,640,393,1015,938,755,691,1002,989,742,678,976,729,665,418,963, 716,652,405,950,767,703,392,1014,754,690,1001,988,741,677,975,728,664,417, 962,715,651,404,949,766,702,391,1013,753,689,1000,987,740,676] [views:debug,2014-08-19T16:51:00.096,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/643. Updated state: pending (0) [ns_server:debug,2014-08-19T16:51:00.096,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",643,pending,0} [ns_server:debug,2014-08-19T16:51:00.172,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 641. Nacking mccouch update. [views:debug,2014-08-19T16:51:00.173,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/641. Updated state: pending (0) [ns_server:debug,2014-08-19T16:51:00.173,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",641,pending,0} [ns_server:debug,2014-08-19T16:51:00.173,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,750,686,984,737,673,426,971,724,660,413,958,711,647,400,1022,945,762,698, 1009,996,749,685,983,736,672,425,970,723,659,412,957,710,646,399,1021,944, 761,697,386,1008,995,748,684,982,735,671,424,969,722,658,411,956,709,645,398, 1020,943,760,696,1007,994,747,683,981,734,670,423,968,721,657,410,955,708, 644,397,1019,974,942,759,727,695,663,416,384,1006,993,961,746,714,682,650, 403,980,948,765,733,701,669,422,390,1012,999,967,752,720,688,656,409,986,954, 739,707,675,643,396,1018,973,941,758,726,694,662,415,1005,992,960,745,713, 681,649,402,979,947,764,732,700,668,421,1011,998,966,751,719,687,655,408,985, 953,738,706,674,642,395,1017,972,940,757,725,693,661,414,1004,991,959,744, 712,680,648,401,1023,978,946,763,731,699,667,420,388,1010,965,718,654,407, 952,705,641,394,1016,939,756,692,1003,990,743,679,977,730,666,419,964,717, 653,406,951,704,640,393,1015,938,755,691,1002,989,742,678,976,729,665,418, 963,716,652,405,950,767,703,392,1014,754,690,1001,988,741,677,975,728,664, 417,962,715,651,404,949,766,702,391,1013,753,689,1000,987,740,676] [views:debug,2014-08-19T16:51:00.206,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/641. Updated state: pending (0) [ns_server:debug,2014-08-19T16:51:00.207,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",641,pending,0} [ns_server:debug,2014-08-19T16:51:00.342,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 389. Nacking mccouch update. [views:debug,2014-08-19T16:51:00.342,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/389. Updated state: replica (0) [ns_server:debug,2014-08-19T16:51:00.342,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",389,replica,0} [ns_server:debug,2014-08-19T16:51:00.342,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,750,686,984,737,673,426,971,724,660,413,958,711,647,400,1022,945,762,698, 1009,996,749,685,983,736,672,425,970,723,659,412,957,710,646,399,1021,944, 761,697,386,1008,995,748,684,982,735,671,424,969,722,658,411,956,709,645,398, 1020,943,760,696,1007,994,747,683,981,734,670,423,968,721,657,410,955,708, 644,397,1019,974,942,759,727,695,663,416,384,1006,993,961,746,714,682,650, 403,980,948,765,733,701,669,422,390,1012,999,967,752,720,688,656,409,986,954, 739,707,675,643,396,1018,973,941,758,726,694,662,415,1005,992,960,745,713, 681,649,402,979,947,764,732,700,668,421,389,1011,998,966,751,719,687,655,408, 985,953,738,706,674,642,395,1017,972,940,757,725,693,661,414,1004,991,959, 744,712,680,648,401,1023,978,946,763,731,699,667,420,388,1010,965,718,654, 407,952,705,641,394,1016,939,756,692,1003,990,743,679,977,730,666,419,964, 717,653,406,951,704,640,393,1015,938,755,691,1002,989,742,678,976,729,665, 418,963,716,652,405,950,767,703,392,1014,754,690,1001,988,741,677,975,728, 664,417,962,715,651,404,949,766,702,391,1013,753,689,1000,987,740,676] [views:debug,2014-08-19T16:51:00.376,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/389. Updated state: replica (0) [ns_server:debug,2014-08-19T16:51:00.376,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",389,replica,0} [ns_server:debug,2014-08-19T16:51:00.459,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 387. Nacking mccouch update. [views:debug,2014-08-19T16:51:00.459,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/387. Updated state: replica (0) [ns_server:debug,2014-08-19T16:51:00.459,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",387,replica,0} [ns_server:debug,2014-08-19T16:51:00.460,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,750,686,984,737,673,426,971,724,660,413,958,711,647,400,1022,945,762,698, 387,1009,996,749,685,983,736,672,425,970,723,659,412,957,710,646,399,1021, 944,761,697,386,1008,995,748,684,982,735,671,424,969,722,658,411,956,709,645, 398,1020,943,760,696,1007,994,747,683,981,734,670,423,968,721,657,410,955, 708,644,397,1019,942,759,695,384,1006,993,961,746,714,682,650,403,980,948, 765,733,701,669,422,390,1012,999,967,752,720,688,656,409,986,954,739,707,675, 643,396,1018,973,941,758,726,694,662,415,1005,992,960,745,713,681,649,402, 979,947,764,732,700,668,421,389,1011,998,966,751,719,687,655,408,985,953,738, 706,674,642,395,1017,972,940,757,725,693,661,414,1004,991,959,744,712,680, 648,401,1023,978,946,763,731,699,667,420,388,1010,965,718,654,407,952,705, 641,394,1016,939,756,692,1003,990,743,679,977,730,666,419,964,717,653,406, 951,704,640,393,1015,938,755,691,1002,989,742,678,976,729,665,418,963,716, 652,405,950,767,703,392,1014,754,690,1001,988,741,677,975,728,664,417,962, 715,651,404,949,766,702,391,1013,753,689,1000,987,740,676,974,727,663,416] [views:debug,2014-08-19T16:51:00.493,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/387. Updated state: replica (0) [ns_server:debug,2014-08-19T16:51:00.493,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",387,replica,0} [ns_server:debug,2014-08-19T16:51:00.576,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 385. Nacking mccouch update. [views:debug,2014-08-19T16:51:00.577,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/385. Updated state: replica (0) [ns_server:debug,2014-08-19T16:51:00.577,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",385,replica,0} [ns_server:debug,2014-08-19T16:51:00.577,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,750,686,984,737,673,426,971,724,660,413,958,711,647,400,1022,945,762,698, 387,1009,996,749,685,983,736,672,425,970,723,659,412,957,710,646,399,1021, 944,761,697,386,1008,995,748,684,982,735,671,424,969,722,658,411,956,709,645, 398,1020,943,760,696,385,1007,994,747,683,981,734,670,423,968,721,657,410, 955,708,644,397,1019,942,759,695,384,1006,993,961,746,714,682,650,403,980, 948,765,733,701,669,422,390,1012,999,967,752,720,688,656,409,986,954,739,707, 675,643,396,1018,973,941,758,726,694,662,415,1005,992,960,745,713,681,649, 402,979,947,764,732,700,668,421,389,1011,998,966,751,719,687,655,408,985,953, 738,706,674,642,395,1017,972,940,757,725,693,661,414,1004,991,959,744,712, 680,648,401,1023,978,946,763,731,699,667,420,388,1010,965,718,654,407,952, 705,641,394,1016,939,756,692,1003,990,743,679,977,730,666,419,964,717,653, 406,951,704,640,393,1015,938,755,691,1002,989,742,678,976,729,665,418,963, 716,652,405,950,767,703,392,1014,754,690,1001,988,741,677,975,728,664,417, 962,715,651,404,949,766,702,391,1013,753,689,1000,987,740,676,974,727,663, 416] [views:debug,2014-08-19T16:51:00.611,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/385. Updated state: replica (0) [ns_server:debug,2014-08-19T16:51:00.611,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",385,replica,0} [rebalance:debug,2014-08-19T16:51:00.682,ns_1@10.242.238.90:<0.25513.0>:janitor_agent:handle_call:795]Done [rebalance:debug,2014-08-19T16:51:00.682,ns_1@10.242.238.90:<0.25488.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:51:00.682,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.25513.0> (ok) [ns_server:debug,2014-08-19T16:51:00.682,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.25488.0> (ok) [rebalance:debug,2014-08-19T16:51:00.814,ns_1@10.242.238.90:<0.25436.0>:janitor_agent:handle_call:795]Done [rebalance:debug,2014-08-19T16:51:00.814,ns_1@10.242.238.90:<0.25463.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:51:00.814,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.25436.0> (ok) [ns_server:debug,2014-08-19T16:51:00.814,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.25463.0> (ok) [rebalance:debug,2014-08-19T16:51:00.913,ns_1@10.242.238.90:<0.25397.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:51:00.913,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.25397.0> (ok) [rebalance:debug,2014-08-19T16:51:00.964,ns_1@10.242.238.90:<0.25938.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:51:00.964,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.25938.0> (ok) [rebalance:debug,2014-08-19T16:51:01.030,ns_1@10.242.238.90:<0.25916.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:51:01.030,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.25916.0> (ok) [rebalance:debug,2014-08-19T16:51:01.097,ns_1@10.242.238.90:<0.25852.0>:janitor_agent:handle_call:795]Done [rebalance:debug,2014-08-19T16:51:01.097,ns_1@10.242.238.90:<0.25927.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:51:01.097,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.25852.0> (ok) [ns_server:debug,2014-08-19T16:51:01.098,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.25927.0> (ok) [rebalance:debug,2014-08-19T16:51:01.214,ns_1@10.242.238.90:<0.25802.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:51:01.214,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.25802.0> (ok) [rebalance:debug,2014-08-19T16:51:01.215,ns_1@10.242.238.90:<0.25891.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:51:01.215,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.25891.0> (ok) [rebalance:debug,2014-08-19T16:51:01.348,ns_1@10.242.238.90:<0.25827.0>:janitor_agent:handle_call:795]Done [rebalance:debug,2014-08-19T16:51:01.348,ns_1@10.242.238.90:<0.25752.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:51:01.348,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.25827.0> (ok) [ns_server:debug,2014-08-19T16:51:01.348,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.25752.0> (ok) [rebalance:debug,2014-08-19T16:51:01.467,ns_1@10.242.238.90:<0.25708.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:51:01.467,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.25708.0> (ok) [rebalance:debug,2014-08-19T16:51:01.467,ns_1@10.242.238.90:<0.25777.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:51:01.467,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.25777.0> (ok) [rebalance:debug,2014-08-19T16:51:01.561,ns_1@10.242.238.90:<0.25658.0>:janitor_agent:handle_call:795]Done [rebalance:debug,2014-08-19T16:51:01.561,ns_1@10.242.238.90:<0.25733.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:51:01.561,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.25658.0> (ok) [ns_server:debug,2014-08-19T16:51:01.561,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.25733.0> (ok) [rebalance:debug,2014-08-19T16:51:01.565,ns_1@10.242.238.90:<0.26308.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 396 [rebalance:debug,2014-08-19T16:51:01.662,ns_1@10.242.238.90:<0.25683.0>:janitor_agent:handle_call:795]Done [rebalance:debug,2014-08-19T16:51:01.662,ns_1@10.242.238.90:<0.25607.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:51:01.662,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.25683.0> (ok) [ns_server:debug,2014-08-19T16:51:01.662,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.25607.0> (ok) [rebalance:debug,2014-08-19T16:51:01.737,ns_1@10.242.238.90:<0.25557.0>:janitor_agent:handle_call:795]Done [rebalance:debug,2014-08-19T16:51:01.737,ns_1@10.242.238.90:<0.25633.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:51:01.737,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.25557.0> (ok) [ns_server:debug,2014-08-19T16:51:01.737,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.25633.0> (ok) [rebalance:debug,2014-08-19T16:51:01.804,ns_1@10.242.238.90:<0.25933.0>:janitor_agent:handle_call:795]Done [rebalance:debug,2014-08-19T16:51:01.804,ns_1@10.242.238.90:<0.25582.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:51:01.804,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.25933.0> (ok) [ns_server:debug,2014-08-19T16:51:01.804,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.25582.0> (ok) [rebalance:debug,2014-08-19T16:51:01.904,ns_1@10.242.238.90:<0.25897.0>:janitor_agent:handle_call:795]Done [rebalance:debug,2014-08-19T16:51:01.904,ns_1@10.242.238.90:<0.25518.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:51:01.904,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.25897.0> (ok) [ns_server:debug,2014-08-19T16:51:01.905,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.25518.0> (ok) [rebalance:debug,2014-08-19T16:51:01.996,ns_1@10.242.238.90:<0.25847.0>:janitor_agent:handle_call:795]Done [rebalance:debug,2014-08-19T16:51:01.996,ns_1@10.242.238.90:<0.25922.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:51:01.997,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.25847.0> (ok) [ns_server:debug,2014-08-19T16:51:01.997,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.25922.0> (ok) [rebalance:debug,2014-08-19T16:51:02.072,ns_1@10.242.238.90:<0.25783.0>:janitor_agent:handle_call:795]Done [rebalance:debug,2014-08-19T16:51:02.072,ns_1@10.242.238.90:<0.25872.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:51:02.072,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.25783.0> (ok) [ns_server:debug,2014-08-19T16:51:02.072,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.25872.0> (ok) [rebalance:debug,2014-08-19T16:51:02.156,ns_1@10.242.238.90:<0.25819.0>:janitor_agent:handle_call:795]Done [rebalance:debug,2014-08-19T16:51:02.156,ns_1@10.242.238.90:<0.25739.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:51:02.156,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.25819.0> (ok) [ns_server:debug,2014-08-19T16:51:02.156,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.25739.0> (ok) [rebalance:debug,2014-08-19T16:51:02.292,ns_1@10.242.238.90:<0.25703.0>:janitor_agent:handle_call:795]Done [rebalance:debug,2014-08-19T16:51:02.292,ns_1@10.242.238.90:<0.25772.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:51:02.293,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.25703.0> (ok) [ns_server:debug,2014-08-19T16:51:02.293,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.25772.0> (ok) [rebalance:debug,2014-08-19T16:51:02.393,ns_1@10.242.238.90:<0.25714.0>:janitor_agent:handle_call:795]Done [rebalance:debug,2014-08-19T16:51:02.393,ns_1@10.242.238.90:<0.25653.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:51:02.393,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.25714.0> (ok) [ns_server:debug,2014-08-19T16:51:02.393,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.25653.0> (ok) [rebalance:debug,2014-08-19T16:51:02.510,ns_1@10.242.238.90:<0.25678.0>:janitor_agent:handle_call:795]Done [rebalance:debug,2014-08-19T16:51:02.510,ns_1@10.242.238.90:<0.25602.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:51:02.510,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.25678.0> (ok) [ns_server:debug,2014-08-19T16:51:02.510,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.25602.0> (ok) [rebalance:debug,2014-08-19T16:51:02.644,ns_1@10.242.238.90:<0.25538.0>:janitor_agent:handle_call:795]Done [rebalance:debug,2014-08-19T16:51:02.644,ns_1@10.242.238.90:<0.25628.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:51:02.644,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.25538.0> (ok) [ns_server:debug,2014-08-19T16:51:02.644,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.25628.0> (ok) [rebalance:debug,2014-08-19T16:51:02.761,ns_1@10.242.238.90:<0.25577.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:51:02.761,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.25577.0> (ok) [rebalance:debug,2014-08-19T16:51:02.811,ns_1@10.242.238.90:<0.26308.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:51:02.811,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.26308.0> (ok) [rebalance:debug,2014-08-19T16:51:04.175,ns_1@10.242.238.90:<0.26333.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 398 [rebalance:debug,2014-08-19T16:51:04.175,ns_1@10.242.238.90:<0.26334.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 399 [rebalance:debug,2014-08-19T16:51:04.176,ns_1@10.242.238.90:<0.26333.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:51:04.176,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.26333.0> (ok) [rebalance:debug,2014-08-19T16:51:04.177,ns_1@10.242.238.90:<0.26334.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:51:04.177,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.26334.0> (ok) [rebalance:debug,2014-08-19T16:51:04.329,ns_1@10.242.238.90:<0.26339.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 400 [rebalance:debug,2014-08-19T16:51:04.329,ns_1@10.242.238.90:<0.26340.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 401 [rebalance:debug,2014-08-19T16:51:04.330,ns_1@10.242.238.90:<0.26339.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:51:04.330,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.26339.0> (ok) [rebalance:debug,2014-08-19T16:51:04.330,ns_1@10.242.238.90:<0.26340.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:51:04.330,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.26340.0> (ok) [rebalance:debug,2014-08-19T16:51:04.458,ns_1@10.242.238.90:<0.26345.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 402 [rebalance:debug,2014-08-19T16:51:04.458,ns_1@10.242.238.90:<0.26348.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 403 [rebalance:debug,2014-08-19T16:51:04.459,ns_1@10.242.238.90:<0.26345.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:51:04.459,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.26345.0> (ok) [rebalance:debug,2014-08-19T16:51:04.459,ns_1@10.242.238.90:<0.26348.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:51:04.459,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.26348.0> (ok) [rebalance:debug,2014-08-19T16:51:04.583,ns_1@10.242.238.90:<0.26351.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 405 [rebalance:debug,2014-08-19T16:51:04.584,ns_1@10.242.238.90:<0.26354.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 404 [rebalance:debug,2014-08-19T16:51:04.584,ns_1@10.242.238.90:<0.26354.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:51:04.585,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.26354.0> (ok) [rebalance:debug,2014-08-19T16:51:04.585,ns_1@10.242.238.90:<0.26351.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:51:04.585,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.26351.0> (ok) [rebalance:debug,2014-08-19T16:51:04.684,ns_1@10.242.238.90:<0.26357.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 653 [rebalance:debug,2014-08-19T16:51:04.684,ns_1@10.242.238.90:<0.26360.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 652 [rebalance:debug,2014-08-19T16:51:04.685,ns_1@10.242.238.90:<0.26360.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:51:04.685,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.26360.0> (ok) [rebalance:debug,2014-08-19T16:51:04.685,ns_1@10.242.238.90:<0.26357.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:51:04.685,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.26357.0> (ok) [rebalance:debug,2014-08-19T16:51:04.842,ns_1@10.242.238.90:<0.26363.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 654 [rebalance:debug,2014-08-19T16:51:04.842,ns_1@10.242.238.90:<0.26366.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 655 [rebalance:debug,2014-08-19T16:51:04.844,ns_1@10.242.238.90:<0.26366.0>:janitor_agent:handle_call:795]Done [rebalance:debug,2014-08-19T16:51:04.844,ns_1@10.242.238.90:<0.26363.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:51:04.844,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.26366.0> (ok) [ns_server:debug,2014-08-19T16:51:04.844,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.26363.0> (ok) [rebalance:debug,2014-08-19T16:51:04.960,ns_1@10.242.238.90:<0.26369.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 657 [rebalance:debug,2014-08-19T16:51:04.960,ns_1@10.242.238.90:<0.26372.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 656 [rebalance:debug,2014-08-19T16:51:04.961,ns_1@10.242.238.90:<0.26372.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:51:04.961,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.26372.0> (ok) [rebalance:debug,2014-08-19T16:51:04.961,ns_1@10.242.238.90:<0.26369.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:51:04.961,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.26369.0> (ok) [rebalance:debug,2014-08-19T16:51:05.097,ns_1@10.242.238.90:<0.26375.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 659 [rebalance:debug,2014-08-19T16:51:05.097,ns_1@10.242.238.90:<0.26378.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 658 [rebalance:debug,2014-08-19T16:51:05.098,ns_1@10.242.238.90:<0.26378.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:51:05.098,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.26378.0> (ok) [rebalance:debug,2014-08-19T16:51:05.098,ns_1@10.242.238.90:<0.26375.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:51:05.098,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.26375.0> (ok) [rebalance:debug,2014-08-19T16:51:05.189,ns_1@10.242.238.90:<0.26381.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 660 [rebalance:debug,2014-08-19T16:51:05.190,ns_1@10.242.238.90:<0.26384.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 661 [rebalance:debug,2014-08-19T16:51:05.191,ns_1@10.242.238.90:<0.26381.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:51:05.191,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.26381.0> (ok) [rebalance:debug,2014-08-19T16:51:05.191,ns_1@10.242.238.90:<0.26384.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:51:05.191,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.26384.0> (ok) [rebalance:debug,2014-08-19T16:51:05.785,ns_1@10.242.238.90:<0.26393.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 384 [rebalance:debug,2014-08-19T16:51:05.786,ns_1@10.242.238.90:<0.26396.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 385 [rebalance:debug,2014-08-19T16:51:05.786,ns_1@10.242.238.90:<0.26393.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:51:05.786,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.26393.0> (ok) [rebalance:debug,2014-08-19T16:51:05.787,ns_1@10.242.238.90:<0.26396.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:51:05.787,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.26396.0> (ok) [rebalance:debug,2014-08-19T16:51:05.910,ns_1@10.242.238.90:<0.26399.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 386 [rebalance:debug,2014-08-19T16:51:05.910,ns_1@10.242.238.90:<0.26402.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 387 [rebalance:debug,2014-08-19T16:51:05.911,ns_1@10.242.238.90:<0.26399.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:51:05.911,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.26399.0> (ok) [rebalance:debug,2014-08-19T16:51:05.912,ns_1@10.242.238.90:<0.26402.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:51:05.912,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.26402.0> (ok) [rebalance:debug,2014-08-19T16:51:06.079,ns_1@10.242.238.90:<0.26405.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 388 [rebalance:debug,2014-08-19T16:51:06.079,ns_1@10.242.238.90:<0.26408.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 389 [rebalance:debug,2014-08-19T16:51:06.080,ns_1@10.242.238.90:<0.26405.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:51:06.080,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.26405.0> (ok) [rebalance:debug,2014-08-19T16:51:06.081,ns_1@10.242.238.90:<0.26408.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:51:06.081,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.26408.0> (ok) [rebalance:debug,2014-08-19T16:51:06.244,ns_1@10.242.238.90:<0.26411.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 391 [rebalance:debug,2014-08-19T16:51:06.245,ns_1@10.242.238.90:<0.26414.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 390 [rebalance:debug,2014-08-19T16:51:06.246,ns_1@10.242.238.90:<0.26411.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:51:06.246,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.26411.0> (ok) [rebalance:debug,2014-08-19T16:51:06.246,ns_1@10.242.238.90:<0.26414.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:51:06.246,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.26414.0> (ok) [rebalance:debug,2014-08-19T16:51:06.412,ns_1@10.242.238.90:<0.26417.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 392 [rebalance:debug,2014-08-19T16:51:06.412,ns_1@10.242.238.90:<0.26420.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 393 [rebalance:debug,2014-08-19T16:51:06.413,ns_1@10.242.238.90:<0.26417.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:51:06.413,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.26417.0> (ok) [rebalance:debug,2014-08-19T16:51:06.413,ns_1@10.242.238.90:<0.26420.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:51:06.413,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.26420.0> (ok) [rebalance:debug,2014-08-19T16:51:06.579,ns_1@10.242.238.90:<0.26424.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 395 [rebalance:debug,2014-08-19T16:51:06.579,ns_1@10.242.238.90:<0.26427.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 394 [rebalance:debug,2014-08-19T16:51:06.580,ns_1@10.242.238.90:<0.26427.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:51:06.580,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.26427.0> (ok) [rebalance:debug,2014-08-19T16:51:06.581,ns_1@10.242.238.90:<0.26424.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:51:06.581,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.26424.0> (ok) [rebalance:debug,2014-08-19T16:51:06.737,ns_1@10.242.238.90:<0.26430.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 397 [rebalance:debug,2014-08-19T16:51:06.738,ns_1@10.242.238.90:<0.26430.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:51:06.738,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.26430.0> (ok) [rebalance:debug,2014-08-19T16:51:06.781,ns_1@10.242.238.90:<0.25656.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:51:06.781,ns_1@10.242.238.90:<0.25656.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:51:06.781,ns_1@10.242.238.90:<0.26433.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:51:06.781,ns_1@10.242.238.90:<0.26433.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:51:06.782,ns_1@10.242.238.90:<0.25656.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:info,2014-08-19T16:51:06.786,ns_1@10.242.238.90:<0.18786.0>:ns_memcached:do_handle_call:527]Changed vbucket 396 state to replica [ns_server:info,2014-08-19T16:51:06.787,ns_1@10.242.238.90:tap_replication_manager-default<0.18775.0>:tap_replication_manager:change_vbucket_filter:200]Going to change replication from 'ns_1@10.242.238.89' to have [396,406,407,408,409,410,411,412,413,414,415,416,417,418,419,420,421,422,423, 424,425,426] ([396], []) [ns_server:debug,2014-08-19T16:51:06.787,ns_1@10.242.238.90:<0.26434.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:135]Registered myself under id:{"default", {new_child_id, [396,406,407,408,409,410,411,412,413,414,415, 416,417,418,419,420,421,422,423,424,425,426], 'ns_1@10.242.238.89'}, #Ref<0.0.1.34644>} Args:[{"10.242.238.89",11209}, {"10.242.238.90",11209}, [{old_state_retriever,#Fun}, {on_not_ready_vbuckets,#Fun}, {username,"default"}, {password,get_from_config}, {vbuckets,[396,406,407,408,409,410,411,412,413,414,415,416,417,418,419, 420,421,422,423,424,425,426]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]] [ns_server:debug,2014-08-19T16:51:06.788,ns_1@10.242.238.90:<0.26434.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:139]Linked myself to old ebucketmigrator <0.25382.0> [ns_server:info,2014-08-19T16:51:06.788,ns_1@10.242.238.90:<0.25382.0>:ebucketmigrator_srv:handle_call:270]Starting new-style vbucket filter change on stream `replication_ns_1@10.242.238.90` [ns_server:info,2014-08-19T16:51:06.800,ns_1@10.242.238.90:<0.25382.0>:ebucketmigrator_srv:handle_call:297]Changing vbucket filter on tap stream `replication_ns_1@10.242.238.90`: [{396,1}, {406,1}, {407,1}, {408,1}, {409,1}, {410,1}, {411,1}, {412,1}, {413,1}, {414,1}, {415,1}, {416,1}, {417,1}, {418,1}, {419,1}, {420,1}, {421,1}, {422,1}, {423,1}, {424,1}, {425,1}, {426,1}] [ns_server:info,2014-08-19T16:51:06.801,ns_1@10.242.238.90:<0.25382.0>:ebucketmigrator_srv:handle_call:307]Successfully changed vbucket filter on tap stream `replication_ns_1@10.242.238.90`. [ns_server:info,2014-08-19T16:51:06.801,ns_1@10.242.238.90:<0.25382.0>:ebucketmigrator_srv:process_upstream:1027]Got vbucket filter change completion message. Silencing upstream sender [ns_server:info,2014-08-19T16:51:06.802,ns_1@10.242.238.90:<0.25382.0>:ebucketmigrator_srv:handle_info:366]Got reply from upstream silencing request. Completing state transition to a new ebucketmigrator. [ns_server:debug,2014-08-19T16:51:06.802,ns_1@10.242.238.90:<0.25382.0>:ebucketmigrator_srv:complete_native_vb_filter_change:170]Proceeding with reading unread binaries [ns_server:debug,2014-08-19T16:51:06.802,ns_1@10.242.238.90:<0.25382.0>:ebucketmigrator_srv:confirm_downstream:197]Going to confirm reception downstream messages [ns_server:debug,2014-08-19T16:51:06.802,ns_1@10.242.238.90:<0.25382.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:51:06.802,ns_1@10.242.238.90:<0.26436.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:51:06.802,ns_1@10.242.238.90:<0.26436.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:51:06.802,ns_1@10.242.238.90:<0.25382.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:51:06.802,ns_1@10.242.238.90:<0.25382.0>:ebucketmigrator_srv:confirm_downstream:201]Confirmed upstream messages are feeded to kernel [ns_server:debug,2014-08-19T16:51:06.803,ns_1@10.242.238.90:<0.25382.0>:ebucketmigrator_srv:reply_and_die:210]Passed old state to caller [ns_server:debug,2014-08-19T16:51:06.803,ns_1@10.242.238.90:<0.25382.0>:ebucketmigrator_srv:reply_and_die:213]Sent out state. Preparing to die [ns_server:debug,2014-08-19T16:51:06.803,ns_1@10.242.238.90:<0.26434.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:143]Got old state from previous ebucketmigrator: <0.25382.0> [ns_server:debug,2014-08-19T16:51:06.803,ns_1@10.242.238.90:<0.26434.0>:ns_vbm_new_sup:perform_vbucket_filter_change_loop:184]Sent old state to new instance [ns_server:info,2014-08-19T16:51:06.803,ns_1@10.242.238.90:<0.26438.0>:ns_vbm_new_sup:mk_old_state_retriever:83]Got vbucket filter change old state. Proceeding vbucket filter change operation [ns_server:debug,2014-08-19T16:51:06.803,ns_1@10.242.238.90:<0.26438.0>:ebucketmigrator_srv:init:494]Got old ebucketmigrator state from <0.25382.0>: {state,#Port<0.16372>,#Port<0.16368>,#Port<0.16373>,#Port<0.16369>, <0.25383.0>,<<"cut off">>,<<"cut off">>,[],67,false,false,0, {1408,452666,801840}, completed, {<0.26434.0>,#Ref<0.0.1.34657>}, <<"replication_ns_1@10.242.238.90">>,<0.25382.0>, {had_backfill,false,undefined,[]}, completed,false}. [ns_server:debug,2014-08-19T16:51:06.804,ns_1@10.242.238.90:<0.17162.0>:ns_process_registry:handle_info:98]Got exit msg: {'EXIT',<0.26434.0>,{#Ref<0.0.1.34646>,<0.26438.0>}} [error_logger:info,2014-08-19T16:51:06.804,ns_1@10.242.238.90:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,'ns_vbm_new_sup-default'} started: [{pid,<0.26438.0>}, {name, {new_child_id, [396,406,407,408,409,410,411,412,413,414,415, 416,417,418,419,420,421,422,423,424,425,426], 'ns_1@10.242.238.89'}}, {mfargs, {ebucketmigrator_srv,start_link, [{"10.242.238.89",11209}, {"10.242.238.90",11209}, [{old_state_retriever, #Fun}, {on_not_ready_vbuckets, #Fun}, {username,"default"}, {password,get_from_config}, {vbuckets, [396,406,407,408,409,410,411,412,413,414, 415,416,417,418,419,420,421,422,423,424, 425,426]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]]}}, {restart_type,temporary}, {shutdown,60000}, {child_type,worker}] [ns_server:debug,2014-08-19T16:51:06.812,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:51:06.814,ns_1@10.242.238.90:<0.26438.0>:ebucketmigrator_srv:init:621]Reusing old upstream: [{vbuckets,[396,406,407,408,409,410,411,412,413,414,415,416,417,418,419,420, 421,422,423,424,425,426]}, {name,<<"replication_ns_1@10.242.238.90">>}, {takeover,false}] [rebalance:debug,2014-08-19T16:51:06.814,ns_1@10.242.238.90:<0.26438.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.26439.0> [ns_server:debug,2014-08-19T16:51:06.820,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:06.820,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 7418 us [ns_server:debug,2014-08-19T16:51:06.820,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:06.821,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{396, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [rebalance:debug,2014-08-19T16:51:06.853,ns_1@10.242.238.90:<0.25580.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:51:06.853,ns_1@10.242.238.90:<0.25580.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:51:06.853,ns_1@10.242.238.90:<0.26441.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:51:06.853,ns_1@10.242.238.90:<0.26441.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:51:06.853,ns_1@10.242.238.90:<0.25580.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:info,2014-08-19T16:51:06.857,ns_1@10.242.238.90:<0.18786.0>:ns_memcached:do_handle_call:527]Changed vbucket 399 state to replica [ns_server:info,2014-08-19T16:51:06.857,ns_1@10.242.238.90:tap_replication_manager-default<0.18775.0>:tap_replication_manager:change_vbucket_filter:200]Going to change replication from 'ns_1@10.242.238.89' to have [396,399,406,407,408,409,410,411,412,413,414,415,416,417,418,419,420,421,422, 423,424,425,426] ([399], []) [ns_server:debug,2014-08-19T16:51:06.858,ns_1@10.242.238.90:<0.26442.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:135]Registered myself under id:{"default", {new_child_id, [396,399,406,407,408,409,410,411,412,413,414, 415,416,417,418,419,420,421,422,423,424,425, 426], 'ns_1@10.242.238.89'}, #Ref<0.0.1.34795>} Args:[{"10.242.238.89",11209}, {"10.242.238.90",11209}, [{old_state_retriever,#Fun}, {on_not_ready_vbuckets,#Fun}, {username,"default"}, {password,get_from_config}, {vbuckets,[396,399,406,407,408,409,410,411,412,413,414,415,416,417,418, 419,420,421,422,423,424,425,426]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]] [ns_server:debug,2014-08-19T16:51:06.858,ns_1@10.242.238.90:<0.26442.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:139]Linked myself to old ebucketmigrator <0.26438.0> [ns_server:info,2014-08-19T16:51:06.858,ns_1@10.242.238.90:<0.26438.0>:ebucketmigrator_srv:handle_call:270]Starting new-style vbucket filter change on stream `replication_ns_1@10.242.238.90` [rebalance:debug,2014-08-19T16:51:06.866,ns_1@10.242.238.90:<0.25516.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:51:06.866,ns_1@10.242.238.90:<0.25516.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:51:06.867,ns_1@10.242.238.90:<0.26444.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:51:06.867,ns_1@10.242.238.90:<0.26444.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:51:06.867,ns_1@10.242.238.90:<0.25516.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:info,2014-08-19T16:51:06.868,ns_1@10.242.238.90:<0.26438.0>:ebucketmigrator_srv:handle_call:297]Changing vbucket filter on tap stream `replication_ns_1@10.242.238.90`: [{396,1}, {399,1}, {406,1}, {407,1}, {408,1}, {409,1}, {410,1}, {411,1}, {412,1}, {413,1}, {414,1}, {415,1}, {416,1}, {417,1}, {418,1}, {419,1}, {420,1}, {421,1}, {422,1}, {423,1}, {424,1}, {425,1}, {426,1}] [ns_server:info,2014-08-19T16:51:06.869,ns_1@10.242.238.90:<0.26438.0>:ebucketmigrator_srv:handle_call:307]Successfully changed vbucket filter on tap stream `replication_ns_1@10.242.238.90`. [ns_server:info,2014-08-19T16:51:06.869,ns_1@10.242.238.90:<0.26438.0>:ebucketmigrator_srv:process_upstream:1027]Got vbucket filter change completion message. Silencing upstream sender [ns_server:info,2014-08-19T16:51:06.869,ns_1@10.242.238.90:<0.26438.0>:ebucketmigrator_srv:handle_info:366]Got reply from upstream silencing request. Completing state transition to a new ebucketmigrator. [ns_server:debug,2014-08-19T16:51:06.869,ns_1@10.242.238.90:<0.26438.0>:ebucketmigrator_srv:complete_native_vb_filter_change:170]Proceeding with reading unread binaries [ns_server:debug,2014-08-19T16:51:06.869,ns_1@10.242.238.90:<0.26438.0>:ebucketmigrator_srv:confirm_downstream:197]Going to confirm reception downstream messages [ns_server:debug,2014-08-19T16:51:06.869,ns_1@10.242.238.90:<0.26438.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:51:06.870,ns_1@10.242.238.90:<0.26445.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:51:06.870,ns_1@10.242.238.90:<0.26445.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:51:06.870,ns_1@10.242.238.90:<0.26438.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:51:06.870,ns_1@10.242.238.90:<0.26438.0>:ebucketmigrator_srv:confirm_downstream:201]Confirmed upstream messages are feeded to kernel [ns_server:debug,2014-08-19T16:51:06.870,ns_1@10.242.238.90:<0.26438.0>:ebucketmigrator_srv:reply_and_die:210]Passed old state to caller [ns_server:debug,2014-08-19T16:51:06.870,ns_1@10.242.238.90:<0.26438.0>:ebucketmigrator_srv:reply_and_die:213]Sent out state. Preparing to die [ns_server:debug,2014-08-19T16:51:06.870,ns_1@10.242.238.90:<0.26442.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:143]Got old state from previous ebucketmigrator: <0.26438.0> [ns_server:debug,2014-08-19T16:51:06.871,ns_1@10.242.238.90:<0.26442.0>:ns_vbm_new_sup:perform_vbucket_filter_change_loop:184]Sent old state to new instance [ns_server:info,2014-08-19T16:51:06.871,ns_1@10.242.238.90:<0.26447.0>:ns_vbm_new_sup:mk_old_state_retriever:83]Got vbucket filter change old state. Proceeding vbucket filter change operation [ns_server:debug,2014-08-19T16:51:06.871,ns_1@10.242.238.90:<0.26447.0>:ebucketmigrator_srv:init:494]Got old ebucketmigrator state from <0.26438.0>: {state,#Port<0.16372>,#Port<0.16368>,#Port<0.16373>,#Port<0.16369>, <0.26439.0>,<<"cut off">>,<<"cut off">>,[],70,false,false,0, {1408,452666,869411}, completed, {<0.26442.0>,#Ref<0.0.1.34808>}, <<"replication_ns_1@10.242.238.90">>,<0.26438.0>, {had_backfill,false,undefined,[]}, completed,false}. [ns_server:debug,2014-08-19T16:51:06.871,ns_1@10.242.238.90:<0.17162.0>:ns_process_registry:handle_info:98]Got exit msg: {'EXIT',<0.26442.0>,{#Ref<0.0.1.34797>,<0.26447.0>}} [error_logger:info,2014-08-19T16:51:06.871,ns_1@10.242.238.90:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,'ns_vbm_new_sup-default'} started: [{pid,<0.26447.0>}, {name, {new_child_id, [396,399,406,407,408,409,410,411,412,413,414, 415,416,417,418,419,420,421,422,423,424,425, 426], 'ns_1@10.242.238.89'}}, {mfargs, {ebucketmigrator_srv,start_link, [{"10.242.238.89",11209}, {"10.242.238.90",11209}, [{old_state_retriever, #Fun}, {on_not_ready_vbuckets, #Fun}, {username,"default"}, {password,get_from_config}, {vbuckets, [396,399,406,407,408,409,410,411,412,413, 414,415,416,417,418,419,420,421,422,423, 424,425,426]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]]}}, {restart_type,temporary}, {shutdown,60000}, {child_type,worker}] [ns_server:debug,2014-08-19T16:51:06.878,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:51:06.881,ns_1@10.242.238.90:<0.26447.0>:ebucketmigrator_srv:init:621]Reusing old upstream: [{vbuckets,[396,399,406,407,408,409,410,411,412,413,414,415,416,417,418,419, 420,421,422,423,424,425,426]}, {name,<<"replication_ns_1@10.242.238.90">>}, {takeover,false}] [ns_server:debug,2014-08-19T16:51:06.881,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [rebalance:debug,2014-08-19T16:51:06.881,ns_1@10.242.238.90:<0.26447.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.26449.0> [ns_server:debug,2014-08-19T16:51:06.882,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 3908 us [ns_server:debug,2014-08-19T16:51:06.882,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:06.883,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{399, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [rebalance:debug,2014-08-19T16:51:06.884,ns_1@10.242.238.90:<0.25555.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:51:06.884,ns_1@10.242.238.90:<0.25555.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:51:06.884,ns_1@10.242.238.90:<0.26450.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:51:06.884,ns_1@10.242.238.90:<0.26450.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:51:06.885,ns_1@10.242.238.90:<0.25555.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:info,2014-08-19T16:51:06.885,ns_1@10.242.238.90:<0.18786.0>:ns_memcached:do_handle_call:527]Changed vbucket 401 state to replica [ns_server:info,2014-08-19T16:51:06.886,ns_1@10.242.238.90:tap_replication_manager-default<0.18775.0>:tap_replication_manager:change_vbucket_filter:200]Going to change replication from 'ns_1@10.242.238.89' to have [396,399,401,406,407,408,409,410,411,412,413,414,415,416,417,418,419,420,421, 422,423,424,425,426] ([401], []) [ns_server:debug,2014-08-19T16:51:06.887,ns_1@10.242.238.90:<0.26451.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:135]Registered myself under id:{"default", {new_child_id, [396,399,401,406,407,408,409,410,411,412,413, 414,415,416,417,418,419,420,421,422,423,424, 425,426], 'ns_1@10.242.238.89'}, #Ref<0.0.1.34966>} Args:[{"10.242.238.89",11209}, {"10.242.238.90",11209}, [{old_state_retriever,#Fun}, {on_not_ready_vbuckets,#Fun}, {username,"default"}, {password,get_from_config}, {vbuckets,[396,399,401,406,407,408,409,410,411,412,413,414,415,416,417, 418,419,420,421,422,423,424,425,426]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]] [ns_server:debug,2014-08-19T16:51:06.887,ns_1@10.242.238.90:<0.26451.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:139]Linked myself to old ebucketmigrator <0.26447.0> [ns_server:info,2014-08-19T16:51:06.887,ns_1@10.242.238.90:<0.26447.0>:ebucketmigrator_srv:handle_call:270]Starting new-style vbucket filter change on stream `replication_ns_1@10.242.238.90` [rebalance:debug,2014-08-19T16:51:06.891,ns_1@10.242.238.90:<0.25605.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:51:06.892,ns_1@10.242.238.90:<0.25605.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:51:06.892,ns_1@10.242.238.90:<0.26453.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:51:06.892,ns_1@10.242.238.90:<0.26453.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:51:06.892,ns_1@10.242.238.90:<0.25605.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:info,2014-08-19T16:51:06.896,ns_1@10.242.238.90:<0.26447.0>:ebucketmigrator_srv:handle_call:297]Changing vbucket filter on tap stream `replication_ns_1@10.242.238.90`: [{396,1}, {399,1}, {401,1}, {406,1}, {407,1}, {408,1}, {409,1}, {410,1}, {411,1}, {412,1}, {413,1}, {414,1}, {415,1}, {416,1}, {417,1}, {418,1}, {419,1}, {420,1}, {421,1}, {422,1}, {423,1}, {424,1}, {425,1}, {426,1}] [ns_server:info,2014-08-19T16:51:06.896,ns_1@10.242.238.90:<0.26447.0>:ebucketmigrator_srv:handle_call:307]Successfully changed vbucket filter on tap stream `replication_ns_1@10.242.238.90`. [ns_server:info,2014-08-19T16:51:06.897,ns_1@10.242.238.90:<0.26447.0>:ebucketmigrator_srv:process_upstream:1027]Got vbucket filter change completion message. Silencing upstream sender [ns_server:info,2014-08-19T16:51:06.897,ns_1@10.242.238.90:<0.26447.0>:ebucketmigrator_srv:handle_info:366]Got reply from upstream silencing request. Completing state transition to a new ebucketmigrator. [ns_server:debug,2014-08-19T16:51:06.897,ns_1@10.242.238.90:<0.26447.0>:ebucketmigrator_srv:complete_native_vb_filter_change:170]Proceeding with reading unread binaries [ns_server:debug,2014-08-19T16:51:06.897,ns_1@10.242.238.90:<0.26447.0>:ebucketmigrator_srv:confirm_downstream:197]Going to confirm reception downstream messages [ns_server:debug,2014-08-19T16:51:06.897,ns_1@10.242.238.90:<0.26447.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:51:06.897,ns_1@10.242.238.90:<0.26454.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:51:06.897,ns_1@10.242.238.90:<0.26454.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:51:06.898,ns_1@10.242.238.90:<0.26447.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:51:06.898,ns_1@10.242.238.90:<0.26447.0>:ebucketmigrator_srv:confirm_downstream:201]Confirmed upstream messages are feeded to kernel [ns_server:debug,2014-08-19T16:51:06.898,ns_1@10.242.238.90:<0.26447.0>:ebucketmigrator_srv:reply_and_die:210]Passed old state to caller [ns_server:debug,2014-08-19T16:51:06.898,ns_1@10.242.238.90:<0.26447.0>:ebucketmigrator_srv:reply_and_die:213]Sent out state. Preparing to die [ns_server:debug,2014-08-19T16:51:06.898,ns_1@10.242.238.90:<0.26451.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:143]Got old state from previous ebucketmigrator: <0.26447.0> [ns_server:debug,2014-08-19T16:51:06.898,ns_1@10.242.238.90:<0.26451.0>:ns_vbm_new_sup:perform_vbucket_filter_change_loop:184]Sent old state to new instance [ns_server:info,2014-08-19T16:51:06.898,ns_1@10.242.238.90:<0.26456.0>:ns_vbm_new_sup:mk_old_state_retriever:83]Got vbucket filter change old state. Proceeding vbucket filter change operation [ns_server:debug,2014-08-19T16:51:06.898,ns_1@10.242.238.90:<0.26456.0>:ebucketmigrator_srv:init:494]Got old ebucketmigrator state from <0.26447.0>: {state,#Port<0.16372>,#Port<0.16368>,#Port<0.16373>,#Port<0.16369>, <0.26449.0>,<<"cut off">>,<<"cut off">>,[],73,false,false,0, {1408,452666,897221}, completed, {<0.26451.0>,#Ref<0.0.1.34979>}, <<"replication_ns_1@10.242.238.90">>,<0.26447.0>, {had_backfill,false,undefined,[]}, completed,false}. [ns_server:debug,2014-08-19T16:51:06.899,ns_1@10.242.238.90:<0.17162.0>:ns_process_registry:handle_info:98]Got exit msg: {'EXIT',<0.26451.0>,{#Ref<0.0.1.34968>,<0.26456.0>}} [error_logger:info,2014-08-19T16:51:06.899,ns_1@10.242.238.90:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,'ns_vbm_new_sup-default'} started: [{pid,<0.26456.0>}, {name, {new_child_id, [396,399,401,406,407,408,409,410,411,412,413, 414,415,416,417,418,419,420,421,422,423,424, 425,426], 'ns_1@10.242.238.89'}}, {mfargs, {ebucketmigrator_srv,start_link, [{"10.242.238.89",11209}, {"10.242.238.90",11209}, [{old_state_retriever, #Fun}, {on_not_ready_vbuckets, #Fun}, {username,"default"}, {password,get_from_config}, {vbuckets, [396,399,401,406,407,408,409,410,411,412, 413,414,415,416,417,418,419,420,421,422, 423,424,425,426]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]]}}, {restart_type,temporary}, {shutdown,60000}, {child_type,worker}] [ns_server:debug,2014-08-19T16:51:06.904,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:51:06.907,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 2604 us [ns_server:debug,2014-08-19T16:51:06.907,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:06.908,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:06.908,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{401, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:51:06.909,ns_1@10.242.238.90:<0.26456.0>:ebucketmigrator_srv:init:621]Reusing old upstream: [{vbuckets,[396,399,401,406,407,408,409,410,411,412,413,414,415,416,417,418, 419,420,421,422,423,424,425,426]}, {name,<<"replication_ns_1@10.242.238.90">>}, {takeover,false}] [rebalance:debug,2014-08-19T16:51:06.909,ns_1@10.242.238.90:<0.26456.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.26457.0> [ns_server:info,2014-08-19T16:51:06.910,ns_1@10.242.238.90:<0.18786.0>:ns_memcached:do_handle_call:527]Changed vbucket 400 state to replica [ns_server:info,2014-08-19T16:51:06.910,ns_1@10.242.238.90:tap_replication_manager-default<0.18775.0>:tap_replication_manager:change_vbucket_filter:200]Going to change replication from 'ns_1@10.242.238.89' to have [396,399,400,401,406,407,408,409,410,411,412,413,414,415,416,417,418,419,420, 421,422,423,424,425,426] ([400], []) [ns_server:debug,2014-08-19T16:51:06.912,ns_1@10.242.238.90:<0.26458.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:135]Registered myself under id:{"default", {new_child_id, [396,399,400,401,406,407,408,409,410,411,412, 413,414,415,416,417,418,419,420,421,422,423, 424,425,426], 'ns_1@10.242.238.89'}, #Ref<0.0.1.35116>} Args:[{"10.242.238.89",11209}, {"10.242.238.90",11209}, [{old_state_retriever,#Fun}, {on_not_ready_vbuckets,#Fun}, {username,"default"}, {password,get_from_config}, {vbuckets,[396,399,400,401,406,407,408,409,410,411,412,413,414,415,416, 417,418,419,420,421,422,423,424,425,426]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]] [ns_server:debug,2014-08-19T16:51:06.912,ns_1@10.242.238.90:<0.26458.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:139]Linked myself to old ebucketmigrator <0.26456.0> [ns_server:info,2014-08-19T16:51:06.912,ns_1@10.242.238.90:<0.26456.0>:ebucketmigrator_srv:handle_call:270]Starting new-style vbucket filter change on stream `replication_ns_1@10.242.238.90` [ns_server:info,2014-08-19T16:51:06.921,ns_1@10.242.238.90:<0.26456.0>:ebucketmigrator_srv:handle_call:297]Changing vbucket filter on tap stream `replication_ns_1@10.242.238.90`: [{396,1}, {399,1}, {400,1}, {401,1}, {406,1}, {407,1}, {408,1}, {409,1}, {410,1}, {411,1}, {412,1}, {413,1}, {414,1}, {415,1}, {416,1}, {417,1}, {418,1}, {419,1}, {420,1}, {421,1}, {422,1}, {423,1}, {424,1}, {425,1}, {426,1}] [ns_server:info,2014-08-19T16:51:06.922,ns_1@10.242.238.90:<0.26456.0>:ebucketmigrator_srv:handle_call:307]Successfully changed vbucket filter on tap stream `replication_ns_1@10.242.238.90`. [ns_server:info,2014-08-19T16:51:06.922,ns_1@10.242.238.90:<0.26456.0>:ebucketmigrator_srv:process_upstream:1027]Got vbucket filter change completion message. Silencing upstream sender [ns_server:info,2014-08-19T16:51:06.922,ns_1@10.242.238.90:<0.26456.0>:ebucketmigrator_srv:handle_info:366]Got reply from upstream silencing request. Completing state transition to a new ebucketmigrator. [ns_server:debug,2014-08-19T16:51:06.922,ns_1@10.242.238.90:<0.26456.0>:ebucketmigrator_srv:complete_native_vb_filter_change:170]Proceeding with reading unread binaries [ns_server:debug,2014-08-19T16:51:06.922,ns_1@10.242.238.90:<0.26456.0>:ebucketmigrator_srv:confirm_downstream:197]Going to confirm reception downstream messages [ns_server:debug,2014-08-19T16:51:06.923,ns_1@10.242.238.90:<0.26456.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:51:06.923,ns_1@10.242.238.90:<0.26461.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:51:06.923,ns_1@10.242.238.90:<0.26461.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:51:06.923,ns_1@10.242.238.90:<0.26456.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:51:06.923,ns_1@10.242.238.90:<0.26456.0>:ebucketmigrator_srv:confirm_downstream:201]Confirmed upstream messages are feeded to kernel [ns_server:debug,2014-08-19T16:51:06.923,ns_1@10.242.238.90:<0.26456.0>:ebucketmigrator_srv:reply_and_die:210]Passed old state to caller [ns_server:debug,2014-08-19T16:51:06.923,ns_1@10.242.238.90:<0.26456.0>:ebucketmigrator_srv:reply_and_die:213]Sent out state. Preparing to die [ns_server:debug,2014-08-19T16:51:06.923,ns_1@10.242.238.90:<0.26458.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:143]Got old state from previous ebucketmigrator: <0.26456.0> [ns_server:debug,2014-08-19T16:51:06.924,ns_1@10.242.238.90:<0.26458.0>:ns_vbm_new_sup:perform_vbucket_filter_change_loop:184]Sent old state to new instance [ns_server:info,2014-08-19T16:51:06.924,ns_1@10.242.238.90:<0.26463.0>:ns_vbm_new_sup:mk_old_state_retriever:83]Got vbucket filter change old state. Proceeding vbucket filter change operation [ns_server:debug,2014-08-19T16:51:06.924,ns_1@10.242.238.90:<0.26463.0>:ebucketmigrator_srv:init:494]Got old ebucketmigrator state from <0.26456.0>: {state,#Port<0.16372>,#Port<0.16368>,#Port<0.16373>,#Port<0.16369>, <0.26457.0>,<<"cut off">>,<<"cut off">>,[],76,false,false,0, {1408,452666,922529}, completed, {<0.26458.0>,#Ref<0.0.1.35129>}, <<"replication_ns_1@10.242.238.90">>,<0.26456.0>, {had_backfill,false,undefined,[]}, completed,false}. [ns_server:debug,2014-08-19T16:51:06.924,ns_1@10.242.238.90:<0.17162.0>:ns_process_registry:handle_info:98]Got exit msg: {'EXIT',<0.26458.0>,{#Ref<0.0.1.35118>,<0.26463.0>}} [error_logger:info,2014-08-19T16:51:06.924,ns_1@10.242.238.90:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,'ns_vbm_new_sup-default'} started: [{pid,<0.26463.0>}, {name, {new_child_id, [396,399,400,401,406,407,408,409,410,411,412, 413,414,415,416,417,418,419,420,421,422,423, 424,425,426], 'ns_1@10.242.238.89'}}, {mfargs, {ebucketmigrator_srv,start_link, [{"10.242.238.89",11209}, {"10.242.238.90",11209}, [{old_state_retriever, #Fun}, {on_not_ready_vbuckets, #Fun}, {username,"default"}, {password,get_from_config}, {vbuckets, [396,399,400,401,406,407,408,409,410,411, 412,413,414,415,416,417,418,419,420,421, 422,423,424,425,426]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]]}}, {restart_type,temporary}, {shutdown,60000}, {child_type,worker}] [ns_server:debug,2014-08-19T16:51:06.930,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:51:06.933,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 3303 us [ns_server:debug,2014-08-19T16:51:06.933,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:06.935,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:06.936,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{400, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [rebalance:debug,2014-08-19T16:51:06.937,ns_1@10.242.238.90:<0.25491.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:51:06.937,ns_1@10.242.238.90:<0.26463.0>:ebucketmigrator_srv:init:621]Reusing old upstream: [{vbuckets,[396,399,400,401,406,407,408,409,410,411,412,413,414,415,416,417, 418,419,420,421,422,423,424,425,426]}, {name,<<"replication_ns_1@10.242.238.90">>}, {takeover,false}] [ns_server:debug,2014-08-19T16:51:06.937,ns_1@10.242.238.90:<0.25491.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:info,2014-08-19T16:51:06.937,ns_1@10.242.238.90:<0.18786.0>:ns_memcached:do_handle_call:527]Changed vbucket 398 state to replica [rebalance:debug,2014-08-19T16:51:06.937,ns_1@10.242.238.90:<0.26463.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.26465.0> [ns_server:debug,2014-08-19T16:51:06.937,ns_1@10.242.238.90:<0.26464.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:51:06.938,ns_1@10.242.238.90:<0.26464.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [ns_server:info,2014-08-19T16:51:06.938,ns_1@10.242.238.90:tap_replication_manager-default<0.18775.0>:tap_replication_manager:change_vbucket_filter:200]Going to change replication from 'ns_1@10.242.238.89' to have [396,398,399,400,401,406,407,408,409,410,411,412,413,414,415,416,417,418,419, 420,421,422,423,424,425,426] ([398], []) [rebalance:info,2014-08-19T16:51:06.938,ns_1@10.242.238.90:<0.25491.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:51:06.939,ns_1@10.242.238.90:<0.26466.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:135]Registered myself under id:{"default", {new_child_id, [396,398,399,400,401,406,407,408,409,410,411, 412,413,414,415,416,417,418,419,420,421,422, 423,424,425,426], 'ns_1@10.242.238.89'}, #Ref<0.0.1.35275>} Args:[{"10.242.238.89",11209}, {"10.242.238.90",11209}, [{old_state_retriever,#Fun}, {on_not_ready_vbuckets,#Fun}, {username,"default"}, {password,get_from_config}, {vbuckets,[396,398,399,400,401,406,407,408,409,410,411,412,413,414,415, 416,417,418,419,420,421,422,423,424,425,426]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]] [ns_server:debug,2014-08-19T16:51:06.939,ns_1@10.242.238.90:<0.26466.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:139]Linked myself to old ebucketmigrator <0.26463.0> [ns_server:info,2014-08-19T16:51:06.940,ns_1@10.242.238.90:<0.26463.0>:ebucketmigrator_srv:handle_call:270]Starting new-style vbucket filter change on stream `replication_ns_1@10.242.238.90` [rebalance:debug,2014-08-19T16:51:06.949,ns_1@10.242.238.90:<0.25466.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:51:06.949,ns_1@10.242.238.90:<0.25466.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:51:06.949,ns_1@10.242.238.90:<0.26468.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:info,2014-08-19T16:51:06.949,ns_1@10.242.238.90:<0.26463.0>:ebucketmigrator_srv:handle_call:297]Changing vbucket filter on tap stream `replication_ns_1@10.242.238.90`: [{396,1}, {398,1}, {399,1}, {400,1}, {401,1}, {406,1}, {407,1}, {408,1}, {409,1}, {410,1}, {411,1}, {412,1}, {413,1}, {414,1}, {415,1}, {416,1}, {417,1}, {418,1}, {419,1}, {420,1}, {421,1}, {422,1}, {423,1}, {424,1}, {425,1}, {426,1}] [ns_server:debug,2014-08-19T16:51:06.949,ns_1@10.242.238.90:<0.26468.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:51:06.949,ns_1@10.242.238.90:<0.25466.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:info,2014-08-19T16:51:06.950,ns_1@10.242.238.90:<0.26463.0>:ebucketmigrator_srv:handle_call:307]Successfully changed vbucket filter on tap stream `replication_ns_1@10.242.238.90`. [ns_server:info,2014-08-19T16:51:06.950,ns_1@10.242.238.90:<0.26463.0>:ebucketmigrator_srv:process_upstream:1027]Got vbucket filter change completion message. Silencing upstream sender [ns_server:info,2014-08-19T16:51:06.950,ns_1@10.242.238.90:<0.26463.0>:ebucketmigrator_srv:handle_info:366]Got reply from upstream silencing request. Completing state transition to a new ebucketmigrator. [ns_server:debug,2014-08-19T16:51:06.950,ns_1@10.242.238.90:<0.26463.0>:ebucketmigrator_srv:complete_native_vb_filter_change:170]Proceeding with reading unread binaries [ns_server:debug,2014-08-19T16:51:06.950,ns_1@10.242.238.90:<0.26463.0>:ebucketmigrator_srv:confirm_downstream:197]Going to confirm reception downstream messages [ns_server:debug,2014-08-19T16:51:06.950,ns_1@10.242.238.90:<0.26463.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:51:06.950,ns_1@10.242.238.90:<0.26469.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:51:06.950,ns_1@10.242.238.90:<0.26469.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:51:06.951,ns_1@10.242.238.90:<0.26463.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:51:06.951,ns_1@10.242.238.90:<0.26463.0>:ebucketmigrator_srv:confirm_downstream:201]Confirmed upstream messages are feeded to kernel [ns_server:debug,2014-08-19T16:51:06.951,ns_1@10.242.238.90:<0.26463.0>:ebucketmigrator_srv:reply_and_die:210]Passed old state to caller [ns_server:debug,2014-08-19T16:51:06.951,ns_1@10.242.238.90:<0.26463.0>:ebucketmigrator_srv:reply_and_die:213]Sent out state. Preparing to die [ns_server:debug,2014-08-19T16:51:06.951,ns_1@10.242.238.90:<0.26466.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:143]Got old state from previous ebucketmigrator: <0.26463.0> [ns_server:debug,2014-08-19T16:51:06.951,ns_1@10.242.238.90:<0.26466.0>:ns_vbm_new_sup:perform_vbucket_filter_change_loop:184]Sent old state to new instance [ns_server:info,2014-08-19T16:51:06.951,ns_1@10.242.238.90:<0.26471.0>:ns_vbm_new_sup:mk_old_state_retriever:83]Got vbucket filter change old state. Proceeding vbucket filter change operation [ns_server:debug,2014-08-19T16:51:06.951,ns_1@10.242.238.90:<0.26471.0>:ebucketmigrator_srv:init:494]Got old ebucketmigrator state from <0.26463.0>: {state,#Port<0.16372>,#Port<0.16368>,#Port<0.16373>,#Port<0.16369>, <0.26465.0>,<<"cut off">>,<<"cut off">>,[],79,false,false,0, {1408,452666,950407}, completed, {<0.26466.0>,#Ref<0.0.1.35288>}, <<"replication_ns_1@10.242.238.90">>,<0.26463.0>, {had_backfill,false,undefined,[]}, completed,false}. [error_logger:info,2014-08-19T16:51:06.952,ns_1@10.242.238.90:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,'ns_vbm_new_sup-default'} started: [{pid,<0.26471.0>}, {name, {new_child_id, [396,398,399,400,401,406,407,408,409,410,411, 412,413,414,415,416,417,418,419,420,421,422, 423,424,425,426], 'ns_1@10.242.238.89'}}, {mfargs, {ebucketmigrator_srv,start_link, [{"10.242.238.89",11209}, {"10.242.238.90",11209}, [{old_state_retriever, #Fun}, {on_not_ready_vbuckets, #Fun}, {username,"default"}, {password,get_from_config}, {vbuckets, [396,398,399,400,401,406,407,408,409,410, 411,412,413,414,415,416,417,418,419,420, 421,422,423,424,425,426]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]]}}, {restart_type,temporary}, {shutdown,60000}, {child_type,worker}] [ns_server:debug,2014-08-19T16:51:06.952,ns_1@10.242.238.90:<0.17162.0>:ns_process_registry:handle_info:98]Got exit msg: {'EXIT',<0.26466.0>,{#Ref<0.0.1.35277>,<0.26471.0>}} [ns_server:debug,2014-08-19T16:51:06.958,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:51:06.959,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 1211 us [ns_server:debug,2014-08-19T16:51:06.959,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:06.960,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:06.960,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{398, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:51:06.963,ns_1@10.242.238.90:<0.26471.0>:ebucketmigrator_srv:init:621]Reusing old upstream: [{vbuckets,[396,398,399,400,401,406,407,408,409,410,411,412,413,414,415,416, 417,418,419,420,421,422,423,424,425,426]}, {name,<<"replication_ns_1@10.242.238.90">>}, {takeover,false}] [rebalance:debug,2014-08-19T16:51:06.963,ns_1@10.242.238.90:<0.26471.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.26473.0> [ns_server:info,2014-08-19T16:51:06.965,ns_1@10.242.238.90:<0.18786.0>:ns_memcached:do_handle_call:527]Changed vbucket 402 state to replica [ns_server:info,2014-08-19T16:51:06.965,ns_1@10.242.238.90:tap_replication_manager-default<0.18775.0>:tap_replication_manager:change_vbucket_filter:200]Going to change replication from 'ns_1@10.242.238.89' to have [396,398,399,400,401,402,406,407,408,409,410,411,412,413,414,415,416,417,418, 419,420,421,422,423,424,425,426] ([402], []) [ns_server:debug,2014-08-19T16:51:06.968,ns_1@10.242.238.90:<0.26474.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:135]Registered myself under id:{"default", {new_child_id, [396,398,399,400,401,402,406,407,408,409,410, 411,412,413,414,415,416,417,418,419,420,421, 422,423,424,425,426], 'ns_1@10.242.238.89'}, #Ref<0.0.1.35428>} Args:[{"10.242.238.89",11209}, {"10.242.238.90",11209}, [{old_state_retriever,#Fun}, {on_not_ready_vbuckets,#Fun}, {username,"default"}, {password,get_from_config}, {vbuckets,[396,398,399,400,401,402,406,407,408,409,410,411,412,413,414, 415,416,417,418,419,420,421,422,423,424,425,426]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]] [ns_server:debug,2014-08-19T16:51:06.968,ns_1@10.242.238.90:<0.26474.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:139]Linked myself to old ebucketmigrator <0.26471.0> [ns_server:info,2014-08-19T16:51:06.968,ns_1@10.242.238.90:<0.26471.0>:ebucketmigrator_srv:handle_call:270]Starting new-style vbucket filter change on stream `replication_ns_1@10.242.238.90` [ns_server:info,2014-08-19T16:51:06.977,ns_1@10.242.238.90:<0.26471.0>:ebucketmigrator_srv:handle_call:297]Changing vbucket filter on tap stream `replication_ns_1@10.242.238.90`: [{396,1}, {398,1}, {399,1}, {400,1}, {401,1}, {402,1}, {406,1}, {407,1}, {408,1}, {409,1}, {410,1}, {411,1}, {412,1}, {413,1}, {414,1}, {415,1}, {416,1}, {417,1}, {418,1}, {419,1}, {420,1}, {421,1}, {422,1}, {423,1}, {424,1}, {425,1}, {426,1}] [ns_server:info,2014-08-19T16:51:06.978,ns_1@10.242.238.90:<0.26471.0>:ebucketmigrator_srv:handle_call:307]Successfully changed vbucket filter on tap stream `replication_ns_1@10.242.238.90`. [ns_server:info,2014-08-19T16:51:06.979,ns_1@10.242.238.90:<0.26471.0>:ebucketmigrator_srv:process_upstream:1027]Got vbucket filter change completion message. Silencing upstream sender [ns_server:info,2014-08-19T16:51:06.979,ns_1@10.242.238.90:<0.26471.0>:ebucketmigrator_srv:handle_info:366]Got reply from upstream silencing request. Completing state transition to a new ebucketmigrator. [ns_server:debug,2014-08-19T16:51:06.979,ns_1@10.242.238.90:<0.26471.0>:ebucketmigrator_srv:complete_native_vb_filter_change:170]Proceeding with reading unread binaries [ns_server:debug,2014-08-19T16:51:06.979,ns_1@10.242.238.90:<0.26471.0>:ebucketmigrator_srv:confirm_downstream:197]Going to confirm reception downstream messages [ns_server:debug,2014-08-19T16:51:06.979,ns_1@10.242.238.90:<0.26471.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:51:06.979,ns_1@10.242.238.90:<0.26476.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:51:06.979,ns_1@10.242.238.90:<0.26476.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:51:06.979,ns_1@10.242.238.90:<0.26471.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:51:06.979,ns_1@10.242.238.90:<0.26471.0>:ebucketmigrator_srv:confirm_downstream:201]Confirmed upstream messages are feeded to kernel [ns_server:debug,2014-08-19T16:51:06.979,ns_1@10.242.238.90:<0.26471.0>:ebucketmigrator_srv:reply_and_die:210]Passed old state to caller [ns_server:debug,2014-08-19T16:51:06.979,ns_1@10.242.238.90:<0.26471.0>:ebucketmigrator_srv:reply_and_die:213]Sent out state. Preparing to die [ns_server:debug,2014-08-19T16:51:06.980,ns_1@10.242.238.90:<0.26474.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:143]Got old state from previous ebucketmigrator: <0.26471.0> [ns_server:debug,2014-08-19T16:51:06.980,ns_1@10.242.238.90:<0.26474.0>:ns_vbm_new_sup:perform_vbucket_filter_change_loop:184]Sent old state to new instance [ns_server:info,2014-08-19T16:51:06.980,ns_1@10.242.238.90:<0.26478.0>:ns_vbm_new_sup:mk_old_state_retriever:83]Got vbucket filter change old state. Proceeding vbucket filter change operation [ns_server:debug,2014-08-19T16:51:06.980,ns_1@10.242.238.90:<0.26478.0>:ebucketmigrator_srv:init:494]Got old ebucketmigrator state from <0.26471.0>: {state,#Port<0.16372>,#Port<0.16368>,#Port<0.16373>,#Port<0.16369>, <0.26473.0>,<<"cut off">>,<<"cut off">>,[],82,false,false,0, {1408,452666,979000}, completed, {<0.26474.0>,#Ref<0.0.1.35441>}, <<"replication_ns_1@10.242.238.90">>,<0.26471.0>, {had_backfill,false,undefined,[]}, completed,false}. [ns_server:debug,2014-08-19T16:51:06.980,ns_1@10.242.238.90:<0.17162.0>:ns_process_registry:handle_info:98]Got exit msg: {'EXIT',<0.26474.0>,{#Ref<0.0.1.35430>,<0.26478.0>}} [error_logger:info,2014-08-19T16:51:06.980,ns_1@10.242.238.90:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,'ns_vbm_new_sup-default'} started: [{pid,<0.26478.0>}, {name, {new_child_id, [396,398,399,400,401,402,406,407,408,409,410, 411,412,413,414,415,416,417,418,419,420,421, 422,423,424,425,426], 'ns_1@10.242.238.89'}}, {mfargs, {ebucketmigrator_srv,start_link, [{"10.242.238.89",11209}, {"10.242.238.90",11209}, [{old_state_retriever, #Fun}, {on_not_ready_vbuckets, #Fun}, {username,"default"}, {password,get_from_config}, {vbuckets, [396,398,399,400,401,402,406,407,408,409, 410,411,412,413,414,415,416,417,418,419, 420,421,422,423,424,425,426]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]]}}, {restart_type,temporary}, {shutdown,60000}, {child_type,worker}] [ns_server:debug,2014-08-19T16:51:06.988,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:51:06.990,ns_1@10.242.238.90:<0.26478.0>:ebucketmigrator_srv:init:621]Reusing old upstream: [{vbuckets,[396,398,399,400,401,402,406,407,408,409,410,411,412,413,414,415, 416,417,418,419,420,421,422,423,424,425,426]}, {name,<<"replication_ns_1@10.242.238.90">>}, {takeover,false}] [rebalance:debug,2014-08-19T16:51:06.990,ns_1@10.242.238.90:<0.26478.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.26480.0> [ns_server:debug,2014-08-19T16:51:06.991,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:06.991,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 2247 us [ns_server:debug,2014-08-19T16:51:06.991,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:06.992,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{402, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:info,2014-08-19T16:51:06.994,ns_1@10.242.238.90:<0.18786.0>:ns_memcached:do_handle_call:527]Changed vbucket 403 state to replica [ns_server:info,2014-08-19T16:51:06.994,ns_1@10.242.238.90:tap_replication_manager-default<0.18775.0>:tap_replication_manager:change_vbucket_filter:200]Going to change replication from 'ns_1@10.242.238.89' to have [396,398,399,400,401,402,403,406,407,408,409,410,411,412,413,414,415,416,417, 418,419,420,421,422,423,424,425,426] ([403], []) [ns_server:debug,2014-08-19T16:51:06.995,ns_1@10.242.238.90:<0.26481.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:135]Registered myself under id:{"default", {new_child_id, [396,398,399,400,401,402,403,406,407,408,409, 410,411,412,413,414,415,416,417,418,419,420, 421,422,423,424,425,426], 'ns_1@10.242.238.89'}, #Ref<0.0.1.35570>} Args:[{"10.242.238.89",11209}, {"10.242.238.90",11209}, [{old_state_retriever,#Fun}, {on_not_ready_vbuckets,#Fun}, {username,"default"}, {password,get_from_config}, {vbuckets,[396,398,399,400,401,402,403,406,407,408,409,410,411,412,413, 414,415,416,417,418,419,420,421,422,423,424,425,426]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]] [ns_server:debug,2014-08-19T16:51:06.995,ns_1@10.242.238.90:<0.26481.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:139]Linked myself to old ebucketmigrator <0.26478.0> [ns_server:info,2014-08-19T16:51:06.995,ns_1@10.242.238.90:<0.26478.0>:ebucketmigrator_srv:handle_call:270]Starting new-style vbucket filter change on stream `replication_ns_1@10.242.238.90` [ns_server:info,2014-08-19T16:51:07.004,ns_1@10.242.238.90:<0.26478.0>:ebucketmigrator_srv:handle_call:297]Changing vbucket filter on tap stream `replication_ns_1@10.242.238.90`: [{396,1}, {398,1}, {399,1}, {400,1}, {401,1}, {402,1}, {403,1}, {406,1}, {407,1}, {408,1}, {409,1}, {410,1}, {411,1}, {412,1}, {413,1}, {414,1}, {415,1}, {416,1}, {417,1}, {418,1}, {419,1}, {420,1}, {421,1}, {422,1}, {423,1}, {424,1}, {425,1}, {426,1}] [ns_server:info,2014-08-19T16:51:07.004,ns_1@10.242.238.90:<0.26478.0>:ebucketmigrator_srv:handle_call:307]Successfully changed vbucket filter on tap stream `replication_ns_1@10.242.238.90`. [ns_server:info,2014-08-19T16:51:07.005,ns_1@10.242.238.90:<0.26478.0>:ebucketmigrator_srv:process_upstream:1027]Got vbucket filter change completion message. Silencing upstream sender [ns_server:info,2014-08-19T16:51:07.005,ns_1@10.242.238.90:<0.26478.0>:ebucketmigrator_srv:handle_info:366]Got reply from upstream silencing request. Completing state transition to a new ebucketmigrator. [ns_server:debug,2014-08-19T16:51:07.005,ns_1@10.242.238.90:<0.26478.0>:ebucketmigrator_srv:complete_native_vb_filter_change:170]Proceeding with reading unread binaries [ns_server:debug,2014-08-19T16:51:07.005,ns_1@10.242.238.90:<0.26478.0>:ebucketmigrator_srv:confirm_downstream:197]Going to confirm reception downstream messages [ns_server:debug,2014-08-19T16:51:07.005,ns_1@10.242.238.90:<0.26478.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:51:07.005,ns_1@10.242.238.90:<0.26483.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:51:07.005,ns_1@10.242.238.90:<0.26483.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:51:07.006,ns_1@10.242.238.90:<0.26478.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:51:07.006,ns_1@10.242.238.90:<0.26478.0>:ebucketmigrator_srv:confirm_downstream:201]Confirmed upstream messages are feeded to kernel [ns_server:debug,2014-08-19T16:51:07.006,ns_1@10.242.238.90:<0.26478.0>:ebucketmigrator_srv:reply_and_die:210]Passed old state to caller [ns_server:debug,2014-08-19T16:51:07.006,ns_1@10.242.238.90:<0.26478.0>:ebucketmigrator_srv:reply_and_die:213]Sent out state. Preparing to die [ns_server:debug,2014-08-19T16:51:07.006,ns_1@10.242.238.90:<0.26481.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:143]Got old state from previous ebucketmigrator: <0.26478.0> [ns_server:debug,2014-08-19T16:51:07.006,ns_1@10.242.238.90:<0.26481.0>:ns_vbm_new_sup:perform_vbucket_filter_change_loop:184]Sent old state to new instance [ns_server:info,2014-08-19T16:51:07.007,ns_1@10.242.238.90:<0.26485.0>:ns_vbm_new_sup:mk_old_state_retriever:83]Got vbucket filter change old state. Proceeding vbucket filter change operation [ns_server:debug,2014-08-19T16:51:07.007,ns_1@10.242.238.90:<0.26485.0>:ebucketmigrator_srv:init:494]Got old ebucketmigrator state from <0.26478.0>: {state,#Port<0.16372>,#Port<0.16368>,#Port<0.16373>,#Port<0.16369>, <0.26480.0>,<<"cut off">>,<<"cut off">>,[],85,false,false,0, {1408,452667,5229}, completed, {<0.26481.0>,#Ref<0.0.1.35583>}, <<"replication_ns_1@10.242.238.90">>,<0.26478.0>, {had_backfill,false,undefined,[]}, completed,false}. [ns_server:debug,2014-08-19T16:51:07.007,ns_1@10.242.238.90:<0.17162.0>:ns_process_registry:handle_info:98]Got exit msg: {'EXIT',<0.26481.0>,{#Ref<0.0.1.35572>,<0.26485.0>}} [error_logger:info,2014-08-19T16:51:07.007,ns_1@10.242.238.90:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,'ns_vbm_new_sup-default'} started: [{pid,<0.26485.0>}, {name, {new_child_id, [396,398,399,400,401,402,403,406,407,408,409, 410,411,412,413,414,415,416,417,418,419,420, 421,422,423,424,425,426], 'ns_1@10.242.238.89'}}, {mfargs, {ebucketmigrator_srv,start_link, [{"10.242.238.89",11209}, {"10.242.238.90",11209}, [{old_state_retriever, #Fun}, {on_not_ready_vbuckets, #Fun}, {username,"default"}, {password,get_from_config}, {vbuckets, [396,398,399,400,401,402,403,406,407,408, 409,410,411,412,413,414,415,416,417,418, 419,420,421,422,423,424,425,426]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]]}}, {restart_type,temporary}, {shutdown,60000}, {child_type,worker}] [ns_server:debug,2014-08-19T16:51:07.011,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:51:07.017,ns_1@10.242.238.90:<0.26485.0>:ebucketmigrator_srv:init:621]Reusing old upstream: [{vbuckets,[396,398,399,400,401,402,403,406,407,408,409,410,411,412,413,414, 415,416,417,418,419,420,421,422,423,424,425,426]}, {name,<<"replication_ns_1@10.242.238.90">>}, {takeover,false}] [rebalance:debug,2014-08-19T16:51:07.017,ns_1@10.242.238.90:<0.26485.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.26486.0> [ns_server:debug,2014-08-19T16:51:07.018,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 6348 us [ns_server:debug,2014-08-19T16:51:07.018,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:07.019,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:07.019,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{403, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [rebalance:debug,2014-08-19T16:51:07.220,ns_1@10.242.238.90:<0.26488.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 640 [rebalance:debug,2014-08-19T16:51:07.221,ns_1@10.242.238.90:<0.26489.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 641 [rebalance:debug,2014-08-19T16:51:07.221,ns_1@10.242.238.90:<0.26488.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:51:07.222,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.26488.0> (ok) [rebalance:debug,2014-08-19T16:51:07.222,ns_1@10.242.238.90:<0.26489.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:51:07.222,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.26489.0> (ok) [rebalance:debug,2014-08-19T16:51:07.259,ns_1@10.242.238.90:<0.25400.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:51:07.260,ns_1@10.242.238.90:<0.25400.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:51:07.260,ns_1@10.242.238.90:<0.26494.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:51:07.260,ns_1@10.242.238.90:<0.26494.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:51:07.260,ns_1@10.242.238.90:<0.25400.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:info,2014-08-19T16:51:07.264,ns_1@10.242.238.90:<0.18786.0>:ns_memcached:do_handle_call:527]Changed vbucket 405 state to replica [ns_server:info,2014-08-19T16:51:07.264,ns_1@10.242.238.90:tap_replication_manager-default<0.18775.0>:tap_replication_manager:change_vbucket_filter:200]Going to change replication from 'ns_1@10.242.238.89' to have [396,398,399,400,401,402,403,405,406,407,408,409,410,411,412,413,414,415,416, 417,418,419,420,421,422,423,424,425,426] ([405], []) [ns_server:debug,2014-08-19T16:51:07.265,ns_1@10.242.238.90:<0.26495.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:135]Registered myself under id:{"default", {new_child_id, [396,398,399,400,401,402,403,405,406,407,408, 409,410,411,412,413,414,415,416,417,418,419, 420,421,422,423,424,425,426], 'ns_1@10.242.238.89'}, #Ref<0.0.1.35778>} Args:[{"10.242.238.89",11209}, {"10.242.238.90",11209}, [{old_state_retriever,#Fun}, {on_not_ready_vbuckets,#Fun}, {username,"default"}, {password,get_from_config}, {vbuckets,[396,398,399,400,401,402,403,405,406,407,408,409,410,411,412, 413,414,415,416,417,418,419,420,421,422,423,424,425,426]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]] [ns_server:debug,2014-08-19T16:51:07.266,ns_1@10.242.238.90:<0.26495.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:139]Linked myself to old ebucketmigrator <0.26485.0> [ns_server:info,2014-08-19T16:51:07.266,ns_1@10.242.238.90:<0.26485.0>:ebucketmigrator_srv:handle_call:270]Starting new-style vbucket filter change on stream `replication_ns_1@10.242.238.90` [rebalance:debug,2014-08-19T16:51:07.273,ns_1@10.242.238.90:<0.25439.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:51:07.273,ns_1@10.242.238.90:<0.25439.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:51:07.273,ns_1@10.242.238.90:<0.26497.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:51:07.273,ns_1@10.242.238.90:<0.26497.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:51:07.273,ns_1@10.242.238.90:<0.25439.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:info,2014-08-19T16:51:07.275,ns_1@10.242.238.90:<0.26485.0>:ebucketmigrator_srv:handle_call:297]Changing vbucket filter on tap stream `replication_ns_1@10.242.238.90`: [{396,1}, {398,1}, {399,1}, {400,1}, {401,1}, {402,1}, {403,1}, {405,1}, {406,1}, {407,1}, {408,1}, {409,1}, {410,1}, {411,1}, {412,1}, {413,1}, {414,1}, {415,1}, {416,1}, {417,1}, {418,1}, {419,1}, {420,1}, {421,1}, {422,1}, {423,1}, {424,1}, {425,1}, {426,1}] [ns_server:info,2014-08-19T16:51:07.276,ns_1@10.242.238.90:<0.26485.0>:ebucketmigrator_srv:handle_call:307]Successfully changed vbucket filter on tap stream `replication_ns_1@10.242.238.90`. [ns_server:info,2014-08-19T16:51:07.276,ns_1@10.242.238.90:<0.26485.0>:ebucketmigrator_srv:process_upstream:1027]Got vbucket filter change completion message. Silencing upstream sender [ns_server:info,2014-08-19T16:51:07.276,ns_1@10.242.238.90:<0.26485.0>:ebucketmigrator_srv:handle_info:366]Got reply from upstream silencing request. Completing state transition to a new ebucketmigrator. [ns_server:debug,2014-08-19T16:51:07.277,ns_1@10.242.238.90:<0.26485.0>:ebucketmigrator_srv:complete_native_vb_filter_change:170]Proceeding with reading unread binaries [ns_server:debug,2014-08-19T16:51:07.277,ns_1@10.242.238.90:<0.26485.0>:ebucketmigrator_srv:confirm_downstream:197]Going to confirm reception downstream messages [ns_server:debug,2014-08-19T16:51:07.277,ns_1@10.242.238.90:<0.26485.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:51:07.277,ns_1@10.242.238.90:<0.26498.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:51:07.277,ns_1@10.242.238.90:<0.26498.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:51:07.277,ns_1@10.242.238.90:<0.26485.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:51:07.277,ns_1@10.242.238.90:<0.26485.0>:ebucketmigrator_srv:confirm_downstream:201]Confirmed upstream messages are feeded to kernel [ns_server:debug,2014-08-19T16:51:07.278,ns_1@10.242.238.90:<0.26485.0>:ebucketmigrator_srv:reply_and_die:210]Passed old state to caller [ns_server:debug,2014-08-19T16:51:07.278,ns_1@10.242.238.90:<0.26485.0>:ebucketmigrator_srv:reply_and_die:213]Sent out state. Preparing to die [ns_server:debug,2014-08-19T16:51:07.278,ns_1@10.242.238.90:<0.26495.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:143]Got old state from previous ebucketmigrator: <0.26485.0> [ns_server:debug,2014-08-19T16:51:07.278,ns_1@10.242.238.90:<0.26495.0>:ns_vbm_new_sup:perform_vbucket_filter_change_loop:184]Sent old state to new instance [ns_server:info,2014-08-19T16:51:07.278,ns_1@10.242.238.90:<0.26500.0>:ns_vbm_new_sup:mk_old_state_retriever:83]Got vbucket filter change old state. Proceeding vbucket filter change operation [ns_server:debug,2014-08-19T16:51:07.278,ns_1@10.242.238.90:<0.26500.0>:ebucketmigrator_srv:init:494]Got old ebucketmigrator state from <0.26485.0>: {state,#Port<0.16372>,#Port<0.16368>,#Port<0.16373>,#Port<0.16369>, <0.26486.0>,<<"cut off">>,<<"cut off">>,[],88,false,false,0, {1408,452667,276682}, completed, {<0.26495.0>,#Ref<0.0.1.35791>}, <<"replication_ns_1@10.242.238.90">>,<0.26485.0>, {had_backfill,false,undefined,[]}, completed,false}. [ns_server:debug,2014-08-19T16:51:07.279,ns_1@10.242.238.90:<0.17162.0>:ns_process_registry:handle_info:98]Got exit msg: {'EXIT',<0.26495.0>,{#Ref<0.0.1.35780>,<0.26500.0>}} [error_logger:info,2014-08-19T16:51:07.279,ns_1@10.242.238.90:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,'ns_vbm_new_sup-default'} started: [{pid,<0.26500.0>}, {name, {new_child_id, [396,398,399,400,401,402,403,405,406,407,408, 409,410,411,412,413,414,415,416,417,418,419, 420,421,422,423,424,425,426], 'ns_1@10.242.238.89'}}, {mfargs, {ebucketmigrator_srv,start_link, [{"10.242.238.89",11209}, {"10.242.238.90",11209}, [{old_state_retriever, #Fun}, {on_not_ready_vbuckets, #Fun}, {username,"default"}, {password,get_from_config}, {vbuckets, [396,398,399,400,401,402,403,405,406,407, 408,409,410,411,412,413,414,415,416,417, 418,419,420,421,422,423,424,425,426]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]]}}, {restart_type,temporary}, {shutdown,60000}, {child_type,worker}] [ns_server:debug,2014-08-19T16:51:07.284,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [rebalance:debug,2014-08-19T16:51:07.286,ns_1@10.242.238.90:<0.26501.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 642 [rebalance:debug,2014-08-19T16:51:07.287,ns_1@10.242.238.90:<0.26504.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 643 [ns_server:debug,2014-08-19T16:51:07.287,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:07.288,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 3564 us [ns_server:debug,2014-08-19T16:51:07.288,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [rebalance:debug,2014-08-19T16:51:07.288,ns_1@10.242.238.90:<0.26501.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:51:07.288,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.26501.0> (ok) [rebalance:debug,2014-08-19T16:51:07.288,ns_1@10.242.238.90:<0.26504.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:51:07.288,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.26504.0> (ok) [ns_server:debug,2014-08-19T16:51:07.289,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{405, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:info,2014-08-19T16:51:07.290,ns_1@10.242.238.90:<0.18786.0>:ns_memcached:do_handle_call:527]Changed vbucket 404 state to replica [ns_server:info,2014-08-19T16:51:07.290,ns_1@10.242.238.90:tap_replication_manager-default<0.18775.0>:tap_replication_manager:change_vbucket_filter:200]Going to change replication from 'ns_1@10.242.238.89' to have [396,398,399,400,401,402,403,404,405,406,407,408,409,410,411,412,413,414,415, 416,417,418,419,420,421,422,423,424,425,426] ([404], []) [ns_server:debug,2014-08-19T16:51:07.290,ns_1@10.242.238.90:<0.26500.0>:ebucketmigrator_srv:init:621]Reusing old upstream: [{vbuckets,[396,398,399,400,401,402,403,405,406,407,408,409,410,411,412,413, 414,415,416,417,418,419,420,421,422,423,424,425,426]}, {name,<<"replication_ns_1@10.242.238.90">>}, {takeover,false}] [rebalance:debug,2014-08-19T16:51:07.290,ns_1@10.242.238.90:<0.26500.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.26508.0> [ns_server:debug,2014-08-19T16:51:07.291,ns_1@10.242.238.90:<0.26509.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:135]Registered myself under id:{"default", {new_child_id, [396,398,399,400,401,402,403,404,405,406,407, 408,409,410,411,412,413,414,415,416,417,418, 419,420,421,422,423,424,425,426], 'ns_1@10.242.238.89'}, #Ref<0.0.1.35958>} Args:[{"10.242.238.89",11209}, {"10.242.238.90",11209}, [{old_state_retriever,#Fun}, {on_not_ready_vbuckets,#Fun}, {username,"default"}, {password,get_from_config}, {vbuckets,[396,398,399,400,401,402,403,404,405,406,407,408,409,410,411, 412,413,414,415,416,417,418,419,420,421,422,423,424,425, 426]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]] [ns_server:debug,2014-08-19T16:51:07.291,ns_1@10.242.238.90:<0.26509.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:139]Linked myself to old ebucketmigrator <0.26500.0> [ns_server:info,2014-08-19T16:51:07.291,ns_1@10.242.238.90:<0.26500.0>:ebucketmigrator_srv:handle_call:270]Starting new-style vbucket filter change on stream `replication_ns_1@10.242.238.90` [ns_server:info,2014-08-19T16:51:07.301,ns_1@10.242.238.90:<0.26500.0>:ebucketmigrator_srv:handle_call:297]Changing vbucket filter on tap stream `replication_ns_1@10.242.238.90`: [{396,1}, {398,1}, {399,1}, {400,1}, {401,1}, {402,1}, {403,1}, {404,1}, {405,1}, {406,1}, {407,1}, {408,1}, {409,1}, {410,1}, {411,1}, {412,1}, {413,1}, {414,1}, {415,1}, {416,1}, {417,1}, {418,1}, {419,1}, {420,1}, {421,1}, {422,1}, {423,1}, {424,1}, {425,1}, {426,1}] [ns_server:info,2014-08-19T16:51:07.301,ns_1@10.242.238.90:<0.26500.0>:ebucketmigrator_srv:handle_call:307]Successfully changed vbucket filter on tap stream `replication_ns_1@10.242.238.90`. [ns_server:info,2014-08-19T16:51:07.302,ns_1@10.242.238.90:<0.26500.0>:ebucketmigrator_srv:process_upstream:1027]Got vbucket filter change completion message. Silencing upstream sender [ns_server:info,2014-08-19T16:51:07.302,ns_1@10.242.238.90:<0.26500.0>:ebucketmigrator_srv:handle_info:366]Got reply from upstream silencing request. Completing state transition to a new ebucketmigrator. [ns_server:debug,2014-08-19T16:51:07.302,ns_1@10.242.238.90:<0.26500.0>:ebucketmigrator_srv:complete_native_vb_filter_change:170]Proceeding with reading unread binaries [ns_server:debug,2014-08-19T16:51:07.302,ns_1@10.242.238.90:<0.26500.0>:ebucketmigrator_srv:confirm_downstream:197]Going to confirm reception downstream messages [ns_server:debug,2014-08-19T16:51:07.302,ns_1@10.242.238.90:<0.26500.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:51:07.302,ns_1@10.242.238.90:<0.26511.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:51:07.302,ns_1@10.242.238.90:<0.26511.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:51:07.302,ns_1@10.242.238.90:<0.26500.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:51:07.303,ns_1@10.242.238.90:<0.26500.0>:ebucketmigrator_srv:confirm_downstream:201]Confirmed upstream messages are feeded to kernel [ns_server:debug,2014-08-19T16:51:07.303,ns_1@10.242.238.90:<0.26500.0>:ebucketmigrator_srv:reply_and_die:210]Passed old state to caller [ns_server:debug,2014-08-19T16:51:07.303,ns_1@10.242.238.90:<0.26500.0>:ebucketmigrator_srv:reply_and_die:213]Sent out state. Preparing to die [ns_server:debug,2014-08-19T16:51:07.303,ns_1@10.242.238.90:<0.26509.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:143]Got old state from previous ebucketmigrator: <0.26500.0> [ns_server:debug,2014-08-19T16:51:07.303,ns_1@10.242.238.90:<0.26509.0>:ns_vbm_new_sup:perform_vbucket_filter_change_loop:184]Sent old state to new instance [ns_server:info,2014-08-19T16:51:07.303,ns_1@10.242.238.90:<0.26513.0>:ns_vbm_new_sup:mk_old_state_retriever:83]Got vbucket filter change old state. Proceeding vbucket filter change operation [ns_server:debug,2014-08-19T16:51:07.304,ns_1@10.242.238.90:<0.26513.0>:ebucketmigrator_srv:init:494]Got old ebucketmigrator state from <0.26500.0>: {state,#Port<0.16372>,#Port<0.16368>,#Port<0.16373>,#Port<0.16369>, <0.26508.0>,<<"cut off">>,<<"cut off">>,[],91,false,false,0, {1408,452667,302090}, completed, {<0.26509.0>,#Ref<0.0.1.35971>}, <<"replication_ns_1@10.242.238.90">>,<0.26500.0>, {had_backfill,false,undefined,[]}, completed,false}. [ns_server:debug,2014-08-19T16:51:07.304,ns_1@10.242.238.90:<0.17162.0>:ns_process_registry:handle_info:98]Got exit msg: {'EXIT',<0.26509.0>,{#Ref<0.0.1.35960>,<0.26513.0>}} [error_logger:info,2014-08-19T16:51:07.304,ns_1@10.242.238.90:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,'ns_vbm_new_sup-default'} started: [{pid,<0.26513.0>}, {name, {new_child_id, [396,398,399,400,401,402,403,404,405,406,407, 408,409,410,411,412,413,414,415,416,417,418, 419,420,421,422,423,424,425,426], 'ns_1@10.242.238.89'}}, {mfargs, {ebucketmigrator_srv,start_link, [{"10.242.238.89",11209}, {"10.242.238.90",11209}, [{old_state_retriever, #Fun}, {on_not_ready_vbuckets, #Fun}, {username,"default"}, {password,get_from_config}, {vbuckets, [396,398,399,400,401,402,403,404,405,406, 407,408,409,410,411,412,413,414,415,416, 417,418,419,420,421,422,423,424,425, 426]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]]}}, {restart_type,temporary}, {shutdown,60000}, {child_type,worker}] [ns_server:debug,2014-08-19T16:51:07.308,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:51:07.311,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 1347 us [ns_server:debug,2014-08-19T16:51:07.311,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:07.312,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:07.312,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{404, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:51:07.314,ns_1@10.242.238.90:<0.26513.0>:ebucketmigrator_srv:init:621]Reusing old upstream: [{vbuckets,[396,398,399,400,401,402,403,404,405,406,407,408,409,410,411,412, 413,414,415,416,417,418,419,420,421,422,423,424,425,426]}, {name,<<"replication_ns_1@10.242.238.90">>}, {takeover,false}] [rebalance:debug,2014-08-19T16:51:07.314,ns_1@10.242.238.90:<0.26513.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.26514.0> [rebalance:debug,2014-08-19T16:51:07.422,ns_1@10.242.238.90:<0.26522.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 645 [rebalance:debug,2014-08-19T16:51:07.422,ns_1@10.242.238.90:<0.26523.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 644 [rebalance:debug,2014-08-19T16:51:07.423,ns_1@10.242.238.90:<0.26523.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:51:07.423,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.26523.0> (ok) [rebalance:debug,2014-08-19T16:51:07.424,ns_1@10.242.238.90:<0.26522.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:51:07.424,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.26522.0> (ok) [rebalance:debug,2014-08-19T16:51:07.505,ns_1@10.242.238.90:<0.26528.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 647 [rebalance:debug,2014-08-19T16:51:07.505,ns_1@10.242.238.90:<0.26531.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 646 [rebalance:debug,2014-08-19T16:51:07.506,ns_1@10.242.238.90:<0.26531.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:51:07.506,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.26531.0> (ok) [rebalance:debug,2014-08-19T16:51:07.507,ns_1@10.242.238.90:<0.26528.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:51:07.507,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.26528.0> (ok) [rebalance:debug,2014-08-19T16:51:07.588,ns_1@10.242.238.90:<0.26534.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 648 [rebalance:debug,2014-08-19T16:51:07.588,ns_1@10.242.238.90:<0.26537.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 649 [rebalance:debug,2014-08-19T16:51:07.589,ns_1@10.242.238.90:<0.26534.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:51:07.590,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.26534.0> (ok) [rebalance:debug,2014-08-19T16:51:07.590,ns_1@10.242.238.90:<0.26537.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:51:07.590,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.26537.0> (ok) [rebalance:debug,2014-08-19T16:51:07.698,ns_1@10.242.238.90:<0.26540.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 650 [rebalance:debug,2014-08-19T16:51:07.699,ns_1@10.242.238.90:<0.26541.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 651 [rebalance:debug,2014-08-19T16:51:07.700,ns_1@10.242.238.90:<0.26540.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:51:07.700,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.26540.0> (ok) [rebalance:debug,2014-08-19T16:51:07.700,ns_1@10.242.238.90:<0.26541.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:51:07.700,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.26541.0> (ok) [ns_server:debug,2014-08-19T16:51:07.804,ns_1@10.242.238.90:<0.26548.0>:capi_set_view_manager:do_wait_index_updated:618]References to wait: [] ("default", 653) [ns_server:debug,2014-08-19T16:51:07.804,ns_1@10.242.238.90:<0.26548.0>:capi_set_view_manager:do_wait_index_updated:638]All refs fired [ns_server:debug,2014-08-19T16:51:07.804,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.26547.0> (ok) [ns_server:debug,2014-08-19T16:51:07.804,ns_1@10.242.238.90:<0.26551.0>:capi_set_view_manager:do_wait_index_updated:618]References to wait: [] ("default", 659) [ns_server:debug,2014-08-19T16:51:07.804,ns_1@10.242.238.90:<0.26551.0>:capi_set_view_manager:do_wait_index_updated:638]All refs fired [ns_server:debug,2014-08-19T16:51:07.804,ns_1@10.242.238.90:<0.26555.0>:capi_set_view_manager:do_wait_index_updated:618]References to wait: [] ("default", 655) [ns_server:debug,2014-08-19T16:51:07.804,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.26549.0> (ok) [ns_server:debug,2014-08-19T16:51:07.805,ns_1@10.242.238.90:<0.26555.0>:capi_set_view_manager:do_wait_index_updated:638]All refs fired [rebalance:debug,2014-08-19T16:51:07.805,ns_1@10.242.238.90:<0.25625.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:51:07.805,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.26550.0> (ok) [ns_server:debug,2014-08-19T16:51:07.805,ns_1@10.242.238.90:<0.26557.0>:capi_set_view_manager:do_wait_index_updated:618]References to wait: [] ("default", 660) [ns_server:debug,2014-08-19T16:51:07.805,ns_1@10.242.238.90:<0.25625.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:51:07.805,ns_1@10.242.238.90:<0.26557.0>:capi_set_view_manager:do_wait_index_updated:638]All refs fired [ns_server:debug,2014-08-19T16:51:07.805,ns_1@10.242.238.90:<0.26560.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:51:07.805,ns_1@10.242.238.90:<0.26561.0>:capi_set_view_manager:do_wait_index_updated:618]References to wait: [] ("default", 658) [ns_server:debug,2014-08-19T16:51:07.805,ns_1@10.242.238.90:<0.26560.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [ns_server:debug,2014-08-19T16:51:07.805,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.26552.0> (ok) [ns_server:debug,2014-08-19T16:51:07.805,ns_1@10.242.238.90:<0.26561.0>:capi_set_view_manager:do_wait_index_updated:638]All refs fired [ns_server:debug,2014-08-19T16:51:07.805,ns_1@10.242.238.90:<0.26563.0>:capi_set_view_manager:do_wait_index_updated:618]References to wait: [] ("default", 657) [rebalance:debug,2014-08-19T16:51:07.805,ns_1@10.242.238.90:<0.25446.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:51:07.805,ns_1@10.242.238.90:<0.26563.0>:capi_set_view_manager:do_wait_index_updated:638]All refs fired [rebalance:debug,2014-08-19T16:51:07.805,ns_1@10.242.238.90:<0.25560.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:51:07.805,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.26553.0> (ok) [rebalance:info,2014-08-19T16:51:07.805,ns_1@10.242.238.90:<0.25625.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:51:07.805,ns_1@10.242.238.90:<0.26564.0>:capi_set_view_manager:do_wait_index_updated:618]References to wait: [] ("default", 656) [ns_server:debug,2014-08-19T16:51:07.805,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.26554.0> (ok) [ns_server:debug,2014-08-19T16:51:07.806,ns_1@10.242.238.90:<0.25446.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:51:07.806,ns_1@10.242.238.90:<0.25560.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:51:07.806,ns_1@10.242.238.90:<0.26565.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:51:07.806,ns_1@10.242.238.90:<0.26564.0>:capi_set_view_manager:do_wait_index_updated:638]All refs fired [ns_server:debug,2014-08-19T16:51:07.806,ns_1@10.242.238.90:<0.26567.0>:capi_set_view_manager:do_wait_index_updated:618]References to wait: [] ("default", 654) [ns_server:debug,2014-08-19T16:51:07.806,ns_1@10.242.238.90:<0.26566.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [rebalance:debug,2014-08-19T16:51:07.806,ns_1@10.242.238.90:<0.25433.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [rebalance:debug,2014-08-19T16:51:07.806,ns_1@10.242.238.90:<0.25485.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:51:07.806,ns_1@10.242.238.90:<0.26565.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [ns_server:debug,2014-08-19T16:51:07.806,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.26556.0> (ok) [ns_server:debug,2014-08-19T16:51:07.806,ns_1@10.242.238.90:<0.26567.0>:capi_set_view_manager:do_wait_index_updated:638]All refs fired [ns_server:debug,2014-08-19T16:51:07.806,ns_1@10.242.238.90:<0.26566.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [ns_server:debug,2014-08-19T16:51:07.806,ns_1@10.242.238.90:<0.26568.0>:capi_set_view_manager:do_wait_index_updated:618]References to wait: [] ("default", 652) [ns_server:debug,2014-08-19T16:51:07.806,ns_1@10.242.238.90:<0.25433.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:51:07.806,ns_1@10.242.238.90:<0.25485.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:51:07.806,ns_1@10.242.238.90:<0.26569.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:51:07.806,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.26559.0> (ok) [ns_server:debug,2014-08-19T16:51:07.806,ns_1@10.242.238.90:<0.26570.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [rebalance:info,2014-08-19T16:51:07.806,ns_1@10.242.238.90:<0.25446.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:51:07.806,ns_1@10.242.238.90:<0.26568.0>:capi_set_view_manager:do_wait_index_updated:638]All refs fired [rebalance:info,2014-08-19T16:51:07.806,ns_1@10.242.238.90:<0.25560.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:51:07.806,ns_1@10.242.238.90:<0.26571.0>:capi_set_view_manager:do_wait_index_updated:618]References to wait: [] ("default", 661) [ns_server:debug,2014-08-19T16:51:07.806,ns_1@10.242.238.90:<0.26570.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [ns_server:debug,2014-08-19T16:51:07.806,ns_1@10.242.238.90:<0.26569.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:debug,2014-08-19T16:51:07.806,ns_1@10.242.238.90:<0.25510.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:51:07.806,ns_1@10.242.238.90:<0.26571.0>:capi_set_view_manager:do_wait_index_updated:638]All refs fired [ns_server:debug,2014-08-19T16:51:07.806,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.26558.0> (ok) [rebalance:info,2014-08-19T16:51:07.806,ns_1@10.242.238.90:<0.25485.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [rebalance:debug,2014-08-19T16:51:07.807,ns_1@10.242.238.90:<0.25535.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:51:07.807,ns_1@10.242.238.90:<0.25510.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [rebalance:info,2014-08-19T16:51:07.807,ns_1@10.242.238.90:<0.25433.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:51:07.807,ns_1@10.242.238.90:<0.26572.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:51:07.807,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.26562.0> (ok) [ns_server:debug,2014-08-19T16:51:07.807,ns_1@10.242.238.90:<0.26572.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [ns_server:debug,2014-08-19T16:51:07.807,ns_1@10.242.238.90:<0.25535.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:51:07.807,ns_1@10.242.238.90:<0.26573.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:51:07.807,ns_1@10.242.238.90:<0.26573.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:51:07.807,ns_1@10.242.238.90:<0.25510.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [rebalance:debug,2014-08-19T16:51:07.807,ns_1@10.242.238.90:<0.25599.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [rebalance:info,2014-08-19T16:51:07.807,ns_1@10.242.238.90:<0.25535.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [rebalance:debug,2014-08-19T16:51:07.807,ns_1@10.242.238.90:<0.25650.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:51:07.807,ns_1@10.242.238.90:<0.25599.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:51:07.807,ns_1@10.242.238.90:<0.26574.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [rebalance:debug,2014-08-19T16:51:07.807,ns_1@10.242.238.90:<0.25394.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:51:07.807,ns_1@10.242.238.90:<0.26574.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [ns_server:debug,2014-08-19T16:51:07.807,ns_1@10.242.238.90:<0.25394.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:51:07.807,ns_1@10.242.238.90:<0.26576.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:51:07.807,ns_1@10.242.238.90:<0.25650.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:51:07.807,ns_1@10.242.238.90:<0.26575.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:51:07.808,ns_1@10.242.238.90:<0.26576.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:51:07.808,ns_1@10.242.238.90:<0.25599.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:51:07.808,ns_1@10.242.238.90:<0.26575.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:51:07.808,ns_1@10.242.238.90:<0.25650.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [rebalance:info,2014-08-19T16:51:07.808,ns_1@10.242.238.90:<0.25394.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:info,2014-08-19T16:51:07.852,ns_1@10.242.238.90:<0.18786.0>:ns_memcached:do_handle_call:527]Changed vbucket 653 state to active [ns_server:info,2014-08-19T16:51:07.864,ns_1@10.242.238.90:<0.18786.0>:ns_memcached:do_handle_call:527]Changed vbucket 659 state to active [ns_server:info,2014-08-19T16:51:07.877,ns_1@10.242.238.90:<0.18786.0>:ns_memcached:do_handle_call:527]Changed vbucket 655 state to active [ns_server:debug,2014-08-19T16:51:07.889,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:51:07.891,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:07.891,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 1450 us [ns_server:debug,2014-08-19T16:51:07.891,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:07.892,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{653, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:info,2014-08-19T16:51:07.896,ns_1@10.242.238.90:<0.18786.0>:ns_memcached:do_handle_call:527]Changed vbucket 660 state to active [ns_server:info,2014-08-19T16:51:07.906,ns_1@10.242.238.90:<0.18786.0>:ns_memcached:do_handle_call:527]Changed vbucket 652 state to active [ns_server:debug,2014-08-19T16:51:07.915,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:info,2014-08-19T16:51:07.916,ns_1@10.242.238.90:<0.18786.0>:ns_memcached:do_handle_call:527]Changed vbucket 661 state to active [ns_server:debug,2014-08-19T16:51:07.919,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 3546 us [ns_server:debug,2014-08-19T16:51:07.919,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:07.919,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:07.920,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{659, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [views:debug,2014-08-19T16:51:07.928,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/653. Updated state: active (1) [ns_server:debug,2014-08-19T16:51:07.928,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",653,active,1} [ns_server:info,2014-08-19T16:51:07.931,ns_1@10.242.238.90:<0.18786.0>:ns_memcached:do_handle_call:527]Changed vbucket 654 state to active [ns_server:info,2014-08-19T16:51:07.947,ns_1@10.242.238.90:<0.18786.0>:ns_memcached:do_handle_call:527]Changed vbucket 658 state to active [ns_server:debug,2014-08-19T16:51:07.947,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:51:07.950,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 2985 us [ns_server:debug,2014-08-19T16:51:07.950,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:07.951,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:07.951,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{655, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:info,2014-08-19T16:51:07.962,ns_1@10.242.238.90:<0.18786.0>:ns_memcached:do_handle_call:527]Changed vbucket 657 state to active [ns_server:info,2014-08-19T16:51:07.968,ns_1@10.242.238.90:<0.18786.0>:ns_memcached:do_handle_call:527]Changed vbucket 656 state to active [ns_server:debug,2014-08-19T16:51:07.975,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:51:07.984,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 8362 us [ns_server:debug,2014-08-19T16:51:07.984,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:07.984,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:07.985,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{660, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [views:debug,2014-08-19T16:51:07.994,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/661. Updated state: active (1) [ns_server:debug,2014-08-19T16:51:07.995,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",661,active,1} [ns_server:debug,2014-08-19T16:51:08.005,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:51:08.008,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:08.008,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 3040 us [ns_server:debug,2014-08-19T16:51:08.008,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:08.011,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{652, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:51:08.034,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:51:08.037,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 2738 us [ns_server:debug,2014-08-19T16:51:08.037,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:08.037,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:08.038,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{661, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [views:debug,2014-08-19T16:51:08.045,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/659. Updated state: active (1) [ns_server:debug,2014-08-19T16:51:08.045,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",659,active,1} [ns_server:debug,2014-08-19T16:51:08.060,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:51:08.064,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 3062 us [ns_server:debug,2014-08-19T16:51:08.064,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:08.064,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:08.065,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{654, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:51:08.090,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:51:08.094,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 3549 us [ns_server:debug,2014-08-19T16:51:08.094,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:08.094,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [views:debug,2014-08-19T16:51:08.095,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/655. Updated state: active (1) [ns_server:debug,2014-08-19T16:51:08.095,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",655,active,1} [ns_server:debug,2014-08-19T16:51:08.095,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{658, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:51:08.120,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:51:08.126,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:08.126,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 3343 us [ns_server:debug,2014-08-19T16:51:08.126,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:08.127,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{657, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [views:debug,2014-08-19T16:51:08.145,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/657. Updated state: active (1) [ns_server:debug,2014-08-19T16:51:08.145,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",657,active,1} [ns_server:debug,2014-08-19T16:51:08.156,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:51:08.164,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 8098 us [ns_server:debug,2014-08-19T16:51:08.164,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:08.164,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:08.165,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{656, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [views:debug,2014-08-19T16:51:08.197,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/660. Updated state: active (1) [ns_server:debug,2014-08-19T16:51:08.197,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",660,active,1} [views:debug,2014-08-19T16:51:08.231,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/658. Updated state: active (1) [ns_server:debug,2014-08-19T16:51:08.231,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",658,active,1} [views:debug,2014-08-19T16:51:08.281,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/656. Updated state: active (1) [ns_server:debug,2014-08-19T16:51:08.281,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",656,active,1} [views:debug,2014-08-19T16:51:08.315,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/654. Updated state: active (1) [ns_server:debug,2014-08-19T16:51:08.315,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",654,active,1} [views:debug,2014-08-19T16:51:08.349,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/652. Updated state: active (1) [ns_server:debug,2014-08-19T16:51:08.349,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",652,active,1} [ns_server:debug,2014-08-19T16:51:08.385,ns_1@10.242.238.90:<0.26598.0>:capi_set_view_manager:do_wait_index_updated:618]References to wait: [] ("default", 642) [ns_server:debug,2014-08-19T16:51:08.386,ns_1@10.242.238.90:<0.26598.0>:capi_set_view_manager:do_wait_index_updated:638]All refs fired [ns_server:debug,2014-08-19T16:51:08.386,ns_1@10.242.238.90:<0.26600.0>:capi_set_view_manager:do_wait_index_updated:618]References to wait: [] ("default", 644) [ns_server:debug,2014-08-19T16:51:08.386,ns_1@10.242.238.90:<0.26600.0>:capi_set_view_manager:do_wait_index_updated:638]All refs fired [ns_server:debug,2014-08-19T16:51:08.386,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.26595.0> (ok) [ns_server:debug,2014-08-19T16:51:08.386,ns_1@10.242.238.90:<0.26604.0>:capi_set_view_manager:do_wait_index_updated:618]References to wait: [] ("default", 640) [ns_server:debug,2014-08-19T16:51:08.386,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.26596.0> (ok) [ns_server:debug,2014-08-19T16:51:08.386,ns_1@10.242.238.90:<0.26604.0>:capi_set_view_manager:do_wait_index_updated:638]All refs fired [ns_server:debug,2014-08-19T16:51:08.386,ns_1@10.242.238.90:<0.26605.0>:capi_set_view_manager:do_wait_index_updated:618]References to wait: [] ("default", 646) [ns_server:debug,2014-08-19T16:51:08.386,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.26597.0> (ok) [ns_server:debug,2014-08-19T16:51:08.386,ns_1@10.242.238.90:<0.26605.0>:capi_set_view_manager:do_wait_index_updated:638]All refs fired [ns_server:debug,2014-08-19T16:51:08.386,ns_1@10.242.238.90:<0.26607.0>:capi_set_view_manager:do_wait_index_updated:618]References to wait: [] ("default", 643) [ns_server:debug,2014-08-19T16:51:08.386,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.26599.0> (ok) [ns_server:debug,2014-08-19T16:51:08.386,ns_1@10.242.238.90:<0.26607.0>:capi_set_view_manager:do_wait_index_updated:638]All refs fired [ns_server:debug,2014-08-19T16:51:08.386,ns_1@10.242.238.90:<0.26610.0>:capi_set_view_manager:do_wait_index_updated:618]References to wait: [] ("default", 641) [ns_server:debug,2014-08-19T16:51:08.386,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.26603.0> (ok) [ns_server:debug,2014-08-19T16:51:08.386,ns_1@10.242.238.90:<0.26610.0>:capi_set_view_manager:do_wait_index_updated:638]All refs fired [ns_server:debug,2014-08-19T16:51:08.386,ns_1@10.242.238.90:<0.26612.0>:capi_set_view_manager:do_wait_index_updated:618]References to wait: [] ("default", 645) [ns_server:debug,2014-08-19T16:51:08.387,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.26601.0> (ok) [ns_server:debug,2014-08-19T16:51:08.387,ns_1@10.242.238.90:<0.26612.0>:capi_set_view_manager:do_wait_index_updated:638]All refs fired [ns_server:debug,2014-08-19T16:51:08.387,ns_1@10.242.238.90:<0.26613.0>:capi_set_view_manager:do_wait_index_updated:618]References to wait: [] ("default", 649) [rebalance:debug,2014-08-19T16:51:08.387,ns_1@10.242.238.90:<0.25894.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:51:08.387,ns_1@10.242.238.90:<0.26613.0>:capi_set_view_manager:do_wait_index_updated:638]All refs fired [rebalance:debug,2014-08-19T16:51:08.387,ns_1@10.242.238.90:<0.25830.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:51:08.387,ns_1@10.242.238.90:<0.26614.0>:capi_set_view_manager:do_wait_index_updated:618]References to wait: [] ("default", 647) [ns_server:debug,2014-08-19T16:51:08.387,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.26602.0> (ok) [ns_server:debug,2014-08-19T16:51:08.387,ns_1@10.242.238.90:<0.25894.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [rebalance:debug,2014-08-19T16:51:08.387,ns_1@10.242.238.90:<0.25930.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:51:08.387,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.26606.0> (ok) [rebalance:debug,2014-08-19T16:51:08.387,ns_1@10.242.238.90:<0.25780.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:51:08.387,ns_1@10.242.238.90:<0.26616.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:51:08.387,ns_1@10.242.238.90:<0.26614.0>:capi_set_view_manager:do_wait_index_updated:638]All refs fired [ns_server:debug,2014-08-19T16:51:08.387,ns_1@10.242.238.90:<0.26616.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:debug,2014-08-19T16:51:08.387,ns_1@10.242.238.90:<0.25869.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:51:08.387,ns_1@10.242.238.90:<0.26617.0>:capi_set_view_manager:do_wait_index_updated:618]References to wait: [] ("default", 650) [ns_server:debug,2014-08-19T16:51:08.387,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.26608.0> (ok) [ns_server:debug,2014-08-19T16:51:08.387,ns_1@10.242.238.90:<0.25830.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:51:08.387,ns_1@10.242.238.90:<0.25780.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:51:08.387,ns_1@10.242.238.90:<0.25930.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:51:08.387,ns_1@10.242.238.90:<0.26618.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:51:08.387,ns_1@10.242.238.90:<0.26619.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:51:08.387,ns_1@10.242.238.90:<0.26620.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [rebalance:info,2014-08-19T16:51:08.387,ns_1@10.242.238.90:<0.25894.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:51:08.387,ns_1@10.242.238.90:<0.26617.0>:capi_set_view_manager:do_wait_index_updated:638]All refs fired [rebalance:debug,2014-08-19T16:51:08.387,ns_1@10.242.238.90:<0.25919.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:51:08.388,ns_1@10.242.238.90:<0.25869.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:51:08.388,ns_1@10.242.238.90:<0.26622.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:51:08.388,ns_1@10.242.238.90:<0.26620.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [ns_server:debug,2014-08-19T16:51:08.388,ns_1@10.242.238.90:<0.26619.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [ns_server:debug,2014-08-19T16:51:08.388,ns_1@10.242.238.90:<0.26618.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [ns_server:debug,2014-08-19T16:51:08.388,ns_1@10.242.238.90:<0.26621.0>:capi_set_view_manager:do_wait_index_updated:618]References to wait: [] ("default", 648) [rebalance:debug,2014-08-19T16:51:08.388,ns_1@10.242.238.90:<0.25711.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [rebalance:debug,2014-08-19T16:51:08.388,ns_1@10.242.238.90:<0.25805.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:51:08.388,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.26609.0> (ok) [ns_server:debug,2014-08-19T16:51:08.388,ns_1@10.242.238.90:<0.26622.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [ns_server:debug,2014-08-19T16:51:08.388,ns_1@10.242.238.90:<0.26621.0>:capi_set_view_manager:do_wait_index_updated:638]All refs fired [ns_server:debug,2014-08-19T16:51:08.388,ns_1@10.242.238.90:<0.25919.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [rebalance:info,2014-08-19T16:51:08.388,ns_1@10.242.238.90:<0.25780.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:51:08.388,ns_1@10.242.238.90:<0.26623.0>:capi_set_view_manager:do_wait_index_updated:618]References to wait: [] ("default", 651) [rebalance:info,2014-08-19T16:51:08.388,ns_1@10.242.238.90:<0.25930.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:51:08.388,ns_1@10.242.238.90:<0.26624.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [rebalance:info,2014-08-19T16:51:08.388,ns_1@10.242.238.90:<0.25830.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:51:08.388,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.26611.0> (ok) [ns_server:debug,2014-08-19T16:51:08.388,ns_1@10.242.238.90:<0.25805.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:51:08.388,ns_1@10.242.238.90:<0.25711.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:51:08.388,ns_1@10.242.238.90:<0.26625.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:51:08.388,ns_1@10.242.238.90:<0.26623.0>:capi_set_view_manager:do_wait_index_updated:638]All refs fired [ns_server:debug,2014-08-19T16:51:08.388,ns_1@10.242.238.90:<0.26626.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [rebalance:debug,2014-08-19T16:51:08.388,ns_1@10.242.238.90:<0.25769.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [rebalance:info,2014-08-19T16:51:08.388,ns_1@10.242.238.90:<0.25869.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:51:08.388,ns_1@10.242.238.90:<0.26624.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [ns_server:debug,2014-08-19T16:51:08.388,ns_1@10.242.238.90:<0.26625.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [ns_server:debug,2014-08-19T16:51:08.388,ns_1@10.242.238.90:<0.26626.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [ns_server:debug,2014-08-19T16:51:08.388,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.26615.0> (ok) [rebalance:info,2014-08-19T16:51:08.388,ns_1@10.242.238.90:<0.25919.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:51:08.388,ns_1@10.242.238.90:<0.25769.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:51:08.388,ns_1@10.242.238.90:<0.26627.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [rebalance:info,2014-08-19T16:51:08.389,ns_1@10.242.238.90:<0.25711.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [rebalance:info,2014-08-19T16:51:08.389,ns_1@10.242.238.90:<0.25805.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [rebalance:debug,2014-08-19T16:51:08.389,ns_1@10.242.238.90:<0.25686.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:51:08.389,ns_1@10.242.238.90:<0.26627.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:51:08.389,ns_1@10.242.238.90:<0.25769.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:51:08.389,ns_1@10.242.238.90:<0.25686.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [rebalance:debug,2014-08-19T16:51:08.389,ns_1@10.242.238.90:<0.25736.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:51:08.389,ns_1@10.242.238.90:<0.26628.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [rebalance:debug,2014-08-19T16:51:08.389,ns_1@10.242.238.90:<0.25675.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:51:08.389,ns_1@10.242.238.90:<0.26628.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [ns_server:debug,2014-08-19T16:51:08.389,ns_1@10.242.238.90:<0.25736.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [rebalance:info,2014-08-19T16:51:08.389,ns_1@10.242.238.90:<0.25686.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:51:08.389,ns_1@10.242.238.90:<0.26629.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:51:08.389,ns_1@10.242.238.90:<0.25675.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:51:08.389,ns_1@10.242.238.90:<0.26630.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:51:08.389,ns_1@10.242.238.90:<0.26629.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:51:08.389,ns_1@10.242.238.90:<0.25736.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:51:08.389,ns_1@10.242.238.90:<0.26630.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:51:08.390,ns_1@10.242.238.90:<0.25675.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:51:08.509,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:51:08.512,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:08.513,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 3209 us [ns_server:debug,2014-08-19T16:51:08.513,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:08.514,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{906, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:51:08.539,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:51:08.542,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 2704 us [ns_server:debug,2014-08-19T16:51:08.542,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:08.542,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:08.543,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{907, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:51:08.565,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:51:08.569,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:08.569,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 3191 us [ns_server:debug,2014-08-19T16:51:08.569,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:08.571,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{908, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [rebalance:debug,2014-08-19T16:51:08.576,ns_1@10.242.238.90:<0.25875.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:51:08.576,ns_1@10.242.238.90:<0.25875.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:51:08.576,ns_1@10.242.238.90:<0.26636.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:51:08.576,ns_1@10.242.238.90:<0.26636.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:51:08.576,ns_1@10.242.238.90:<0.25875.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [rebalance:debug,2014-08-19T16:51:08.597,ns_1@10.242.238.90:<0.25925.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:51:08.597,ns_1@10.242.238.90:<0.25925.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:51:08.597,ns_1@10.242.238.90:<0.26637.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:51:08.597,ns_1@10.242.238.90:<0.26637.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:51:08.597,ns_1@10.242.238.90:<0.25925.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [rebalance:debug,2014-08-19T16:51:08.598,ns_1@10.242.238.90:<0.25681.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:51:08.598,ns_1@10.242.238.90:<0.25681.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:51:08.598,ns_1@10.242.238.90:<0.26638.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:51:08.598,ns_1@10.242.238.90:<0.26638.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:51:08.598,ns_1@10.242.238.90:<0.25681.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:51:08.599,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:51:08.602,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 2985 us [ns_server:debug,2014-08-19T16:51:08.602,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:08.602,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:08.603,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{910, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:51:08.634,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:51:08.637,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:08.637,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 2650 us [ns_server:debug,2014-08-19T16:51:08.637,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:08.638,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{912, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [rebalance:debug,2014-08-19T16:51:08.660,ns_1@10.242.238.90:<0.25786.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:51:08.661,ns_1@10.242.238.90:<0.25786.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:51:08.661,ns_1@10.242.238.90:<0.26641.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:51:08.661,ns_1@10.242.238.90:<0.26641.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:51:08.661,ns_1@10.242.238.90:<0.25786.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [rebalance:debug,2014-08-19T16:51:08.662,ns_1@10.242.238.90:<0.25742.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:51:08.663,ns_1@10.242.238.90:<0.25742.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:51:08.663,ns_1@10.242.238.90:<0.26642.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:51:08.663,ns_1@10.242.238.90:<0.26642.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:51:08.663,ns_1@10.242.238.90:<0.25742.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:51:08.663,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:info,2014-08-19T16:51:08.668,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 641 state to active [ns_server:debug,2014-08-19T16:51:08.670,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 6617 us [ns_server:debug,2014-08-19T16:51:08.670,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:08.671,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:08.673,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{915, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:info,2014-08-19T16:51:08.681,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 645 state to active [ns_server:info,2014-08-19T16:51:08.691,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 647 state to active [ns_server:debug,2014-08-19T16:51:08.699,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:51:08.700,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:08.700,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 1151 us [views:debug,2014-08-19T16:51:08.701,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/641. Updated state: active (1) [ns_server:debug,2014-08-19T16:51:08.701,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:08.701,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",641,active,1} [ns_server:debug,2014-08-19T16:51:08.701,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{913, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:info,2014-08-19T16:51:08.715,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 643 state to active [rebalance:debug,2014-08-19T16:51:08.727,ns_1@10.242.238.90:<0.25825.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:51:08.727,ns_1@10.242.238.90:<0.25825.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:51:08.727,ns_1@10.242.238.90:<0.26645.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:51:08.727,ns_1@10.242.238.90:<0.26645.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:51:08.727,ns_1@10.242.238.90:<0.25825.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:51:08.732,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [views:debug,2014-08-19T16:51:08.732,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/647. Updated state: active (1) [ns_server:debug,2014-08-19T16:51:08.732,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",647,active,1} [ns_server:debug,2014-08-19T16:51:08.735,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 2804 us [ns_server:debug,2014-08-19T16:51:08.735,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:08.735,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:08.736,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{911, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:info,2014-08-19T16:51:08.741,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 644 state to active [rebalance:debug,2014-08-19T16:51:08.761,ns_1@10.242.238.90:<0.25850.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:51:08.762,ns_1@10.242.238.90:<0.25850.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:51:08.762,ns_1@10.242.238.90:<0.26647.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:51:08.762,ns_1@10.242.238.90:<0.26647.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:51:08.762,ns_1@10.242.238.90:<0.25850.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:51:08.763,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:51:08.766,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 1642 us [ns_server:debug,2014-08-19T16:51:08.766,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:08.767,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:08.767,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{914, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:info,2014-08-19T16:51:08.769,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 387 state to replica [ns_server:info,2014-08-19T16:51:08.770,ns_1@10.242.238.90:tap_replication_manager-default<0.18775.0>:tap_replication_manager:change_vbucket_filter:200]Going to change replication from 'ns_1@10.242.238.89' to have [387,396,398,399,400,401,402,403,404,405,406,407,408,409,410,411,412,413,414, 415,416,417,418,419,420,421,422,423,424,425,426] ([387], []) [ns_server:debug,2014-08-19T16:51:08.771,ns_1@10.242.238.90:<0.26648.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:135]Registered myself under id:{"default", {new_child_id, [387,396,398,399,400,401,402,403,404,405,406, 407,408,409,410,411,412,413,414,415,416,417, 418,419,420,421,422,423,424,425,426], 'ns_1@10.242.238.89'}, #Ref<0.0.1.38399>} Args:[{"10.242.238.89",11209}, {"10.242.238.90",11209}, [{old_state_retriever,#Fun}, {on_not_ready_vbuckets,#Fun}, {username,"default"}, {password,get_from_config}, {vbuckets,[387,396,398,399,400,401,402,403,404,405,406,407,408,409,410, 411,412,413,414,415,416,417,418,419,420,421,422,423,424,425, 426]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]] [ns_server:debug,2014-08-19T16:51:08.771,ns_1@10.242.238.90:<0.26648.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:139]Linked myself to old ebucketmigrator <0.26513.0> [ns_server:info,2014-08-19T16:51:08.772,ns_1@10.242.238.90:<0.26513.0>:ebucketmigrator_srv:handle_call:270]Starting new-style vbucket filter change on stream `replication_ns_1@10.242.238.90` [views:debug,2014-08-19T16:51:08.783,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/645. Updated state: active (1) [ns_server:debug,2014-08-19T16:51:08.783,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",645,active,1} [ns_server:info,2014-08-19T16:51:08.786,ns_1@10.242.238.90:<0.26513.0>:ebucketmigrator_srv:handle_call:297]Changing vbucket filter on tap stream `replication_ns_1@10.242.238.90`: [{387,1}, {396,1}, {398,1}, {399,1}, {400,1}, {401,1}, {402,1}, {403,1}, {404,1}, {405,1}, {406,1}, {407,1}, {408,1}, {409,1}, {410,1}, {411,1}, {412,1}, {413,1}, {414,1}, {415,1}, {416,1}, {417,1}, {418,1}, {419,1}, {420,1}, {421,1}, {422,1}, {423,1}, {424,1}, {425,1}, {426,1}] [ns_server:info,2014-08-19T16:51:08.787,ns_1@10.242.238.90:<0.26513.0>:ebucketmigrator_srv:handle_call:307]Successfully changed vbucket filter on tap stream `replication_ns_1@10.242.238.90`. [ns_server:info,2014-08-19T16:51:08.787,ns_1@10.242.238.90:<0.26513.0>:ebucketmigrator_srv:process_upstream:1027]Got vbucket filter change completion message. Silencing upstream sender [ns_server:info,2014-08-19T16:51:08.787,ns_1@10.242.238.90:<0.26513.0>:ebucketmigrator_srv:handle_info:366]Got reply from upstream silencing request. Completing state transition to a new ebucketmigrator. [ns_server:debug,2014-08-19T16:51:08.787,ns_1@10.242.238.90:<0.26513.0>:ebucketmigrator_srv:complete_native_vb_filter_change:170]Proceeding with reading unread binaries [ns_server:debug,2014-08-19T16:51:08.787,ns_1@10.242.238.90:<0.26513.0>:ebucketmigrator_srv:confirm_downstream:197]Going to confirm reception downstream messages [ns_server:debug,2014-08-19T16:51:08.787,ns_1@10.242.238.90:<0.26513.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:51:08.787,ns_1@10.242.238.90:<0.26651.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:51:08.787,ns_1@10.242.238.90:<0.26651.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:51:08.788,ns_1@10.242.238.90:<0.26513.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:51:08.788,ns_1@10.242.238.90:<0.26513.0>:ebucketmigrator_srv:confirm_downstream:201]Confirmed upstream messages are feeded to kernel [ns_server:debug,2014-08-19T16:51:08.788,ns_1@10.242.238.90:<0.26513.0>:ebucketmigrator_srv:reply_and_die:210]Passed old state to caller [ns_server:debug,2014-08-19T16:51:08.788,ns_1@10.242.238.90:<0.26513.0>:ebucketmigrator_srv:reply_and_die:213]Sent out state. Preparing to die [ns_server:debug,2014-08-19T16:51:08.788,ns_1@10.242.238.90:<0.26648.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:143]Got old state from previous ebucketmigrator: <0.26513.0> [ns_server:debug,2014-08-19T16:51:08.788,ns_1@10.242.238.90:<0.26648.0>:ns_vbm_new_sup:perform_vbucket_filter_change_loop:184]Sent old state to new instance [ns_server:info,2014-08-19T16:51:08.789,ns_1@10.242.238.90:<0.26653.0>:ns_vbm_new_sup:mk_old_state_retriever:83]Got vbucket filter change old state. Proceeding vbucket filter change operation [ns_server:debug,2014-08-19T16:51:08.789,ns_1@10.242.238.90:<0.26653.0>:ebucketmigrator_srv:init:494]Got old ebucketmigrator state from <0.26513.0>: {state,#Port<0.16372>,#Port<0.16368>,#Port<0.16373>,#Port<0.16369>, <0.26514.0>,<<"cut off">>,<<"cut off">>,[],94,false,false,0, {1408,452668,787361}, completed, {<0.26648.0>,#Ref<0.0.1.38412>}, <<"replication_ns_1@10.242.238.90">>,<0.26513.0>, {had_backfill,false,undefined,[]}, completed,false}. [ns_server:debug,2014-08-19T16:51:08.789,ns_1@10.242.238.90:<0.17162.0>:ns_process_registry:handle_info:98]Got exit msg: {'EXIT',<0.26648.0>,{#Ref<0.0.1.38401>,<0.26653.0>}} [rebalance:debug,2014-08-19T16:51:08.789,ns_1@10.242.238.90:<0.25900.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [error_logger:info,2014-08-19T16:51:08.789,ns_1@10.242.238.90:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,'ns_vbm_new_sup-default'} started: [{pid,<0.26653.0>}, {name, {new_child_id, [387,396,398,399,400,401,402,403,404,405,406, 407,408,409,410,411,412,413,414,415,416,417, 418,419,420,421,422,423,424,425,426], 'ns_1@10.242.238.89'}}, {mfargs, {ebucketmigrator_srv,start_link, [{"10.242.238.89",11209}, {"10.242.238.90",11209}, [{old_state_retriever, #Fun}, {on_not_ready_vbuckets, #Fun}, {username,"default"}, {password,get_from_config}, {vbuckets, [387,396,398,399,400,401,402,403,404,405, 406,407,408,409,410,411,412,413,414,415, 416,417,418,419,420,421,422,423,424,425, 426]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]]}}, {restart_type,temporary}, {shutdown,60000}, {child_type,worker}] [ns_server:debug,2014-08-19T16:51:08.789,ns_1@10.242.238.90:<0.26654.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:51:08.789,ns_1@10.242.238.90:<0.25900.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:51:08.790,ns_1@10.242.238.90:<0.26654.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:51:08.790,ns_1@10.242.238.90:<0.25900.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:51:08.794,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:51:08.797,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 3089 us [ns_server:debug,2014-08-19T16:51:08.797,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:08.798,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:08.798,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{387, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:51:08.799,ns_1@10.242.238.90:<0.26653.0>:ebucketmigrator_srv:init:621]Reusing old upstream: [{vbuckets,[387,396,398,399,400,401,402,403,404,405,406,407,408,409,410,411, 412,413,414,415,416,417,418,419,420,421,422,423,424,425,426]}, {name,<<"replication_ns_1@10.242.238.90">>}, {takeover,false}] [rebalance:debug,2014-08-19T16:51:08.799,ns_1@10.242.238.90:<0.26653.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.26655.0> [rebalance:debug,2014-08-19T16:51:08.800,ns_1@10.242.238.90:<0.25706.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:51:08.800,ns_1@10.242.238.90:<0.25706.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:51:08.800,ns_1@10.242.238.90:<0.26656.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:51:08.800,ns_1@10.242.238.90:<0.26656.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:51:08.801,ns_1@10.242.238.90:<0.25706.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:info,2014-08-19T16:51:08.805,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 385 state to replica [ns_server:info,2014-08-19T16:51:08.805,ns_1@10.242.238.90:tap_replication_manager-default<0.18775.0>:tap_replication_manager:change_vbucket_filter:200]Going to change replication from 'ns_1@10.242.238.89' to have [385,387,396,398,399,400,401,402,403,404,405,406,407,408,409,410,411,412,413, 414,415,416,417,418,419,420,421,422,423,424,425,426] ([385], []) [ns_server:debug,2014-08-19T16:51:08.806,ns_1@10.242.238.90:<0.26657.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:135]Registered myself under id:{"default", {new_child_id, [385,387,396,398,399,400,401,402,403,404,405, 406,407,408,409,410,411,412,413,414,415,416, 417,418,419,420,421,422,423,424,425,426], 'ns_1@10.242.238.89'}, #Ref<0.0.1.38591>} Args:[{"10.242.238.89",11209}, {"10.242.238.90",11209}, [{old_state_retriever,#Fun}, {on_not_ready_vbuckets,#Fun}, {username,"default"}, {password,get_from_config}, {vbuckets,[385,387,396,398,399,400,401,402,403,404,405,406,407,408,409, 410,411,412,413,414,415,416,417,418,419,420,421,422,423,424, 425,426]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]] [ns_server:debug,2014-08-19T16:51:08.806,ns_1@10.242.238.90:<0.26657.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:139]Linked myself to old ebucketmigrator <0.26653.0> [ns_server:info,2014-08-19T16:51:08.807,ns_1@10.242.238.90:<0.26653.0>:ebucketmigrator_srv:handle_call:270]Starting new-style vbucket filter change on stream `replication_ns_1@10.242.238.90` [ns_server:info,2014-08-19T16:51:08.817,ns_1@10.242.238.90:<0.26653.0>:ebucketmigrator_srv:handle_call:297]Changing vbucket filter on tap stream `replication_ns_1@10.242.238.90`: [{385,1}, {387,1}, {396,1}, {398,1}, {399,1}, {400,1}, {401,1}, {402,1}, {403,1}, {404,1}, {405,1}, {406,1}, {407,1}, {408,1}, {409,1}, {410,1}, {411,1}, {412,1}, {413,1}, {414,1}, {415,1}, {416,1}, {417,1}, {418,1}, {419,1}, {420,1}, {421,1}, {422,1}, {423,1}, {424,1}, {425,1}, {426,1}] [ns_server:info,2014-08-19T16:51:08.818,ns_1@10.242.238.90:<0.26653.0>:ebucketmigrator_srv:handle_call:307]Successfully changed vbucket filter on tap stream `replication_ns_1@10.242.238.90`. [ns_server:info,2014-08-19T16:51:08.819,ns_1@10.242.238.90:<0.26653.0>:ebucketmigrator_srv:process_upstream:1027]Got vbucket filter change completion message. Silencing upstream sender [ns_server:info,2014-08-19T16:51:08.819,ns_1@10.242.238.90:<0.26653.0>:ebucketmigrator_srv:handle_info:366]Got reply from upstream silencing request. Completing state transition to a new ebucketmigrator. [ns_server:debug,2014-08-19T16:51:08.819,ns_1@10.242.238.90:<0.26653.0>:ebucketmigrator_srv:complete_native_vb_filter_change:170]Proceeding with reading unread binaries [ns_server:debug,2014-08-19T16:51:08.819,ns_1@10.242.238.90:<0.26653.0>:ebucketmigrator_srv:confirm_downstream:197]Going to confirm reception downstream messages [ns_server:debug,2014-08-19T16:51:08.819,ns_1@10.242.238.90:<0.26653.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:51:08.819,ns_1@10.242.238.90:<0.26660.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:51:08.819,ns_1@10.242.238.90:<0.26660.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:51:08.819,ns_1@10.242.238.90:<0.26653.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:51:08.819,ns_1@10.242.238.90:<0.26653.0>:ebucketmigrator_srv:confirm_downstream:201]Confirmed upstream messages are feeded to kernel [ns_server:debug,2014-08-19T16:51:08.820,ns_1@10.242.238.90:<0.26653.0>:ebucketmigrator_srv:reply_and_die:210]Passed old state to caller [ns_server:debug,2014-08-19T16:51:08.820,ns_1@10.242.238.90:<0.26653.0>:ebucketmigrator_srv:reply_and_die:213]Sent out state. Preparing to die [ns_server:debug,2014-08-19T16:51:08.820,ns_1@10.242.238.90:<0.26657.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:143]Got old state from previous ebucketmigrator: <0.26653.0> [ns_server:debug,2014-08-19T16:51:08.820,ns_1@10.242.238.90:<0.26657.0>:ns_vbm_new_sup:perform_vbucket_filter_change_loop:184]Sent old state to new instance [ns_server:info,2014-08-19T16:51:08.820,ns_1@10.242.238.90:<0.26662.0>:ns_vbm_new_sup:mk_old_state_retriever:83]Got vbucket filter change old state. Proceeding vbucket filter change operation [ns_server:debug,2014-08-19T16:51:08.820,ns_1@10.242.238.90:<0.26662.0>:ebucketmigrator_srv:init:494]Got old ebucketmigrator state from <0.26653.0>: {state,#Port<0.16372>,#Port<0.16368>,#Port<0.16373>,#Port<0.16369>, <0.26655.0>,<<"cut off">>,<<"cut off">>,[],97,false,false,0, {1408,452668,819004}, completed, {<0.26657.0>,#Ref<0.0.1.38604>}, <<"replication_ns_1@10.242.238.90">>,<0.26653.0>, {had_backfill,false,undefined,[]}, completed,false}. [ns_server:debug,2014-08-19T16:51:08.821,ns_1@10.242.238.90:<0.17162.0>:ns_process_registry:handle_info:98]Got exit msg: {'EXIT',<0.26657.0>,{#Ref<0.0.1.38593>,<0.26662.0>}} [error_logger:info,2014-08-19T16:51:08.821,ns_1@10.242.238.90:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,'ns_vbm_new_sup-default'} started: [{pid,<0.26662.0>}, {name, {new_child_id, [385,387,396,398,399,400,401,402,403,404,405, 406,407,408,409,410,411,412,413,414,415,416, 417,418,419,420,421,422,423,424,425,426], 'ns_1@10.242.238.89'}}, {mfargs, {ebucketmigrator_srv,start_link, [{"10.242.238.89",11209}, {"10.242.238.90",11209}, [{old_state_retriever, #Fun}, {on_not_ready_vbuckets, #Fun}, {username,"default"}, {password,get_from_config}, {vbuckets, [385,387,396,398,399,400,401,402,403,404, 405,406,407,408,409,410,411,412,413,414, 415,416,417,418,419,420,421,422,423,424, 425,426]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]]}}, {restart_type,temporary}, {shutdown,60000}, {child_type,worker}] [ns_server:debug,2014-08-19T16:51:08.826,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:51:08.829,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:08.829,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 2685 us [ns_server:debug,2014-08-19T16:51:08.829,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:08.830,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{385, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:51:08.831,ns_1@10.242.238.90:<0.26662.0>:ebucketmigrator_srv:init:621]Reusing old upstream: [{vbuckets,[385,387,396,398,399,400,401,402,403,404,405,406,407,408,409,410, 411,412,413,414,415,416,417,418,419,420,421,422,423,424,425,426]}, {name,<<"replication_ns_1@10.242.238.90">>}, {takeover,false}] [rebalance:debug,2014-08-19T16:51:08.831,ns_1@10.242.238.90:<0.26662.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.26663.0> [views:debug,2014-08-19T16:51:08.834,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/643. Updated state: active (1) [ns_server:debug,2014-08-19T16:51:08.834,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",643,active,1} [ns_server:info,2014-08-19T16:51:08.837,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 395 state to replica [ns_server:info,2014-08-19T16:51:08.838,ns_1@10.242.238.90:tap_replication_manager-default<0.18775.0>:tap_replication_manager:change_vbucket_filter:200]Going to change replication from 'ns_1@10.242.238.89' to have [385,387,395,396,398,399,400,401,402,403,404,405,406,407,408,409,410,411,412, 413,414,415,416,417,418,419,420,421,422,423,424,425,426] ([395], []) [ns_server:debug,2014-08-19T16:51:08.839,ns_1@10.242.238.90:<0.26664.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:135]Registered myself under id:{"default", {new_child_id, [385,387,395,396,398,399,400,401,402,403,404, 405,406,407,408,409,410,411,412,413,414,415, 416,417,418,419,420,421,422,423,424,425,426], 'ns_1@10.242.238.89'}, #Ref<0.0.1.38758>} Args:[{"10.242.238.89",11209}, {"10.242.238.90",11209}, [{old_state_retriever,#Fun}, {on_not_ready_vbuckets,#Fun}, {username,"default"}, {password,get_from_config}, {vbuckets,[385,387,395,396,398,399,400,401,402,403,404,405,406,407,408, 409,410,411,412,413,414,415,416,417,418,419,420,421,422,423, 424,425,426]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]] [ns_server:debug,2014-08-19T16:51:08.839,ns_1@10.242.238.90:<0.26664.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:139]Linked myself to old ebucketmigrator <0.26662.0> [ns_server:info,2014-08-19T16:51:08.839,ns_1@10.242.238.90:<0.26662.0>:ebucketmigrator_srv:handle_call:270]Starting new-style vbucket filter change on stream `replication_ns_1@10.242.238.90` [ns_server:info,2014-08-19T16:51:08.848,ns_1@10.242.238.90:<0.26662.0>:ebucketmigrator_srv:handle_call:297]Changing vbucket filter on tap stream `replication_ns_1@10.242.238.90`: [{385,1}, {387,1}, {395,1}, {396,1}, {398,1}, {399,1}, {400,1}, {401,1}, {402,1}, {403,1}, {404,1}, {405,1}, {406,1}, {407,1}, {408,1}, {409,1}, {410,1}, {411,1}, {412,1}, {413,1}, {414,1}, {415,1}, {416,1}, {417,1}, {418,1}, {419,1}, {420,1}, {421,1}, {422,1}, {423,1}, {424,1}, {425,1}, {426,1}] [ns_server:info,2014-08-19T16:51:08.849,ns_1@10.242.238.90:<0.26662.0>:ebucketmigrator_srv:handle_call:307]Successfully changed vbucket filter on tap stream `replication_ns_1@10.242.238.90`. [ns_server:info,2014-08-19T16:51:08.849,ns_1@10.242.238.90:<0.26662.0>:ebucketmigrator_srv:process_upstream:1027]Got vbucket filter change completion message. Silencing upstream sender [ns_server:info,2014-08-19T16:51:08.849,ns_1@10.242.238.90:<0.26662.0>:ebucketmigrator_srv:handle_info:366]Got reply from upstream silencing request. Completing state transition to a new ebucketmigrator. [ns_server:debug,2014-08-19T16:51:08.849,ns_1@10.242.238.90:<0.26662.0>:ebucketmigrator_srv:complete_native_vb_filter_change:170]Proceeding with reading unread binaries [ns_server:debug,2014-08-19T16:51:08.849,ns_1@10.242.238.90:<0.26662.0>:ebucketmigrator_srv:confirm_downstream:197]Going to confirm reception downstream messages [ns_server:debug,2014-08-19T16:51:08.850,ns_1@10.242.238.90:<0.26662.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:51:08.850,ns_1@10.242.238.90:<0.26667.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:51:08.850,ns_1@10.242.238.90:<0.26667.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:51:08.850,ns_1@10.242.238.90:<0.26662.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:51:08.850,ns_1@10.242.238.90:<0.26662.0>:ebucketmigrator_srv:confirm_downstream:201]Confirmed upstream messages are feeded to kernel [ns_server:debug,2014-08-19T16:51:08.850,ns_1@10.242.238.90:<0.26662.0>:ebucketmigrator_srv:reply_and_die:210]Passed old state to caller [ns_server:debug,2014-08-19T16:51:08.850,ns_1@10.242.238.90:<0.26662.0>:ebucketmigrator_srv:reply_and_die:213]Sent out state. Preparing to die [ns_server:debug,2014-08-19T16:51:08.850,ns_1@10.242.238.90:<0.26664.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:143]Got old state from previous ebucketmigrator: <0.26662.0> [ns_server:debug,2014-08-19T16:51:08.851,ns_1@10.242.238.90:<0.26664.0>:ns_vbm_new_sup:perform_vbucket_filter_change_loop:184]Sent old state to new instance [ns_server:info,2014-08-19T16:51:08.851,ns_1@10.242.238.90:<0.26669.0>:ns_vbm_new_sup:mk_old_state_retriever:83]Got vbucket filter change old state. Proceeding vbucket filter change operation [ns_server:debug,2014-08-19T16:51:08.851,ns_1@10.242.238.90:<0.26669.0>:ebucketmigrator_srv:init:494]Got old ebucketmigrator state from <0.26662.0>: {state,#Port<0.16372>,#Port<0.16368>,#Port<0.16373>,#Port<0.16369>, <0.26663.0>,<<"cut off">>,<<"cut off">>,[],100,false,false,0, {1408,452668,849656}, completed, {<0.26664.0>,#Ref<0.0.1.38771>}, <<"replication_ns_1@10.242.238.90">>,<0.26662.0>, {had_backfill,false,undefined,[]}, completed,false}. [error_logger:info,2014-08-19T16:51:08.851,ns_1@10.242.238.90:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,'ns_vbm_new_sup-default'} started: [{pid,<0.26669.0>}, {name, {new_child_id, [385,387,395,396,398,399,400,401,402,403,404, 405,406,407,408,409,410,411,412,413,414,415, 416,417,418,419,420,421,422,423,424,425,426], 'ns_1@10.242.238.89'}}, {mfargs, {ebucketmigrator_srv,start_link, [{"10.242.238.89",11209}, {"10.242.238.90",11209}, [{old_state_retriever, #Fun}, {on_not_ready_vbuckets, #Fun}, {username,"default"}, {password,get_from_config}, {vbuckets, [385,387,395,396,398,399,400,401,402,403, 404,405,406,407,408,409,410,411,412,413, 414,415,416,417,418,419,420,421,422,423, 424,425,426]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]]}}, {restart_type,temporary}, {shutdown,60000}, {child_type,worker}] [ns_server:debug,2014-08-19T16:51:08.851,ns_1@10.242.238.90:<0.17162.0>:ns_process_registry:handle_info:98]Got exit msg: {'EXIT',<0.26664.0>,{#Ref<0.0.1.38760>,<0.26669.0>}} [ns_server:info,2014-08-19T16:51:08.852,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 640 state to active [ns_server:info,2014-08-19T16:51:08.853,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 649 state to active [ns_server:debug,2014-08-19T16:51:08.856,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:51:08.860,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 3628 us [ns_server:debug,2014-08-19T16:51:08.860,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:08.861,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:08.861,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{395, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:51:08.862,ns_1@10.242.238.90:<0.26669.0>:ebucketmigrator_srv:init:621]Reusing old upstream: [{vbuckets,[385,387,395,396,398,399,400,401,402,403,404,405,406,407,408,409, 410,411,412,413,414,415,416,417,418,419,420,421,422,423,424,425, 426]}, {name,<<"replication_ns_1@10.242.238.90">>}, {takeover,false}] [rebalance:debug,2014-08-19T16:51:08.862,ns_1@10.242.238.90:<0.26669.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.26670.0> [rebalance:debug,2014-08-19T16:51:08.864,ns_1@10.242.238.90:<0.25936.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:51:08.865,ns_1@10.242.238.90:<0.25936.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:51:08.865,ns_1@10.242.238.90:<0.26671.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:51:08.865,ns_1@10.242.238.90:<0.26671.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:51:08.865,ns_1@10.242.238.90:<0.25936.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [views:debug,2014-08-19T16:51:08.884,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/644. Updated state: active (1) [ns_server:debug,2014-08-19T16:51:08.884,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",644,active,1} [ns_server:info,2014-08-19T16:51:08.887,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 651 state to active [rebalance:debug,2014-08-19T16:51:08.892,ns_1@10.242.238.90:<0.25631.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:51:08.892,ns_1@10.242.238.90:<0.25631.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:51:08.893,ns_1@10.242.238.90:<0.26673.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:51:08.893,ns_1@10.242.238.90:<0.26673.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:51:08.893,ns_1@10.242.238.90:<0.25631.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:51:08.896,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:51:08.899,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:08.899,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 2827 us [ns_server:debug,2014-08-19T16:51:08.899,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:08.900,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{904, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:info,2014-08-19T16:51:08.900,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 646 state to active [views:debug,2014-08-19T16:51:08.918,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/640. Updated state: active (1) [ns_server:debug,2014-08-19T16:51:08.918,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",640,active,1} [ns_server:debug,2014-08-19T16:51:08.925,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [rebalance:debug,2014-08-19T16:51:08.925,ns_1@10.242.238.90:<0.25775.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:51:08.926,ns_1@10.242.238.90:<0.25775.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:51:08.926,ns_1@10.242.238.90:<0.26675.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:51:08.926,ns_1@10.242.238.90:<0.26675.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:51:08.926,ns_1@10.242.238.90:<0.25775.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:51:08.928,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 3200 us [ns_server:debug,2014-08-19T16:51:08.928,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:08.929,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:08.929,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{899, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:info,2014-08-19T16:51:08.936,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 648 state to active [ns_server:info,2014-08-19T16:51:08.951,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 642 state to active [ns_server:debug,2014-08-19T16:51:08.956,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:51:08.959,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:08.961,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 4434 us [ns_server:debug,2014-08-19T16:51:08.961,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:08.961,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{905, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [rebalance:debug,2014-08-19T16:51:08.963,ns_1@10.242.238.90:<0.25717.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:info,2014-08-19T16:51:08.963,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 390 state to replica [ns_server:debug,2014-08-19T16:51:08.964,ns_1@10.242.238.90:<0.25717.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:info,2014-08-19T16:51:08.964,ns_1@10.242.238.90:tap_replication_manager-default<0.18775.0>:tap_replication_manager:change_vbucket_filter:200]Going to change replication from 'ns_1@10.242.238.89' to have [385,387,390,395,396,398,399,400,401,402,403,404,405,406,407,408,409,410,411, 412,413,414,415,416,417,418,419,420,421,422,423,424,425,426] ([390], []) [ns_server:debug,2014-08-19T16:51:08.964,ns_1@10.242.238.90:<0.26677.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:51:08.964,ns_1@10.242.238.90:<0.26677.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:51:08.964,ns_1@10.242.238.90:<0.25717.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:51:08.965,ns_1@10.242.238.90:<0.26678.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:135]Registered myself under id:{"default", {new_child_id, [385,387,390,395,396,398,399,400,401,402,403, 404,405,406,407,408,409,410,411,412,413,414, 415,416,417,418,419,420,421,422,423,424,425, 426], 'ns_1@10.242.238.89'}, #Ref<0.0.1.39152>} Args:[{"10.242.238.89",11209}, {"10.242.238.90",11209}, [{old_state_retriever,#Fun}, {on_not_ready_vbuckets,#Fun}, {username,"default"}, {password,get_from_config}, {vbuckets,[385,387,390,395,396,398,399,400,401,402,403,404,405,406,407, 408,409,410,411,412,413,414,415,416,417,418,419,420,421,422, 423,424,425,426]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]] [ns_server:debug,2014-08-19T16:51:08.965,ns_1@10.242.238.90:<0.26678.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:139]Linked myself to old ebucketmigrator <0.26669.0> [ns_server:info,2014-08-19T16:51:08.965,ns_1@10.242.238.90:<0.26669.0>:ebucketmigrator_srv:handle_call:270]Starting new-style vbucket filter change on stream `replication_ns_1@10.242.238.90` [ns_server:info,2014-08-19T16:51:08.974,ns_1@10.242.238.90:<0.26669.0>:ebucketmigrator_srv:handle_call:297]Changing vbucket filter on tap stream `replication_ns_1@10.242.238.90`: [{385,1}, {387,1}, {390,1}, {395,1}, {396,1}, {398,1}, {399,1}, {400,1}, {401,1}, {402,1}, {403,1}, {404,1}, {405,1}, {406,1}, {407,1}, {408,1}, {409,1}, {410,1}, {411,1}, {412,1}, {413,1}, {414,1}, {415,1}, {416,1}, {417,1}, {418,1}, {419,1}, {420,1}, {421,1}, {422,1}, {423,1}, {424,1}, {425,1}, {426,1}] [ns_server:info,2014-08-19T16:51:08.975,ns_1@10.242.238.90:<0.26669.0>:ebucketmigrator_srv:handle_call:307]Successfully changed vbucket filter on tap stream `replication_ns_1@10.242.238.90`. [ns_server:info,2014-08-19T16:51:08.975,ns_1@10.242.238.90:<0.26669.0>:ebucketmigrator_srv:process_upstream:1027]Got vbucket filter change completion message. Silencing upstream sender [ns_server:info,2014-08-19T16:51:08.975,ns_1@10.242.238.90:<0.26669.0>:ebucketmigrator_srv:handle_info:366]Got reply from upstream silencing request. Completing state transition to a new ebucketmigrator. [ns_server:debug,2014-08-19T16:51:08.976,ns_1@10.242.238.90:<0.26669.0>:ebucketmigrator_srv:complete_native_vb_filter_change:170]Proceeding with reading unread binaries [ns_server:debug,2014-08-19T16:51:08.976,ns_1@10.242.238.90:<0.26669.0>:ebucketmigrator_srv:confirm_downstream:197]Going to confirm reception downstream messages [ns_server:debug,2014-08-19T16:51:08.976,ns_1@10.242.238.90:<0.26669.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:51:08.976,ns_1@10.242.238.90:<0.26680.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:51:08.976,ns_1@10.242.238.90:<0.26680.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:51:08.976,ns_1@10.242.238.90:<0.26669.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [views:debug,2014-08-19T16:51:08.976,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/651. Updated state: active (1) [ns_server:debug,2014-08-19T16:51:08.976,ns_1@10.242.238.90:<0.26669.0>:ebucketmigrator_srv:confirm_downstream:201]Confirmed upstream messages are feeded to kernel [ns_server:debug,2014-08-19T16:51:08.977,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",651,active,1} [ns_server:debug,2014-08-19T16:51:08.977,ns_1@10.242.238.90:<0.26669.0>:ebucketmigrator_srv:reply_and_die:210]Passed old state to caller [ns_server:debug,2014-08-19T16:51:08.977,ns_1@10.242.238.90:<0.26669.0>:ebucketmigrator_srv:reply_and_die:213]Sent out state. Preparing to die [ns_server:debug,2014-08-19T16:51:08.977,ns_1@10.242.238.90:<0.26678.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:143]Got old state from previous ebucketmigrator: <0.26669.0> [ns_server:debug,2014-08-19T16:51:08.977,ns_1@10.242.238.90:<0.26678.0>:ns_vbm_new_sup:perform_vbucket_filter_change_loop:184]Sent old state to new instance [ns_server:info,2014-08-19T16:51:08.978,ns_1@10.242.238.90:<0.26682.0>:ns_vbm_new_sup:mk_old_state_retriever:83]Got vbucket filter change old state. Proceeding vbucket filter change operation [ns_server:debug,2014-08-19T16:51:08.978,ns_1@10.242.238.90:<0.26682.0>:ebucketmigrator_srv:init:494]Got old ebucketmigrator state from <0.26669.0>: {state,#Port<0.16372>,#Port<0.16368>,#Port<0.16373>,#Port<0.16369>, <0.26670.0>,<<"cut off">>,<<"cut off">>,[],103,false,false,0, {1408,452668,975816}, completed, {<0.26678.0>,#Ref<0.0.1.39165>}, <<"replication_ns_1@10.242.238.90">>,<0.26669.0>, {had_backfill,false,undefined,[]}, completed,false}. [ns_server:debug,2014-08-19T16:51:08.978,ns_1@10.242.238.90:<0.17162.0>:ns_process_registry:handle_info:98]Got exit msg: {'EXIT',<0.26678.0>,{#Ref<0.0.1.39154>,<0.26682.0>}} [error_logger:info,2014-08-19T16:51:08.978,ns_1@10.242.238.90:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,'ns_vbm_new_sup-default'} started: [{pid,<0.26682.0>}, {name, {new_child_id, [385,387,390,395,396,398,399,400,401,402,403, 404,405,406,407,408,409,410,411,412,413,414, 415,416,417,418,419,420,421,422,423,424,425, 426], 'ns_1@10.242.238.89'}}, {mfargs, {ebucketmigrator_srv,start_link, [{"10.242.238.89",11209}, {"10.242.238.90",11209}, [{old_state_retriever, #Fun}, {on_not_ready_vbuckets, #Fun}, {username,"default"}, {password,get_from_config}, {vbuckets, [385,387,390,395,396,398,399,400,401,402, 403,404,405,406,407,408,409,410,411,412, 413,414,415,416,417,418,419,420,421,422, 423,424,425,426]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]]}}, {restart_type,temporary}, {shutdown,60000}, {child_type,worker}] [ns_server:debug,2014-08-19T16:51:08.984,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:info,2014-08-19T16:51:08.986,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 650 state to active [ns_server:debug,2014-08-19T16:51:08.987,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 2949 us [ns_server:debug,2014-08-19T16:51:08.987,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:08.988,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:08.988,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{390, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:51:08.989,ns_1@10.242.238.90:<0.26682.0>:ebucketmigrator_srv:init:621]Reusing old upstream: [{vbuckets,[385,387,390,395,396,398,399,400,401,402,403,404,405,406,407,408, 409,410,411,412,413,414,415,416,417,418,419,420,421,422,423,424, 425,426]}, {name,<<"replication_ns_1@10.242.238.90">>}, {takeover,false}] [rebalance:debug,2014-08-19T16:51:08.989,ns_1@10.242.238.90:<0.26682.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.26684.0> [ns_server:info,2014-08-19T16:51:08.994,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 392 state to replica [ns_server:info,2014-08-19T16:51:08.994,ns_1@10.242.238.90:tap_replication_manager-default<0.18775.0>:tap_replication_manager:change_vbucket_filter:200]Going to change replication from 'ns_1@10.242.238.89' to have [385,387,390,392,395,396,398,399,400,401,402,403,404,405,406,407,408,409,410, 411,412,413,414,415,416,417,418,419,420,421,422,423,424,425,426] ([392], []) [ns_server:debug,2014-08-19T16:51:08.995,ns_1@10.242.238.90:<0.26685.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:135]Registered myself under id:{"default", {new_child_id, [385,387,390,392,395,396,398,399,400,401,402, 403,404,405,406,407,408,409,410,411,412,413, 414,415,416,417,418,419,420,421,422,423,424, 425,426], 'ns_1@10.242.238.89'}, #Ref<0.0.1.39325>} Args:[{"10.242.238.89",11209}, {"10.242.238.90",11209}, [{old_state_retriever,#Fun}, {on_not_ready_vbuckets,#Fun}, {username,"default"}, {password,get_from_config}, {vbuckets,[385,387,390,392,395,396,398,399,400,401,402,403,404,405,406, 407,408,409,410,411,412,413,414,415,416,417,418,419,420,421, 422,423,424,425,426]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]] [ns_server:debug,2014-08-19T16:51:08.996,ns_1@10.242.238.90:<0.26685.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:139]Linked myself to old ebucketmigrator <0.26682.0> [ns_server:info,2014-08-19T16:51:08.996,ns_1@10.242.238.90:<0.26682.0>:ebucketmigrator_srv:handle_call:270]Starting new-style vbucket filter change on stream `replication_ns_1@10.242.238.90` [ns_server:info,2014-08-19T16:51:09.005,ns_1@10.242.238.90:<0.26682.0>:ebucketmigrator_srv:handle_call:297]Changing vbucket filter on tap stream `replication_ns_1@10.242.238.90`: [{385,1}, {387,1}, {390,1}, {392,1}, {395,1}, {396,1}, {398,1}, {399,1}, {400,1}, {401,1}, {402,1}, {403,1}, {404,1}, {405,1}, {406,1}, {407,1}, {408,1}, {409,1}, {410,1}, {411,1}, {412,1}, {413,1}, {414,1}, {415,1}, {416,1}, {417,1}, {418,1}, {419,1}, {420,1}, {421,1}, {422,1}, {423,1}, {424,1}, {425,1}, {426,1}] [ns_server:info,2014-08-19T16:51:09.006,ns_1@10.242.238.90:<0.26682.0>:ebucketmigrator_srv:handle_call:307]Successfully changed vbucket filter on tap stream `replication_ns_1@10.242.238.90`. [ns_server:info,2014-08-19T16:51:09.006,ns_1@10.242.238.90:<0.26682.0>:ebucketmigrator_srv:process_upstream:1027]Got vbucket filter change completion message. Silencing upstream sender [ns_server:info,2014-08-19T16:51:09.006,ns_1@10.242.238.90:<0.26682.0>:ebucketmigrator_srv:handle_info:366]Got reply from upstream silencing request. Completing state transition to a new ebucketmigrator. [ns_server:debug,2014-08-19T16:51:09.006,ns_1@10.242.238.90:<0.26682.0>:ebucketmigrator_srv:complete_native_vb_filter_change:170]Proceeding with reading unread binaries [ns_server:debug,2014-08-19T16:51:09.006,ns_1@10.242.238.90:<0.26682.0>:ebucketmigrator_srv:confirm_downstream:197]Going to confirm reception downstream messages [ns_server:debug,2014-08-19T16:51:09.006,ns_1@10.242.238.90:<0.26682.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:51:09.007,ns_1@10.242.238.90:<0.26687.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:51:09.007,ns_1@10.242.238.90:<0.26687.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:51:09.007,ns_1@10.242.238.90:<0.26682.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:51:09.007,ns_1@10.242.238.90:<0.26682.0>:ebucketmigrator_srv:confirm_downstream:201]Confirmed upstream messages are feeded to kernel [ns_server:debug,2014-08-19T16:51:09.007,ns_1@10.242.238.90:<0.26682.0>:ebucketmigrator_srv:reply_and_die:210]Passed old state to caller [ns_server:debug,2014-08-19T16:51:09.007,ns_1@10.242.238.90:<0.26682.0>:ebucketmigrator_srv:reply_and_die:213]Sent out state. Preparing to die [ns_server:debug,2014-08-19T16:51:09.007,ns_1@10.242.238.90:<0.26685.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:143]Got old state from previous ebucketmigrator: <0.26682.0> [ns_server:debug,2014-08-19T16:51:09.008,ns_1@10.242.238.90:<0.26685.0>:ns_vbm_new_sup:perform_vbucket_filter_change_loop:184]Sent old state to new instance [ns_server:info,2014-08-19T16:51:09.008,ns_1@10.242.238.90:<0.26689.0>:ns_vbm_new_sup:mk_old_state_retriever:83]Got vbucket filter change old state. Proceeding vbucket filter change operation [ns_server:debug,2014-08-19T16:51:09.008,ns_1@10.242.238.90:<0.26689.0>:ebucketmigrator_srv:init:494]Got old ebucketmigrator state from <0.26682.0>: {state,#Port<0.16372>,#Port<0.16368>,#Port<0.16373>,#Port<0.16369>, <0.26684.0>,<<"cut off">>,<<"cut off">>,[],106,false,false,0, {1408,452669,6586}, completed, {<0.26685.0>,#Ref<0.0.1.39338>}, <<"replication_ns_1@10.242.238.90">>,<0.26682.0>, {had_backfill,false,undefined,[]}, completed,false}. [ns_server:debug,2014-08-19T16:51:09.008,ns_1@10.242.238.90:<0.17162.0>:ns_process_registry:handle_info:98]Got exit msg: {'EXIT',<0.26685.0>,{#Ref<0.0.1.39327>,<0.26689.0>}} [error_logger:info,2014-08-19T16:51:09.008,ns_1@10.242.238.90:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,'ns_vbm_new_sup-default'} started: [{pid,<0.26689.0>}, {name, {new_child_id, [385,387,390,392,395,396,398,399,400,401,402, 403,404,405,406,407,408,409,410,411,412,413, 414,415,416,417,418,419,420,421,422,423,424, 425,426], 'ns_1@10.242.238.89'}}, {mfargs, {ebucketmigrator_srv,start_link, [{"10.242.238.89",11209}, {"10.242.238.90",11209}, [{old_state_retriever, #Fun}, {on_not_ready_vbuckets, #Fun}, {username,"default"}, {password,get_from_config}, {vbuckets, [385,387,390,392,395,396,398,399,400,401, 402,403,404,405,406,407,408,409,410,411, 412,413,414,415,416,417,418,419,420,421, 422,423,424,425,426]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]]}}, {restart_type,temporary}, {shutdown,60000}, {child_type,worker}] [views:debug,2014-08-19T16:51:09.010,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/649. Updated state: active (1) [ns_server:debug,2014-08-19T16:51:09.010,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",649,active,1} [ns_server:debug,2014-08-19T16:51:09.014,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:51:09.019,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 4883 us [ns_server:debug,2014-08-19T16:51:09.019,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:09.019,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:09.020,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{392, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:51:09.021,ns_1@10.242.238.90:<0.26689.0>:ebucketmigrator_srv:init:621]Reusing old upstream: [{vbuckets,[385,387,390,392,395,396,398,399,400,401,402,403,404,405,406,407, 408,409,410,411,412,413,414,415,416,417,418,419,420,421,422,423, 424,425,426]}, {name,<<"replication_ns_1@10.242.238.90">>}, {takeover,false}] [rebalance:debug,2014-08-19T16:51:09.021,ns_1@10.242.238.90:<0.26689.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.26691.0> [ns_server:debug,2014-08-19T16:51:09.051,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:51:09.059,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:09.059,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 7402 us [ns_server:debug,2014-08-19T16:51:09.059,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:09.060,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{641, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:51:09.082,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [views:debug,2014-08-19T16:51:09.085,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/650. Updated state: active (1) [ns_server:debug,2014-08-19T16:51:09.085,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",650,active,1} [ns_server:debug,2014-08-19T16:51:09.085,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 2972 us [ns_server:debug,2014-08-19T16:51:09.086,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:09.086,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:09.087,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{645, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:51:09.107,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:51:09.110,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 2622 us [ns_server:debug,2014-08-19T16:51:09.110,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:09.111,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:09.111,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{647, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:51:09.136,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:51:09.139,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:09.140,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 3460 us [ns_server:debug,2014-08-19T16:51:09.140,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:09.141,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{643, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:info,2014-08-19T16:51:09.143,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 389 state to replica [ns_server:info,2014-08-19T16:51:09.143,ns_1@10.242.238.90:tap_replication_manager-default<0.18775.0>:tap_replication_manager:change_vbucket_filter:200]Going to change replication from 'ns_1@10.242.238.89' to have [385,387,389,390,392,395,396,398,399,400,401,402,403,404,405,406,407,408,409, 410,411,412,413,414,415,416,417,418,419,420,421,422,423,424,425,426] ([389], []) [ns_server:debug,2014-08-19T16:51:09.144,ns_1@10.242.238.90:<0.26695.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:135]Registered myself under id:{"default", {new_child_id, [385,387,389,390,392,395,396,398,399,400,401, 402,403,404,405,406,407,408,409,410,411,412, 413,414,415,416,417,418,419,420,421,422,423, 424,425,426], 'ns_1@10.242.238.89'}, #Ref<0.0.1.39629>} Args:[{"10.242.238.89",11209}, {"10.242.238.90",11209}, [{old_state_retriever,#Fun}, {on_not_ready_vbuckets,#Fun}, {username,"default"}, {password,get_from_config}, {vbuckets,[385,387,389,390,392,395,396,398,399,400,401,402,403,404,405, 406,407,408,409,410,411,412,413,414,415,416,417,418,419,420, 421,422,423,424,425,426]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]] [ns_server:debug,2014-08-19T16:51:09.144,ns_1@10.242.238.90:<0.26695.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:139]Linked myself to old ebucketmigrator <0.26689.0> [ns_server:info,2014-08-19T16:51:09.144,ns_1@10.242.238.90:<0.26689.0>:ebucketmigrator_srv:handle_call:270]Starting new-style vbucket filter change on stream `replication_ns_1@10.242.238.90` [ns_server:info,2014-08-19T16:51:09.154,ns_1@10.242.238.90:<0.26689.0>:ebucketmigrator_srv:handle_call:297]Changing vbucket filter on tap stream `replication_ns_1@10.242.238.90`: [{385,1}, {387,1}, {389,1}, {390,1}, {392,1}, {395,1}, {396,1}, {398,1}, {399,1}, {400,1}, {401,1}, {402,1}, {403,1}, {404,1}, {405,1}, {406,1}, {407,1}, {408,1}, {409,1}, {410,1}, {411,1}, {412,1}, {413,1}, {414,1}, {415,1}, {416,1}, {417,1}, {418,1}, {419,1}, {420,1}, {421,1}, {422,1}, {423,1}, {424,1}, {425,1}, {426,1}] [views:debug,2014-08-19T16:51:09.154,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/648. Updated state: active (1) [ns_server:debug,2014-08-19T16:51:09.154,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",648,active,1} [ns_server:info,2014-08-19T16:51:09.155,ns_1@10.242.238.90:<0.26689.0>:ebucketmigrator_srv:handle_call:307]Successfully changed vbucket filter on tap stream `replication_ns_1@10.242.238.90`. [ns_server:info,2014-08-19T16:51:09.155,ns_1@10.242.238.90:<0.26689.0>:ebucketmigrator_srv:process_upstream:1027]Got vbucket filter change completion message. Silencing upstream sender [ns_server:info,2014-08-19T16:51:09.155,ns_1@10.242.238.90:<0.26689.0>:ebucketmigrator_srv:handle_info:366]Got reply from upstream silencing request. Completing state transition to a new ebucketmigrator. [ns_server:debug,2014-08-19T16:51:09.155,ns_1@10.242.238.90:<0.26689.0>:ebucketmigrator_srv:complete_native_vb_filter_change:170]Proceeding with reading unread binaries [ns_server:debug,2014-08-19T16:51:09.155,ns_1@10.242.238.90:<0.26689.0>:ebucketmigrator_srv:confirm_downstream:197]Going to confirm reception downstream messages [ns_server:debug,2014-08-19T16:51:09.155,ns_1@10.242.238.90:<0.26689.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:51:09.155,ns_1@10.242.238.90:<0.26697.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:51:09.155,ns_1@10.242.238.90:<0.26697.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:51:09.156,ns_1@10.242.238.90:<0.26689.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:51:09.156,ns_1@10.242.238.90:<0.26689.0>:ebucketmigrator_srv:confirm_downstream:201]Confirmed upstream messages are feeded to kernel [ns_server:debug,2014-08-19T16:51:09.156,ns_1@10.242.238.90:<0.26689.0>:ebucketmigrator_srv:reply_and_die:210]Passed old state to caller [ns_server:debug,2014-08-19T16:51:09.156,ns_1@10.242.238.90:<0.26689.0>:ebucketmigrator_srv:reply_and_die:213]Sent out state. Preparing to die [ns_server:debug,2014-08-19T16:51:09.156,ns_1@10.242.238.90:<0.26695.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:143]Got old state from previous ebucketmigrator: <0.26689.0> [ns_server:debug,2014-08-19T16:51:09.156,ns_1@10.242.238.90:<0.26695.0>:ns_vbm_new_sup:perform_vbucket_filter_change_loop:184]Sent old state to new instance [ns_server:info,2014-08-19T16:51:09.156,ns_1@10.242.238.90:<0.26699.0>:ns_vbm_new_sup:mk_old_state_retriever:83]Got vbucket filter change old state. Proceeding vbucket filter change operation [ns_server:debug,2014-08-19T16:51:09.157,ns_1@10.242.238.90:<0.26699.0>:ebucketmigrator_srv:init:494]Got old ebucketmigrator state from <0.26689.0>: {state,#Port<0.16372>,#Port<0.16368>,#Port<0.16373>,#Port<0.16369>, <0.26691.0>,<<"cut off">>,<<"cut off">>,[],109,false,false,0, {1408,452669,155279}, completed, {<0.26695.0>,#Ref<0.0.1.39642>}, <<"replication_ns_1@10.242.238.90">>,<0.26689.0>, {had_backfill,false,undefined,[]}, completed,false}. [ns_server:debug,2014-08-19T16:51:09.157,ns_1@10.242.238.90:<0.17162.0>:ns_process_registry:handle_info:98]Got exit msg: {'EXIT',<0.26695.0>,{#Ref<0.0.1.39631>,<0.26699.0>}} [error_logger:info,2014-08-19T16:51:09.157,ns_1@10.242.238.90:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,'ns_vbm_new_sup-default'} started: [{pid,<0.26699.0>}, {name, {new_child_id, [385,387,389,390,392,395,396,398,399,400,401, 402,403,404,405,406,407,408,409,410,411,412, 413,414,415,416,417,418,419,420,421,422,423, 424,425,426], 'ns_1@10.242.238.89'}}, {mfargs, {ebucketmigrator_srv,start_link, [{"10.242.238.89",11209}, {"10.242.238.90",11209}, [{old_state_retriever, #Fun}, {on_not_ready_vbuckets, #Fun}, {username,"default"}, {password,get_from_config}, {vbuckets, [385,387,389,390,392,395,396,398,399,400, 401,402,403,404,405,406,407,408,409,410, 411,412,413,414,415,416,417,418,419,420, 421,422,423,424,425,426]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]]}}, {restart_type,temporary}, {shutdown,60000}, {child_type,worker}] [ns_server:debug,2014-08-19T16:51:09.164,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:51:09.166,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:09.166,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 2131 us [ns_server:debug,2014-08-19T16:51:09.166,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:09.167,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{389, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:51:09.169,ns_1@10.242.238.90:<0.26699.0>:ebucketmigrator_srv:init:621]Reusing old upstream: [{vbuckets,[385,387,389,390,392,395,396,398,399,400,401,402,403,404,405,406, 407,408,409,410,411,412,413,414,415,416,417,418,419,420,421,422, 423,424,425,426]}, {name,<<"replication_ns_1@10.242.238.90">>}, {takeover,false}] [rebalance:debug,2014-08-19T16:51:09.169,ns_1@10.242.238.90:<0.26699.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.26701.0> [ns_server:debug,2014-08-19T16:51:09.211,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:51:09.214,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:09.214,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 2799 us [ns_server:debug,2014-08-19T16:51:09.215,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:09.215,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{902, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [views:debug,2014-08-19T16:51:09.221,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/646. Updated state: active (1) [ns_server:debug,2014-08-19T16:51:09.221,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",646,active,1} [ns_server:debug,2014-08-19T16:51:09.238,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:51:09.245,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 7544 us [ns_server:debug,2014-08-19T16:51:09.245,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:09.246,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:09.247,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{644, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:info,2014-08-19T16:51:09.250,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 388 state to replica [ns_server:info,2014-08-19T16:51:09.250,ns_1@10.242.238.90:tap_replication_manager-default<0.18775.0>:tap_replication_manager:change_vbucket_filter:200]Going to change replication from 'ns_1@10.242.238.89' to have [385,387,388,389,390,392,395,396,398,399,400,401,402,403,404,405,406,407,408, 409,410,411,412,413,414,415,416,417,418,419,420,421,422,423,424,425,426] ([388], []) [ns_server:debug,2014-08-19T16:51:09.251,ns_1@10.242.238.90:<0.26705.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:135]Registered myself under id:{"default", {new_child_id, [385,387,388,389,390,392,395,396,398,399,400, 401,402,403,404,405,406,407,408,409,410,411, 412,413,414,415,416,417,418,419,420,421,422, 423,424,425,426], 'ns_1@10.242.238.89'}, #Ref<0.0.1.39867>} Args:[{"10.242.238.89",11209}, {"10.242.238.90",11209}, [{old_state_retriever,#Fun}, {on_not_ready_vbuckets,#Fun}, {username,"default"}, {password,get_from_config}, {vbuckets,[385,387,388,389,390,392,395,396,398,399,400,401,402,403,404, 405,406,407,408,409,410,411,412,413,414,415,416,417,418,419, 420,421,422,423,424,425,426]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]] [ns_server:debug,2014-08-19T16:51:09.252,ns_1@10.242.238.90:<0.26705.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:139]Linked myself to old ebucketmigrator <0.26699.0> [ns_server:info,2014-08-19T16:51:09.252,ns_1@10.242.238.90:<0.26699.0>:ebucketmigrator_srv:handle_call:270]Starting new-style vbucket filter change on stream `replication_ns_1@10.242.238.90` [ns_server:info,2014-08-19T16:51:09.261,ns_1@10.242.238.90:<0.26699.0>:ebucketmigrator_srv:handle_call:297]Changing vbucket filter on tap stream `replication_ns_1@10.242.238.90`: [{385,1}, {387,1}, {388,1}, {389,1}, {390,1}, {392,1}, {395,1}, {396,1}, {398,1}, {399,1}, {400,1}, {401,1}, {402,1}, {403,1}, {404,1}, {405,1}, {406,1}, {407,1}, {408,1}, {409,1}, {410,1}, {411,1}, {412,1}, {413,1}, {414,1}, {415,1}, {416,1}, {417,1}, {418,1}, {419,1}, {420,1}, {421,1}, {422,1}, {423,1}, {424,1}, {425,1}, {426,1}] [ns_server:info,2014-08-19T16:51:09.262,ns_1@10.242.238.90:<0.26699.0>:ebucketmigrator_srv:handle_call:307]Successfully changed vbucket filter on tap stream `replication_ns_1@10.242.238.90`. [ns_server:info,2014-08-19T16:51:09.262,ns_1@10.242.238.90:<0.26699.0>:ebucketmigrator_srv:process_upstream:1027]Got vbucket filter change completion message. Silencing upstream sender [ns_server:info,2014-08-19T16:51:09.262,ns_1@10.242.238.90:<0.26699.0>:ebucketmigrator_srv:handle_info:366]Got reply from upstream silencing request. Completing state transition to a new ebucketmigrator. [ns_server:debug,2014-08-19T16:51:09.262,ns_1@10.242.238.90:<0.26699.0>:ebucketmigrator_srv:complete_native_vb_filter_change:170]Proceeding with reading unread binaries [ns_server:debug,2014-08-19T16:51:09.262,ns_1@10.242.238.90:<0.26699.0>:ebucketmigrator_srv:confirm_downstream:197]Going to confirm reception downstream messages [ns_server:debug,2014-08-19T16:51:09.263,ns_1@10.242.238.90:<0.26699.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:51:09.263,ns_1@10.242.238.90:<0.26707.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:51:09.263,ns_1@10.242.238.90:<0.26707.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:51:09.263,ns_1@10.242.238.90:<0.26699.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:51:09.263,ns_1@10.242.238.90:<0.26699.0>:ebucketmigrator_srv:confirm_downstream:201]Confirmed upstream messages are feeded to kernel [ns_server:debug,2014-08-19T16:51:09.263,ns_1@10.242.238.90:<0.26699.0>:ebucketmigrator_srv:reply_and_die:210]Passed old state to caller [ns_server:debug,2014-08-19T16:51:09.263,ns_1@10.242.238.90:<0.26699.0>:ebucketmigrator_srv:reply_and_die:213]Sent out state. Preparing to die [ns_server:debug,2014-08-19T16:51:09.263,ns_1@10.242.238.90:<0.26705.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:143]Got old state from previous ebucketmigrator: <0.26699.0> [ns_server:debug,2014-08-19T16:51:09.264,ns_1@10.242.238.90:<0.26705.0>:ns_vbm_new_sup:perform_vbucket_filter_change_loop:184]Sent old state to new instance [ns_server:info,2014-08-19T16:51:09.264,ns_1@10.242.238.90:<0.26709.0>:ns_vbm_new_sup:mk_old_state_retriever:83]Got vbucket filter change old state. Proceeding vbucket filter change operation [ns_server:debug,2014-08-19T16:51:09.264,ns_1@10.242.238.90:<0.26709.0>:ebucketmigrator_srv:init:494]Got old ebucketmigrator state from <0.26699.0>: {state,#Port<0.16372>,#Port<0.16368>,#Port<0.16373>,#Port<0.16369>, <0.26701.0>,<<"cut off">>,<<"cut off">>,[],112,false,false,0, {1408,452669,262593}, completed, {<0.26705.0>,#Ref<0.0.1.39880>}, <<"replication_ns_1@10.242.238.90">>,<0.26699.0>, {had_backfill,false,undefined,[]}, completed,false}. [ns_server:debug,2014-08-19T16:51:09.264,ns_1@10.242.238.90:<0.17162.0>:ns_process_registry:handle_info:98]Got exit msg: {'EXIT',<0.26705.0>,{#Ref<0.0.1.39869>,<0.26709.0>}} [error_logger:info,2014-08-19T16:51:09.264,ns_1@10.242.238.90:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,'ns_vbm_new_sup-default'} started: [{pid,<0.26709.0>}, {name, {new_child_id, [385,387,388,389,390,392,395,396,398,399,400, 401,402,403,404,405,406,407,408,409,410,411, 412,413,414,415,416,417,418,419,420,421,422, 423,424,425,426], 'ns_1@10.242.238.89'}}, {mfargs, {ebucketmigrator_srv,start_link, [{"10.242.238.89",11209}, {"10.242.238.90",11209}, [{old_state_retriever, #Fun}, {on_not_ready_vbuckets, #Fun}, {username,"default"}, {password,get_from_config}, {vbuckets, [385,387,388,389,390,392,395,396,398,399, 400,401,402,403,404,405,406,407,408,409, 410,411,412,413,414,415,416,417,418,419, 420,421,422,423,424,425,426]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]]}}, {restart_type,temporary}, {shutdown,60000}, {child_type,worker}] [ns_server:debug,2014-08-19T16:51:09.269,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [views:debug,2014-08-19T16:51:09.271,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/642. Updated state: active (1) [ns_server:debug,2014-08-19T16:51:09.271,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",642,active,1} [ns_server:debug,2014-08-19T16:51:09.273,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:09.273,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 3398 us [ns_server:debug,2014-08-19T16:51:09.273,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:09.274,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{388, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:51:09.277,ns_1@10.242.238.90:<0.26709.0>:ebucketmigrator_srv:init:621]Reusing old upstream: [{vbuckets,[385,387,388,389,390,392,395,396,398,399,400,401,402,403,404,405, 406,407,408,409,410,411,412,413,414,415,416,417,418,419,420,421, 422,423,424,425,426]}, {name,<<"replication_ns_1@10.242.238.90">>}, {takeover,false}] [rebalance:debug,2014-08-19T16:51:09.277,ns_1@10.242.238.90:<0.26709.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.26710.0> [ns_server:debug,2014-08-19T16:51:09.303,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:51:09.306,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 2636 us [ns_server:debug,2014-08-19T16:51:09.306,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:09.307,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:09.307,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{909, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:51:09.341,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:51:09.343,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:09.343,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 1672 us [ns_server:debug,2014-08-19T16:51:09.344,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:09.344,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{900, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:info,2014-08-19T16:51:09.347,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 386 state to replica [ns_server:info,2014-08-19T16:51:09.347,ns_1@10.242.238.90:tap_replication_manager-default<0.18775.0>:tap_replication_manager:change_vbucket_filter:200]Going to change replication from 'ns_1@10.242.238.89' to have [385,386,387,388,389,390,392,395,396,398,399,400,401,402,403,404,405,406,407, 408,409,410,411,412,413,414,415,416,417,418,419,420,421,422,423,424,425,426] ([386], []) [ns_server:debug,2014-08-19T16:51:09.348,ns_1@10.242.238.90:<0.26714.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:135]Registered myself under id:{"default", {new_child_id, [385,386,387,388,389,390,392,395,396,398,399, 400,401,402,403,404,405,406,407,408,409,410, 411,412,413,414,415,416,417,418,419,420,421, 422,423,424,425,426], 'ns_1@10.242.238.89'}, #Ref<0.0.1.40084>} Args:[{"10.242.238.89",11209}, {"10.242.238.90",11209}, [{old_state_retriever,#Fun}, {on_not_ready_vbuckets,#Fun}, {username,"default"}, {password,get_from_config}, {vbuckets,[385,386,387,388,389,390,392,395,396,398,399,400,401,402,403, 404,405,406,407,408,409,410,411,412,413,414,415,416,417,418, 419,420,421,422,423,424,425,426]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]] [ns_server:debug,2014-08-19T16:51:09.348,ns_1@10.242.238.90:<0.26714.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:139]Linked myself to old ebucketmigrator <0.26709.0> [ns_server:info,2014-08-19T16:51:09.348,ns_1@10.242.238.90:<0.26709.0>:ebucketmigrator_srv:handle_call:270]Starting new-style vbucket filter change on stream `replication_ns_1@10.242.238.90` [ns_server:info,2014-08-19T16:51:09.358,ns_1@10.242.238.90:<0.26709.0>:ebucketmigrator_srv:handle_call:297]Changing vbucket filter on tap stream `replication_ns_1@10.242.238.90`: [{385,1}, {386,1}, {387,1}, {388,1}, {389,1}, {390,1}, {392,1}, {395,1}, {396,1}, {398,1}, {399,1}, {400,1}, {401,1}, {402,1}, {403,1}, {404,1}, {405,1}, {406,1}, {407,1}, {408,1}, {409,1}, {410,1}, {411,1}, {412,1}, {413,1}, {414,1}, {415,1}, {416,1}, {417,1}, {418,1}, {419,1}, {420,1}, {421,1}, {422,1}, {423,1}, {424,1}, {425,1}, {426,1}] [ns_server:info,2014-08-19T16:51:09.359,ns_1@10.242.238.90:<0.26709.0>:ebucketmigrator_srv:handle_call:307]Successfully changed vbucket filter on tap stream `replication_ns_1@10.242.238.90`. [ns_server:info,2014-08-19T16:51:09.359,ns_1@10.242.238.90:<0.26709.0>:ebucketmigrator_srv:process_upstream:1027]Got vbucket filter change completion message. Silencing upstream sender [ns_server:info,2014-08-19T16:51:09.359,ns_1@10.242.238.90:<0.26709.0>:ebucketmigrator_srv:handle_info:366]Got reply from upstream silencing request. Completing state transition to a new ebucketmigrator. [ns_server:debug,2014-08-19T16:51:09.359,ns_1@10.242.238.90:<0.26709.0>:ebucketmigrator_srv:complete_native_vb_filter_change:170]Proceeding with reading unread binaries [ns_server:debug,2014-08-19T16:51:09.359,ns_1@10.242.238.90:<0.26709.0>:ebucketmigrator_srv:confirm_downstream:197]Going to confirm reception downstream messages [ns_server:debug,2014-08-19T16:51:09.359,ns_1@10.242.238.90:<0.26709.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:51:09.359,ns_1@10.242.238.90:<0.26716.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:51:09.360,ns_1@10.242.238.90:<0.26716.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:51:09.360,ns_1@10.242.238.90:<0.26709.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:51:09.360,ns_1@10.242.238.90:<0.26709.0>:ebucketmigrator_srv:confirm_downstream:201]Confirmed upstream messages are feeded to kernel [ns_server:debug,2014-08-19T16:51:09.360,ns_1@10.242.238.90:<0.26709.0>:ebucketmigrator_srv:reply_and_die:210]Passed old state to caller [ns_server:debug,2014-08-19T16:51:09.360,ns_1@10.242.238.90:<0.26709.0>:ebucketmigrator_srv:reply_and_die:213]Sent out state. Preparing to die [ns_server:debug,2014-08-19T16:51:09.360,ns_1@10.242.238.90:<0.26714.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:143]Got old state from previous ebucketmigrator: <0.26709.0> [ns_server:debug,2014-08-19T16:51:09.360,ns_1@10.242.238.90:<0.26714.0>:ns_vbm_new_sup:perform_vbucket_filter_change_loop:184]Sent old state to new instance [ns_server:info,2014-08-19T16:51:09.361,ns_1@10.242.238.90:<0.26718.0>:ns_vbm_new_sup:mk_old_state_retriever:83]Got vbucket filter change old state. Proceeding vbucket filter change operation [ns_server:debug,2014-08-19T16:51:09.361,ns_1@10.242.238.90:<0.26718.0>:ebucketmigrator_srv:init:494]Got old ebucketmigrator state from <0.26709.0>: {state,#Port<0.16372>,#Port<0.16368>,#Port<0.16373>,#Port<0.16369>, <0.26710.0>,<<"cut off">>,<<"cut off">>,[],115,false,false,0, {1408,452669,359446}, completed, {<0.26714.0>,#Ref<0.0.1.40097>}, <<"replication_ns_1@10.242.238.90">>,<0.26709.0>, {had_backfill,false,undefined,[]}, completed,false}. [ns_server:debug,2014-08-19T16:51:09.361,ns_1@10.242.238.90:<0.17162.0>:ns_process_registry:handle_info:98]Got exit msg: {'EXIT',<0.26714.0>,{#Ref<0.0.1.40086>,<0.26718.0>}} [error_logger:info,2014-08-19T16:51:09.361,ns_1@10.242.238.90:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,'ns_vbm_new_sup-default'} started: [{pid,<0.26718.0>}, {name, {new_child_id, [385,386,387,388,389,390,392,395,396,398,399, 400,401,402,403,404,405,406,407,408,409,410, 411,412,413,414,415,416,417,418,419,420,421, 422,423,424,425,426], 'ns_1@10.242.238.89'}}, {mfargs, {ebucketmigrator_srv,start_link, [{"10.242.238.89",11209}, {"10.242.238.90",11209}, [{old_state_retriever, #Fun}, {on_not_ready_vbuckets, #Fun}, {username,"default"}, {password,get_from_config}, {vbuckets, [385,386,387,388,389,390,392,395,396,398, 399,400,401,402,403,404,405,406,407,408, 409,410,411,412,413,414,415,416,417,418, 419,420,421,422,423,424,425,426]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]]}}, {restart_type,temporary}, {shutdown,60000}, {child_type,worker}] [ns_server:debug,2014-08-19T16:51:09.366,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:51:09.374,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 6936 us [ns_server:debug,2014-08-19T16:51:09.374,ns_1@10.242.238.90:<0.26718.0>:ebucketmigrator_srv:init:621]Reusing old upstream: [{vbuckets,[385,386,387,388,389,390,392,395,396,398,399,400,401,402,403,404, 405,406,407,408,409,410,411,412,413,414,415,416,417,418,419,420, 421,422,423,424,425,426]}, {name,<<"replication_ns_1@10.242.238.90">>}, {takeover,false}] [ns_server:debug,2014-08-19T16:51:09.374,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [rebalance:debug,2014-08-19T16:51:09.374,ns_1@10.242.238.90:<0.26718.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.26725.0> [ns_server:debug,2014-08-19T16:51:09.374,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:09.375,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{386, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:info,2014-08-19T16:51:09.377,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 394 state to replica [ns_server:info,2014-08-19T16:51:09.377,ns_1@10.242.238.90:tap_replication_manager-default<0.18775.0>:tap_replication_manager:change_vbucket_filter:200]Going to change replication from 'ns_1@10.242.238.89' to have [385,386,387,388,389,390,392,394,395,396,398,399,400,401,402,403,404,405,406, 407,408,409,410,411,412,413,414,415,416,417,418,419,420,421,422,423,424,425, 426] ([394], []) [ns_server:debug,2014-08-19T16:51:09.379,ns_1@10.242.238.90:<0.26727.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:135]Registered myself under id:{"default", {new_child_id, [385,386,387,388,389,390,392,394,395,396,398, 399,400,401,402,403,404,405,406,407,408,409, 410,411,412,413,414,415,416,417,418,419,420, 421,422,423,424,425,426], 'ns_1@10.242.238.89'}, #Ref<0.0.1.40223>} Args:[{"10.242.238.89",11209}, {"10.242.238.90",11209}, [{old_state_retriever,#Fun}, {on_not_ready_vbuckets,#Fun}, {username,"default"}, {password,get_from_config}, {vbuckets,[385,386,387,388,389,390,392,394,395,396,398,399,400,401,402, 403,404,405,406,407,408,409,410,411,412,413,414,415,416,417, 418,419,420,421,422,423,424,425,426]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]] [ns_server:debug,2014-08-19T16:51:09.379,ns_1@10.242.238.90:<0.26727.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:139]Linked myself to old ebucketmigrator <0.26718.0> [ns_server:info,2014-08-19T16:51:09.380,ns_1@10.242.238.90:<0.26718.0>:ebucketmigrator_srv:handle_call:270]Starting new-style vbucket filter change on stream `replication_ns_1@10.242.238.90` [ns_server:info,2014-08-19T16:51:09.389,ns_1@10.242.238.90:<0.26718.0>:ebucketmigrator_srv:handle_call:297]Changing vbucket filter on tap stream `replication_ns_1@10.242.238.90`: [{385,1}, {386,1}, {387,1}, {388,1}, {389,1}, {390,1}, {392,1}, {394,1}, {395,1}, {396,1}, {398,1}, {399,1}, {400,1}, {401,1}, {402,1}, {403,1}, {404,1}, {405,1}, {406,1}, {407,1}, {408,1}, {409,1}, {410,1}, {411,1}, {412,1}, {413,1}, {414,1}, {415,1}, {416,1}, {417,1}, {418,1}, {419,1}, {420,1}, {421,1}, {422,1}, {423,1}, {424,1}, {425,1}, {426,1}] [ns_server:info,2014-08-19T16:51:09.390,ns_1@10.242.238.90:<0.26718.0>:ebucketmigrator_srv:handle_call:307]Successfully changed vbucket filter on tap stream `replication_ns_1@10.242.238.90`. [ns_server:info,2014-08-19T16:51:09.390,ns_1@10.242.238.90:<0.26718.0>:ebucketmigrator_srv:process_upstream:1027]Got vbucket filter change completion message. Silencing upstream sender [ns_server:info,2014-08-19T16:51:09.390,ns_1@10.242.238.90:<0.26718.0>:ebucketmigrator_srv:handle_info:366]Got reply from upstream silencing request. Completing state transition to a new ebucketmigrator. [ns_server:debug,2014-08-19T16:51:09.390,ns_1@10.242.238.90:<0.26718.0>:ebucketmigrator_srv:complete_native_vb_filter_change:170]Proceeding with reading unread binaries [ns_server:debug,2014-08-19T16:51:09.390,ns_1@10.242.238.90:<0.26718.0>:ebucketmigrator_srv:confirm_downstream:197]Going to confirm reception downstream messages [ns_server:debug,2014-08-19T16:51:09.390,ns_1@10.242.238.90:<0.26718.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:51:09.390,ns_1@10.242.238.90:<0.26729.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:51:09.390,ns_1@10.242.238.90:<0.26729.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:51:09.391,ns_1@10.242.238.90:<0.26718.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:51:09.391,ns_1@10.242.238.90:<0.26718.0>:ebucketmigrator_srv:confirm_downstream:201]Confirmed upstream messages are feeded to kernel [ns_server:debug,2014-08-19T16:51:09.391,ns_1@10.242.238.90:<0.26718.0>:ebucketmigrator_srv:reply_and_die:210]Passed old state to caller [ns_server:debug,2014-08-19T16:51:09.391,ns_1@10.242.238.90:<0.26718.0>:ebucketmigrator_srv:reply_and_die:213]Sent out state. Preparing to die [ns_server:debug,2014-08-19T16:51:09.391,ns_1@10.242.238.90:<0.26727.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:143]Got old state from previous ebucketmigrator: <0.26718.0> [ns_server:debug,2014-08-19T16:51:09.391,ns_1@10.242.238.90:<0.26727.0>:ns_vbm_new_sup:perform_vbucket_filter_change_loop:184]Sent old state to new instance [ns_server:info,2014-08-19T16:51:09.391,ns_1@10.242.238.90:<0.26731.0>:ns_vbm_new_sup:mk_old_state_retriever:83]Got vbucket filter change old state. Proceeding vbucket filter change operation [ns_server:debug,2014-08-19T16:51:09.391,ns_1@10.242.238.90:<0.26731.0>:ebucketmigrator_srv:init:494]Got old ebucketmigrator state from <0.26718.0>: {state,#Port<0.16372>,#Port<0.16368>,#Port<0.16373>,#Port<0.16369>, <0.26725.0>,<<"cut off">>,<<"cut off">>,[],118,false,false,0, {1408,452669,390261}, completed, {<0.26727.0>,#Ref<0.0.1.40236>}, <<"replication_ns_1@10.242.238.90">>,<0.26718.0>, {had_backfill,false,undefined,[]}, completed,false}. [ns_server:debug,2014-08-19T16:51:09.392,ns_1@10.242.238.90:<0.17162.0>:ns_process_registry:handle_info:98]Got exit msg: {'EXIT',<0.26727.0>,{#Ref<0.0.1.40225>,<0.26731.0>}} [error_logger:info,2014-08-19T16:51:09.392,ns_1@10.242.238.90:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,'ns_vbm_new_sup-default'} started: [{pid,<0.26731.0>}, {name, {new_child_id, [385,386,387,388,389,390,392,394,395,396,398, 399,400,401,402,403,404,405,406,407,408,409, 410,411,412,413,414,415,416,417,418,419,420, 421,422,423,424,425,426], 'ns_1@10.242.238.89'}}, {mfargs, {ebucketmigrator_srv,start_link, [{"10.242.238.89",11209}, {"10.242.238.90",11209}, [{old_state_retriever, #Fun}, {on_not_ready_vbuckets, #Fun}, {username,"default"}, {password,get_from_config}, {vbuckets, [385,386,387,388,389,390,392,394,395,396, 398,399,400,401,402,403,404,405,406,407, 408,409,410,411,412,413,414,415,416,417, 418,419,420,421,422,423,424,425,426]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]]}}, {restart_type,temporary}, {shutdown,60000}, {child_type,worker}] [ns_server:debug,2014-08-19T16:51:09.396,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:51:09.399,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 2852 us [ns_server:debug,2014-08-19T16:51:09.399,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:09.400,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:09.400,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{394, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:51:09.404,ns_1@10.242.238.90:<0.26731.0>:ebucketmigrator_srv:init:621]Reusing old upstream: [{vbuckets,[385,386,387,388,389,390,392,394,395,396,398,399,400,401,402,403, 404,405,406,407,408,409,410,411,412,413,414,415,416,417,418,419, 420,421,422,423,424,425,426]}, {name,<<"replication_ns_1@10.242.238.90">>}, {takeover,false}] [rebalance:debug,2014-08-19T16:51:09.404,ns_1@10.242.238.90:<0.26731.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.26732.0> [ns_server:debug,2014-08-19T16:51:09.433,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:51:09.441,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:09.441,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 8362 us [ns_server:debug,2014-08-19T16:51:09.442,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:09.442,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{896, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:51:09.470,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:51:09.473,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:09.473,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 3578 us [ns_server:debug,2014-08-19T16:51:09.474,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:09.474,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{897, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:51:09.498,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:51:09.500,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 2517 us [ns_server:debug,2014-08-19T16:51:09.500,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:09.501,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:09.501,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{640, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:51:09.526,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:51:09.529,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 3018 us [ns_server:debug,2014-08-19T16:51:09.529,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:09.529,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:09.530,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{649, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:info,2014-08-19T16:51:09.535,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 384 state to replica [ns_server:info,2014-08-19T16:51:09.535,ns_1@10.242.238.90:tap_replication_manager-default<0.18775.0>:tap_replication_manager:change_vbucket_filter:200]Going to change replication from 'ns_1@10.242.238.89' to have [384,385,386,387,388,389,390,392,394,395,396,398,399,400,401,402,403,404,405, 406,407,408,409,410,411,412,413,414,415,416,417,418,419,420,421,422,423,424, 425,426] ([384], []) [ns_server:debug,2014-08-19T16:51:09.536,ns_1@10.242.238.90:<0.26738.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:135]Registered myself under id:{"default", {new_child_id, [384,385,386,387,388,389,390,392,394,395,396, 398,399,400,401,402,403,404,405,406,407,408, 409,410,411,412,413,414,415,416,417,418,419, 420,421,422,423,424,425,426], 'ns_1@10.242.238.89'}, #Ref<0.0.1.40512>} Args:[{"10.242.238.89",11209}, {"10.242.238.90",11209}, [{old_state_retriever,#Fun}, {on_not_ready_vbuckets,#Fun}, {username,"default"}, {password,get_from_config}, {vbuckets,[384,385,386,387,388,389,390,392,394,395,396,398,399,400,401, 402,403,404,405,406,407,408,409,410,411,412,413,414,415,416, 417,418,419,420,421,422,423,424,425,426]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]] [ns_server:debug,2014-08-19T16:51:09.536,ns_1@10.242.238.90:<0.26738.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:139]Linked myself to old ebucketmigrator <0.26731.0> [ns_server:info,2014-08-19T16:51:09.536,ns_1@10.242.238.90:<0.26731.0>:ebucketmigrator_srv:handle_call:270]Starting new-style vbucket filter change on stream `replication_ns_1@10.242.238.90` [ns_server:info,2014-08-19T16:51:09.546,ns_1@10.242.238.90:<0.26731.0>:ebucketmigrator_srv:handle_call:297]Changing vbucket filter on tap stream `replication_ns_1@10.242.238.90`: [{384,1}, {385,1}, {386,1}, {387,1}, {388,1}, {389,1}, {390,1}, {392,1}, {394,1}, {395,1}, {396,1}, {398,1}, {399,1}, {400,1}, {401,1}, {402,1}, {403,1}, {404,1}, {405,1}, {406,1}, {407,1}, {408,1}, {409,1}, {410,1}, {411,1}, {412,1}, {413,1}, {414,1}, {415,1}, {416,1}, {417,1}, {418,1}, {419,1}, {420,1}, {421,1}, {422,1}, {423,1}, {424,1}, {425,1}, {426,1}] [ns_server:info,2014-08-19T16:51:09.547,ns_1@10.242.238.90:<0.26731.0>:ebucketmigrator_srv:handle_call:307]Successfully changed vbucket filter on tap stream `replication_ns_1@10.242.238.90`. [ns_server:info,2014-08-19T16:51:09.547,ns_1@10.242.238.90:<0.26731.0>:ebucketmigrator_srv:process_upstream:1027]Got vbucket filter change completion message. Silencing upstream sender [ns_server:info,2014-08-19T16:51:09.547,ns_1@10.242.238.90:<0.26731.0>:ebucketmigrator_srv:handle_info:366]Got reply from upstream silencing request. Completing state transition to a new ebucketmigrator. [ns_server:debug,2014-08-19T16:51:09.548,ns_1@10.242.238.90:<0.26731.0>:ebucketmigrator_srv:complete_native_vb_filter_change:170]Proceeding with reading unread binaries [ns_server:debug,2014-08-19T16:51:09.548,ns_1@10.242.238.90:<0.26731.0>:ebucketmigrator_srv:confirm_downstream:197]Going to confirm reception downstream messages [ns_server:debug,2014-08-19T16:51:09.548,ns_1@10.242.238.90:<0.26731.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:51:09.548,ns_1@10.242.238.90:<0.26740.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:51:09.548,ns_1@10.242.238.90:<0.26740.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:51:09.548,ns_1@10.242.238.90:<0.26731.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:51:09.548,ns_1@10.242.238.90:<0.26731.0>:ebucketmigrator_srv:confirm_downstream:201]Confirmed upstream messages are feeded to kernel [ns_server:debug,2014-08-19T16:51:09.548,ns_1@10.242.238.90:<0.26731.0>:ebucketmigrator_srv:reply_and_die:210]Passed old state to caller [ns_server:debug,2014-08-19T16:51:09.548,ns_1@10.242.238.90:<0.26731.0>:ebucketmigrator_srv:reply_and_die:213]Sent out state. Preparing to die [ns_server:debug,2014-08-19T16:51:09.548,ns_1@10.242.238.90:<0.26738.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:143]Got old state from previous ebucketmigrator: <0.26731.0> [ns_server:debug,2014-08-19T16:51:09.549,ns_1@10.242.238.90:<0.26738.0>:ns_vbm_new_sup:perform_vbucket_filter_change_loop:184]Sent old state to new instance [ns_server:info,2014-08-19T16:51:09.549,ns_1@10.242.238.90:<0.26742.0>:ns_vbm_new_sup:mk_old_state_retriever:83]Got vbucket filter change old state. Proceeding vbucket filter change operation [ns_server:debug,2014-08-19T16:51:09.549,ns_1@10.242.238.90:<0.26742.0>:ebucketmigrator_srv:init:494]Got old ebucketmigrator state from <0.26731.0>: {state,#Port<0.16372>,#Port<0.16368>,#Port<0.16373>,#Port<0.16369>, <0.26732.0>,<<"cut off">>,<<"cut off">>,[],121,false,false,0, {1408,452669,547735}, completed, {<0.26738.0>,#Ref<0.0.1.40525>}, <<"replication_ns_1@10.242.238.90">>,<0.26731.0>, {had_backfill,false,undefined,[]}, completed,false}. [ns_server:debug,2014-08-19T16:51:09.549,ns_1@10.242.238.90:<0.17162.0>:ns_process_registry:handle_info:98]Got exit msg: {'EXIT',<0.26738.0>,{#Ref<0.0.1.40514>,<0.26742.0>}} [error_logger:info,2014-08-19T16:51:09.549,ns_1@10.242.238.90:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,'ns_vbm_new_sup-default'} started: [{pid,<0.26742.0>}, {name, {new_child_id, [384,385,386,387,388,389,390,392,394,395,396, 398,399,400,401,402,403,404,405,406,407,408, 409,410,411,412,413,414,415,416,417,418,419, 420,421,422,423,424,425,426], 'ns_1@10.242.238.89'}}, {mfargs, {ebucketmigrator_srv,start_link, [{"10.242.238.89",11209}, {"10.242.238.90",11209}, [{old_state_retriever, #Fun}, {on_not_ready_vbuckets, #Fun}, {username,"default"}, {password,get_from_config}, {vbuckets, [384,385,386,387,388,389,390,392,394,395, 396,398,399,400,401,402,403,404,405,406, 407,408,409,410,411,412,413,414,415,416, 417,418,419,420,421,422,423,424,425, 426]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]]}}, {restart_type,temporary}, {shutdown,60000}, {child_type,worker}] [ns_server:debug,2014-08-19T16:51:09.556,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:51:09.557,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 1554 us [ns_server:debug,2014-08-19T16:51:09.557,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:09.558,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:09.558,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{384, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:51:09.560,ns_1@10.242.238.90:<0.26742.0>:ebucketmigrator_srv:init:621]Reusing old upstream: [{vbuckets,[384,385,386,387,388,389,390,392,394,395,396,398,399,400,401,402, 403,404,405,406,407,408,409,410,411,412,413,414,415,416,417,418, 419,420,421,422,423,424,425,426]}, {name,<<"replication_ns_1@10.242.238.90">>}, {takeover,false}] [rebalance:debug,2014-08-19T16:51:09.560,ns_1@10.242.238.90:<0.26742.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.26744.0> [ns_server:debug,2014-08-19T16:51:09.591,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:51:09.593,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:09.593,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 2753 us [ns_server:debug,2014-08-19T16:51:09.594,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:09.595,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{651, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:info,2014-08-19T16:51:09.599,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 397 state to replica [ns_server:info,2014-08-19T16:51:09.599,ns_1@10.242.238.90:tap_replication_manager-default<0.18775.0>:tap_replication_manager:change_vbucket_filter:200]Going to change replication from 'ns_1@10.242.238.89' to have [384,385,386,387,388,389,390,392,394,395,396,397,398,399,400,401,402,403,404, 405,406,407,408,409,410,411,412,413,414,415,416,417,418,419,420,421,422,423, 424,425,426] ([397], []) [ns_server:debug,2014-08-19T16:51:09.600,ns_1@10.242.238.90:<0.26746.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:135]Registered myself under id:{"default", {new_child_id, [384,385,386,387,388,389,390,392,394,395,396, 397,398,399,400,401,402,403,404,405,406,407, 408,409,410,411,412,413,414,415,416,417,418, 419,420,421,422,423,424,425,426], 'ns_1@10.242.238.89'}, #Ref<0.0.1.40679>} Args:[{"10.242.238.89",11209}, {"10.242.238.90",11209}, [{old_state_retriever,#Fun}, {on_not_ready_vbuckets,#Fun}, {username,"default"}, {password,get_from_config}, {vbuckets,[384,385,386,387,388,389,390,392,394,395,396,397,398,399,400, 401,402,403,404,405,406,407,408,409,410,411,412,413,414,415, 416,417,418,419,420,421,422,423,424,425,426]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]] [ns_server:debug,2014-08-19T16:51:09.601,ns_1@10.242.238.90:<0.26746.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:139]Linked myself to old ebucketmigrator <0.26742.0> [ns_server:info,2014-08-19T16:51:09.601,ns_1@10.242.238.90:<0.26742.0>:ebucketmigrator_srv:handle_call:270]Starting new-style vbucket filter change on stream `replication_ns_1@10.242.238.90` [ns_server:info,2014-08-19T16:51:09.611,ns_1@10.242.238.90:<0.26742.0>:ebucketmigrator_srv:handle_call:297]Changing vbucket filter on tap stream `replication_ns_1@10.242.238.90`: [{384,1}, {385,1}, {386,1}, {387,1}, {388,1}, {389,1}, {390,1}, {392,1}, {394,1}, {395,1}, {396,1}, {397,1}, {398,1}, {399,1}, {400,1}, {401,1}, {402,1}, {403,1}, {404,1}, {405,1}, {406,1}, {407,1}, {408,1}, {409,1}, {410,1}, {411,1}, {412,1}, {413,1}, {414,1}, {415,1}, {416,1}, {417,1}, {418,1}, {419,1}, {420,1}, {421,1}, {422,1}, {423,1}, {424,1}, {425,1}, {426,1}] [ns_server:info,2014-08-19T16:51:09.612,ns_1@10.242.238.90:<0.26742.0>:ebucketmigrator_srv:handle_call:307]Successfully changed vbucket filter on tap stream `replication_ns_1@10.242.238.90`. [ns_server:info,2014-08-19T16:51:09.612,ns_1@10.242.238.90:<0.26742.0>:ebucketmigrator_srv:process_upstream:1027]Got vbucket filter change completion message. Silencing upstream sender [ns_server:info,2014-08-19T16:51:09.613,ns_1@10.242.238.90:<0.26742.0>:ebucketmigrator_srv:handle_info:366]Got reply from upstream silencing request. Completing state transition to a new ebucketmigrator. [ns_server:debug,2014-08-19T16:51:09.613,ns_1@10.242.238.90:<0.26742.0>:ebucketmigrator_srv:complete_native_vb_filter_change:170]Proceeding with reading unread binaries [ns_server:debug,2014-08-19T16:51:09.613,ns_1@10.242.238.90:<0.26742.0>:ebucketmigrator_srv:confirm_downstream:197]Going to confirm reception downstream messages [ns_server:debug,2014-08-19T16:51:09.613,ns_1@10.242.238.90:<0.26742.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:51:09.613,ns_1@10.242.238.90:<0.26748.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:51:09.613,ns_1@10.242.238.90:<0.26748.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:51:09.613,ns_1@10.242.238.90:<0.26742.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:51:09.613,ns_1@10.242.238.90:<0.26742.0>:ebucketmigrator_srv:confirm_downstream:201]Confirmed upstream messages are feeded to kernel [ns_server:debug,2014-08-19T16:51:09.614,ns_1@10.242.238.90:<0.26742.0>:ebucketmigrator_srv:reply_and_die:210]Passed old state to caller [ns_server:debug,2014-08-19T16:51:09.614,ns_1@10.242.238.90:<0.26742.0>:ebucketmigrator_srv:reply_and_die:213]Sent out state. Preparing to die [ns_server:debug,2014-08-19T16:51:09.614,ns_1@10.242.238.90:<0.26746.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:143]Got old state from previous ebucketmigrator: <0.26742.0> [ns_server:debug,2014-08-19T16:51:09.614,ns_1@10.242.238.90:<0.26746.0>:ns_vbm_new_sup:perform_vbucket_filter_change_loop:184]Sent old state to new instance [ns_server:info,2014-08-19T16:51:09.614,ns_1@10.242.238.90:<0.26750.0>:ns_vbm_new_sup:mk_old_state_retriever:83]Got vbucket filter change old state. Proceeding vbucket filter change operation [ns_server:debug,2014-08-19T16:51:09.614,ns_1@10.242.238.90:<0.26750.0>:ebucketmigrator_srv:init:494]Got old ebucketmigrator state from <0.26742.0>: {state,#Port<0.16372>,#Port<0.16368>,#Port<0.16373>,#Port<0.16369>, <0.26744.0>,<<"cut off">>,<<"cut off">>,[],124,false,false,0, {1408,452669,612933}, completed, {<0.26746.0>,#Ref<0.0.1.40692>}, <<"replication_ns_1@10.242.238.90">>,<0.26742.0>, {had_backfill,false,undefined,[]}, completed,false}. [ns_server:debug,2014-08-19T16:51:09.615,ns_1@10.242.238.90:<0.17162.0>:ns_process_registry:handle_info:98]Got exit msg: {'EXIT',<0.26746.0>,{#Ref<0.0.1.40681>,<0.26750.0>}} [error_logger:info,2014-08-19T16:51:09.615,ns_1@10.242.238.90:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,'ns_vbm_new_sup-default'} started: [{pid,<0.26750.0>}, {name, {new_child_id, [384,385,386,387,388,389,390,392,394,395,396, 397,398,399,400,401,402,403,404,405,406,407, 408,409,410,411,412,413,414,415,416,417,418, 419,420,421,422,423,424,425,426], 'ns_1@10.242.238.89'}}, {mfargs, {ebucketmigrator_srv,start_link, [{"10.242.238.89",11209}, {"10.242.238.90",11209}, [{old_state_retriever, #Fun}, {on_not_ready_vbuckets, #Fun}, {username,"default"}, {password,get_from_config}, {vbuckets, [384,385,386,387,388,389,390,392,394,395, 396,397,398,399,400,401,402,403,404,405, 406,407,408,409,410,411,412,413,414,415, 416,417,418,419,420,421,422,423,424,425, 426]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]]}}, {restart_type,temporary}, {shutdown,60000}, {child_type,worker}] [ns_server:debug,2014-08-19T16:51:09.619,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:51:09.622,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 3485 us [ns_server:debug,2014-08-19T16:51:09.622,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:09.623,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:09.623,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{397, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:51:09.625,ns_1@10.242.238.90:<0.26750.0>:ebucketmigrator_srv:init:621]Reusing old upstream: [{vbuckets,[384,385,386,387,388,389,390,392,394,395,396,397,398,399,400,401, 402,403,404,405,406,407,408,409,410,411,412,413,414,415,416,417, 418,419,420,421,422,423,424,425,426]}, {name,<<"replication_ns_1@10.242.238.90">>}, {takeover,false}] [rebalance:debug,2014-08-19T16:51:09.625,ns_1@10.242.238.90:<0.26750.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.26752.0> [ns_server:debug,2014-08-19T16:51:09.651,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:51:09.654,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:09.654,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 2992 us [ns_server:debug,2014-08-19T16:51:09.655,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:09.655,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{646, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:51:09.689,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:51:09.692,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:09.692,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 3143 us [ns_server:debug,2014-08-19T16:51:09.693,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:09.693,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{898, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:info,2014-08-19T16:51:09.695,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 391 state to replica [ns_server:info,2014-08-19T16:51:09.695,ns_1@10.242.238.90:tap_replication_manager-default<0.18775.0>:tap_replication_manager:change_vbucket_filter:200]Going to change replication from 'ns_1@10.242.238.89' to have [384,385,386,387,388,389,390,391,392,394,395,396,397,398,399,400,401,402,403, 404,405,406,407,408,409,410,411,412,413,414,415,416,417,418,419,420,421,422, 423,424,425,426] ([391], []) [ns_server:debug,2014-08-19T16:51:09.696,ns_1@10.242.238.90:<0.26755.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:135]Registered myself under id:{"default", {new_child_id, [384,385,386,387,388,389,390,391,392,394,395, 396,397,398,399,400,401,402,403,404,405,406, 407,408,409,410,411,412,413,414,415,416,417, 418,419,420,421,422,423,424,425,426], 'ns_1@10.242.238.89'}, #Ref<0.0.1.40872>} Args:[{"10.242.238.89",11209}, {"10.242.238.90",11209}, [{old_state_retriever,#Fun}, {on_not_ready_vbuckets,#Fun}, {username,"default"}, {password,get_from_config}, {vbuckets,[384,385,386,387,388,389,390,391,392,394,395,396,397,398,399, 400,401,402,403,404,405,406,407,408,409,410,411,412,413,414, 415,416,417,418,419,420,421,422,423,424,425,426]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]] [ns_server:debug,2014-08-19T16:51:09.697,ns_1@10.242.238.90:<0.26755.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:139]Linked myself to old ebucketmigrator <0.26750.0> [ns_server:info,2014-08-19T16:51:09.697,ns_1@10.242.238.90:<0.26750.0>:ebucketmigrator_srv:handle_call:270]Starting new-style vbucket filter change on stream `replication_ns_1@10.242.238.90` [ns_server:info,2014-08-19T16:51:09.708,ns_1@10.242.238.90:<0.26750.0>:ebucketmigrator_srv:handle_call:297]Changing vbucket filter on tap stream `replication_ns_1@10.242.238.90`: [{384,1}, {385,1}, {386,1}, {387,1}, {388,1}, {389,1}, {390,1}, {391,1}, {392,1}, {394,1}, {395,1}, {396,1}, {397,1}, {398,1}, {399,1}, {400,1}, {401,1}, {402,1}, {403,1}, {404,1}, {405,1}, {406,1}, {407,1}, {408,1}, {409,1}, {410,1}, {411,1}, {412,1}, {413,1}, {414,1}, {415,1}, {416,1}, {417,1}, {418,1}, {419,1}, {420,1}, {421,1}, {422,1}, {423,1}, {424,1}, {425,1}, {426,1}] [ns_server:info,2014-08-19T16:51:09.709,ns_1@10.242.238.90:<0.26750.0>:ebucketmigrator_srv:handle_call:307]Successfully changed vbucket filter on tap stream `replication_ns_1@10.242.238.90`. [ns_server:info,2014-08-19T16:51:09.709,ns_1@10.242.238.90:<0.26750.0>:ebucketmigrator_srv:process_upstream:1027]Got vbucket filter change completion message. Silencing upstream sender [ns_server:info,2014-08-19T16:51:09.709,ns_1@10.242.238.90:<0.26750.0>:ebucketmigrator_srv:handle_info:366]Got reply from upstream silencing request. Completing state transition to a new ebucketmigrator. [ns_server:debug,2014-08-19T16:51:09.710,ns_1@10.242.238.90:<0.26750.0>:ebucketmigrator_srv:complete_native_vb_filter_change:170]Proceeding with reading unread binaries [ns_server:debug,2014-08-19T16:51:09.710,ns_1@10.242.238.90:<0.26750.0>:ebucketmigrator_srv:confirm_downstream:197]Going to confirm reception downstream messages [ns_server:debug,2014-08-19T16:51:09.710,ns_1@10.242.238.90:<0.26750.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:51:09.710,ns_1@10.242.238.90:<0.26757.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:51:09.710,ns_1@10.242.238.90:<0.26757.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:51:09.710,ns_1@10.242.238.90:<0.26750.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:51:09.710,ns_1@10.242.238.90:<0.26750.0>:ebucketmigrator_srv:confirm_downstream:201]Confirmed upstream messages are feeded to kernel [ns_server:debug,2014-08-19T16:51:09.710,ns_1@10.242.238.90:<0.26750.0>:ebucketmigrator_srv:reply_and_die:210]Passed old state to caller [ns_server:debug,2014-08-19T16:51:09.710,ns_1@10.242.238.90:<0.26750.0>:ebucketmigrator_srv:reply_and_die:213]Sent out state. Preparing to die [ns_server:debug,2014-08-19T16:51:09.711,ns_1@10.242.238.90:<0.26755.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:143]Got old state from previous ebucketmigrator: <0.26750.0> [ns_server:debug,2014-08-19T16:51:09.711,ns_1@10.242.238.90:<0.26755.0>:ns_vbm_new_sup:perform_vbucket_filter_change_loop:184]Sent old state to new instance [ns_server:info,2014-08-19T16:51:09.711,ns_1@10.242.238.90:<0.26759.0>:ns_vbm_new_sup:mk_old_state_retriever:83]Got vbucket filter change old state. Proceeding vbucket filter change operation [ns_server:debug,2014-08-19T16:51:09.711,ns_1@10.242.238.90:<0.26759.0>:ebucketmigrator_srv:init:494]Got old ebucketmigrator state from <0.26750.0>: {state,#Port<0.16372>,#Port<0.16368>,#Port<0.16373>,#Port<0.16369>, <0.26752.0>,<<"cut off">>,<<"cut off">>,[],127,false,false,0, {1408,452669,709755}, completed, {<0.26755.0>,#Ref<0.0.1.40885>}, <<"replication_ns_1@10.242.238.90">>,<0.26750.0>, {had_backfill,false,undefined,[]}, completed,false}. [ns_server:debug,2014-08-19T16:51:09.712,ns_1@10.242.238.90:<0.17162.0>:ns_process_registry:handle_info:98]Got exit msg: {'EXIT',<0.26755.0>,{#Ref<0.0.1.40874>,<0.26759.0>}} [error_logger:info,2014-08-19T16:51:09.712,ns_1@10.242.238.90:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,'ns_vbm_new_sup-default'} started: [{pid,<0.26759.0>}, {name, {new_child_id, [384,385,386,387,388,389,390,391,392,394,395, 396,397,398,399,400,401,402,403,404,405,406, 407,408,409,410,411,412,413,414,415,416,417, 418,419,420,421,422,423,424,425,426], 'ns_1@10.242.238.89'}}, {mfargs, {ebucketmigrator_srv,start_link, [{"10.242.238.89",11209}, {"10.242.238.90",11209}, [{old_state_retriever, #Fun}, {on_not_ready_vbuckets, #Fun}, {username,"default"}, {password,get_from_config}, {vbuckets, [384,385,386,387,388,389,390,391,392,394, 395,396,397,398,399,400,401,402,403,404, 405,406,407,408,409,410,411,412,413,414, 415,416,417,418,419,420,421,422,423,424, 425,426]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]]}}, {restart_type,temporary}, {shutdown,60000}, {child_type,worker}] [ns_server:debug,2014-08-19T16:51:09.716,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:51:09.719,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:09.719,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 2781 us [ns_server:debug,2014-08-19T16:51:09.719,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:09.719,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{391, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:51:09.722,ns_1@10.242.238.90:<0.26759.0>:ebucketmigrator_srv:init:621]Reusing old upstream: [{vbuckets,[384,385,386,387,388,389,390,391,392,394,395,396,397,398,399,400, 401,402,403,404,405,406,407,408,409,410,411,412,413,414,415,416, 417,418,419,420,421,422,423,424,425,426]}, {name,<<"replication_ns_1@10.242.238.90">>}, {takeover,false}] [rebalance:debug,2014-08-19T16:51:09.722,ns_1@10.242.238.90:<0.26759.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.26760.0> [ns_server:debug,2014-08-19T16:51:09.751,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:51:09.754,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 2838 us [ns_server:debug,2014-08-19T16:51:09.754,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:09.754,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:09.755,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{648, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:51:09.781,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:51:09.784,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:09.784,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 2736 us [ns_server:debug,2014-08-19T16:51:09.784,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:09.785,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{642, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:info,2014-08-19T16:51:09.790,ns_1@10.242.238.90:<0.18786.0>:ns_memcached:do_handle_call:527]Changed vbucket 393 state to replica [ns_server:info,2014-08-19T16:51:09.790,ns_1@10.242.238.90:tap_replication_manager-default<0.18775.0>:tap_replication_manager:change_vbucket_filter:200]Going to change replication from 'ns_1@10.242.238.89' to have [384,385,386,387,388,389,390,391,392,393,394,395,396,397,398,399,400,401,402, 403,404,405,406,407,408,409,410,411,412,413,414,415,416,417,418,419,420,421, 422,423,424,425,426] ([393], []) [ns_server:debug,2014-08-19T16:51:09.791,ns_1@10.242.238.90:<0.26763.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:135]Registered myself under id:{"default", {new_child_id, [384,385,386,387,388,389,390,391,392,393,394, 395,396,397,398,399,400,401,402,403,404,405, 406,407,408,409,410,411,412,413,414,415,416, 417,418,419,420,421,422,423,424,425,426], 'ns_1@10.242.238.89'}, #Ref<0.0.1.41070>} Args:[{"10.242.238.89",11209}, {"10.242.238.90",11209}, [{old_state_retriever,#Fun}, {on_not_ready_vbuckets,#Fun}, {username,"default"}, {password,get_from_config}, {vbuckets,[384,385,386,387,388,389,390,391,392,393,394,395,396,397,398, 399,400,401,402,403,404,405,406,407,408,409,410,411,412,413, 414,415,416,417,418,419,420,421,422,423,424,425,426]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]] [ns_server:debug,2014-08-19T16:51:09.792,ns_1@10.242.238.90:<0.26763.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:139]Linked myself to old ebucketmigrator <0.26759.0> [ns_server:info,2014-08-19T16:51:09.792,ns_1@10.242.238.90:<0.26759.0>:ebucketmigrator_srv:handle_call:270]Starting new-style vbucket filter change on stream `replication_ns_1@10.242.238.90` [ns_server:info,2014-08-19T16:51:09.803,ns_1@10.242.238.90:<0.26759.0>:ebucketmigrator_srv:handle_call:297]Changing vbucket filter on tap stream `replication_ns_1@10.242.238.90`: [{384,1}, {385,1}, {386,1}, {387,1}, {388,1}, {389,1}, {390,1}, {391,1}, {392,1}, {393,1}, {394,1}, {395,1}, {396,1}, {397,1}, {398,1}, {399,1}, {400,1}, {401,1}, {402,1}, {403,1}, {404,1}, {405,1}, {406,1}, {407,1}, {408,1}, {409,1}, {410,1}, {411,1}, {412,1}, {413,1}, {414,1}, {415,1}, {416,1}, {417,1}, {418,1}, {419,1}, {420,1}, {421,1}, {422,1}, {423,1}, {424,1}, {425,1}, {426,1}] [ns_server:info,2014-08-19T16:51:09.804,ns_1@10.242.238.90:<0.26759.0>:ebucketmigrator_srv:handle_call:307]Successfully changed vbucket filter on tap stream `replication_ns_1@10.242.238.90`. [ns_server:info,2014-08-19T16:51:09.804,ns_1@10.242.238.90:<0.26759.0>:ebucketmigrator_srv:process_upstream:1027]Got vbucket filter change completion message. Silencing upstream sender [ns_server:info,2014-08-19T16:51:09.804,ns_1@10.242.238.90:<0.26759.0>:ebucketmigrator_srv:handle_info:366]Got reply from upstream silencing request. Completing state transition to a new ebucketmigrator. [ns_server:debug,2014-08-19T16:51:09.804,ns_1@10.242.238.90:<0.26759.0>:ebucketmigrator_srv:complete_native_vb_filter_change:170]Proceeding with reading unread binaries [ns_server:debug,2014-08-19T16:51:09.804,ns_1@10.242.238.90:<0.26759.0>:ebucketmigrator_srv:confirm_downstream:197]Going to confirm reception downstream messages [ns_server:debug,2014-08-19T16:51:09.805,ns_1@10.242.238.90:<0.26759.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:51:09.805,ns_1@10.242.238.90:<0.26766.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:51:09.805,ns_1@10.242.238.90:<0.26766.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:51:09.805,ns_1@10.242.238.90:<0.26759.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:51:09.805,ns_1@10.242.238.90:<0.26759.0>:ebucketmigrator_srv:confirm_downstream:201]Confirmed upstream messages are feeded to kernel [ns_server:debug,2014-08-19T16:51:09.805,ns_1@10.242.238.90:<0.26759.0>:ebucketmigrator_srv:reply_and_die:210]Passed old state to caller [ns_server:debug,2014-08-19T16:51:09.805,ns_1@10.242.238.90:<0.26759.0>:ebucketmigrator_srv:reply_and_die:213]Sent out state. Preparing to die [ns_server:debug,2014-08-19T16:51:09.805,ns_1@10.242.238.90:<0.26763.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:143]Got old state from previous ebucketmigrator: <0.26759.0> [ns_server:debug,2014-08-19T16:51:09.806,ns_1@10.242.238.90:<0.26763.0>:ns_vbm_new_sup:perform_vbucket_filter_change_loop:184]Sent old state to new instance [ns_server:info,2014-08-19T16:51:09.806,ns_1@10.242.238.90:<0.26768.0>:ns_vbm_new_sup:mk_old_state_retriever:83]Got vbucket filter change old state. Proceeding vbucket filter change operation [ns_server:debug,2014-08-19T16:51:09.806,ns_1@10.242.238.90:<0.26768.0>:ebucketmigrator_srv:init:494]Got old ebucketmigrator state from <0.26759.0>: {state,#Port<0.16372>,#Port<0.16368>,#Port<0.16373>,#Port<0.16369>, <0.26760.0>,<<"cut off">>,<<"cut off">>,[],130,false,false,0, {1408,452669,804574}, completed, {<0.26763.0>,#Ref<0.0.1.41083>}, <<"replication_ns_1@10.242.238.90">>,<0.26759.0>, {had_backfill,false,undefined,[]}, completed,false}. [ns_server:debug,2014-08-19T16:51:09.806,ns_1@10.242.238.90:<0.17162.0>:ns_process_registry:handle_info:98]Got exit msg: {'EXIT',<0.26763.0>,{#Ref<0.0.1.41072>,<0.26768.0>}} [error_logger:info,2014-08-19T16:51:09.806,ns_1@10.242.238.90:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,'ns_vbm_new_sup-default'} started: [{pid,<0.26768.0>}, {name, {new_child_id, [384,385,386,387,388,389,390,391,392,393,394, 395,396,397,398,399,400,401,402,403,404,405, 406,407,408,409,410,411,412,413,414,415,416, 417,418,419,420,421,422,423,424,425,426], 'ns_1@10.242.238.89'}}, {mfargs, {ebucketmigrator_srv,start_link, [{"10.242.238.89",11209}, {"10.242.238.90",11209}, [{old_state_retriever, #Fun}, {on_not_ready_vbuckets, #Fun}, {username,"default"}, {password,get_from_config}, {vbuckets, [384,385,386,387,388,389,390,391,392,393, 394,395,396,397,398,399,400,401,402,403, 404,405,406,407,408,409,410,411,412,413, 414,415,416,417,418,419,420,421,422,423, 424,425,426]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]]}}, {restart_type,temporary}, {shutdown,60000}, {child_type,worker}] [ns_server:debug,2014-08-19T16:51:09.811,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:51:09.817,ns_1@10.242.238.90:<0.26768.0>:ebucketmigrator_srv:init:621]Reusing old upstream: [{vbuckets,[384,385,386,387,388,389,390,391,392,393,394,395,396,397,398,399, 400,401,402,403,404,405,406,407,408,409,410,411,412,413,414,415, 416,417,418,419,420,421,422,423,424,425,426]}, {name,<<"replication_ns_1@10.242.238.90">>}, {takeover,false}] [rebalance:debug,2014-08-19T16:51:09.817,ns_1@10.242.238.90:<0.26768.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.26769.0> [ns_server:debug,2014-08-19T16:51:09.818,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 6341 us [ns_server:debug,2014-08-19T16:51:09.818,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:09.819,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:09.819,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{393, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:51:09.844,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:51:09.849,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:09.849,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 5107 us [ns_server:debug,2014-08-19T16:51:09.849,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:09.850,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{901, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:51:09.878,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:51:09.881,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 2636 us [ns_server:debug,2014-08-19T16:51:09.881,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:09.882,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:09.882,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{650, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:51:09.885,ns_1@10.242.238.90:compaction_daemon<0.17567.0>:compaction_daemon:handle_info:447]Starting compaction for the following buckets: [<<"default">>] [ns_server:debug,2014-08-19T16:51:09.886,ns_1@10.242.238.90:compaction_daemon<0.17567.0>:compaction_daemon:compact_next_bucket:1453]Going to spawn bucket compaction with forced view compaction for bucket default [ns_server:debug,2014-08-19T16:51:09.886,ns_1@10.242.238.90:compaction_daemon<0.17567.0>:compaction_daemon:compact_next_bucket:1482]Spawned 'uninhibited' compaction for default [ns_server:info,2014-08-19T16:51:09.887,ns_1@10.242.238.90:<0.26772.0>:compaction_daemon:try_to_cleanup_indexes:650]Cleaning up indexes for bucket `default` [ns_server:info,2014-08-19T16:51:09.888,ns_1@10.242.238.90:<0.26772.0>:compaction_daemon:spawn_bucket_compactor:609]Compacting bucket default with config: [forced_previously_inhibited_view_compaction, {database_fragmentation_threshold,{30,undefined}}, {view_fragmentation_threshold,{30,undefined}}] [ns_server:debug,2014-08-19T16:51:09.892,ns_1@10.242.238.90:<0.26775.0>:compaction_daemon:bucket_needs_compaction:1042]`default` data size is 35604, disk size is 5361756 [ns_server:debug,2014-08-19T16:51:09.892,ns_1@10.242.238.90:compaction_daemon<0.17567.0>:compaction_daemon:handle_info:505]Finished compaction iteration. [ns_server:debug,2014-08-19T16:51:09.893,ns_1@10.242.238.90:compaction_daemon<0.17567.0>:compaction_daemon:schedule_next_compaction:1519]Finished compaction too soon. Next run will be in 30s [ns_server:debug,2014-08-19T16:51:09.912,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:51:09.915,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:09.916,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 2946 us [ns_server:debug,2014-08-19T16:51:09.917,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:09.917,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{903, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:info,2014-08-19T16:51:09.928,ns_1@10.242.238.90:<0.18786.0>:ns_memcached:do_handle_call:527]Changed vbucket 639 state to replica [ns_server:info,2014-08-19T16:51:09.934,ns_1@10.242.238.90:<0.26778.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 639 to state replica [ns_server:debug,2014-08-19T16:51:09.967,ns_1@10.242.238.90:<0.26778.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_639_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:51:09.968,ns_1@10.242.238.90:<0.26778.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[639]}, {checkpoints,[{639,0}]}, {name,<<"replication_building_639_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[639]}, {takeover,false}, {suffix,"building_639_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",639,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,true}]} [rebalance:debug,2014-08-19T16:51:09.969,ns_1@10.242.238.90:<0.26778.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.26779.0> [rebalance:debug,2014-08-19T16:51:09.970,ns_1@10.242.238.90:<0.26778.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:51:09.970,ns_1@10.242.238.90:<0.26778.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.18401.1>,#Ref<16550.0.1.229799>}]} [rebalance:info,2014-08-19T16:51:09.970,ns_1@10.242.238.90:<0.26778.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 639 [rebalance:debug,2014-08-19T16:51:09.970,ns_1@10.242.238.90:<0.26778.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.18401.1>,#Ref<16550.0.1.229799>}] [ns_server:debug,2014-08-19T16:51:09.971,ns_1@10.242.238.90:<0.26778.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:51:09.971,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.26780.0> (ok) [rebalance:debug,2014-08-19T16:51:09.976,ns_1@10.242.238.90:<0.26781.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 639 [ns_server:debug,2014-08-19T16:51:10.013,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 639. Nacking mccouch update. [views:debug,2014-08-19T16:51:10.013,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/639. Updated state: replica (0) [ns_server:debug,2014-08-19T16:51:10.013,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",639,replica,0} [ns_server:debug,2014-08-19T16:51:10.014,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,750,686,984,737,673,426,971,724,660,413,958,711,647,400,1022,945,762,698, 387,1009,996,749,685,983,736,672,425,970,723,659,412,957,710,646,399,1021, 944,761,697,386,1008,995,748,684,982,735,671,424,969,722,658,411,956,709,645, 398,1020,943,760,696,385,1007,994,747,683,981,734,670,423,968,721,657,410, 955,708,644,397,1019,942,759,695,384,1006,993,961,746,714,682,650,403,980, 948,765,733,701,669,422,390,1012,999,967,752,720,688,656,409,986,954,739,707, 675,643,396,1018,973,941,758,726,694,662,415,1005,992,960,745,713,681,649, 402,979,947,764,732,700,668,421,389,1011,998,966,751,719,687,655,408,985,953, 738,706,674,642,395,1017,972,940,757,725,693,661,414,1004,991,959,744,712, 680,648,401,1023,978,946,763,731,699,667,420,388,1010,965,718,654,407,952, 705,641,394,1016,939,756,692,1003,990,743,679,977,730,666,419,964,717,653, 406,951,704,640,393,1015,938,755,691,1002,989,742,678,976,729,665,418,963, 716,652,405,950,767,703,639,392,1014,754,690,1001,988,741,677,975,728,664, 417,962,715,651,404,949,766,702,391,1013,753,689,1000,987,740,676,974,727, 663,416] [ns_server:info,2014-08-19T16:51:10.032,ns_1@10.242.238.90:<0.18786.0>:ns_memcached:do_handle_call:527]Changed vbucket 383 state to replica [ns_server:info,2014-08-19T16:51:10.037,ns_1@10.242.238.90:<0.26798.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 383 to state replica [views:debug,2014-08-19T16:51:10.047,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/639. Updated state: replica (0) [ns_server:debug,2014-08-19T16:51:10.047,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",639,replica,0} [ns_server:debug,2014-08-19T16:51:10.060,ns_1@10.242.238.90:<0.26798.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_383_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:51:10.061,ns_1@10.242.238.90:<0.26798.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[383]}, {checkpoints,[{383,0}]}, {name,<<"replication_building_383_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[383]}, {takeover,false}, {suffix,"building_383_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",383,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,false}]} [rebalance:debug,2014-08-19T16:51:10.062,ns_1@10.242.238.90:<0.26798.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.26799.0> [rebalance:debug,2014-08-19T16:51:10.062,ns_1@10.242.238.90:<0.26798.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:51:10.063,ns_1@10.242.238.90:<0.26798.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.18459.1>,#Ref<16550.0.1.230072>}]} [rebalance:info,2014-08-19T16:51:10.063,ns_1@10.242.238.90:<0.26798.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 383 [rebalance:debug,2014-08-19T16:51:10.063,ns_1@10.242.238.90:<0.26798.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.18459.1>,#Ref<16550.0.1.230072>}] [ns_server:debug,2014-08-19T16:51:10.064,ns_1@10.242.238.90:<0.26798.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:debug,2014-08-19T16:51:10.078,ns_1@10.242.238.90:<0.26800.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 383 [views:debug,2014-08-19T16:51:10.080,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/639. Updated state: pending (0) [ns_server:debug,2014-08-19T16:51:10.080,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",639,pending,0} [ns_server:info,2014-08-19T16:51:10.132,ns_1@10.242.238.90:<0.18786.0>:ns_memcached:do_handle_call:527]Changed vbucket 638 state to replica [ns_server:info,2014-08-19T16:51:10.138,ns_1@10.242.238.90:<0.26817.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 638 to state replica [ns_server:debug,2014-08-19T16:51:10.164,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 383. Nacking mccouch update. [views:debug,2014-08-19T16:51:10.164,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/383. Updated state: replica (0) [ns_server:debug,2014-08-19T16:51:10.164,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",383,replica,0} [ns_server:debug,2014-08-19T16:51:10.164,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,750,686,984,737,673,426,971,724,660,413,958,711,647,400,1022,945,762,698, 387,1009,996,749,685,983,736,672,425,970,723,659,412,957,710,646,399,1021, 944,761,697,386,1008,995,748,684,982,735,671,424,969,722,658,411,956,709,645, 398,1020,943,760,696,385,1007,994,747,683,981,734,670,423,968,721,657,410, 955,708,644,397,1019,942,759,695,384,1006,993,961,746,714,682,650,403,980, 948,765,733,701,669,422,390,1012,999,967,752,720,688,656,409,986,954,739,707, 675,643,396,1018,973,941,758,726,694,662,415,383,1005,992,960,745,713,681, 649,402,979,947,764,732,700,668,421,389,1011,998,966,751,719,687,655,408,985, 953,738,706,674,642,395,1017,972,940,757,725,693,661,414,1004,991,959,744, 712,680,648,401,1023,978,946,763,731,699,667,420,388,1010,965,718,654,407, 952,705,641,394,1016,939,756,692,1003,990,743,679,977,730,666,419,964,717, 653,406,951,704,640,393,1015,938,755,691,1002,989,742,678,976,729,665,418, 963,716,652,405,950,767,703,639,392,1014,754,690,1001,988,741,677,975,728, 664,417,962,715,651,404,949,766,702,391,1013,753,689,1000,987,740,676,974, 727,663,416] [ns_server:debug,2014-08-19T16:51:10.169,ns_1@10.242.238.90:<0.26817.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_638_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:51:10.171,ns_1@10.242.238.90:<0.26817.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[638]}, {checkpoints,[{638,0}]}, {name,<<"replication_building_638_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[638]}, {takeover,false}, {suffix,"building_638_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",638,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,true}]} [rebalance:debug,2014-08-19T16:51:10.171,ns_1@10.242.238.90:<0.26817.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.26818.0> [rebalance:debug,2014-08-19T16:51:10.172,ns_1@10.242.238.90:<0.26817.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:51:10.172,ns_1@10.242.238.90:<0.26817.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.18500.1>,#Ref<16550.0.1.230297>}]} [rebalance:info,2014-08-19T16:51:10.172,ns_1@10.242.238.90:<0.26817.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 638 [rebalance:debug,2014-08-19T16:51:10.172,ns_1@10.242.238.90:<0.26817.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.18500.1>,#Ref<16550.0.1.230297>}] [ns_server:debug,2014-08-19T16:51:10.173,ns_1@10.242.238.90:<0.26817.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:51:10.173,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.26819.0> (ok) [rebalance:debug,2014-08-19T16:51:10.175,ns_1@10.242.238.90:<0.26820.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 638 [ns_server:info,2014-08-19T16:51:10.181,ns_1@10.242.238.90:<0.18786.0>:ns_memcached:do_handle_call:527]Changed vbucket 382 state to replica [ns_server:info,2014-08-19T16:51:10.185,ns_1@10.242.238.90:<0.26823.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 382 to state replica [views:debug,2014-08-19T16:51:10.198,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/383. Updated state: replica (0) [ns_server:debug,2014-08-19T16:51:10.198,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",383,replica,0} [rebalance:debug,2014-08-19T16:51:10.199,ns_1@10.242.238.90:<0.26800.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:51:10.200,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.26800.0> (ok) [ns_server:debug,2014-08-19T16:51:10.208,ns_1@10.242.238.90:<0.26823.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_382_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:51:10.209,ns_1@10.242.238.90:<0.26823.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[382]}, {checkpoints,[{382,0}]}, {name,<<"replication_building_382_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[382]}, {takeover,false}, {suffix,"building_382_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",382,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,false}]} [rebalance:debug,2014-08-19T16:51:10.210,ns_1@10.242.238.90:<0.26823.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.26824.0> [rebalance:debug,2014-08-19T16:51:10.210,ns_1@10.242.238.90:<0.26823.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:51:10.210,ns_1@10.242.238.90:<0.26823.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.18536.1>,#Ref<16550.0.1.230441>}]} [rebalance:info,2014-08-19T16:51:10.210,ns_1@10.242.238.90:<0.26823.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 382 [rebalance:debug,2014-08-19T16:51:10.211,ns_1@10.242.238.90:<0.26823.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.18536.1>,#Ref<16550.0.1.230441>}] [ns_server:debug,2014-08-19T16:51:10.211,ns_1@10.242.238.90:<0.26823.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:debug,2014-08-19T16:51:10.224,ns_1@10.242.238.90:<0.26825.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 382 [ns_server:info,2014-08-19T16:51:10.277,ns_1@10.242.238.90:<0.18786.0>:ns_memcached:do_handle_call:527]Changed vbucket 637 state to replica [ns_server:info,2014-08-19T16:51:10.283,ns_1@10.242.238.90:<0.26831.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 637 to state replica [ns_server:debug,2014-08-19T16:51:10.314,ns_1@10.242.238.90:<0.26831.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_637_'ns_1@10.242.238.90' [ns_server:debug,2014-08-19T16:51:10.315,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 638. Nacking mccouch update. [views:debug,2014-08-19T16:51:10.315,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/638. Updated state: pending (0) [ns_server:debug,2014-08-19T16:51:10.315,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",638,pending,0} [rebalance:info,2014-08-19T16:51:10.315,ns_1@10.242.238.90:<0.26831.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[637]}, {checkpoints,[{637,0}]}, {name,<<"replication_building_637_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[637]}, {takeover,false}, {suffix,"building_637_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",637,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,true}]} [ns_server:debug,2014-08-19T16:51:10.316,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,750,686,984,737,673,426,971,724,660,413,958,711,647,400,1022,945,762,698, 387,1009,996,749,685,983,736,672,425,970,723,659,412,957,710,646,399,1021, 944,761,697,386,1008,995,748,684,982,735,671,424,969,722,658,411,956,709,645, 398,1020,943,760,696,385,1007,994,747,683,981,734,670,423,968,721,657,410, 955,708,644,397,1019,942,759,695,384,1006,993,961,746,714,682,650,403,980, 948,765,733,701,669,422,390,1012,999,967,752,720,688,656,409,986,954,739,707, 675,643,396,1018,973,941,758,726,694,662,415,383,1005,992,960,745,713,681, 649,402,979,947,764,732,700,668,421,389,1011,998,966,751,719,687,655,408,985, 953,738,706,674,642,395,1017,972,940,757,725,693,661,414,1004,991,959,744, 712,680,648,401,1023,978,946,763,731,699,667,420,388,1010,965,718,654,407, 952,705,641,394,1016,939,756,692,1003,990,743,679,977,730,666,419,964,717, 653,406,951,704,640,393,1015,938,755,691,1002,989,742,678,976,729,665,418, 963,716,652,405,950,767,703,639,392,1014,754,690,1001,988,741,677,975,728, 664,417,962,715,651,404,949,766,702,638,391,1013,753,689,1000,987,740,676, 974,727,663,416] [rebalance:debug,2014-08-19T16:51:10.316,ns_1@10.242.238.90:<0.26831.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.26843.0> [rebalance:debug,2014-08-19T16:51:10.316,ns_1@10.242.238.90:<0.26831.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:51:10.317,ns_1@10.242.238.90:<0.26831.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.18577.1>,#Ref<16550.0.1.230653>}]} [rebalance:info,2014-08-19T16:51:10.317,ns_1@10.242.238.90:<0.26831.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 637 [rebalance:debug,2014-08-19T16:51:10.317,ns_1@10.242.238.90:<0.26831.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.18577.1>,#Ref<16550.0.1.230653>}] [ns_server:debug,2014-08-19T16:51:10.318,ns_1@10.242.238.90:<0.26831.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:51:10.318,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.26844.0> (ok) [rebalance:debug,2014-08-19T16:51:10.320,ns_1@10.242.238.90:<0.26845.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 637 [ns_server:info,2014-08-19T16:51:10.324,ns_1@10.242.238.90:<0.18786.0>:ns_memcached:do_handle_call:527]Changed vbucket 381 state to replica [ns_server:info,2014-08-19T16:51:10.329,ns_1@10.242.238.90:<0.26848.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 381 to state replica [ns_server:debug,2014-08-19T16:51:10.351,ns_1@10.242.238.90:<0.26848.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_381_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:51:10.353,ns_1@10.242.238.90:<0.26848.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[381]}, {checkpoints,[{381,0}]}, {name,<<"replication_building_381_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[381]}, {takeover,false}, {suffix,"building_381_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",381,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,false}]} [rebalance:debug,2014-08-19T16:51:10.353,ns_1@10.242.238.90:<0.26848.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.26849.0> [rebalance:debug,2014-08-19T16:51:10.353,ns_1@10.242.238.90:<0.26848.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:51:10.354,ns_1@10.242.238.90:<0.26848.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.18599.1>,#Ref<16550.0.1.230787>}]} [rebalance:info,2014-08-19T16:51:10.354,ns_1@10.242.238.90:<0.26848.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 381 [rebalance:debug,2014-08-19T16:51:10.354,ns_1@10.242.238.90:<0.26848.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.18599.1>,#Ref<16550.0.1.230787>}] [ns_server:debug,2014-08-19T16:51:10.355,ns_1@10.242.238.90:<0.26848.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:debug,2014-08-19T16:51:10.370,ns_1@10.242.238.90:<0.26850.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 381 [views:debug,2014-08-19T16:51:10.374,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/638. Updated state: pending (0) [ns_server:debug,2014-08-19T16:51:10.374,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",638,pending,0} [ns_server:info,2014-08-19T16:51:10.423,ns_1@10.242.238.90:<0.18786.0>:ns_memcached:do_handle_call:527]Changed vbucket 636 state to replica [ns_server:info,2014-08-19T16:51:10.430,ns_1@10.242.238.90:<0.26853.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 636 to state replica [ns_server:debug,2014-08-19T16:51:10.460,ns_1@10.242.238.90:<0.26853.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_636_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:51:10.462,ns_1@10.242.238.90:<0.26853.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[636]}, {checkpoints,[{636,0}]}, {name,<<"replication_building_636_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[636]}, {takeover,false}, {suffix,"building_636_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",636,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,true}]} [rebalance:debug,2014-08-19T16:51:10.462,ns_1@10.242.238.90:<0.26853.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.26868.0> [rebalance:debug,2014-08-19T16:51:10.462,ns_1@10.242.238.90:<0.26853.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:51:10.463,ns_1@10.242.238.90:<0.26853.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.18654.1>,#Ref<16550.0.1.231024>}]} [rebalance:info,2014-08-19T16:51:10.463,ns_1@10.242.238.90:<0.26853.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 636 [rebalance:debug,2014-08-19T16:51:10.463,ns_1@10.242.238.90:<0.26853.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.18654.1>,#Ref<16550.0.1.231024>}] [ns_server:debug,2014-08-19T16:51:10.464,ns_1@10.242.238.90:<0.26853.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:51:10.464,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.26869.0> (ok) [rebalance:debug,2014-08-19T16:51:10.466,ns_1@10.242.238.90:<0.26870.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 636 [ns_server:info,2014-08-19T16:51:10.470,ns_1@10.242.238.90:<0.18786.0>:ns_memcached:do_handle_call:527]Changed vbucket 380 state to replica [ns_server:info,2014-08-19T16:51:10.475,ns_1@10.242.238.90:<0.26873.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 380 to state replica [ns_server:debug,2014-08-19T16:51:10.497,ns_1@10.242.238.90:<0.26873.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_380_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:51:10.499,ns_1@10.242.238.90:<0.26873.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[380]}, {checkpoints,[{380,0}]}, {name,<<"replication_building_380_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[380]}, {takeover,false}, {suffix,"building_380_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",380,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,false}]} [rebalance:debug,2014-08-19T16:51:10.499,ns_1@10.242.238.90:<0.26873.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.26874.0> [rebalance:debug,2014-08-19T16:51:10.499,ns_1@10.242.238.90:<0.26873.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:51:10.500,ns_1@10.242.238.90:<0.26873.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.18676.1>,#Ref<16550.0.1.231122>}]} [rebalance:info,2014-08-19T16:51:10.500,ns_1@10.242.238.90:<0.26873.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 380 [rebalance:debug,2014-08-19T16:51:10.500,ns_1@10.242.238.90:<0.26873.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.18676.1>,#Ref<16550.0.1.231122>}] [ns_server:debug,2014-08-19T16:51:10.501,ns_1@10.242.238.90:<0.26873.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:51:10.510,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 382. Nacking mccouch update. [views:debug,2014-08-19T16:51:10.511,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/382. Updated state: replica (0) [ns_server:debug,2014-08-19T16:51:10.511,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",382,replica,0} [ns_server:debug,2014-08-19T16:51:10.511,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,750,686,984,737,673,426,971,724,660,413,958,711,647,400,1022,945,762,698, 387,1009,996,749,685,983,736,672,425,970,723,659,412,957,710,646,399,1021, 944,761,697,386,1008,995,748,684,982,735,671,424,969,722,658,411,956,709,645, 398,1020,943,760,696,385,1007,994,747,683,981,734,670,423,968,721,657,410, 955,708,644,397,1019,942,759,695,384,1006,993,746,682,980,948,765,733,701, 669,422,390,1012,999,967,752,720,688,656,409,986,954,739,707,675,643,396, 1018,973,941,758,726,694,662,415,383,1005,992,960,745,713,681,649,402,979, 947,764,732,700,668,421,389,1011,998,966,751,719,687,655,408,985,953,738,706, 674,642,395,1017,972,940,757,725,693,661,414,382,1004,991,959,744,712,680, 648,401,1023,978,946,763,731,699,667,420,388,1010,965,718,654,407,952,705, 641,394,1016,939,756,692,1003,990,743,679,977,730,666,419,964,717,653,406, 951,704,640,393,1015,938,755,691,1002,989,742,678,976,729,665,418,963,716, 652,405,950,767,703,639,392,1014,754,690,1001,988,741,677,975,728,664,417, 962,715,651,404,949,766,702,638,391,1013,753,689,1000,987,740,676,974,727, 663,416,961,714,650,403] [rebalance:debug,2014-08-19T16:51:10.515,ns_1@10.242.238.90:<0.26875.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 380 [views:debug,2014-08-19T16:51:10.561,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/382. Updated state: replica (0) [ns_server:debug,2014-08-19T16:51:10.561,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",382,replica,0} [ns_server:info,2014-08-19T16:51:10.569,ns_1@10.242.238.90:<0.18786.0>:ns_memcached:do_handle_call:527]Changed vbucket 635 state to replica [ns_server:info,2014-08-19T16:51:10.575,ns_1@10.242.238.90:<0.26878.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 635 to state replica [ns_server:debug,2014-08-19T16:51:10.607,ns_1@10.242.238.90:<0.26878.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_635_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:51:10.608,ns_1@10.242.238.90:<0.26878.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[635]}, {checkpoints,[{635,0}]}, {name,<<"replication_building_635_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[635]}, {takeover,false}, {suffix,"building_635_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",635,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,true}]} [rebalance:debug,2014-08-19T16:51:10.609,ns_1@10.242.238.90:<0.26878.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.26893.0> [rebalance:debug,2014-08-19T16:51:10.609,ns_1@10.242.238.90:<0.26878.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:51:10.610,ns_1@10.242.238.90:<0.26878.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.18717.1>,#Ref<16550.0.1.231334>}]} [rebalance:info,2014-08-19T16:51:10.610,ns_1@10.242.238.90:<0.26878.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 635 [rebalance:debug,2014-08-19T16:51:10.610,ns_1@10.242.238.90:<0.26878.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.18717.1>,#Ref<16550.0.1.231334>}] [ns_server:debug,2014-08-19T16:51:10.611,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.26894.0> (ok) [ns_server:debug,2014-08-19T16:51:10.611,ns_1@10.242.238.90:<0.26878.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:debug,2014-08-19T16:51:10.612,ns_1@10.242.238.90:<0.26895.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 635 [ns_server:info,2014-08-19T16:51:10.617,ns_1@10.242.238.90:<0.18786.0>:ns_memcached:do_handle_call:527]Changed vbucket 379 state to replica [ns_server:info,2014-08-19T16:51:10.621,ns_1@10.242.238.90:<0.26898.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 379 to state replica [ns_server:debug,2014-08-19T16:51:10.628,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 637. Nacking mccouch update. [views:debug,2014-08-19T16:51:10.628,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/637. Updated state: pending (0) [ns_server:debug,2014-08-19T16:51:10.628,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",637,pending,0} [ns_server:debug,2014-08-19T16:51:10.629,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,750,686,984,737,673,426,971,724,660,413,958,711,647,400,1022,945,762,698, 387,1009,996,749,685,983,736,672,425,970,723,659,412,957,710,646,399,1021, 944,761,697,386,1008,995,748,684,982,735,671,424,969,722,658,411,956,709,645, 398,1020,943,760,696,385,1007,994,747,683,981,734,670,423,968,721,657,410, 955,708,644,397,1019,942,759,695,384,1006,993,746,682,980,948,765,733,701, 669,637,422,390,1012,999,967,752,720,688,656,409,986,954,739,707,675,643,396, 1018,973,941,758,726,694,662,415,383,1005,992,960,745,713,681,649,402,979, 947,764,732,700,668,421,389,1011,998,966,751,719,687,655,408,985,953,738,706, 674,642,395,1017,972,940,757,725,693,661,414,382,1004,991,959,744,712,680, 648,401,1023,978,946,763,731,699,667,420,388,1010,965,718,654,407,952,705, 641,394,1016,939,756,692,1003,990,743,679,977,730,666,419,964,717,653,406, 951,704,640,393,1015,938,755,691,1002,989,742,678,976,729,665,418,963,716, 652,405,950,767,703,639,392,1014,754,690,1001,988,741,677,975,728,664,417, 962,715,651,404,949,766,702,638,391,1013,753,689,1000,987,740,676,974,727, 663,416,961,714,650,403] [ns_server:debug,2014-08-19T16:51:10.644,ns_1@10.242.238.90:<0.26898.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_379_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:51:10.646,ns_1@10.242.238.90:<0.26898.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[379]}, {checkpoints,[{379,0}]}, {name,<<"replication_building_379_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[379]}, {takeover,false}, {suffix,"building_379_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",379,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,false}]} [rebalance:debug,2014-08-19T16:51:10.646,ns_1@10.242.238.90:<0.26898.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.26899.0> [rebalance:debug,2014-08-19T16:51:10.646,ns_1@10.242.238.90:<0.26898.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:51:10.647,ns_1@10.242.238.90:<0.26898.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.18739.1>,#Ref<16550.0.1.231453>}]} [rebalance:info,2014-08-19T16:51:10.647,ns_1@10.242.238.90:<0.26898.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 379 [rebalance:debug,2014-08-19T16:51:10.647,ns_1@10.242.238.90:<0.26898.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.18739.1>,#Ref<16550.0.1.231453>}] [ns_server:debug,2014-08-19T16:51:10.648,ns_1@10.242.238.90:<0.26898.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:debug,2014-08-19T16:51:10.660,ns_1@10.242.238.90:<0.26900.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 379 [views:debug,2014-08-19T16:51:10.662,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/637. Updated state: pending (0) [ns_server:debug,2014-08-19T16:51:10.662,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",637,pending,0} [ns_server:info,2014-08-19T16:51:10.714,ns_1@10.242.238.90:<0.18786.0>:ns_memcached:do_handle_call:527]Changed vbucket 634 state to replica [ns_server:info,2014-08-19T16:51:10.720,ns_1@10.242.238.90:<0.26917.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 634 to state replica [ns_server:debug,2014-08-19T16:51:10.729,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 381. Nacking mccouch update. [views:debug,2014-08-19T16:51:10.729,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/381. Updated state: replica (0) [ns_server:debug,2014-08-19T16:51:10.729,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",381,replica,0} [ns_server:debug,2014-08-19T16:51:10.730,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,750,686,984,737,673,426,971,724,660,413,958,711,647,400,1022,945,762,698, 387,1009,996,749,685,983,736,672,425,970,723,659,412,957,710,646,399,1021, 944,761,697,386,1008,995,748,684,982,735,671,424,969,722,658,411,956,709,645, 398,1020,943,760,696,385,1007,994,747,683,981,734,670,423,968,721,657,410, 955,708,644,397,1019,942,759,695,384,1006,993,746,682,980,948,765,733,701, 669,637,422,390,1012,999,967,752,720,688,656,409,986,954,739,707,675,643,396, 1018,973,941,758,726,694,662,415,383,1005,992,960,745,713,681,649,402,979, 947,764,732,700,668,421,389,1011,998,966,751,719,687,655,408,985,953,738,706, 674,642,395,1017,972,940,757,725,693,661,414,382,1004,991,959,744,712,680, 648,401,1023,978,946,763,731,699,667,420,388,1010,965,718,654,407,952,705, 641,394,1016,939,756,692,381,1003,990,743,679,977,730,666,419,964,717,653, 406,951,704,640,393,1015,938,755,691,1002,989,742,678,976,729,665,418,963, 716,652,405,950,767,703,639,392,1014,754,690,1001,988,741,677,975,728,664, 417,962,715,651,404,949,766,702,638,391,1013,753,689,1000,987,740,676,974, 727,663,416,961,714,650,403] [ns_server:debug,2014-08-19T16:51:10.751,ns_1@10.242.238.90:<0.26917.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_634_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:51:10.753,ns_1@10.242.238.90:<0.26917.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[634]}, {checkpoints,[{634,0}]}, {name,<<"replication_building_634_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[634]}, {takeover,false}, {suffix,"building_634_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",634,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,true}]} [rebalance:debug,2014-08-19T16:51:10.753,ns_1@10.242.238.90:<0.26917.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.26918.0> [rebalance:debug,2014-08-19T16:51:10.754,ns_1@10.242.238.90:<0.26917.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:51:10.754,ns_1@10.242.238.90:<0.26917.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.18794.1>,#Ref<16550.0.1.231690>}]} [rebalance:info,2014-08-19T16:51:10.754,ns_1@10.242.238.90:<0.26917.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 634 [rebalance:debug,2014-08-19T16:51:10.755,ns_1@10.242.238.90:<0.26917.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.18794.1>,#Ref<16550.0.1.231690>}] [ns_server:debug,2014-08-19T16:51:10.755,ns_1@10.242.238.90:<0.26917.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:51:10.755,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.26919.0> (ok) [rebalance:debug,2014-08-19T16:51:10.757,ns_1@10.242.238.90:<0.26920.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 634 [ns_server:info,2014-08-19T16:51:10.761,ns_1@10.242.238.90:<0.18786.0>:ns_memcached:do_handle_call:527]Changed vbucket 378 state to replica [views:debug,2014-08-19T16:51:10.763,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/381. Updated state: replica (0) [ns_server:debug,2014-08-19T16:51:10.763,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",381,replica,0} [rebalance:debug,2014-08-19T16:51:10.763,ns_1@10.242.238.90:<0.26781.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:51:10.763,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.26781.0> (ok) [ns_server:info,2014-08-19T16:51:10.765,ns_1@10.242.238.90:<0.26923.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 378 to state replica [ns_server:debug,2014-08-19T16:51:10.788,ns_1@10.242.238.90:<0.26923.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_378_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:51:10.790,ns_1@10.242.238.90:<0.26923.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[378]}, {checkpoints,[{378,0}]}, {name,<<"replication_building_378_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[378]}, {takeover,false}, {suffix,"building_378_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",378,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,false}]} [rebalance:debug,2014-08-19T16:51:10.790,ns_1@10.242.238.90:<0.26923.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.26924.0> [rebalance:debug,2014-08-19T16:51:10.791,ns_1@10.242.238.90:<0.26923.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:51:10.791,ns_1@10.242.238.90:<0.26923.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.18816.1>,#Ref<16550.0.1.231788>}]} [rebalance:info,2014-08-19T16:51:10.791,ns_1@10.242.238.90:<0.26923.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 378 [rebalance:debug,2014-08-19T16:51:10.792,ns_1@10.242.238.90:<0.26923.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.18816.1>,#Ref<16550.0.1.231788>}] [ns_server:debug,2014-08-19T16:51:10.793,ns_1@10.242.238.90:<0.26923.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:debug,2014-08-19T16:51:10.814,ns_1@10.242.238.90:<0.26925.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 378 [ns_server:info,2014-08-19T16:51:10.869,ns_1@10.242.238.90:<0.18786.0>:ns_memcached:do_handle_call:527]Changed vbucket 633 state to replica [ns_server:info,2014-08-19T16:51:10.875,ns_1@10.242.238.90:<0.26942.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 633 to state replica [ns_server:debug,2014-08-19T16:51:10.907,ns_1@10.242.238.90:<0.26942.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_633_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:51:10.908,ns_1@10.242.238.90:<0.26942.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[633]}, {checkpoints,[{633,0}]}, {name,<<"replication_building_633_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[633]}, {takeover,false}, {suffix,"building_633_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",633,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,true}]} [rebalance:debug,2014-08-19T16:51:10.909,ns_1@10.242.238.90:<0.26942.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.26943.0> [rebalance:debug,2014-08-19T16:51:10.909,ns_1@10.242.238.90:<0.26942.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:51:10.909,ns_1@10.242.238.90:<0.26942.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.18857.1>,#Ref<16550.0.1.232020>}]} [rebalance:info,2014-08-19T16:51:10.909,ns_1@10.242.238.90:<0.26942.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 633 [rebalance:debug,2014-08-19T16:51:10.910,ns_1@10.242.238.90:<0.26942.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.18857.1>,#Ref<16550.0.1.232020>}] [ns_server:debug,2014-08-19T16:51:10.910,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.26944.0> (ok) [ns_server:debug,2014-08-19T16:51:10.911,ns_1@10.242.238.90:<0.26942.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:debug,2014-08-19T16:51:10.912,ns_1@10.242.238.90:<0.26945.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 633 [ns_server:debug,2014-08-19T16:51:10.913,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 636. Nacking mccouch update. [views:debug,2014-08-19T16:51:10.913,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/636. Updated state: pending (0) [ns_server:debug,2014-08-19T16:51:10.913,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",636,pending,0} [ns_server:debug,2014-08-19T16:51:10.914,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,750,686,984,737,673,426,971,724,660,413,958,711,647,400,1022,945,762,698, 387,1009,996,749,685,983,736,672,425,970,723,659,412,957,710,646,399,1021, 944,761,697,386,1008,995,748,684,982,735,671,424,969,722,658,411,956,709,645, 398,1020,943,760,696,385,1007,994,747,683,981,734,670,423,968,721,657,410, 955,708,644,397,1019,942,759,695,384,1006,993,746,682,980,948,765,733,701, 669,637,422,390,1012,999,967,752,720,688,656,409,986,954,739,707,675,643,396, 1018,973,941,758,726,694,662,415,383,1005,992,960,745,713,681,649,402,979, 947,764,732,700,668,636,421,389,1011,998,966,751,719,687,655,408,985,953,738, 706,674,642,395,1017,972,940,757,725,693,661,414,382,1004,991,959,744,712, 680,648,401,1023,978,946,763,731,699,667,420,388,1010,965,718,654,407,952, 705,641,394,1016,939,756,692,381,1003,990,743,679,977,730,666,419,964,717, 653,406,951,704,640,393,1015,938,755,691,1002,989,742,678,976,729,665,418, 963,716,652,405,950,767,703,639,392,1014,754,690,1001,988,741,677,975,728, 664,417,962,715,651,404,949,766,702,638,391,1013,753,689,1000,987,740,676, 974,727,663,416,961,714,650,403] [ns_server:info,2014-08-19T16:51:10.916,ns_1@10.242.238.90:<0.18786.0>:ns_memcached:do_handle_call:527]Changed vbucket 377 state to replica [ns_server:info,2014-08-19T16:51:10.920,ns_1@10.242.238.90:<0.26948.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 377 to state replica [ns_server:debug,2014-08-19T16:51:10.943,ns_1@10.242.238.90:<0.26948.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_377_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:51:10.944,ns_1@10.242.238.90:<0.26948.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[377]}, {checkpoints,[{377,0}]}, {name,<<"replication_building_377_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[377]}, {takeover,false}, {suffix,"building_377_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",377,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,false}]} [rebalance:debug,2014-08-19T16:51:10.945,ns_1@10.242.238.90:<0.26948.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.26949.0> [rebalance:debug,2014-08-19T16:51:10.945,ns_1@10.242.238.90:<0.26948.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:51:10.945,ns_1@10.242.238.90:<0.26948.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.18893.1>,#Ref<16550.0.1.232164>}]} [rebalance:info,2014-08-19T16:51:10.946,ns_1@10.242.238.90:<0.26948.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 377 [rebalance:debug,2014-08-19T16:51:10.946,ns_1@10.242.238.90:<0.26948.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.18893.1>,#Ref<16550.0.1.232164>}] [ns_server:debug,2014-08-19T16:51:10.947,ns_1@10.242.238.90:<0.26948.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:debug,2014-08-19T16:51:10.959,ns_1@10.242.238.90:<0.26950.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 377 [views:debug,2014-08-19T16:51:10.972,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/636. Updated state: pending (0) [ns_server:debug,2014-08-19T16:51:10.972,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",636,pending,0} [ns_server:info,2014-08-19T16:51:11.012,ns_1@10.242.238.90:<0.18786.0>:ns_memcached:do_handle_call:527]Changed vbucket 632 state to replica [ns_server:info,2014-08-19T16:51:11.018,ns_1@10.242.238.90:<0.26953.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 632 to state replica [ns_server:debug,2014-08-19T16:51:11.050,ns_1@10.242.238.90:<0.26953.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_632_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:51:11.051,ns_1@10.242.238.90:<0.26953.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[632]}, {checkpoints,[{632,0}]}, {name,<<"replication_building_632_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[632]}, {takeover,false}, {suffix,"building_632_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",632,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,true}]} [rebalance:debug,2014-08-19T16:51:11.052,ns_1@10.242.238.90:<0.26953.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.26968.0> [rebalance:debug,2014-08-19T16:51:11.052,ns_1@10.242.238.90:<0.26953.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:51:11.052,ns_1@10.242.238.90:<0.26953.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.18934.1>,#Ref<16550.0.1.232377>}]} [rebalance:info,2014-08-19T16:51:11.053,ns_1@10.242.238.90:<0.26953.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 632 [rebalance:debug,2014-08-19T16:51:11.053,ns_1@10.242.238.90:<0.26953.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.18934.1>,#Ref<16550.0.1.232377>}] [ns_server:debug,2014-08-19T16:51:11.053,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.26969.0> (ok) [ns_server:debug,2014-08-19T16:51:11.054,ns_1@10.242.238.90:<0.26953.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:debug,2014-08-19T16:51:11.055,ns_1@10.242.238.90:<0.26970.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 632 [ns_server:info,2014-08-19T16:51:11.059,ns_1@10.242.238.90:<0.18786.0>:ns_memcached:do_handle_call:527]Changed vbucket 376 state to replica [ns_server:info,2014-08-19T16:51:11.063,ns_1@10.242.238.90:<0.26973.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 376 to state replica [ns_server:debug,2014-08-19T16:51:11.086,ns_1@10.242.238.90:<0.26973.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_376_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:51:11.087,ns_1@10.242.238.90:<0.26973.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[376]}, {checkpoints,[{376,0}]}, {name,<<"replication_building_376_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[376]}, {takeover,false}, {suffix,"building_376_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",376,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,false}]} [rebalance:debug,2014-08-19T16:51:11.088,ns_1@10.242.238.90:<0.26973.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.26974.0> [rebalance:debug,2014-08-19T16:51:11.088,ns_1@10.242.238.90:<0.26973.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:51:11.088,ns_1@10.242.238.90:<0.26973.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.18956.1>,#Ref<16550.0.1.232495>}]} [rebalance:info,2014-08-19T16:51:11.088,ns_1@10.242.238.90:<0.26973.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 376 [rebalance:debug,2014-08-19T16:51:11.089,ns_1@10.242.238.90:<0.26973.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.18956.1>,#Ref<16550.0.1.232495>}] [ns_server:debug,2014-08-19T16:51:11.089,ns_1@10.242.238.90:<0.26973.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:51:11.099,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 634. Nacking mccouch update. [views:debug,2014-08-19T16:51:11.099,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/634. Updated state: pending (0) [ns_server:debug,2014-08-19T16:51:11.100,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",634,pending,0} [ns_server:debug,2014-08-19T16:51:11.100,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,750,686,984,737,673,426,971,724,660,413,958,711,647,400,1022,945,762,698, 634,387,1009,996,749,685,983,736,672,425,970,723,659,412,957,710,646,399, 1021,944,761,697,386,1008,995,748,684,982,735,671,424,969,722,658,411,956, 709,645,398,1020,943,760,696,385,1007,994,747,683,981,734,670,423,968,721, 657,410,955,708,644,397,1019,942,759,695,384,1006,993,746,682,980,948,765, 733,701,669,637,422,390,1012,999,967,752,720,688,656,409,986,954,739,707,675, 643,396,1018,973,941,758,726,694,662,415,383,1005,992,960,745,713,681,649, 402,979,947,764,732,700,668,636,421,389,1011,998,966,751,719,687,655,408,985, 953,738,706,674,642,395,1017,972,940,757,725,693,661,414,382,1004,991,959, 744,712,680,648,401,1023,978,946,763,731,699,667,420,388,1010,965,718,654, 407,952,705,641,394,1016,939,756,692,381,1003,990,743,679,977,730,666,419, 964,717,653,406,951,704,640,393,1015,938,755,691,1002,989,742,678,976,729, 665,418,963,716,652,405,950,767,703,639,392,1014,754,690,1001,988,741,677, 975,728,664,417,962,715,651,404,949,766,702,638,391,1013,753,689,1000,987, 740,676,974,727,663,416,961,714,650,403] [rebalance:debug,2014-08-19T16:51:11.102,ns_1@10.242.238.90:<0.26975.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 376 [ns_server:info,2014-08-19T16:51:11.155,ns_1@10.242.238.90:<0.18786.0>:ns_memcached:do_handle_call:527]Changed vbucket 631 state to replica [views:debug,2014-08-19T16:51:11.159,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/634. Updated state: pending (0) [ns_server:debug,2014-08-19T16:51:11.159,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",634,pending,0} [ns_server:info,2014-08-19T16:51:11.162,ns_1@10.242.238.90:<0.26978.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 631 to state replica [ns_server:debug,2014-08-19T16:51:11.193,ns_1@10.242.238.90:<0.26978.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_631_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:51:11.194,ns_1@10.242.238.90:<0.26978.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[631]}, {checkpoints,[{631,0}]}, {name,<<"replication_building_631_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[631]}, {takeover,false}, {suffix,"building_631_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",631,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,true}]} [rebalance:debug,2014-08-19T16:51:11.195,ns_1@10.242.238.90:<0.26978.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.26985.0> [rebalance:debug,2014-08-19T16:51:11.195,ns_1@10.242.238.90:<0.26978.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:51:11.196,ns_1@10.242.238.90:<0.26978.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.19016.1>,#Ref<16550.0.1.232752>}]} [rebalance:info,2014-08-19T16:51:11.196,ns_1@10.242.238.90:<0.26978.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 631 [rebalance:debug,2014-08-19T16:51:11.196,ns_1@10.242.238.90:<0.26978.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.19016.1>,#Ref<16550.0.1.232752>}] [ns_server:debug,2014-08-19T16:51:11.197,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.26993.0> (ok) [ns_server:debug,2014-08-19T16:51:11.197,ns_1@10.242.238.90:<0.26978.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:debug,2014-08-19T16:51:11.198,ns_1@10.242.238.90:<0.26995.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 631 [ns_server:info,2014-08-19T16:51:11.203,ns_1@10.242.238.90:<0.18786.0>:ns_memcached:do_handle_call:527]Changed vbucket 375 state to replica [ns_server:info,2014-08-19T16:51:11.206,ns_1@10.242.238.90:<0.26998.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 375 to state replica [ns_server:debug,2014-08-19T16:51:11.229,ns_1@10.242.238.90:<0.26998.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_375_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:51:11.231,ns_1@10.242.238.90:<0.26998.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[375]}, {checkpoints,[{375,0}]}, {name,<<"replication_building_375_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[375]}, {takeover,false}, {suffix,"building_375_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",375,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,false}]} [rebalance:debug,2014-08-19T16:51:11.232,ns_1@10.242.238.90:<0.26998.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.26999.0> [rebalance:debug,2014-08-19T16:51:11.232,ns_1@10.242.238.90:<0.26998.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:51:11.232,ns_1@10.242.238.90:<0.26998.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.19038.1>,#Ref<16550.0.1.232868>}]} [rebalance:info,2014-08-19T16:51:11.232,ns_1@10.242.238.90:<0.26998.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 375 [rebalance:debug,2014-08-19T16:51:11.233,ns_1@10.242.238.90:<0.26998.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.19038.1>,#Ref<16550.0.1.232868>}] [ns_server:debug,2014-08-19T16:51:11.234,ns_1@10.242.238.90:<0.26998.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:debug,2014-08-19T16:51:11.247,ns_1@10.242.238.90:<0.27000.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 375 [ns_server:debug,2014-08-19T16:51:11.251,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 380. Nacking mccouch update. [views:debug,2014-08-19T16:51:11.251,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/380. Updated state: replica (0) [ns_server:debug,2014-08-19T16:51:11.251,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",380,replica,0} [ns_server:debug,2014-08-19T16:51:11.252,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,750,686,984,737,673,426,971,724,660,413,958,711,647,400,1022,945,762,698, 634,387,1009,996,749,685,983,736,672,425,970,723,659,412,957,710,646,399, 1021,944,761,697,386,1008,995,748,684,982,735,671,424,969,722,658,411,956, 709,645,398,1020,943,760,696,385,1007,994,747,683,981,734,670,423,968,721, 657,410,955,708,644,397,1019,942,759,695,384,1006,993,746,682,980,733,669, 422,999,967,752,720,688,656,409,986,954,739,707,675,643,396,1018,973,941,758, 726,694,662,415,383,1005,992,960,745,713,681,649,402,979,947,764,732,700,668, 636,421,389,1011,998,966,751,719,687,655,408,985,953,738,706,674,642,395, 1017,972,940,757,725,693,661,414,382,1004,991,959,744,712,680,648,401,1023, 978,946,763,731,699,667,420,388,1010,965,718,654,407,952,705,641,394,1016, 939,756,692,381,1003,990,743,679,977,730,666,419,964,717,653,406,951,704,640, 393,1015,938,755,691,380,1002,989,742,678,976,729,665,418,963,716,652,405, 950,767,703,639,392,1014,754,690,1001,988,741,677,975,728,664,417,962,715, 651,404,949,766,702,638,391,1013,753,689,1000,987,740,676,974,727,663,416, 961,714,650,403,948,765,701,637,390,1012] [ns_server:info,2014-08-19T16:51:11.298,ns_1@10.242.238.90:<0.18786.0>:ns_memcached:do_handle_call:527]Changed vbucket 630 state to replica [ns_server:info,2014-08-19T16:51:11.305,ns_1@10.242.238.90:<0.27003.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 630 to state replica [views:debug,2014-08-19T16:51:11.310,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/380. Updated state: replica (0) [ns_server:debug,2014-08-19T16:51:11.310,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",380,replica,0} [ns_server:debug,2014-08-19T16:51:11.336,ns_1@10.242.238.90:<0.27003.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_630_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:51:11.338,ns_1@10.242.238.90:<0.27003.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[630]}, {checkpoints,[{630,0}]}, {name,<<"replication_building_630_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[630]}, {takeover,false}, {suffix,"building_630_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",630,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,true}]} [rebalance:debug,2014-08-19T16:51:11.338,ns_1@10.242.238.90:<0.27003.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.27004.0> [rebalance:debug,2014-08-19T16:51:11.338,ns_1@10.242.238.90:<0.27003.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:51:11.339,ns_1@10.242.238.90:<0.27003.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.19101.1>,#Ref<16550.0.1.233169>}]} [rebalance:info,2014-08-19T16:51:11.339,ns_1@10.242.238.90:<0.27003.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 630 [rebalance:debug,2014-08-19T16:51:11.339,ns_1@10.242.238.90:<0.27003.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.19101.1>,#Ref<16550.0.1.233169>}] [ns_server:debug,2014-08-19T16:51:11.340,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.27005.0> (ok) [ns_server:debug,2014-08-19T16:51:11.340,ns_1@10.242.238.90:<0.27003.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:debug,2014-08-19T16:51:11.342,ns_1@10.242.238.90:<0.27006.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 630 [ns_server:info,2014-08-19T16:51:11.346,ns_1@10.242.238.90:<0.18786.0>:ns_memcached:do_handle_call:527]Changed vbucket 374 state to replica [ns_server:info,2014-08-19T16:51:11.350,ns_1@10.242.238.90:<0.27023.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 374 to state replica [ns_server:debug,2014-08-19T16:51:11.373,ns_1@10.242.238.90:<0.27023.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_374_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:51:11.374,ns_1@10.242.238.90:<0.27023.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[374]}, {checkpoints,[{374,0}]}, {name,<<"replication_building_374_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[374]}, {takeover,false}, {suffix,"building_374_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",374,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,false}]} [rebalance:debug,2014-08-19T16:51:11.375,ns_1@10.242.238.90:<0.27023.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.27030.0> [rebalance:debug,2014-08-19T16:51:11.375,ns_1@10.242.238.90:<0.27023.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:51:11.375,ns_1@10.242.238.90:<0.27023.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.19125.1>,#Ref<16550.0.1.233289>}]} [rebalance:info,2014-08-19T16:51:11.375,ns_1@10.242.238.90:<0.27023.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 374 [rebalance:debug,2014-08-19T16:51:11.376,ns_1@10.242.238.90:<0.27023.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.19125.1>,#Ref<16550.0.1.233289>}] [ns_server:debug,2014-08-19T16:51:11.377,ns_1@10.242.238.90:<0.27023.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:51:11.377,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 378. Nacking mccouch update. [views:debug,2014-08-19T16:51:11.377,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/378. Updated state: replica (0) [ns_server:debug,2014-08-19T16:51:11.377,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",378,replica,0} [ns_server:debug,2014-08-19T16:51:11.378,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,750,686,984,737,673,426,971,724,660,413,958,711,647,400,1022,945,762,698, 634,387,1009,996,749,685,983,736,672,425,970,723,659,412,957,710,646,399, 1021,944,761,697,386,1008,995,748,684,982,735,671,424,969,722,658,411,956, 709,645,398,1020,943,760,696,385,1007,994,747,683,981,734,670,423,968,721, 657,410,955,708,644,397,1019,942,759,695,384,1006,993,746,682,980,733,669, 422,999,967,752,720,688,656,409,986,954,739,707,675,643,396,1018,973,941,758, 726,694,662,415,383,1005,992,960,745,713,681,649,402,979,947,764,732,700,668, 636,421,389,1011,998,966,751,719,687,655,408,985,953,738,706,674,642,395, 1017,972,940,757,725,693,661,414,382,1004,991,959,744,712,680,648,401,1023, 978,946,763,731,699,667,420,388,1010,965,718,654,407,952,705,641,394,1016, 939,756,692,381,1003,990,743,679,977,730,666,419,964,717,653,406,951,704,640, 393,1015,938,755,691,380,1002,989,742,678,976,729,665,418,963,716,652,405, 950,767,703,639,392,1014,754,690,1001,988,741,677,975,728,664,417,962,715, 651,404,949,766,702,638,391,1013,753,689,378,1000,987,740,676,974,727,663, 416,961,714,650,403,948,765,701,637,390,1012] [rebalance:debug,2014-08-19T16:51:11.391,ns_1@10.242.238.90:<0.27031.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 374 [views:debug,2014-08-19T16:51:11.427,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/378. Updated state: replica (0) [ns_server:debug,2014-08-19T16:51:11.427,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",378,replica,0} [ns_server:info,2014-08-19T16:51:11.444,ns_1@10.242.238.90:<0.18786.0>:ns_memcached:do_handle_call:527]Changed vbucket 629 state to replica [ns_server:info,2014-08-19T16:51:11.451,ns_1@10.242.238.90:<0.27034.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 629 to state replica [ns_server:debug,2014-08-19T16:51:11.483,ns_1@10.242.238.90:<0.27034.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_629_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:51:11.485,ns_1@10.242.238.90:<0.27034.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[629]}, {checkpoints,[{629,0}]}, {name,<<"replication_building_629_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[629]}, {takeover,false}, {suffix,"building_629_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",629,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,true}]} [rebalance:debug,2014-08-19T16:51:11.485,ns_1@10.242.238.90:<0.27034.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.27049.0> [rebalance:debug,2014-08-19T16:51:11.485,ns_1@10.242.238.90:<0.27034.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:51:11.486,ns_1@10.242.238.90:<0.27034.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.19181.1>,#Ref<16550.0.1.233599>}]} [rebalance:info,2014-08-19T16:51:11.486,ns_1@10.242.238.90:<0.27034.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 629 [rebalance:debug,2014-08-19T16:51:11.486,ns_1@10.242.238.90:<0.27034.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.19181.1>,#Ref<16550.0.1.233599>}] [ns_server:debug,2014-08-19T16:51:11.487,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.27050.0> (ok) [ns_server:debug,2014-08-19T16:51:11.487,ns_1@10.242.238.90:<0.27034.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:debug,2014-08-19T16:51:11.489,ns_1@10.242.238.90:<0.27051.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 629 [ns_server:info,2014-08-19T16:51:11.493,ns_1@10.242.238.90:<0.18786.0>:ns_memcached:do_handle_call:527]Changed vbucket 373 state to replica [ns_server:debug,2014-08-19T16:51:11.494,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 632. Nacking mccouch update. [views:debug,2014-08-19T16:51:11.494,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/632. Updated state: pending (0) [ns_server:debug,2014-08-19T16:51:11.494,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",632,pending,0} [ns_server:debug,2014-08-19T16:51:11.495,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,750,686,984,737,673,426,971,724,660,413,958,711,647,400,1022,945,762,698, 634,387,1009,996,749,685,983,736,672,425,970,723,659,412,957,710,646,399, 1021,944,761,697,386,1008,995,748,684,982,735,671,424,969,722,658,411,956, 709,645,398,1020,943,760,696,632,385,1007,994,747,683,981,734,670,423,968, 721,657,410,955,708,644,397,1019,942,759,695,384,1006,993,746,682,980,733, 669,422,999,967,752,720,688,656,409,986,954,739,707,675,643,396,1018,973,941, 758,726,694,662,415,383,1005,992,960,745,713,681,649,402,979,947,764,732,700, 668,636,421,389,1011,998,966,751,719,687,655,408,985,953,738,706,674,642,395, 1017,972,940,757,725,693,661,414,382,1004,991,959,744,712,680,648,401,1023, 978,946,763,731,699,667,420,388,1010,965,718,654,407,952,705,641,394,1016, 939,756,692,381,1003,990,743,679,977,730,666,419,964,717,653,406,951,704,640, 393,1015,938,755,691,380,1002,989,742,678,976,729,665,418,963,716,652,405, 950,767,703,639,392,1014,754,690,1001,988,741,677,975,728,664,417,962,715, 651,404,949,766,702,638,391,1013,753,689,378,1000,987,740,676,974,727,663, 416,961,714,650,403,948,765,701,637,390,1012] [ns_server:info,2014-08-19T16:51:11.497,ns_1@10.242.238.90:<0.27054.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 373 to state replica [ns_server:debug,2014-08-19T16:51:11.521,ns_1@10.242.238.90:<0.27054.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_373_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:51:11.522,ns_1@10.242.238.90:<0.27054.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[373]}, {checkpoints,[{373,0}]}, {name,<<"replication_building_373_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[373]}, {takeover,false}, {suffix,"building_373_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",373,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,false}]} [rebalance:debug,2014-08-19T16:51:11.523,ns_1@10.242.238.90:<0.27054.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.27055.0> [rebalance:debug,2014-08-19T16:51:11.523,ns_1@10.242.238.90:<0.27054.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:51:11.524,ns_1@10.242.238.90:<0.27054.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.19203.1>,#Ref<16550.0.1.233716>}]} [rebalance:info,2014-08-19T16:51:11.524,ns_1@10.242.238.90:<0.27054.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 373 [rebalance:debug,2014-08-19T16:51:11.524,ns_1@10.242.238.90:<0.27054.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.19203.1>,#Ref<16550.0.1.233716>}] [ns_server:debug,2014-08-19T16:51:11.525,ns_1@10.242.238.90:<0.27054.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [views:debug,2014-08-19T16:51:11.528,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/632. Updated state: pending (0) [ns_server:debug,2014-08-19T16:51:11.529,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",632,pending,0} [rebalance:debug,2014-08-19T16:51:11.539,ns_1@10.242.238.90:<0.27056.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 373 [ns_server:info,2014-08-19T16:51:11.597,ns_1@10.242.238.90:<0.18786.0>:ns_memcached:do_handle_call:527]Changed vbucket 628 state to replica [ns_server:info,2014-08-19T16:51:11.604,ns_1@10.242.238.90:<0.27073.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 628 to state replica [ns_server:debug,2014-08-19T16:51:11.605,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 630. Nacking mccouch update. [views:debug,2014-08-19T16:51:11.605,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/630. Updated state: pending (0) [ns_server:debug,2014-08-19T16:51:11.606,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",630,pending,0} [ns_server:debug,2014-08-19T16:51:11.606,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,750,686,984,737,673,426,971,724,660,413,958,711,647,400,1022,945,762,698, 634,387,1009,996,749,685,983,736,672,425,970,723,659,412,957,710,646,399, 1021,944,761,697,386,1008,995,748,684,982,735,671,424,969,722,658,411,956, 709,645,398,1020,943,760,696,632,385,1007,994,747,683,981,734,670,423,968, 721,657,410,955,708,644,397,1019,942,759,695,384,1006,993,746,682,980,733, 669,422,999,967,752,720,688,656,409,986,954,739,707,675,643,396,1018,973,941, 758,726,694,662,630,415,383,1005,992,960,745,713,681,649,402,979,947,764,732, 700,668,636,421,389,1011,998,966,751,719,687,655,408,985,953,738,706,674,642, 395,1017,972,940,757,725,693,661,414,382,1004,991,959,744,712,680,648,401, 1023,978,946,763,731,699,667,420,388,1010,965,718,654,407,952,705,641,394, 1016,939,756,692,381,1003,990,743,679,977,730,666,419,964,717,653,406,951, 704,640,393,1015,938,755,691,380,1002,989,742,678,976,729,665,418,963,716, 652,405,950,767,703,639,392,1014,754,690,1001,988,741,677,975,728,664,417, 962,715,651,404,949,766,702,638,391,1013,753,689,378,1000,987,740,676,974, 727,663,416,961,714,650,403,948,765,701,637,390,1012] [ns_server:debug,2014-08-19T16:51:11.636,ns_1@10.242.238.90:<0.27073.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_628_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:51:11.638,ns_1@10.242.238.90:<0.27073.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[628]}, {checkpoints,[{628,0}]}, {name,<<"replication_building_628_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[628]}, {takeover,false}, {suffix,"building_628_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",628,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,true}]} [rebalance:debug,2014-08-19T16:51:11.639,ns_1@10.242.238.90:<0.27073.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.27074.0> [rebalance:debug,2014-08-19T16:51:11.639,ns_1@10.242.238.90:<0.27073.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:51:11.639,ns_1@10.242.238.90:<0.27073.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.19259.1>,#Ref<16550.0.1.233989>}]} [rebalance:info,2014-08-19T16:51:11.640,ns_1@10.242.238.90:<0.27073.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 628 [rebalance:debug,2014-08-19T16:51:11.640,ns_1@10.242.238.90:<0.27073.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.19259.1>,#Ref<16550.0.1.233989>}] [ns_server:debug,2014-08-19T16:51:11.641,ns_1@10.242.238.90:<0.27073.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:51:11.641,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.27075.0> (ok) [rebalance:debug,2014-08-19T16:51:11.643,ns_1@10.242.238.90:<0.27076.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 628 [ns_server:info,2014-08-19T16:51:11.647,ns_1@10.242.238.90:<0.18786.0>:ns_memcached:do_handle_call:527]Changed vbucket 372 state to replica [ns_server:info,2014-08-19T16:51:11.651,ns_1@10.242.238.90:<0.27079.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 372 to state replica [views:debug,2014-08-19T16:51:11.664,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/630. Updated state: pending (0) [ns_server:debug,2014-08-19T16:51:11.664,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",630,pending,0} [ns_server:debug,2014-08-19T16:51:11.675,ns_1@10.242.238.90:<0.27079.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_372_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:51:11.676,ns_1@10.242.238.90:<0.27079.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[372]}, {checkpoints,[{372,0}]}, {name,<<"replication_building_372_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[372]}, {takeover,false}, {suffix,"building_372_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",372,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,false}]} [rebalance:debug,2014-08-19T16:51:11.677,ns_1@10.242.238.90:<0.27079.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.27080.0> [rebalance:debug,2014-08-19T16:51:11.677,ns_1@10.242.238.90:<0.27079.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:51:11.677,ns_1@10.242.238.90:<0.27079.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.19281.1>,#Ref<16550.0.1.234108>}]} [rebalance:info,2014-08-19T16:51:11.677,ns_1@10.242.238.90:<0.27079.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 372 [rebalance:debug,2014-08-19T16:51:11.678,ns_1@10.242.238.90:<0.27079.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.19281.1>,#Ref<16550.0.1.234108>}] [ns_server:debug,2014-08-19T16:51:11.679,ns_1@10.242.238.90:<0.27079.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:debug,2014-08-19T16:51:11.692,ns_1@10.242.238.90:<0.27081.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 372 [ns_server:info,2014-08-19T16:51:11.745,ns_1@10.242.238.90:<0.18786.0>:ns_memcached:do_handle_call:527]Changed vbucket 627 state to replica [ns_server:debug,2014-08-19T16:51:11.748,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 376. Nacking mccouch update. [views:debug,2014-08-19T16:51:11.748,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/376. Updated state: replica (0) [ns_server:debug,2014-08-19T16:51:11.748,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",376,replica,0} [ns_server:debug,2014-08-19T16:51:11.748,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,750,686,984,737,673,426,971,724,660,413,958,711,647,400,1022,945,762,698, 634,387,1009,996,749,685,983,736,672,425,970,723,659,412,957,710,646,399, 1021,944,761,697,386,1008,995,748,684,982,735,671,424,969,722,658,411,956, 709,645,398,1020,943,760,696,632,385,1007,994,747,683,981,734,670,423,968, 721,657,410,955,708,644,397,1019,942,759,695,384,1006,993,746,682,980,733, 669,422,999,967,752,720,688,656,409,986,954,739,707,675,643,396,1018,973,941, 758,726,694,662,630,415,383,1005,992,960,745,713,681,649,402,979,947,764,732, 700,668,636,421,389,1011,998,966,751,719,687,655,408,376,985,953,738,706,674, 642,395,1017,972,940,757,725,693,661,414,382,1004,991,959,744,712,680,648, 401,1023,978,946,763,731,699,667,420,388,1010,965,718,654,407,952,705,641, 394,1016,939,756,692,381,1003,990,743,679,977,730,666,419,964,717,653,406, 951,704,640,393,1015,938,755,691,380,1002,989,742,678,976,729,665,418,963, 716,652,405,950,767,703,639,392,1014,754,690,1001,988,741,677,975,728,664, 417,962,715,651,404,949,766,702,638,391,1013,753,689,378,1000,987,740,676, 974,727,663,416,961,714,650,403,948,765,701,637,390,1012] [ns_server:info,2014-08-19T16:51:11.752,ns_1@10.242.238.90:<0.27098.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 627 to state replica [ns_server:debug,2014-08-19T16:51:11.784,ns_1@10.242.238.90:<0.27098.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_627_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:51:11.786,ns_1@10.242.238.90:<0.27098.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[627]}, {checkpoints,[{627,0}]}, {name,<<"replication_building_627_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[627]}, {takeover,false}, {suffix,"building_627_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",627,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,true}]} [rebalance:debug,2014-08-19T16:51:11.786,ns_1@10.242.238.90:<0.27098.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.27099.0> [rebalance:debug,2014-08-19T16:51:11.786,ns_1@10.242.238.90:<0.27098.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:51:11.787,ns_1@10.242.238.90:<0.27098.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.19336.1>,#Ref<16550.0.1.234370>}]} [rebalance:info,2014-08-19T16:51:11.787,ns_1@10.242.238.90:<0.27098.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 627 [rebalance:debug,2014-08-19T16:51:11.787,ns_1@10.242.238.90:<0.27098.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.19336.1>,#Ref<16550.0.1.234370>}] [ns_server:debug,2014-08-19T16:51:11.788,ns_1@10.242.238.90:<0.27098.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:51:11.788,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.27100.0> (ok) [rebalance:debug,2014-08-19T16:51:11.790,ns_1@10.242.238.90:<0.27101.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 627 [ns_server:info,2014-08-19T16:51:11.794,ns_1@10.242.238.90:<0.18786.0>:ns_memcached:do_handle_call:527]Changed vbucket 371 state to replica [views:debug,2014-08-19T16:51:11.798,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/376. Updated state: replica (0) [ns_server:debug,2014-08-19T16:51:11.799,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",376,replica,0} [ns_server:info,2014-08-19T16:51:11.799,ns_1@10.242.238.90:<0.27104.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 371 to state replica [ns_server:debug,2014-08-19T16:51:11.821,ns_1@10.242.238.90:<0.27104.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_371_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:51:11.823,ns_1@10.242.238.90:<0.27104.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[371]}, {checkpoints,[{371,0}]}, {name,<<"replication_building_371_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[371]}, {takeover,false}, {suffix,"building_371_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",371,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,false}]} [rebalance:debug,2014-08-19T16:51:11.823,ns_1@10.242.238.90:<0.27104.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.27105.0> [rebalance:debug,2014-08-19T16:51:11.824,ns_1@10.242.238.90:<0.27104.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:51:11.824,ns_1@10.242.238.90:<0.27104.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.19358.1>,#Ref<16550.0.1.234487>}]} [rebalance:info,2014-08-19T16:51:11.824,ns_1@10.242.238.90:<0.27104.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 371 [rebalance:debug,2014-08-19T16:51:11.825,ns_1@10.242.238.90:<0.27104.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.19358.1>,#Ref<16550.0.1.234487>}] [ns_server:debug,2014-08-19T16:51:11.825,ns_1@10.242.238.90:<0.27104.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:debug,2014-08-19T16:51:11.842,ns_1@10.242.238.90:<0.27120.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 371 [ns_server:debug,2014-08-19T16:51:11.865,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 374. Nacking mccouch update. [views:debug,2014-08-19T16:51:11.865,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/374. Updated state: replica (0) [ns_server:debug,2014-08-19T16:51:11.865,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",374,replica,0} [ns_server:debug,2014-08-19T16:51:11.866,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,750,686,984,737,673,426,971,724,660,413,958,711,647,400,1022,945,762,698, 634,387,1009,996,749,685,374,983,736,672,425,970,723,659,412,957,710,646,399, 1021,944,761,697,386,1008,995,748,684,982,735,671,424,969,722,658,411,956, 709,645,398,1020,943,760,696,632,385,1007,994,747,683,981,734,670,423,968, 721,657,410,955,708,644,397,1019,942,759,695,384,1006,993,746,682,980,733, 669,422,967,720,656,409,986,954,739,707,675,643,396,1018,973,941,758,726,694, 662,630,415,383,1005,992,960,745,713,681,649,402,979,947,764,732,700,668,636, 421,389,1011,998,966,751,719,687,655,408,376,985,953,738,706,674,642,395, 1017,972,940,757,725,693,661,414,382,1004,991,959,744,712,680,648,401,1023, 978,946,763,731,699,667,420,388,1010,965,718,654,407,952,705,641,394,1016, 939,756,692,381,1003,990,743,679,977,730,666,419,964,717,653,406,951,704,640, 393,1015,938,755,691,380,1002,989,742,678,976,729,665,418,963,716,652,405, 950,767,703,639,392,1014,754,690,1001,988,741,677,975,728,664,417,962,715, 651,404,949,766,702,638,391,1013,753,689,378,1000,987,740,676,974,727,663, 416,961,714,650,403,948,765,701,637,390,1012,999,752,688] [ns_server:info,2014-08-19T16:51:11.898,ns_1@10.242.238.90:<0.18786.0>:ns_memcached:do_handle_call:527]Changed vbucket 626 state to replica [ns_server:info,2014-08-19T16:51:11.904,ns_1@10.242.238.90:<0.27123.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 626 to state replica [views:debug,2014-08-19T16:51:11.916,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/374. Updated state: replica (0) [ns_server:debug,2014-08-19T16:51:11.916,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",374,replica,0} [ns_server:debug,2014-08-19T16:51:11.936,ns_1@10.242.238.90:<0.27123.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_626_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:51:11.938,ns_1@10.242.238.90:<0.27123.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[626]}, {checkpoints,[{626,0}]}, {name,<<"replication_building_626_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[626]}, {takeover,false}, {suffix,"building_626_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",626,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,true}]} [rebalance:debug,2014-08-19T16:51:11.938,ns_1@10.242.238.90:<0.27123.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.27124.0> [rebalance:debug,2014-08-19T16:51:11.938,ns_1@10.242.238.90:<0.27123.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:51:11.939,ns_1@10.242.238.90:<0.27123.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.19399.1>,#Ref<16550.0.1.234701>}]} [rebalance:info,2014-08-19T16:51:11.939,ns_1@10.242.238.90:<0.27123.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 626 [rebalance:debug,2014-08-19T16:51:11.939,ns_1@10.242.238.90:<0.27123.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.19399.1>,#Ref<16550.0.1.234701>}] [ns_server:debug,2014-08-19T16:51:11.940,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.27125.0> (ok) [ns_server:debug,2014-08-19T16:51:11.940,ns_1@10.242.238.90:<0.27123.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:debug,2014-08-19T16:51:11.941,ns_1@10.242.238.90:<0.27126.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 626 [ns_server:info,2014-08-19T16:51:11.946,ns_1@10.242.238.90:<0.18786.0>:ns_memcached:do_handle_call:527]Changed vbucket 370 state to replica [ns_server:info,2014-08-19T16:51:11.950,ns_1@10.242.238.90:<0.27129.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 370 to state replica [ns_server:debug,2014-08-19T16:51:11.973,ns_1@10.242.238.90:<0.27129.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_370_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:51:11.974,ns_1@10.242.238.90:<0.27129.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[370]}, {checkpoints,[{370,0}]}, {name,<<"replication_building_370_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[370]}, {takeover,false}, {suffix,"building_370_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",370,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,false}]} [rebalance:debug,2014-08-19T16:51:11.975,ns_1@10.242.238.90:<0.27129.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.27130.0> [rebalance:debug,2014-08-19T16:51:11.975,ns_1@10.242.238.90:<0.27129.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:51:11.975,ns_1@10.242.238.90:<0.27129.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.19421.1>,#Ref<16550.0.1.234796>}]} [rebalance:info,2014-08-19T16:51:11.976,ns_1@10.242.238.90:<0.27129.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 370 [rebalance:debug,2014-08-19T16:51:11.976,ns_1@10.242.238.90:<0.27129.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.19421.1>,#Ref<16550.0.1.234796>}] [ns_server:debug,2014-08-19T16:51:11.977,ns_1@10.242.238.90:<0.27129.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:debug,2014-08-19T16:51:11.990,ns_1@10.242.238.90:<0.27131.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 370 [ns_server:info,2014-08-19T16:51:12.044,ns_1@10.242.238.90:<0.18786.0>:ns_memcached:do_handle_call:527]Changed vbucket 625 state to replica [ns_server:info,2014-08-19T16:51:12.050,ns_1@10.242.238.90:<0.27148.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 625 to state replica [ns_server:debug,2014-08-19T16:51:12.084,ns_1@10.242.238.90:<0.27148.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_625_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:51:12.085,ns_1@10.242.238.90:<0.27148.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[625]}, {checkpoints,[{625,0}]}, {name,<<"replication_building_625_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[625]}, {takeover,false}, {suffix,"building_625_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",625,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,true}]} [rebalance:debug,2014-08-19T16:51:12.085,ns_1@10.242.238.90:<0.27148.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.27149.0> [rebalance:debug,2014-08-19T16:51:12.086,ns_1@10.242.238.90:<0.27148.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:51:12.086,ns_1@10.242.238.90:<0.27148.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.19476.1>,#Ref<16550.0.1.235056>}]} [rebalance:info,2014-08-19T16:51:12.086,ns_1@10.242.238.90:<0.27148.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 625 [rebalance:debug,2014-08-19T16:51:12.086,ns_1@10.242.238.90:<0.27148.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.19476.1>,#Ref<16550.0.1.235056>}] [ns_server:debug,2014-08-19T16:51:12.087,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.27150.0> (ok) [ns_server:debug,2014-08-19T16:51:12.087,ns_1@10.242.238.90:<0.27148.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:debug,2014-08-19T16:51:12.089,ns_1@10.242.238.90:<0.27151.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 625 [ns_server:info,2014-08-19T16:51:12.094,ns_1@10.242.238.90:<0.18786.0>:ns_memcached:do_handle_call:527]Changed vbucket 369 state to replica [ns_server:info,2014-08-19T16:51:12.098,ns_1@10.242.238.90:<0.27154.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 369 to state replica [ns_server:debug,2014-08-19T16:51:12.108,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 635. Nacking mccouch update. [views:debug,2014-08-19T16:51:12.108,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/635. Updated state: pending (0) [ns_server:debug,2014-08-19T16:51:12.108,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",635,pending,0} [ns_server:debug,2014-08-19T16:51:12.109,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,750,686,984,737,673,426,971,724,660,413,958,711,647,400,1022,945,762,698, 634,387,1009,996,749,685,374,983,736,672,425,970,723,659,412,957,710,646,399, 1021,944,761,697,386,1008,995,748,684,982,735,671,424,969,722,658,411,956, 709,645,398,1020,943,760,696,632,385,1007,994,747,683,981,734,670,423,968, 721,657,410,955,708,644,397,1019,942,759,695,384,1006,993,746,682,980,733, 669,422,967,720,656,409,986,954,739,707,675,643,396,1018,973,941,758,726,694, 662,630,415,383,1005,992,960,745,713,681,649,402,979,947,764,732,700,668,636, 421,389,1011,998,966,751,719,687,655,408,376,985,953,738,706,674,642,395, 1017,972,940,757,725,693,661,414,382,1004,991,959,744,712,680,648,401,1023, 978,946,763,731,699,667,635,420,388,1010,965,718,654,407,952,705,641,394, 1016,939,756,692,381,1003,990,743,679,977,730,666,419,964,717,653,406,951, 704,640,393,1015,938,755,691,380,1002,989,742,678,976,729,665,418,963,716, 652,405,950,767,703,639,392,1014,754,690,1001,988,741,677,975,728,664,417, 962,715,651,404,949,766,702,638,391,1013,753,689,378,1000,987,740,676,974, 727,663,416,961,714,650,403,948,765,701,637,390,1012,999,752,688] [ns_server:debug,2014-08-19T16:51:12.120,ns_1@10.242.238.90:<0.27154.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_369_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:51:12.122,ns_1@10.242.238.90:<0.27154.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[369]}, {checkpoints,[{369,0}]}, {name,<<"replication_building_369_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[369]}, {takeover,false}, {suffix,"building_369_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",369,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,false}]} [rebalance:debug,2014-08-19T16:51:12.122,ns_1@10.242.238.90:<0.27154.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.27155.0> [rebalance:debug,2014-08-19T16:51:12.122,ns_1@10.242.238.90:<0.27154.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:51:12.123,ns_1@10.242.238.90:<0.27154.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.19498.1>,#Ref<16550.0.1.235189>}]} [rebalance:info,2014-08-19T16:51:12.123,ns_1@10.242.238.90:<0.27154.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 369 [rebalance:debug,2014-08-19T16:51:12.124,ns_1@10.242.238.90:<0.27154.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.19498.1>,#Ref<16550.0.1.235189>}] [ns_server:debug,2014-08-19T16:51:12.124,ns_1@10.242.238.90:<0.27154.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:debug,2014-08-19T16:51:12.139,ns_1@10.242.238.90:<0.27156.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 369 [views:debug,2014-08-19T16:51:12.175,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/635. Updated state: pending (0) [ns_server:debug,2014-08-19T16:51:12.175,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",635,pending,0} [ns_server:info,2014-08-19T16:51:12.198,ns_1@10.242.238.90:<0.18786.0>:ns_memcached:do_handle_call:527]Changed vbucket 624 state to replica [ns_server:info,2014-08-19T16:51:12.205,ns_1@10.242.238.90:<0.27159.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 624 to state replica [ns_server:debug,2014-08-19T16:51:12.236,ns_1@10.242.238.90:<0.27159.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_624_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:51:12.238,ns_1@10.242.238.90:<0.27159.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[624]}, {checkpoints,[{624,0}]}, {name,<<"replication_building_624_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[624]}, {takeover,false}, {suffix,"building_624_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",624,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,true}]} [rebalance:debug,2014-08-19T16:51:12.239,ns_1@10.242.238.90:<0.27159.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.27160.0> [rebalance:debug,2014-08-19T16:51:12.239,ns_1@10.242.238.90:<0.27159.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:51:12.239,ns_1@10.242.238.90:<0.27159.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.19553.1>,#Ref<16550.0.1.235425>}]} [rebalance:info,2014-08-19T16:51:12.239,ns_1@10.242.238.90:<0.27159.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 624 [rebalance:debug,2014-08-19T16:51:12.240,ns_1@10.242.238.90:<0.27159.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.19553.1>,#Ref<16550.0.1.235425>}] [ns_server:debug,2014-08-19T16:51:12.240,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.27161.0> (ok) [ns_server:debug,2014-08-19T16:51:12.241,ns_1@10.242.238.90:<0.27159.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:debug,2014-08-19T16:51:12.242,ns_1@10.242.238.90:<0.27162.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 624 [ns_server:info,2014-08-19T16:51:12.247,ns_1@10.242.238.90:<0.18786.0>:ns_memcached:do_handle_call:527]Changed vbucket 368 state to replica [ns_server:info,2014-08-19T16:51:12.251,ns_1@10.242.238.90:<0.27165.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 368 to state replica [ns_server:debug,2014-08-19T16:51:12.275,ns_1@10.242.238.90:<0.27165.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_368_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:51:12.276,ns_1@10.242.238.90:<0.27165.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[368]}, {checkpoints,[{368,0}]}, {name,<<"replication_building_368_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[368]}, {takeover,false}, {suffix,"building_368_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",368,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,false}]} [rebalance:debug,2014-08-19T16:51:12.277,ns_1@10.242.238.90:<0.27165.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.27180.0> [rebalance:debug,2014-08-19T16:51:12.277,ns_1@10.242.238.90:<0.27165.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:51:12.277,ns_1@10.242.238.90:<0.27165.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.19575.1>,#Ref<16550.0.1.235544>}]} [rebalance:info,2014-08-19T16:51:12.277,ns_1@10.242.238.90:<0.27165.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 368 [rebalance:debug,2014-08-19T16:51:12.278,ns_1@10.242.238.90:<0.27165.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.19575.1>,#Ref<16550.0.1.235544>}] [ns_server:debug,2014-08-19T16:51:12.279,ns_1@10.242.238.90:<0.27165.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:debug,2014-08-19T16:51:12.293,ns_1@10.242.238.90:<0.27181.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 368 [ns_server:info,2014-08-19T16:51:12.347,ns_1@10.242.238.90:<0.18786.0>:ns_memcached:do_handle_call:527]Changed vbucket 623 state to replica [ns_server:debug,2014-08-19T16:51:12.351,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 633. Nacking mccouch update. [views:debug,2014-08-19T16:51:12.352,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/633. Updated state: pending (0) [ns_server:debug,2014-08-19T16:51:12.352,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",633,pending,0} [ns_server:debug,2014-08-19T16:51:12.352,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,750,686,984,737,673,426,971,724,660,413,958,711,647,400,1022,945,762,698, 634,387,1009,996,749,685,374,983,736,672,425,970,723,659,412,957,710,646,399, 1021,944,761,697,633,386,1008,995,748,684,982,735,671,424,969,722,658,411, 956,709,645,398,1020,943,760,696,632,385,1007,994,747,683,981,734,670,423, 968,721,657,410,955,708,644,397,1019,942,759,695,384,1006,993,746,682,980, 733,669,422,967,720,656,409,986,954,739,707,675,643,396,1018,973,941,758,726, 694,662,630,415,383,1005,992,960,745,713,681,649,402,979,947,764,732,700,668, 636,421,389,1011,998,966,751,719,687,655,408,376,985,953,738,706,674,642,395, 1017,972,940,757,725,693,661,414,382,1004,991,959,744,712,680,648,401,1023, 978,946,763,731,699,667,635,420,388,1010,965,718,654,407,952,705,641,394, 1016,939,756,692,381,1003,990,743,679,977,730,666,419,964,717,653,406,951, 704,640,393,1015,938,755,691,380,1002,989,742,678,976,729,665,418,963,716, 652,405,950,767,703,639,392,1014,754,690,1001,988,741,677,975,728,664,417, 962,715,651,404,949,766,702,638,391,1013,753,689,378,1000,987,740,676,974, 727,663,416,961,714,650,403,948,765,701,637,390,1012,999,752,688] [ns_server:info,2014-08-19T16:51:12.353,ns_1@10.242.238.90:<0.27184.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 623 to state replica [ns_server:debug,2014-08-19T16:51:12.385,ns_1@10.242.238.90:<0.27184.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_623_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:51:12.387,ns_1@10.242.238.90:<0.27184.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[623]}, {checkpoints,[{623,0}]}, {name,<<"replication_building_623_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[623]}, {takeover,false}, {suffix,"building_623_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",623,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,true}]} [rebalance:debug,2014-08-19T16:51:12.388,ns_1@10.242.238.90:<0.27184.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.27185.0> [rebalance:debug,2014-08-19T16:51:12.388,ns_1@10.242.238.90:<0.27184.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:51:12.388,ns_1@10.242.238.90:<0.27184.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.19616.1>,#Ref<16550.0.1.235771>}]} [rebalance:info,2014-08-19T16:51:12.388,ns_1@10.242.238.90:<0.27184.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 623 [rebalance:debug,2014-08-19T16:51:12.389,ns_1@10.242.238.90:<0.27184.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.19616.1>,#Ref<16550.0.1.235771>}] [ns_server:debug,2014-08-19T16:51:12.389,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.27186.0> (ok) [ns_server:debug,2014-08-19T16:51:12.389,ns_1@10.242.238.90:<0.27184.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:debug,2014-08-19T16:51:12.391,ns_1@10.242.238.90:<0.27187.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 623 [ns_server:info,2014-08-19T16:51:12.395,ns_1@10.242.238.90:<0.18786.0>:ns_memcached:do_handle_call:527]Changed vbucket 367 state to replica [views:debug,2014-08-19T16:51:12.395,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/633. Updated state: pending (0) [ns_server:debug,2014-08-19T16:51:12.396,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",633,pending,0} [ns_server:info,2014-08-19T16:51:12.399,ns_1@10.242.238.90:<0.27190.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 367 to state replica [ns_server:debug,2014-08-19T16:51:12.422,ns_1@10.242.238.90:<0.27190.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_367_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:51:12.423,ns_1@10.242.238.90:<0.27190.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[367]}, {checkpoints,[{367,0}]}, {name,<<"replication_building_367_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[367]}, {takeover,false}, {suffix,"building_367_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",367,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,false}]} [rebalance:debug,2014-08-19T16:51:12.424,ns_1@10.242.238.90:<0.27190.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.27191.0> [rebalance:debug,2014-08-19T16:51:12.424,ns_1@10.242.238.90:<0.27190.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:51:12.425,ns_1@10.242.238.90:<0.27190.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.19652.1>,#Ref<16550.0.1.235916>}]} [rebalance:info,2014-08-19T16:51:12.425,ns_1@10.242.238.90:<0.27190.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 367 [rebalance:debug,2014-08-19T16:51:12.425,ns_1@10.242.238.90:<0.27190.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.19652.1>,#Ref<16550.0.1.235916>}] [ns_server:debug,2014-08-19T16:51:12.426,ns_1@10.242.238.90:<0.27190.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:debug,2014-08-19T16:51:12.439,ns_1@10.242.238.90:<0.27207.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 367 [ns_server:debug,2014-08-19T16:51:12.471,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 631. Nacking mccouch update. [views:debug,2014-08-19T16:51:12.471,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/631. Updated state: pending (0) [ns_server:debug,2014-08-19T16:51:12.471,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",631,pending,0} [ns_server:debug,2014-08-19T16:51:12.472,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,750,686,984,737,673,426,971,724,660,413,958,711,647,400,1022,945,762,698, 634,387,1009,996,749,685,374,983,736,672,425,970,723,659,412,957,710,646,399, 1021,944,761,697,633,386,1008,995,748,684,982,735,671,424,969,722,658,411, 956,709,645,398,1020,943,760,696,632,385,1007,994,747,683,981,734,670,423, 968,721,657,410,955,708,644,397,1019,942,759,695,631,384,1006,993,746,682, 980,733,669,422,967,720,656,409,986,954,739,707,675,643,396,1018,973,941,758, 726,694,662,630,415,383,1005,992,960,745,713,681,649,402,979,947,764,732,700, 668,636,421,389,1011,998,966,751,719,687,655,408,376,985,953,738,706,674,642, 395,1017,972,940,757,725,693,661,414,382,1004,991,959,744,712,680,648,401, 1023,978,946,763,731,699,667,635,420,388,1010,965,718,654,407,952,705,641, 394,1016,939,756,692,381,1003,990,743,679,977,730,666,419,964,717,653,406, 951,704,640,393,1015,938,755,691,380,1002,989,742,678,976,729,665,418,963, 716,652,405,950,767,703,639,392,1014,754,690,1001,988,741,677,975,728,664, 417,962,715,651,404,949,766,702,638,391,1013,753,689,378,1000,987,740,676, 974,727,663,416,961,714,650,403,948,765,701,637,390,1012,999,752,688] [ns_server:info,2014-08-19T16:51:12.493,ns_1@10.242.238.90:<0.18786.0>:ns_memcached:do_handle_call:527]Changed vbucket 622 state to replica [ns_server:info,2014-08-19T16:51:12.499,ns_1@10.242.238.90:<0.27210.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 622 to state replica [views:debug,2014-08-19T16:51:12.513,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/631. Updated state: pending (0) [ns_server:debug,2014-08-19T16:51:12.513,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",631,pending,0} [ns_server:debug,2014-08-19T16:51:12.532,ns_1@10.242.238.90:<0.27210.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_622_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:51:12.534,ns_1@10.242.238.90:<0.27210.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[622]}, {checkpoints,[{622,0}]}, {name,<<"replication_building_622_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[622]}, {takeover,false}, {suffix,"building_622_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",622,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,true}]} [rebalance:debug,2014-08-19T16:51:12.534,ns_1@10.242.238.90:<0.27210.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.27211.0> [rebalance:debug,2014-08-19T16:51:12.535,ns_1@10.242.238.90:<0.27210.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:51:12.535,ns_1@10.242.238.90:<0.27210.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.19693.1>,#Ref<16550.0.1.236129>}]} [rebalance:info,2014-08-19T16:51:12.535,ns_1@10.242.238.90:<0.27210.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 622 [rebalance:debug,2014-08-19T16:51:12.536,ns_1@10.242.238.90:<0.27210.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.19693.1>,#Ref<16550.0.1.236129>}] [ns_server:debug,2014-08-19T16:51:12.536,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.27212.0> (ok) [ns_server:debug,2014-08-19T16:51:12.537,ns_1@10.242.238.90:<0.27210.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:debug,2014-08-19T16:51:12.539,ns_1@10.242.238.90:<0.27213.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 622 [ns_server:info,2014-08-19T16:51:12.543,ns_1@10.242.238.90:<0.18786.0>:ns_memcached:do_handle_call:527]Changed vbucket 366 state to replica [ns_server:info,2014-08-19T16:51:12.549,ns_1@10.242.238.90:<0.27221.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 366 to state replica [ns_server:debug,2014-08-19T16:51:12.571,ns_1@10.242.238.90:<0.27221.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_366_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:51:12.573,ns_1@10.242.238.90:<0.27221.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[366]}, {checkpoints,[{366,0}]}, {name,<<"replication_building_366_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[366]}, {takeover,false}, {suffix,"building_366_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",366,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,false}]} [rebalance:debug,2014-08-19T16:51:12.573,ns_1@10.242.238.90:<0.27221.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.27231.0> [rebalance:debug,2014-08-19T16:51:12.573,ns_1@10.242.238.90:<0.27221.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:51:12.574,ns_1@10.242.238.90:<0.27221.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.19717.1>,#Ref<16550.0.1.236313>}]} [rebalance:info,2014-08-19T16:51:12.574,ns_1@10.242.238.90:<0.27221.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 366 [rebalance:debug,2014-08-19T16:51:12.574,ns_1@10.242.238.90:<0.27221.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.19717.1>,#Ref<16550.0.1.236313>}] [ns_server:debug,2014-08-19T16:51:12.575,ns_1@10.242.238.90:<0.27221.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:51:12.588,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 629. Nacking mccouch update. [views:debug,2014-08-19T16:51:12.588,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/629. Updated state: pending (0) [ns_server:debug,2014-08-19T16:51:12.589,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",629,pending,0} [rebalance:debug,2014-08-19T16:51:12.589,ns_1@10.242.238.90:<0.27232.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 366 [ns_server:debug,2014-08-19T16:51:12.590,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,750,686,984,737,673,426,971,724,660,413,958,711,647,400,1022,945,762,698, 634,387,1009,996,749,685,374,983,736,672,425,970,723,659,412,957,710,646,399, 1021,944,761,697,633,386,1008,995,748,684,982,735,671,424,969,722,658,411, 956,709,645,398,1020,943,760,696,632,385,1007,994,747,683,981,734,670,423, 968,721,657,410,955,708,644,397,1019,942,759,695,631,384,1006,993,746,682, 980,733,669,422,967,720,656,409,986,954,739,707,675,643,396,1018,973,941,758, 726,694,662,630,415,383,1005,992,960,745,713,681,649,402,979,947,764,732,700, 668,636,421,389,1011,998,966,751,719,687,655,408,376,985,953,738,706,674,642, 395,1017,972,940,757,725,693,661,629,414,382,1004,991,959,744,712,680,648, 401,1023,978,946,763,731,699,667,635,420,388,1010,965,718,654,407,952,705, 641,394,1016,939,756,692,381,1003,990,743,679,977,730,666,419,964,717,653, 406,951,704,640,393,1015,938,755,691,380,1002,989,742,678,976,729,665,418, 963,716,652,405,950,767,703,639,392,1014,754,690,1001,988,741,677,975,728, 664,417,962,715,651,404,949,766,702,638,391,1013,753,689,378,1000,987,740, 676,974,727,663,416,961,714,650,403,948,765,701,637,390,1012,999,752,688] [views:debug,2014-08-19T16:51:12.631,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/629. Updated state: pending (0) [ns_server:debug,2014-08-19T16:51:12.631,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",629,pending,0} [ns_server:info,2014-08-19T16:51:12.643,ns_1@10.242.238.90:<0.18786.0>:ns_memcached:do_handle_call:527]Changed vbucket 621 state to replica [ns_server:info,2014-08-19T16:51:12.651,ns_1@10.242.238.90:<0.27235.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 621 to state replica [ns_server:debug,2014-08-19T16:51:12.683,ns_1@10.242.238.90:<0.27235.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_621_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:51:12.684,ns_1@10.242.238.90:<0.27235.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[621]}, {checkpoints,[{621,0}]}, {name,<<"replication_building_621_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[621]}, {takeover,false}, {suffix,"building_621_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",621,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,true}]} [rebalance:debug,2014-08-19T16:51:12.685,ns_1@10.242.238.90:<0.27235.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.27250.0> [rebalance:debug,2014-08-19T16:51:12.685,ns_1@10.242.238.90:<0.27235.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:51:12.686,ns_1@10.242.238.90:<0.27235.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.19778.1>,#Ref<16550.0.1.238194>}]} [rebalance:info,2014-08-19T16:51:12.686,ns_1@10.242.238.90:<0.27235.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 621 [rebalance:debug,2014-08-19T16:51:12.686,ns_1@10.242.238.90:<0.27235.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.19778.1>,#Ref<16550.0.1.238194>}] [ns_server:debug,2014-08-19T16:51:12.687,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.27251.0> (ok) [ns_server:debug,2014-08-19T16:51:12.687,ns_1@10.242.238.90:<0.27235.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:debug,2014-08-19T16:51:12.688,ns_1@10.242.238.90:<0.27252.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 621 [ns_server:info,2014-08-19T16:51:12.694,ns_1@10.242.238.90:<0.18786.0>:ns_memcached:do_handle_call:527]Changed vbucket 365 state to replica [ns_server:info,2014-08-19T16:51:12.698,ns_1@10.242.238.90:<0.27255.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 365 to state replica [ns_server:debug,2014-08-19T16:51:12.714,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 627. Nacking mccouch update. [views:debug,2014-08-19T16:51:12.714,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/627. Updated state: pending (0) [ns_server:debug,2014-08-19T16:51:12.714,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",627,pending,0} [ns_server:debug,2014-08-19T16:51:12.715,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,750,686,984,737,673,426,971,724,660,413,958,711,647,400,1022,945,762,698, 634,387,1009,996,749,685,374,983,736,672,425,970,723,659,412,957,710,646,399, 1021,944,761,697,633,386,1008,995,748,684,982,735,671,424,969,722,658,411, 956,709,645,398,1020,943,760,696,632,385,1007,994,747,683,981,734,670,423, 968,721,657,410,955,708,644,397,1019,942,759,695,631,384,1006,993,746,682, 980,733,669,422,967,720,656,409,954,707,643,396,1018,973,941,758,726,694,662, 630,415,383,1005,992,960,745,713,681,649,402,979,947,764,732,700,668,636,421, 389,1011,998,966,751,719,687,655,408,376,985,953,738,706,674,642,395,1017, 972,940,757,725,693,661,629,414,382,1004,991,959,744,712,680,648,401,1023, 978,946,763,731,699,667,635,420,388,1010,965,718,654,407,952,705,641,394, 1016,939,756,692,381,1003,990,743,679,977,730,666,419,964,717,653,406,951, 704,640,393,1015,938,755,691,627,380,1002,989,742,678,976,729,665,418,963, 716,652,405,950,767,703,639,392,1014,754,690,1001,988,741,677,975,728,664, 417,962,715,651,404,949,766,702,638,391,1013,753,689,378,1000,987,740,676, 974,727,663,416,961,714,650,403,948,765,701,637,390,1012,999,752,688,986,739, 675] [ns_server:debug,2014-08-19T16:51:12.721,ns_1@10.242.238.90:<0.27255.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_365_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:51:12.723,ns_1@10.242.238.90:<0.27255.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[365]}, {checkpoints,[{365,0}]}, {name,<<"replication_building_365_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[365]}, {takeover,false}, {suffix,"building_365_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",365,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,false}]} [rebalance:debug,2014-08-19T16:51:12.723,ns_1@10.242.238.90:<0.27255.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.27256.0> [rebalance:debug,2014-08-19T16:51:12.723,ns_1@10.242.238.90:<0.27255.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:51:12.724,ns_1@10.242.238.90:<0.27255.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.19810.1>,#Ref<16550.0.1.238557>}]} [rebalance:info,2014-08-19T16:51:12.724,ns_1@10.242.238.90:<0.27255.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 365 [rebalance:debug,2014-08-19T16:51:12.724,ns_1@10.242.238.90:<0.27255.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.19810.1>,#Ref<16550.0.1.238557>}] [ns_server:debug,2014-08-19T16:51:12.726,ns_1@10.242.238.90:<0.27255.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:debug,2014-08-19T16:51:12.739,ns_1@10.242.238.90:<0.27258.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 365 [views:debug,2014-08-19T16:51:12.748,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/627. Updated state: pending (0) [ns_server:debug,2014-08-19T16:51:12.748,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",627,pending,0} [ns_server:info,2014-08-19T16:51:12.793,ns_1@10.242.238.90:<0.18786.0>:ns_memcached:do_handle_call:527]Changed vbucket 620 state to replica [ns_server:info,2014-08-19T16:51:12.799,ns_1@10.242.238.90:<0.27275.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 620 to state replica [ns_server:debug,2014-08-19T16:51:12.815,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 379. Nacking mccouch update. [views:debug,2014-08-19T16:51:12.815,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/379. Updated state: replica (0) [ns_server:debug,2014-08-19T16:51:12.815,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",379,replica,0} [ns_server:debug,2014-08-19T16:51:12.816,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,750,686,984,737,673,426,971,724,660,413,958,711,647,400,1022,945,762,698, 634,387,1009,996,749,685,374,983,736,672,425,970,723,659,412,957,710,646,399, 1021,944,761,697,633,386,1008,995,748,684,982,735,671,424,969,722,658,411, 956,709,645,398,1020,943,760,696,632,385,1007,994,747,683,981,734,670,423, 968,721,657,410,955,708,644,397,1019,942,759,695,631,384,1006,993,746,682, 980,733,669,422,967,720,656,409,954,707,643,396,1018,973,941,758,726,694,662, 630,415,383,1005,992,960,745,713,681,649,402,979,947,764,732,700,668,636,421, 389,1011,998,966,751,719,687,655,408,376,985,953,738,706,674,642,395,1017, 972,940,757,725,693,661,629,414,382,1004,991,959,744,712,680,648,401,1023, 978,946,763,731,699,667,635,420,388,1010,965,718,654,407,952,705,641,394, 1016,939,756,692,381,1003,990,743,679,977,730,666,419,964,717,653,406,951, 704,640,393,1015,938,755,691,627,380,1002,989,742,678,976,729,665,418,963, 716,652,405,950,767,703,639,392,1014,754,690,379,1001,988,741,677,975,728, 664,417,962,715,651,404,949,766,702,638,391,1013,753,689,378,1000,987,740, 676,974,727,663,416,961,714,650,403,948,765,701,637,390,1012,999,752,688,986, 739,675] [ns_server:debug,2014-08-19T16:51:12.832,ns_1@10.242.238.90:<0.27275.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_620_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:51:12.833,ns_1@10.242.238.90:<0.27275.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[620]}, {checkpoints,[{620,0}]}, {name,<<"replication_building_620_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[620]}, {takeover,false}, {suffix,"building_620_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",620,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,true}]} [rebalance:debug,2014-08-19T16:51:12.834,ns_1@10.242.238.90:<0.27275.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.27276.0> [rebalance:debug,2014-08-19T16:51:12.834,ns_1@10.242.238.90:<0.27275.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:51:12.835,ns_1@10.242.238.90:<0.27275.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.19867.1>,#Ref<16550.0.1.239161>}]} [rebalance:info,2014-08-19T16:51:12.835,ns_1@10.242.238.90:<0.27275.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 620 [rebalance:debug,2014-08-19T16:51:12.835,ns_1@10.242.238.90:<0.27275.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.19867.1>,#Ref<16550.0.1.239161>}] [ns_server:debug,2014-08-19T16:51:12.836,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.27277.0> (ok) [ns_server:debug,2014-08-19T16:51:12.836,ns_1@10.242.238.90:<0.27275.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:debug,2014-08-19T16:51:12.837,ns_1@10.242.238.90:<0.27278.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 620 [ns_server:info,2014-08-19T16:51:12.841,ns_1@10.242.238.90:<0.18786.0>:ns_memcached:do_handle_call:527]Changed vbucket 364 state to replica [ns_server:info,2014-08-19T16:51:12.845,ns_1@10.242.238.90:<0.27281.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 364 to state replica [views:debug,2014-08-19T16:51:12.850,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/379. Updated state: replica (0) [ns_server:debug,2014-08-19T16:51:12.850,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",379,replica,0} [ns_server:debug,2014-08-19T16:51:12.868,ns_1@10.242.238.90:<0.27281.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_364_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:51:12.869,ns_1@10.242.238.90:<0.27281.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[364]}, {checkpoints,[{364,0}]}, {name,<<"replication_building_364_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[364]}, {takeover,false}, {suffix,"building_364_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",364,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,false}]} [rebalance:debug,2014-08-19T16:51:12.870,ns_1@10.242.238.90:<0.27281.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.27282.0> [rebalance:debug,2014-08-19T16:51:12.870,ns_1@10.242.238.90:<0.27281.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:51:12.870,ns_1@10.242.238.90:<0.27281.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.19897.1>,#Ref<16550.0.1.239458>}]} [rebalance:info,2014-08-19T16:51:12.870,ns_1@10.242.238.90:<0.27281.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 364 [rebalance:debug,2014-08-19T16:51:12.871,ns_1@10.242.238.90:<0.27281.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.19897.1>,#Ref<16550.0.1.239458>}] [ns_server:debug,2014-08-19T16:51:12.872,ns_1@10.242.238.90:<0.27281.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:debug,2014-08-19T16:51:12.894,ns_1@10.242.238.90:<0.27283.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 364 [ns_server:info,2014-08-19T16:51:12.950,ns_1@10.242.238.90:<0.18786.0>:ns_memcached:do_handle_call:527]Changed vbucket 619 state to replica [ns_server:info,2014-08-19T16:51:12.956,ns_1@10.242.238.90:<0.27300.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 619 to state replica [ns_server:debug,2014-08-19T16:51:12.988,ns_1@10.242.238.90:<0.27300.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_619_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:51:12.990,ns_1@10.242.238.90:<0.27300.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[619]}, {checkpoints,[{619,0}]}, {name,<<"replication_building_619_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[619]}, {takeover,false}, {suffix,"building_619_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",619,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,true}]} [rebalance:debug,2014-08-19T16:51:12.990,ns_1@10.242.238.90:<0.27300.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.27301.0> [rebalance:debug,2014-08-19T16:51:12.990,ns_1@10.242.238.90:<0.27300.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:51:12.991,ns_1@10.242.238.90:<0.27300.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.20015.1>,#Ref<16550.0.1.240877>}]} [rebalance:info,2014-08-19T16:51:12.991,ns_1@10.242.238.90:<0.27300.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 619 [rebalance:debug,2014-08-19T16:51:12.991,ns_1@10.242.238.90:<0.27300.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.20015.1>,#Ref<16550.0.1.240877>}] [ns_server:debug,2014-08-19T16:51:12.992,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.27302.0> (ok) [ns_server:debug,2014-08-19T16:51:12.992,ns_1@10.242.238.90:<0.27300.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:debug,2014-08-19T16:51:12.994,ns_1@10.242.238.90:<0.27303.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 619 [ns_server:info,2014-08-19T16:51:12.998,ns_1@10.242.238.90:<0.18786.0>:ns_memcached:do_handle_call:527]Changed vbucket 363 state to replica [ns_server:debug,2014-08-19T16:51:13.002,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 377. Nacking mccouch update. [views:debug,2014-08-19T16:51:13.002,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/377. Updated state: replica (0) [ns_server:debug,2014-08-19T16:51:13.003,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",377,replica,0} [ns_server:info,2014-08-19T16:51:13.003,ns_1@10.242.238.90:<0.27306.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 363 to state replica [ns_server:debug,2014-08-19T16:51:13.003,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,750,686,984,737,673,426,971,724,660,413,958,711,647,400,1022,945,762,698, 634,387,1009,996,749,685,374,983,736,672,425,970,723,659,412,957,710,646,399, 1021,944,761,697,633,386,1008,995,748,684,982,735,671,424,969,722,658,411, 956,709,645,398,1020,943,760,696,632,385,1007,994,747,683,981,734,670,423, 968,721,657,410,955,708,644,397,1019,942,759,695,631,384,1006,993,746,682, 980,733,669,422,967,720,656,409,954,707,643,396,1018,973,941,758,726,694,662, 630,415,383,1005,992,960,745,713,681,649,402,979,947,764,732,700,668,636,421, 389,1011,998,966,751,719,687,655,408,376,985,953,738,706,674,642,395,1017, 972,940,757,725,693,661,629,414,382,1004,991,959,744,712,680,648,401,1023, 978,946,763,731,699,667,635,420,388,1010,965,718,654,407,952,705,641,394, 1016,939,756,692,381,1003,990,743,679,977,730,666,419,964,717,653,406,951, 704,640,393,1015,938,755,691,627,380,1002,989,742,678,976,729,665,418,963, 716,652,405,950,767,703,639,392,1014,754,690,379,1001,988,741,677,975,728, 664,417,962,715,651,404,949,766,702,638,391,1013,753,689,378,1000,987,740, 676,974,727,663,416,961,714,650,403,948,765,701,637,390,1012,999,752,688,377, 986,739,675] [ns_server:debug,2014-08-19T16:51:13.026,ns_1@10.242.238.90:<0.27306.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_363_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:51:13.028,ns_1@10.242.238.90:<0.27306.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[363]}, {checkpoints,[{363,0}]}, {name,<<"replication_building_363_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[363]}, {takeover,false}, {suffix,"building_363_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",363,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,false}]} [rebalance:debug,2014-08-19T16:51:13.028,ns_1@10.242.238.90:<0.27306.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.27307.0> [rebalance:debug,2014-08-19T16:51:13.028,ns_1@10.242.238.90:<0.27306.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:51:13.029,ns_1@10.242.238.90:<0.27306.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.20045.1>,#Ref<16550.0.1.241181>}]} [rebalance:info,2014-08-19T16:51:13.029,ns_1@10.242.238.90:<0.27306.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 363 [rebalance:debug,2014-08-19T16:51:13.029,ns_1@10.242.238.90:<0.27306.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.20045.1>,#Ref<16550.0.1.241181>}] [ns_server:debug,2014-08-19T16:51:13.030,ns_1@10.242.238.90:<0.27306.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:debug,2014-08-19T16:51:13.046,ns_1@10.242.238.90:<0.27308.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 363 [views:debug,2014-08-19T16:51:13.086,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/377. Updated state: replica (0) [ns_server:debug,2014-08-19T16:51:13.086,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",377,replica,0} [ns_server:debug,2014-08-19T16:51:13.236,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 375. Nacking mccouch update. [views:debug,2014-08-19T16:51:13.236,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/375. Updated state: replica (0) [ns_server:debug,2014-08-19T16:51:13.237,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",375,replica,0} [ns_server:debug,2014-08-19T16:51:13.237,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,750,686,375,984,737,673,426,971,724,660,413,958,711,647,400,1022,945,762, 698,634,387,1009,996,749,685,374,983,736,672,425,970,723,659,412,957,710,646, 399,1021,944,761,697,633,386,1008,995,748,684,982,735,671,424,969,722,658, 411,956,709,645,398,1020,943,760,696,632,385,1007,994,747,683,981,734,670, 423,968,721,657,410,955,708,644,397,1019,942,759,695,631,384,1006,993,746, 682,980,733,669,422,967,720,656,409,954,707,643,396,1018,973,941,758,726,694, 662,630,415,383,1005,992,960,745,713,681,649,402,979,947,764,732,700,668,636, 421,389,1011,998,966,751,719,687,655,408,376,985,953,738,706,674,642,395, 1017,972,940,757,725,693,661,629,414,382,1004,991,959,744,712,680,648,401, 1023,978,946,763,731,699,667,635,420,388,1010,965,718,654,407,952,705,641, 394,1016,939,756,692,381,1003,990,743,679,977,730,666,419,964,717,653,406, 951,704,640,393,1015,938,755,691,627,380,1002,989,742,678,976,729,665,418, 963,716,652,405,950,767,703,639,392,1014,754,690,379,1001,988,741,677,975, 728,664,417,962,715,651,404,949,766,702,638,391,1013,753,689,378,1000,987, 740,676,974,727,663,416,961,714,650,403,948,765,701,637,390,1012,999,752,688, 377,986,739,675] [views:debug,2014-08-19T16:51:13.312,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/375. Updated state: replica (0) [ns_server:debug,2014-08-19T16:51:13.312,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",375,replica,0} [ns_server:debug,2014-08-19T16:51:13.454,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 373. Nacking mccouch update. [views:debug,2014-08-19T16:51:13.454,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/373. Updated state: replica (0) [ns_server:debug,2014-08-19T16:51:13.454,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",373,replica,0} [ns_server:debug,2014-08-19T16:51:13.455,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,750,686,375,984,737,673,426,971,724,660,413,958,711,647,400,1022,945,762, 698,634,387,1009,996,749,685,374,983,736,672,425,970,723,659,412,957,710,646, 399,1021,944,761,697,633,386,1008,995,748,684,373,982,735,671,424,969,722, 658,411,956,709,645,398,1020,943,760,696,632,385,1007,994,747,683,981,734, 670,423,968,721,657,410,955,708,644,397,1019,942,759,695,631,384,1006,993, 746,682,980,733,669,422,967,720,656,409,954,707,643,396,1018,973,941,758,726, 694,662,630,415,383,1005,992,960,745,713,681,649,402,979,947,764,732,700,668, 636,421,389,1011,998,966,751,719,687,655,408,376,985,953,738,706,674,642,395, 1017,972,940,757,725,693,661,629,414,382,1004,991,959,744,712,680,648,401, 1023,978,946,763,731,699,667,635,420,388,1010,965,718,654,407,952,705,641, 394,1016,939,756,692,381,1003,990,743,679,977,730,666,419,964,717,653,406, 951,704,640,393,1015,938,755,691,627,380,1002,989,742,678,976,729,665,418, 963,716,652,405,950,767,703,639,392,1014,754,690,379,1001,988,741,677,975, 728,664,417,962,715,651,404,949,766,702,638,391,1013,753,689,378,1000,987, 740,676,974,727,663,416,961,714,650,403,948,765,701,637,390,1012,999,752,688, 377,986,739,675] [views:debug,2014-08-19T16:51:13.521,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/373. Updated state: replica (0) [ns_server:debug,2014-08-19T16:51:13.521,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",373,replica,0} [ns_server:debug,2014-08-19T16:51:13.663,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 371. Nacking mccouch update. [views:debug,2014-08-19T16:51:13.663,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/371. Updated state: replica (0) [ns_server:debug,2014-08-19T16:51:13.663,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",371,replica,0} [ns_server:debug,2014-08-19T16:51:13.664,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,750,686,375,984,737,673,426,971,724,660,413,958,711,647,400,1022,945,762, 698,634,387,1009,996,749,685,374,983,736,672,425,970,723,659,412,957,710,646, 399,1021,944,761,697,633,386,1008,995,748,684,373,982,735,671,424,969,722, 658,411,956,709,645,398,1020,943,760,696,632,385,1007,994,747,683,981,734, 670,423,968,721,657,410,955,708,644,397,1019,942,759,695,631,384,1006,993, 746,682,371,980,733,669,422,967,720,656,409,954,707,643,396,1018,941,758,694, 630,383,1005,992,960,745,713,681,649,402,979,947,764,732,700,668,636,421,389, 1011,998,966,751,719,687,655,408,376,985,953,738,706,674,642,395,1017,972, 940,757,725,693,661,629,414,382,1004,991,959,744,712,680,648,401,1023,978, 946,763,731,699,667,635,420,388,1010,965,718,654,407,952,705,641,394,1016, 939,756,692,381,1003,990,743,679,977,730,666,419,964,717,653,406,951,704,640, 393,1015,938,755,691,627,380,1002,989,742,678,976,729,665,418,963,716,652, 405,950,767,703,639,392,1014,754,690,379,1001,988,741,677,975,728,664,417, 962,715,651,404,949,766,702,638,391,1013,753,689,378,1000,987,740,676,974, 727,663,416,961,714,650,403,948,765,701,637,390,1012,999,752,688,377,986,739, 675,973,726,662,415] [views:debug,2014-08-19T16:51:13.731,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/371. Updated state: replica (0) [ns_server:debug,2014-08-19T16:51:13.731,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",371,replica,0} [ns_server:debug,2014-08-19T16:51:13.824,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 625. Nacking mccouch update. [views:debug,2014-08-19T16:51:13.824,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/625. Updated state: pending (0) [ns_server:debug,2014-08-19T16:51:13.824,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",625,pending,0} [ns_server:debug,2014-08-19T16:51:13.825,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,750,686,375,984,737,673,426,971,724,660,413,958,711,647,400,1022,945,762, 698,634,387,1009,996,749,685,374,983,736,672,425,970,723,659,412,957,710,646, 399,1021,944,761,697,633,386,1008,995,748,684,373,982,735,671,424,969,722, 658,411,956,709,645,398,1020,943,760,696,632,385,1007,994,747,683,981,734, 670,423,968,721,657,410,955,708,644,397,1019,942,759,695,631,384,1006,993, 746,682,371,980,733,669,422,967,720,656,409,954,707,643,396,1018,941,758,694, 630,383,1005,992,960,745,713,681,649,402,979,947,764,732,700,668,636,421,389, 1011,998,966,751,719,687,655,408,376,985,953,738,706,674,642,395,1017,972, 940,757,725,693,661,629,414,382,1004,991,959,744,712,680,648,401,1023,978, 946,763,731,699,667,635,420,388,1010,965,718,654,407,952,705,641,394,1016, 939,756,692,381,1003,990,743,679,977,730,666,419,964,717,653,406,951,704,640, 393,1015,938,755,691,627,380,1002,989,742,678,976,729,665,418,963,716,652, 405,950,767,703,639,392,1014,754,690,379,1001,988,741,677,975,728,664,417, 962,715,651,404,949,766,702,638,391,1013,753,689,625,378,1000,987,740,676, 974,727,663,416,961,714,650,403,948,765,701,637,390,1012,999,752,688,377,986, 739,675,973,726,662,415] [views:debug,2014-08-19T16:51:13.867,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/625. Updated state: pending (0) [ns_server:debug,2014-08-19T16:51:13.867,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",625,pending,0} [ns_server:debug,2014-08-19T16:51:13.941,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 623. Nacking mccouch update. [views:debug,2014-08-19T16:51:13.942,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/623. Updated state: pending (0) [ns_server:debug,2014-08-19T16:51:13.942,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",623,pending,0} [ns_server:debug,2014-08-19T16:51:13.942,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,750,686,375,984,737,673,426,971,724,660,413,958,711,647,400,1022,945,762, 698,634,387,1009,996,749,685,374,983,736,672,425,970,723,659,412,957,710,646, 399,1021,944,761,697,633,386,1008,995,748,684,373,982,735,671,424,969,722, 658,411,956,709,645,398,1020,943,760,696,632,385,1007,994,747,683,981,734, 670,423,968,721,657,410,955,708,644,397,1019,942,759,695,631,384,1006,993, 746,682,371,980,733,669,422,967,720,656,409,954,707,643,396,1018,941,758,694, 630,383,1005,992,960,745,713,681,649,402,979,947,764,732,700,668,636,421,389, 1011,998,966,751,719,687,655,623,408,376,985,953,738,706,674,642,395,1017, 972,940,757,725,693,661,629,414,382,1004,991,959,744,712,680,648,401,1023, 978,946,763,731,699,667,635,420,388,1010,965,718,654,407,952,705,641,394, 1016,939,756,692,381,1003,990,743,679,977,730,666,419,964,717,653,406,951, 704,640,393,1015,938,755,691,627,380,1002,989,742,678,976,729,665,418,963, 716,652,405,950,767,703,639,392,1014,754,690,379,1001,988,741,677,975,728, 664,417,962,715,651,404,949,766,702,638,391,1013,753,689,625,378,1000,987, 740,676,974,727,663,416,961,714,650,403,948,765,701,637,390,1012,999,752,688, 377,986,739,675,973,726,662,415] [views:debug,2014-08-19T16:51:13.985,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/623. Updated state: pending (0) [ns_server:debug,2014-08-19T16:51:13.985,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",623,pending,0} [ns_server:debug,2014-08-19T16:51:14.052,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 621. Nacking mccouch update. [views:debug,2014-08-19T16:51:14.052,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/621. Updated state: pending (0) [ns_server:debug,2014-08-19T16:51:14.052,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",621,pending,0} [ns_server:debug,2014-08-19T16:51:14.053,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,750,686,375,984,737,673,426,971,724,660,413,958,711,647,400,1022,945,762, 698,634,387,1009,996,749,685,621,374,983,736,672,425,970,723,659,412,957,710, 646,399,1021,944,761,697,633,386,1008,995,748,684,373,982,735,671,424,969, 722,658,411,956,709,645,398,1020,943,760,696,632,385,1007,994,747,683,981, 734,670,423,968,721,657,410,955,708,644,397,1019,942,759,695,631,384,1006, 993,746,682,371,980,733,669,422,967,720,656,409,954,707,643,396,1018,941,758, 694,630,383,1005,992,960,745,713,681,649,402,979,947,764,732,700,668,636,421, 389,1011,998,966,751,719,687,655,623,408,376,985,953,738,706,674,642,395, 1017,972,940,757,725,693,661,629,414,382,1004,991,959,744,712,680,648,401, 1023,978,946,763,731,699,667,635,420,388,1010,965,718,654,407,952,705,641, 394,1016,939,756,692,381,1003,990,743,679,977,730,666,419,964,717,653,406, 951,704,640,393,1015,938,755,691,627,380,1002,989,742,678,976,729,665,418, 963,716,652,405,950,767,703,639,392,1014,754,690,379,1001,988,741,677,975, 728,664,417,962,715,651,404,949,766,702,638,391,1013,753,689,625,378,1000, 987,740,676,974,727,663,416,961,714,650,403,948,765,701,637,390,1012,999,752, 688,377,986,739,675,973,726,662,415] [views:debug,2014-08-19T16:51:14.086,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/621. Updated state: pending (0) [ns_server:debug,2014-08-19T16:51:14.086,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",621,pending,0} [ns_server:debug,2014-08-19T16:51:14.169,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 619. Nacking mccouch update. [views:debug,2014-08-19T16:51:14.169,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/619. Updated state: pending (0) [ns_server:debug,2014-08-19T16:51:14.169,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",619,pending,0} [ns_server:debug,2014-08-19T16:51:14.170,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,750,686,375,984,737,673,426,971,724,660,413,958,711,647,400,1022,945,762, 698,634,387,1009,996,749,685,621,374,983,736,672,425,970,723,659,412,957,710, 646,399,1021,944,761,697,633,386,1008,995,748,684,373,982,735,671,424,969, 722,658,411,956,709,645,398,1020,943,760,696,632,385,1007,994,747,683,619, 981,734,670,423,968,721,657,410,955,708,644,397,1019,942,759,695,631,384, 1006,993,746,682,371,980,733,669,422,967,720,656,409,954,707,643,396,1018, 941,758,694,630,383,1005,992,960,745,713,681,649,402,979,947,764,732,700,668, 636,421,389,1011,998,966,751,719,687,655,623,408,376,985,953,738,706,674,642, 395,1017,972,940,757,725,693,661,629,414,382,1004,991,959,744,712,680,648, 401,1023,978,946,763,731,699,667,635,420,388,1010,965,718,654,407,952,705, 641,394,1016,939,756,692,381,1003,990,743,679,977,730,666,419,964,717,653, 406,951,704,640,393,1015,938,755,691,627,380,1002,989,742,678,976,729,665, 418,963,716,652,405,950,767,703,639,392,1014,754,690,379,1001,988,741,677, 975,728,664,417,962,715,651,404,949,766,702,638,391,1013,753,689,625,378, 1000,987,740,676,974,727,663,416,961,714,650,403,948,765,701,637,390,1012, 999,752,688,377,986,739,675,973,726,662,415] [views:debug,2014-08-19T16:51:14.203,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/619. Updated state: pending (0) [ns_server:debug,2014-08-19T16:51:14.203,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",619,pending,0} [ns_server:debug,2014-08-19T16:51:14.280,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 369. Nacking mccouch update. [views:debug,2014-08-19T16:51:14.280,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/369. Updated state: replica (0) [ns_server:debug,2014-08-19T16:51:14.280,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",369,replica,0} [ns_server:debug,2014-08-19T16:51:14.281,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,750,686,375,984,737,673,426,971,724,660,413,958,711,647,400,1022,945,762, 698,634,387,1009,996,749,685,621,374,983,736,672,425,970,723,659,412,957,710, 646,399,1021,944,761,697,633,386,1008,995,748,684,373,982,735,671,424,969, 722,658,411,956,709,645,398,1020,943,760,696,632,385,1007,994,747,683,619, 981,734,670,423,968,721,657,410,955,708,644,397,1019,942,759,695,631,384, 1006,993,746,682,371,980,733,669,422,967,720,656,409,954,707,643,396,1018, 941,758,694,630,383,1005,992,745,681,979,947,764,732,700,668,636,421,389, 1011,998,966,751,719,687,655,623,408,376,985,953,738,706,674,642,395,1017, 972,940,757,725,693,661,629,414,382,1004,991,959,744,712,680,648,401,369, 1023,978,946,763,731,699,667,635,420,388,1010,965,718,654,407,952,705,641, 394,1016,939,756,692,381,1003,990,743,679,977,730,666,419,964,717,653,406, 951,704,640,393,1015,938,755,691,627,380,1002,989,742,678,976,729,665,418, 963,716,652,405,950,767,703,639,392,1014,754,690,379,1001,988,741,677,975, 728,664,417,962,715,651,404,949,766,702,638,391,1013,753,689,625,378,1000, 987,740,676,974,727,663,416,961,714,650,403,948,765,701,637,390,1012,999,752, 688,377,986,739,675,973,726,662,415,960,713,649,402] [views:debug,2014-08-19T16:51:14.347,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/369. Updated state: replica (0) [ns_server:debug,2014-08-19T16:51:14.347,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",369,replica,0} [ns_server:debug,2014-08-19T16:51:14.489,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 367. Nacking mccouch update. [views:debug,2014-08-19T16:51:14.489,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/367. Updated state: replica (0) [ns_server:debug,2014-08-19T16:51:14.489,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",367,replica,0} [ns_server:debug,2014-08-19T16:51:14.490,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,750,686,375,984,737,673,426,971,724,660,413,958,711,647,400,1022,945,762, 698,634,387,1009,996,749,685,621,374,983,736,672,425,970,723,659,412,957,710, 646,399,1021,944,761,697,633,386,1008,995,748,684,373,982,735,671,424,969, 722,658,411,956,709,645,398,1020,943,760,696,632,385,1007,994,747,683,619, 981,734,670,423,968,721,657,410,955,708,644,397,1019,942,759,695,631,384, 1006,993,746,682,371,980,733,669,422,967,720,656,409,954,707,643,396,1018, 941,758,694,630,383,1005,992,745,681,979,947,764,732,700,668,636,421,389, 1011,998,966,751,719,687,655,623,408,376,985,953,738,706,674,642,395,1017, 972,940,757,725,693,661,629,414,382,1004,991,959,744,712,680,648,401,369, 1023,978,946,763,731,699,667,635,420,388,1010,965,718,654,407,952,705,641, 394,1016,939,756,692,381,1003,990,743,679,977,730,666,419,964,717,653,406, 951,704,640,393,1015,938,755,691,627,380,1002,989,742,678,367,976,729,665, 418,963,716,652,405,950,767,703,639,392,1014,754,690,379,1001,988,741,677, 975,728,664,417,962,715,651,404,949,766,702,638,391,1013,753,689,625,378, 1000,987,740,676,974,727,663,416,961,714,650,403,948,765,701,637,390,1012, 999,752,688,377,986,739,675,973,726,662,415,960,713,649,402] [views:debug,2014-08-19T16:51:14.556,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/367. Updated state: replica (0) [ns_server:debug,2014-08-19T16:51:14.556,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",367,replica,0} [ns_server:debug,2014-08-19T16:51:14.698,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 365. Nacking mccouch update. [views:debug,2014-08-19T16:51:14.698,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/365. Updated state: replica (0) [ns_server:debug,2014-08-19T16:51:14.699,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",365,replica,0} [ns_server:debug,2014-08-19T16:51:14.699,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,750,686,375,984,737,673,426,971,724,660,413,958,711,647,400,1022,945,762, 698,634,387,1009,996,749,685,621,374,983,736,672,425,970,723,659,412,957,710, 646,399,1021,944,761,697,633,386,1008,995,748,684,373,982,735,671,424,969, 722,658,411,956,709,645,398,1020,943,760,696,632,385,1007,994,747,683,619, 981,734,670,423,968,721,657,410,955,708,644,397,1019,942,759,695,631,384, 1006,993,746,682,371,980,733,669,422,967,720,656,409,954,707,643,396,1018, 941,758,694,630,383,1005,992,745,681,979,947,764,732,700,668,636,421,389, 1011,998,966,751,719,687,655,623,408,376,985,953,738,706,674,642,395,1017, 972,940,757,725,693,661,629,414,382,1004,991,959,744,712,680,648,401,369, 1023,978,946,763,731,699,667,635,420,388,1010,965,718,654,407,952,705,641, 394,1016,939,756,692,381,1003,990,743,679,977,730,666,419,964,717,653,406, 951,704,640,393,1015,938,755,691,627,380,1002,989,742,678,367,976,729,665, 418,963,716,652,405,950,767,703,639,392,1014,754,690,379,1001,988,741,677, 975,728,664,417,962,715,651,404,949,766,702,638,391,1013,753,689,625,378, 1000,987,740,676,365,974,727,663,416,961,714,650,403,948,765,701,637,390, 1012,999,752,688,377,986,739,675,973,726,662,415,960,713,649,402] [views:debug,2014-08-19T16:51:14.774,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/365. Updated state: replica (0) [ns_server:debug,2014-08-19T16:51:14.774,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",365,replica,0} [ns_server:debug,2014-08-19T16:51:14.933,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 363. Nacking mccouch update. [views:debug,2014-08-19T16:51:14.933,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/363. Updated state: replica (0) [ns_server:debug,2014-08-19T16:51:14.933,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",363,replica,0} [ns_server:debug,2014-08-19T16:51:14.933,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,750,686,375,984,737,673,426,971,724,660,413,958,711,647,400,1022,945,762, 698,634,387,1009,996,749,685,621,374,983,736,672,425,970,723,659,412,957,710, 646,399,1021,944,761,697,633,386,1008,995,748,684,373,982,735,671,424,969, 722,658,411,956,709,645,398,1020,943,760,696,632,385,1007,994,747,683,619, 981,734,670,423,968,721,657,410,955,708,644,397,1019,942,759,695,631,384, 1006,993,746,682,371,980,733,669,422,967,720,656,409,954,707,643,396,1018, 941,758,694,630,383,1005,992,745,681,979,947,764,732,700,668,636,421,389, 1011,998,966,751,719,687,655,623,408,376,985,953,738,706,674,642,395,363, 1017,972,940,757,725,693,661,629,414,382,1004,991,959,744,712,680,648,401, 369,1023,978,946,763,731,699,667,635,420,388,1010,965,718,654,407,952,705, 641,394,1016,939,756,692,381,1003,990,743,679,977,730,666,419,964,717,653, 406,951,704,640,393,1015,938,755,691,627,380,1002,989,742,678,367,976,729, 665,418,963,716,652,405,950,767,703,639,392,1014,754,690,379,1001,988,741, 677,975,728,664,417,962,715,651,404,949,766,702,638,391,1013,753,689,625,378, 1000,987,740,676,365,974,727,663,416,961,714,650,403,948,765,701,637,390, 1012,999,752,688,377,986,739,675,973,726,662,415,960,713,649,402] [views:debug,2014-08-19T16:51:15.009,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/363. Updated state: replica (0) [ns_server:debug,2014-08-19T16:51:15.009,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",363,replica,0} [ns_server:debug,2014-08-19T16:51:15.192,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 628. Nacking mccouch update. [views:debug,2014-08-19T16:51:15.192,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/628. Updated state: pending (0) [ns_server:debug,2014-08-19T16:51:15.192,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",628,pending,0} [ns_server:debug,2014-08-19T16:51:15.193,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,750,686,375,984,737,673,426,971,724,660,413,958,711,647,400,1022,945,762, 698,634,387,1009,996,749,685,621,374,983,736,672,425,970,723,659,412,957,710, 646,399,1021,944,761,697,633,386,1008,995,748,684,373,982,735,671,424,969, 722,658,411,956,709,645,398,1020,943,760,696,632,385,1007,994,747,683,619, 981,734,670,423,968,721,657,410,955,708,644,397,1019,942,759,695,631,384, 1006,993,746,682,371,980,733,669,422,967,720,656,409,954,707,643,396,1018, 941,758,694,630,383,1005,992,745,681,979,947,764,732,700,668,636,421,389, 1011,998,966,751,719,687,655,623,408,376,985,953,738,706,674,642,395,363, 1017,972,940,757,725,693,661,629,414,382,1004,991,959,744,712,680,648,401, 369,1023,978,946,763,731,699,667,635,420,388,1010,965,718,654,407,952,705, 641,394,1016,939,756,692,628,381,1003,990,743,679,977,730,666,419,964,717, 653,406,951,704,640,393,1015,938,755,691,627,380,1002,989,742,678,367,976, 729,665,418,963,716,652,405,950,767,703,639,392,1014,754,690,379,1001,988, 741,677,975,728,664,417,962,715,651,404,949,766,702,638,391,1013,753,689,625, 378,1000,987,740,676,365,974,727,663,416,961,714,650,403,948,765,701,637,390, 1012,999,752,688,377,986,739,675,973,726,662,415,960,713,649,402] [views:debug,2014-08-19T16:51:15.254,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/628. Updated state: pending (0) [ns_server:debug,2014-08-19T16:51:15.254,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",628,pending,0} [ns_server:debug,2014-08-19T16:51:15.329,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 626. Nacking mccouch update. [views:debug,2014-08-19T16:51:15.329,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/626. Updated state: pending (0) [ns_server:debug,2014-08-19T16:51:15.329,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",626,pending,0} [ns_server:debug,2014-08-19T16:51:15.330,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,750,686,375,984,737,673,426,971,724,660,413,958,711,647,400,1022,945,762, 698,634,387,1009,996,749,685,621,374,983,736,672,425,970,723,659,412,957,710, 646,399,1021,944,761,697,633,386,1008,995,748,684,373,982,735,671,424,969, 722,658,411,956,709,645,398,1020,943,760,696,632,385,1007,994,747,683,619, 981,734,670,423,968,721,657,410,955,708,644,397,1019,942,759,695,631,384, 1006,993,746,682,371,980,733,669,422,967,720,656,409,954,707,643,396,1018, 941,758,694,630,383,1005,992,745,681,979,732,668,421,998,966,751,719,687,655, 623,408,376,985,953,738,706,674,642,395,363,1017,972,940,757,725,693,661,629, 414,382,1004,991,959,744,712,680,648,401,369,1023,978,946,763,731,699,667, 635,420,388,1010,965,718,654,407,952,705,641,394,1016,939,756,692,628,381, 1003,990,743,679,977,730,666,419,964,717,653,406,951,704,640,393,1015,938, 755,691,627,380,1002,989,742,678,367,976,729,665,418,963,716,652,405,950,767, 703,639,392,1014,754,690,626,379,1001,988,741,677,975,728,664,417,962,715, 651,404,949,766,702,638,391,1013,753,689,625,378,1000,987,740,676,365,974, 727,663,416,961,714,650,403,948,765,701,637,390,1012,999,752,688,377,986,739, 675,973,726,662,415,960,713,649,402,947,764,700,636,389,1011] [views:debug,2014-08-19T16:51:15.363,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/626. Updated state: pending (0) [ns_server:debug,2014-08-19T16:51:15.363,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",626,pending,0} [ns_server:debug,2014-08-19T16:51:15.438,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 624. Nacking mccouch update. [views:debug,2014-08-19T16:51:15.439,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/624. Updated state: pending (0) [ns_server:debug,2014-08-19T16:51:15.439,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",624,pending,0} [ns_server:debug,2014-08-19T16:51:15.439,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,750,686,375,984,737,673,426,971,724,660,413,958,711,647,400,1022,945,762, 698,634,387,1009,996,749,685,621,374,983,736,672,425,970,723,659,412,957,710, 646,399,1021,944,761,697,633,386,1008,995,748,684,373,982,735,671,424,969, 722,658,411,956,709,645,398,1020,943,760,696,632,385,1007,994,747,683,619, 981,734,670,423,968,721,657,410,955,708,644,397,1019,942,759,695,631,384, 1006,993,746,682,371,980,733,669,422,967,720,656,409,954,707,643,396,1018, 941,758,694,630,383,1005,992,745,681,979,732,668,421,998,966,751,719,687,655, 623,408,376,985,953,738,706,674,642,395,363,1017,972,940,757,725,693,661,629, 414,382,1004,991,959,744,712,680,648,401,369,1023,978,946,763,731,699,667, 635,420,388,1010,965,718,654,407,952,705,641,394,1016,939,756,692,628,381, 1003,990,743,679,977,730,666,419,964,717,653,406,951,704,640,393,1015,938, 755,691,627,380,1002,989,742,678,367,976,729,665,418,963,716,652,405,950,767, 703,639,392,1014,754,690,626,379,1001,988,741,677,975,728,664,417,962,715, 651,404,949,766,702,638,391,1013,753,689,625,378,1000,987,740,676,365,974, 727,663,416,961,714,650,403,948,765,701,637,390,1012,999,752,688,624,377,986, 739,675,973,726,662,415,960,713,649,402,947,764,700,636,389,1011] [views:debug,2014-08-19T16:51:15.481,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/624. Updated state: pending (0) [ns_server:debug,2014-08-19T16:51:15.481,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",624,pending,0} [ns_server:debug,2014-08-19T16:51:15.548,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 622. Nacking mccouch update. [views:debug,2014-08-19T16:51:15.548,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/622. Updated state: pending (0) [ns_server:debug,2014-08-19T16:51:15.548,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",622,pending,0} [ns_server:debug,2014-08-19T16:51:15.548,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,750,686,622,375,984,737,673,426,971,724,660,413,958,711,647,400,1022,945, 762,698,634,387,1009,996,749,685,621,374,983,736,672,425,970,723,659,412,957, 710,646,399,1021,944,761,697,633,386,1008,995,748,684,373,982,735,671,424, 969,722,658,411,956,709,645,398,1020,943,760,696,632,385,1007,994,747,683, 619,981,734,670,423,968,721,657,410,955,708,644,397,1019,942,759,695,631,384, 1006,993,746,682,371,980,733,669,422,967,720,656,409,954,707,643,396,1018, 941,758,694,630,383,1005,992,745,681,979,732,668,421,998,966,751,719,687,655, 623,408,376,985,953,738,706,674,642,395,363,1017,972,940,757,725,693,661,629, 414,382,1004,991,959,744,712,680,648,401,369,1023,978,946,763,731,699,667, 635,420,388,1010,965,718,654,407,952,705,641,394,1016,939,756,692,628,381, 1003,990,743,679,977,730,666,419,964,717,653,406,951,704,640,393,1015,938, 755,691,627,380,1002,989,742,678,367,976,729,665,418,963,716,652,405,950,767, 703,639,392,1014,754,690,626,379,1001,988,741,677,975,728,664,417,962,715, 651,404,949,766,702,638,391,1013,753,689,625,378,1000,987,740,676,365,974, 727,663,416,961,714,650,403,948,765,701,637,390,1012,999,752,688,624,377,986, 739,675,973,726,662,415,960,713,649,402,947,764,700,636,389,1011] [views:debug,2014-08-19T16:51:15.582,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/622. Updated state: pending (0) [ns_server:debug,2014-08-19T16:51:15.582,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",622,pending,0} [ns_server:debug,2014-08-19T16:51:15.665,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 620. Nacking mccouch update. [views:debug,2014-08-19T16:51:15.665,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/620. Updated state: pending (0) [ns_server:debug,2014-08-19T16:51:15.666,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",620,pending,0} [ns_server:debug,2014-08-19T16:51:15.666,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,750,686,622,375,984,737,673,426,971,724,660,413,958,711,647,400,1022,945, 762,698,634,387,1009,996,749,685,621,374,983,736,672,425,970,723,659,412,957, 710,646,399,1021,944,761,697,633,386,1008,995,748,684,620,373,982,735,671, 424,969,722,658,411,956,709,645,398,1020,943,760,696,632,385,1007,994,747, 683,619,981,734,670,423,968,721,657,410,955,708,644,397,1019,942,759,695,631, 384,1006,993,746,682,371,980,733,669,422,967,720,656,409,954,707,643,396, 1018,941,758,694,630,383,1005,992,745,681,979,732,668,421,998,966,751,719, 687,655,623,408,376,985,953,738,706,674,642,395,363,1017,972,940,757,725,693, 661,629,414,382,1004,991,959,744,712,680,648,401,369,1023,978,946,763,731, 699,667,635,420,388,1010,965,718,654,407,952,705,641,394,1016,939,756,692, 628,381,1003,990,743,679,977,730,666,419,964,717,653,406,951,704,640,393, 1015,938,755,691,627,380,1002,989,742,678,367,976,729,665,418,963,716,652, 405,950,767,703,639,392,1014,754,690,626,379,1001,988,741,677,975,728,664, 417,962,715,651,404,949,766,702,638,391,1013,753,689,625,378,1000,987,740, 676,365,974,727,663,416,961,714,650,403,948,765,701,637,390,1012,999,752,688, 624,377,986,739,675,973,726,662,415,960,713,649,402,947,764,700,636,389,1011] [views:debug,2014-08-19T16:51:15.708,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/620. Updated state: pending (0) [ns_server:debug,2014-08-19T16:51:15.708,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",620,pending,0} [ns_server:debug,2014-08-19T16:51:15.834,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 372. Nacking mccouch update. [views:debug,2014-08-19T16:51:15.834,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/372. Updated state: replica (0) [ns_server:debug,2014-08-19T16:51:15.834,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",372,replica,0} [ns_server:debug,2014-08-19T16:51:15.835,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,750,686,622,375,984,737,673,426,971,724,660,413,958,711,647,400,1022,945, 762,698,634,387,1009,996,749,685,621,374,983,736,672,425,970,723,659,412,957, 710,646,399,1021,944,761,697,633,386,1008,995,748,684,620,373,982,735,671, 424,969,722,658,411,956,709,645,398,1020,943,760,696,632,385,1007,994,747, 683,619,372,981,734,670,423,968,721,657,410,955,708,644,397,1019,942,759,695, 631,384,1006,993,746,682,371,980,733,669,422,967,720,656,409,954,707,643,396, 1018,941,758,694,630,383,1005,992,745,681,979,732,668,421,998,966,751,719, 687,655,623,408,376,985,953,738,706,674,642,395,363,1017,972,940,757,725,693, 661,629,414,382,1004,991,959,744,712,680,648,401,369,1023,978,946,763,731, 699,667,635,420,388,1010,965,718,654,407,952,705,641,394,1016,939,756,692, 628,381,1003,990,743,679,977,730,666,419,964,717,653,406,951,704,640,393, 1015,938,755,691,627,380,1002,989,742,678,367,976,729,665,418,963,716,652, 405,950,767,703,639,392,1014,754,690,626,379,1001,988,741,677,975,728,664, 417,962,715,651,404,949,766,702,638,391,1013,753,689,625,378,1000,987,740, 676,365,974,727,663,416,961,714,650,403,948,765,701,637,390,1012,999,752,688, 624,377,986,739,675,973,726,662,415,960,713,649,402,947,764,700,636,389,1011] [views:debug,2014-08-19T16:51:15.901,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/372. Updated state: replica (0) [ns_server:debug,2014-08-19T16:51:15.902,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",372,replica,0} [ns_server:debug,2014-08-19T16:51:16.036,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 370. Nacking mccouch update. [views:debug,2014-08-19T16:51:16.036,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/370. Updated state: replica (0) [ns_server:debug,2014-08-19T16:51:16.036,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",370,replica,0} [ns_server:debug,2014-08-19T16:51:16.037,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,750,686,622,375,984,737,673,426,971,724,660,413,958,711,647,400,1022,945, 762,698,634,387,1009,996,749,685,621,374,983,736,672,425,970,723,659,412,957, 710,646,399,1021,944,761,697,633,386,1008,995,748,684,620,373,982,735,671, 424,969,722,658,411,956,709,645,398,1020,943,760,696,632,385,1007,994,747, 683,619,372,981,734,670,423,968,721,657,410,955,708,644,397,1019,942,759,695, 631,384,1006,993,746,682,371,980,733,669,422,967,720,656,409,954,707,643,396, 1018,941,758,694,630,383,1005,992,745,681,370,979,732,668,421,966,719,655, 408,985,953,738,706,674,642,395,363,1017,972,940,757,725,693,661,629,414,382, 1004,991,959,744,712,680,648,401,369,1023,978,946,763,731,699,667,635,420, 388,1010,965,718,654,407,952,705,641,394,1016,939,756,692,628,381,1003,990, 743,679,977,730,666,419,964,717,653,406,951,704,640,393,1015,938,755,691,627, 380,1002,989,742,678,367,976,729,665,418,963,716,652,405,950,767,703,639,392, 1014,754,690,626,379,1001,988,741,677,975,728,664,417,962,715,651,404,949, 766,702,638,391,1013,753,689,625,378,1000,987,740,676,365,974,727,663,416, 961,714,650,403,948,765,701,637,390,1012,999,752,688,624,377,986,739,675,973, 726,662,415,960,713,649,402,947,764,700,636,389,1011,998,751,687,623,376] [views:debug,2014-08-19T16:51:16.103,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/370. Updated state: replica (0) [ns_server:debug,2014-08-19T16:51:16.103,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",370,replica,0} [ns_server:debug,2014-08-19T16:51:16.279,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 368. Nacking mccouch update. [views:debug,2014-08-19T16:51:16.279,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/368. Updated state: replica (0) [ns_server:debug,2014-08-19T16:51:16.279,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",368,replica,0} [ns_server:debug,2014-08-19T16:51:16.280,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,750,686,622,375,984,737,673,426,971,724,660,413,958,711,647,400,1022,945, 762,698,634,387,1009,996,749,685,621,374,983,736,672,425,970,723,659,412,957, 710,646,399,1021,944,761,697,633,386,1008,995,748,684,620,373,982,735,671, 424,969,722,658,411,956,709,645,398,1020,943,760,696,632,385,1007,994,747, 683,619,372,981,734,670,423,968,721,657,410,955,708,644,397,1019,942,759,695, 631,384,1006,993,746,682,371,980,733,669,422,967,720,656,409,954,707,643,396, 1018,941,758,694,630,383,1005,992,745,681,370,979,732,668,421,966,719,655, 408,985,953,738,706,674,642,395,363,1017,972,940,757,725,693,661,629,414,382, 1004,991,959,744,712,680,648,401,369,1023,978,946,763,731,699,667,635,420, 388,1010,965,718,654,407,952,705,641,394,1016,939,756,692,628,381,1003,990, 743,679,368,977,730,666,419,964,717,653,406,951,704,640,393,1015,938,755,691, 627,380,1002,989,742,678,367,976,729,665,418,963,716,652,405,950,767,703,639, 392,1014,754,690,626,379,1001,988,741,677,975,728,664,417,962,715,651,404, 949,766,702,638,391,1013,753,689,625,378,1000,987,740,676,365,974,727,663, 416,961,714,650,403,948,765,701,637,390,1012,999,752,688,624,377,986,739,675, 973,726,662,415,960,713,649,402,947,764,700,636,389,1011,998,751,687,623,376] [views:debug,2014-08-19T16:51:16.346,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/368. Updated state: replica (0) [ns_server:debug,2014-08-19T16:51:16.346,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",368,replica,0} [ns_server:debug,2014-08-19T16:51:16.488,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 366. Nacking mccouch update. [views:debug,2014-08-19T16:51:16.488,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/366. Updated state: replica (0) [ns_server:debug,2014-08-19T16:51:16.488,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",366,replica,0} [ns_server:debug,2014-08-19T16:51:16.488,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,750,686,622,375,984,737,673,426,971,724,660,413,958,711,647,400,1022,945, 762,698,634,387,1009,996,749,685,621,374,983,736,672,425,970,723,659,412,957, 710,646,399,1021,944,761,697,633,386,1008,995,748,684,620,373,982,735,671, 424,969,722,658,411,956,709,645,398,1020,943,760,696,632,385,1007,994,747, 683,619,372,981,734,670,423,968,721,657,410,955,708,644,397,1019,942,759,695, 631,384,1006,993,746,682,371,980,733,669,422,967,720,656,409,954,707,643,396, 1018,941,758,694,630,383,1005,992,745,681,370,979,732,668,421,966,719,655, 408,985,953,738,706,674,642,395,363,1017,972,940,757,725,693,661,629,414,382, 1004,991,959,744,712,680,648,401,369,1023,978,946,763,731,699,667,635,420, 388,1010,965,718,654,407,952,705,641,394,1016,939,756,692,628,381,1003,990, 743,679,368,977,730,666,419,964,717,653,406,951,704,640,393,1015,938,755,691, 627,380,1002,989,742,678,367,976,729,665,418,963,716,652,405,950,767,703,639, 392,1014,754,690,626,379,1001,988,741,677,366,975,728,664,417,962,715,651, 404,949,766,702,638,391,1013,753,689,625,378,1000,987,740,676,365,974,727, 663,416,961,714,650,403,948,765,701,637,390,1012,999,752,688,624,377,986,739, 675,973,726,662,415,960,713,649,402,947,764,700,636,389,1011,998,751,687,623, 376] [views:debug,2014-08-19T16:51:16.555,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/366. Updated state: replica (0) [ns_server:debug,2014-08-19T16:51:16.555,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",366,replica,0} [ns_server:debug,2014-08-19T16:51:16.653,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 364. Nacking mccouch update. [views:debug,2014-08-19T16:51:16.653,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/364. Updated state: replica (0) [ns_server:debug,2014-08-19T16:51:16.653,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",364,replica,0} [ns_server:debug,2014-08-19T16:51:16.654,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,750,686,622,375,984,737,673,426,971,724,660,413,958,711,647,400,1022,945, 762,698,634,387,1009,996,749,685,621,374,983,736,672,425,970,723,659,412,957, 710,646,399,1021,944,761,697,633,386,1008,995,748,684,620,373,982,735,671, 424,969,722,658,411,956,709,645,398,1020,943,760,696,632,385,1007,994,747, 683,619,372,981,734,670,423,968,721,657,410,955,708,644,397,1019,942,759,695, 631,384,1006,993,746,682,371,980,733,669,422,967,720,656,409,954,707,643,396, 1018,941,758,694,630,383,1005,992,745,681,370,979,732,668,421,966,719,655, 408,985,953,738,706,674,642,395,363,1017,972,940,757,725,693,661,629,414,382, 1004,991,959,744,712,680,648,401,369,1023,978,946,763,731,699,667,635,420, 388,1010,965,718,654,407,952,705,641,394,1016,939,756,692,628,381,1003,990, 743,679,368,977,730,666,419,964,717,653,406,951,704,640,393,1015,938,755,691, 627,380,1002,989,742,678,367,976,729,665,418,963,716,652,405,950,767,703,639, 392,1014,754,690,626,379,1001,988,741,677,366,975,728,664,417,962,715,651, 404,949,766,702,638,391,1013,753,689,625,378,1000,987,740,676,365,974,727, 663,416,961,714,650,403,948,765,701,637,390,1012,999,752,688,624,377,986,739, 675,364,973,726,662,415,960,713,649,402,947,764,700,636,389,1011,998,751,687, 623,376] [views:debug,2014-08-19T16:51:16.688,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/364. Updated state: replica (0) [ns_server:debug,2014-08-19T16:51:16.688,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",364,replica,0} [rebalance:debug,2014-08-19T16:51:16.690,ns_1@10.242.238.90:<0.27232.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:51:16.690,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.27232.0> (ok) [rebalance:debug,2014-08-19T16:51:16.723,ns_1@10.242.238.90:<0.27181.0>:janitor_agent:handle_call:795]Done [rebalance:debug,2014-08-19T16:51:16.723,ns_1@10.242.238.90:<0.27308.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:51:16.723,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.27181.0> (ok) [ns_server:debug,2014-08-19T16:51:16.723,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.27308.0> (ok) [rebalance:debug,2014-08-19T16:51:16.797,ns_1@10.242.238.90:<0.27131.0>:janitor_agent:handle_call:795]Done [rebalance:debug,2014-08-19T16:51:16.797,ns_1@10.242.238.90:<0.27258.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:51:16.797,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.27131.0> (ok) [ns_server:debug,2014-08-19T16:51:16.797,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.27258.0> (ok) [rebalance:debug,2014-08-19T16:51:16.864,ns_1@10.242.238.90:<0.27081.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:51:16.864,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.27081.0> (ok) [rebalance:debug,2014-08-19T16:51:16.864,ns_1@10.242.238.90:<0.27207.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:51:16.864,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.27207.0> (ok) [rebalance:debug,2014-08-19T16:51:16.931,ns_1@10.242.238.90:<0.27156.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:51:16.931,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.27156.0> (ok) [rebalance:debug,2014-08-19T16:51:16.931,ns_1@10.242.238.90:<0.27031.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:51:16.931,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.27031.0> (ok) [rebalance:debug,2014-08-19T16:51:16.999,ns_1@10.242.238.90:<0.26975.0>:janitor_agent:handle_call:795]Done [rebalance:debug,2014-08-19T16:51:16.999,ns_1@10.242.238.90:<0.27120.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:51:17.000,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.26975.0> (ok) [ns_server:debug,2014-08-19T16:51:17.000,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.27120.0> (ok) [rebalance:debug,2014-08-19T16:51:17.092,ns_1@10.242.238.90:<0.26925.0>:janitor_agent:handle_call:795]Done [rebalance:debug,2014-08-19T16:51:17.092,ns_1@10.242.238.90:<0.27056.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:51:17.092,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.26925.0> (ok) [ns_server:debug,2014-08-19T16:51:17.092,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.27056.0> (ok) [rebalance:debug,2014-08-19T16:51:17.217,ns_1@10.242.238.90:<0.27000.0>:janitor_agent:handle_call:795]Done [rebalance:debug,2014-08-19T16:51:17.217,ns_1@10.242.238.90:<0.26875.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:51:17.217,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.27000.0> (ok) [ns_server:debug,2014-08-19T16:51:17.217,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.26875.0> (ok) [rebalance:debug,2014-08-19T16:51:17.223,ns_1@10.242.238.90:<0.27628.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 380 [rebalance:debug,2014-08-19T16:51:17.370,ns_1@10.242.238.90:<0.26825.0>:janitor_agent:handle_call:795]Done [rebalance:debug,2014-08-19T16:51:17.370,ns_1@10.242.238.90:<0.26950.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:51:17.370,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.26825.0> (ok) [ns_server:debug,2014-08-19T16:51:17.370,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.26950.0> (ok) [rebalance:debug,2014-08-19T16:51:17.495,ns_1@10.242.238.90:<0.27278.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:51:17.496,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.27278.0> (ok) [rebalance:debug,2014-08-19T16:51:17.496,ns_1@10.242.238.90:<0.26900.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:51:17.496,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.26900.0> (ok) [rebalance:debug,2014-08-19T16:51:17.637,ns_1@10.242.238.90:<0.27213.0>:janitor_agent:handle_call:795]Done [rebalance:debug,2014-08-19T16:51:17.638,ns_1@10.242.238.90:<0.26850.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:51:17.638,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.27213.0> (ok) [ns_server:debug,2014-08-19T16:51:17.638,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.26850.0> (ok) [rebalance:debug,2014-08-19T16:51:17.755,ns_1@10.242.238.90:<0.27162.0>:janitor_agent:handle_call:795]Done [rebalance:debug,2014-08-19T16:51:17.755,ns_1@10.242.238.90:<0.27303.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:51:17.755,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.27162.0> (ok) [ns_server:debug,2014-08-19T16:51:17.755,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.27303.0> (ok) [rebalance:debug,2014-08-19T16:51:17.897,ns_1@10.242.238.90:<0.27126.0>:janitor_agent:handle_call:795]Done [rebalance:debug,2014-08-19T16:51:17.897,ns_1@10.242.238.90:<0.27252.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:51:17.897,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.27126.0> (ok) [ns_server:debug,2014-08-19T16:51:17.897,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.27252.0> (ok) [rebalance:debug,2014-08-19T16:51:18.031,ns_1@10.242.238.90:<0.27187.0>:janitor_agent:handle_call:795]Done [rebalance:debug,2014-08-19T16:51:18.031,ns_1@10.242.238.90:<0.27076.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:51:18.031,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.27187.0> (ok) [ns_server:debug,2014-08-19T16:51:18.031,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.27076.0> (ok) [rebalance:debug,2014-08-19T16:51:18.164,ns_1@10.242.238.90:<0.27151.0>:janitor_agent:handle_call:795]Done [rebalance:debug,2014-08-19T16:51:18.164,ns_1@10.242.238.90:<0.27006.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:51:18.165,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.27151.0> (ok) [ns_server:debug,2014-08-19T16:51:18.165,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.27006.0> (ok) [rebalance:debug,2014-08-19T16:51:18.284,ns_1@10.242.238.90:<0.26970.0>:janitor_agent:handle_call:795]Done [rebalance:debug,2014-08-19T16:51:18.284,ns_1@10.242.238.90:<0.27101.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:51:18.284,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.26970.0> (ok) [ns_server:debug,2014-08-19T16:51:18.284,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.27101.0> (ok) [rebalance:debug,2014-08-19T16:51:18.360,ns_1@10.242.238.90:<0.26920.0>:janitor_agent:handle_call:795]Done [rebalance:debug,2014-08-19T16:51:18.360,ns_1@10.242.238.90:<0.27051.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:51:18.360,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.26920.0> (ok) [ns_server:debug,2014-08-19T16:51:18.360,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.27051.0> (ok) [rebalance:debug,2014-08-19T16:51:18.427,ns_1@10.242.238.90:<0.26870.0>:janitor_agent:handle_call:795]Done [rebalance:debug,2014-08-19T16:51:18.427,ns_1@10.242.238.90:<0.26995.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:51:18.427,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.26870.0> (ok) [ns_server:debug,2014-08-19T16:51:18.427,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.26995.0> (ok) [rebalance:debug,2014-08-19T16:51:18.494,ns_1@10.242.238.90:<0.26820.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:51:18.494,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.26820.0> (ok) [rebalance:debug,2014-08-19T16:51:18.494,ns_1@10.242.238.90:<0.26945.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:51:18.494,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.26945.0> (ok) [rebalance:debug,2014-08-19T16:51:18.578,ns_1@10.242.238.90:<0.26895.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:51:18.578,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.26895.0> (ok) [rebalance:debug,2014-08-19T16:51:18.628,ns_1@10.242.238.90:<0.26845.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:51:18.628,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.26845.0> (ok) [rebalance:debug,2014-08-19T16:51:18.679,ns_1@10.242.238.90:<0.27283.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:51:18.679,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.27283.0> (ok) [rebalance:debug,2014-08-19T16:51:18.712,ns_1@10.242.238.90:<0.27628.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:51:18.712,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.27628.0> (ok) [rebalance:debug,2014-08-19T16:51:19.002,ns_1@10.242.238.90:<0.27649.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 381 [rebalance:debug,2014-08-19T16:51:19.002,ns_1@10.242.238.90:<0.27652.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 382 [rebalance:debug,2014-08-19T16:51:19.003,ns_1@10.242.238.90:<0.27652.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:51:19.003,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.27652.0> (ok) [rebalance:debug,2014-08-19T16:51:19.004,ns_1@10.242.238.90:<0.27649.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:51:19.004,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.27649.0> (ok) [rebalance:debug,2014-08-19T16:51:21.036,ns_1@10.242.238.90:<0.27661.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 634 [rebalance:debug,2014-08-19T16:51:21.036,ns_1@10.242.238.90:<0.27664.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 383 [rebalance:debug,2014-08-19T16:51:21.037,ns_1@10.242.238.90:<0.27661.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:51:21.037,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.27661.0> (ok) [rebalance:debug,2014-08-19T16:51:21.038,ns_1@10.242.238.90:<0.27664.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:51:21.038,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.27664.0> (ok) [rebalance:debug,2014-08-19T16:51:21.100,ns_1@10.242.238.90:<0.27667.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 635 [rebalance:debug,2014-08-19T16:51:21.100,ns_1@10.242.238.90:<0.27670.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 636 [rebalance:debug,2014-08-19T16:51:21.101,ns_1@10.242.238.90:<0.27670.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:51:21.101,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.27670.0> (ok) [rebalance:debug,2014-08-19T16:51:21.102,ns_1@10.242.238.90:<0.27667.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:51:21.102,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.27667.0> (ok) [rebalance:debug,2014-08-19T16:51:21.222,ns_1@10.242.238.90:<0.27673.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 637 [rebalance:debug,2014-08-19T16:51:21.223,ns_1@10.242.238.90:<0.27676.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 638 [rebalance:debug,2014-08-19T16:51:21.224,ns_1@10.242.238.90:<0.27676.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:51:21.224,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.27676.0> (ok) [rebalance:debug,2014-08-19T16:51:21.224,ns_1@10.242.238.90:<0.27673.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:51:21.224,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.27673.0> (ok) [rebalance:debug,2014-08-19T16:51:21.372,ns_1@10.242.238.90:<0.27685.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 639 [rebalance:debug,2014-08-19T16:51:21.374,ns_1@10.242.238.90:<0.27685.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:51:21.374,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.27685.0> (ok) [rebalance:debug,2014-08-19T16:51:21.840,ns_1@10.242.238.90:<0.27689.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 364 [rebalance:debug,2014-08-19T16:51:21.841,ns_1@10.242.238.90:<0.27689.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:51:21.841,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.27689.0> (ok) [rebalance:debug,2014-08-19T16:51:21.906,ns_1@10.242.238.90:<0.27692.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 366 [rebalance:debug,2014-08-19T16:51:21.908,ns_1@10.242.238.90:<0.27692.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:51:21.908,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.27692.0> (ok) [rebalance:debug,2014-08-19T16:51:21.983,ns_1@10.242.238.90:<0.27695.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 368 [rebalance:debug,2014-08-19T16:51:21.983,ns_1@10.242.238.90:<0.27698.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 363 [rebalance:debug,2014-08-19T16:51:21.984,ns_1@10.242.238.90:<0.27695.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:51:21.984,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.27695.0> (ok) [rebalance:debug,2014-08-19T16:51:21.985,ns_1@10.242.238.90:<0.27698.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:51:21.985,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.27698.0> (ok) [rebalance:debug,2014-08-19T16:51:22.117,ns_1@10.242.238.90:<0.27701.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 370 [rebalance:debug,2014-08-19T16:51:22.117,ns_1@10.242.238.90:<0.27704.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 365 [rebalance:debug,2014-08-19T16:51:22.118,ns_1@10.242.238.90:<0.27701.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:51:22.118,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.27701.0> (ok) [rebalance:debug,2014-08-19T16:51:22.118,ns_1@10.242.238.90:<0.27704.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:51:22.118,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.27704.0> (ok) [rebalance:debug,2014-08-19T16:51:22.249,ns_1@10.242.238.90:<0.27707.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 372 [rebalance:debug,2014-08-19T16:51:22.249,ns_1@10.242.238.90:<0.27710.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 367 [rebalance:debug,2014-08-19T16:51:22.250,ns_1@10.242.238.90:<0.27707.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:51:22.250,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.27707.0> (ok) [rebalance:debug,2014-08-19T16:51:22.250,ns_1@10.242.238.90:<0.27710.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:51:22.251,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.27710.0> (ok) [rebalance:debug,2014-08-19T16:51:22.316,ns_1@10.242.238.90:<0.27713.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 374 [rebalance:debug,2014-08-19T16:51:22.316,ns_1@10.242.238.90:<0.27716.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 369 [rebalance:debug,2014-08-19T16:51:22.317,ns_1@10.242.238.90:<0.27713.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:51:22.317,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.27713.0> (ok) [rebalance:debug,2014-08-19T16:51:22.317,ns_1@10.242.238.90:<0.27716.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:51:22.317,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.27716.0> (ok) [rebalance:debug,2014-08-19T16:51:22.383,ns_1@10.242.238.90:<0.27719.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 371 [rebalance:debug,2014-08-19T16:51:22.383,ns_1@10.242.238.90:<0.27722.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 376 [rebalance:debug,2014-08-19T16:51:22.384,ns_1@10.242.238.90:<0.27722.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:51:22.384,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.27722.0> (ok) [rebalance:debug,2014-08-19T16:51:22.385,ns_1@10.242.238.90:<0.27719.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:51:22.385,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.27719.0> (ok) [rebalance:debug,2014-08-19T16:51:22.450,ns_1@10.242.238.90:<0.27725.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 373 [rebalance:debug,2014-08-19T16:51:22.450,ns_1@10.242.238.90:<0.27728.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 378 [rebalance:debug,2014-08-19T16:51:22.451,ns_1@10.242.238.90:<0.27728.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:51:22.451,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.27728.0> (ok) [rebalance:debug,2014-08-19T16:51:22.452,ns_1@10.242.238.90:<0.27725.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:51:22.452,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.27725.0> (ok) [rebalance:debug,2014-08-19T16:51:22.516,ns_1@10.242.238.90:<0.27731.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 375 [rebalance:debug,2014-08-19T16:51:22.518,ns_1@10.242.238.90:<0.27731.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:51:22.518,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.27731.0> (ok) [rebalance:debug,2014-08-19T16:51:22.550,ns_1@10.242.238.90:<0.27734.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 377 [rebalance:debug,2014-08-19T16:51:22.551,ns_1@10.242.238.90:<0.27734.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:51:22.551,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.27734.0> (ok) [rebalance:debug,2014-08-19T16:51:22.556,ns_1@10.242.238.90:<0.26873.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:51:22.556,ns_1@10.242.238.90:<0.26873.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:51:22.557,ns_1@10.242.238.90:<0.27737.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:51:22.557,ns_1@10.242.238.90:<0.27737.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:51:22.557,ns_1@10.242.238.90:<0.26873.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:info,2014-08-19T16:51:22.564,ns_1@10.242.238.90:<0.18786.0>:ns_memcached:do_handle_call:527]Changed vbucket 380 state to replica [ns_server:info,2014-08-19T16:51:22.564,ns_1@10.242.238.90:tap_replication_manager-default<0.18775.0>:tap_replication_manager:change_vbucket_filter:200]Going to change replication from 'ns_1@10.242.238.89' to have [380,384,385,386,387,388,389,390,391,392,393,394,395,396,397,398,399,400,401, 402,403,404,405,406,407,408,409,410,411,412,413,414,415,416,417,418,419,420, 421,422,423,424,425,426] ([380], []) [ns_server:debug,2014-08-19T16:51:22.566,ns_1@10.242.238.90:<0.27738.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:135]Registered myself under id:{"default", {new_child_id, [380,384,385,386,387,388,389,390,391,392,393, 394,395,396,397,398,399,400,401,402,403,404, 405,406,407,408,409,410,411,412,413,414,415, 416,417,418,419,420,421,422,423,424,425,426], 'ns_1@10.242.238.89'}, #Ref<0.0.1.51338>} Args:[{"10.242.238.89",11209}, {"10.242.238.90",11209}, [{old_state_retriever,#Fun}, {on_not_ready_vbuckets,#Fun}, {username,"default"}, {password,get_from_config}, {vbuckets,[380,384,385,386,387,388,389,390,391,392,393,394,395,396,397, 398,399,400,401,402,403,404,405,406,407,408,409,410,411,412, 413,414,415,416,417,418,419,420,421,422,423,424,425,426]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]] [ns_server:debug,2014-08-19T16:51:22.566,ns_1@10.242.238.90:<0.27738.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:139]Linked myself to old ebucketmigrator <0.26768.0> [ns_server:info,2014-08-19T16:51:22.567,ns_1@10.242.238.90:<0.26768.0>:ebucketmigrator_srv:handle_call:270]Starting new-style vbucket filter change on stream `replication_ns_1@10.242.238.90` [ns_server:info,2014-08-19T16:51:22.583,ns_1@10.242.238.90:<0.26768.0>:ebucketmigrator_srv:handle_call:297]Changing vbucket filter on tap stream `replication_ns_1@10.242.238.90`: [{380,1}, {384,1}, {385,1}, {386,1}, {387,1}, {388,1}, {389,1}, {390,1}, {391,1}, {392,1}, {393,1}, {394,1}, {395,1}, {396,1}, {397,1}, {398,1}, {399,1}, {400,1}, {401,1}, {402,1}, {403,1}, {404,1}, {405,1}, {406,1}, {407,1}, {408,1}, {409,1}, {410,1}, {411,1}, {412,1}, {413,1}, {414,1}, {415,1}, {416,1}, {417,1}, {418,1}, {419,1}, {420,1}, {421,1}, {422,1}, {423,1}, {424,1}, {425,1}, {426,1}] [ns_server:info,2014-08-19T16:51:22.584,ns_1@10.242.238.90:<0.26768.0>:ebucketmigrator_srv:handle_call:307]Successfully changed vbucket filter on tap stream `replication_ns_1@10.242.238.90`. [ns_server:info,2014-08-19T16:51:22.585,ns_1@10.242.238.90:<0.26768.0>:ebucketmigrator_srv:process_upstream:1027]Got vbucket filter change completion message. Silencing upstream sender [ns_server:info,2014-08-19T16:51:22.585,ns_1@10.242.238.90:<0.26768.0>:ebucketmigrator_srv:handle_info:366]Got reply from upstream silencing request. Completing state transition to a new ebucketmigrator. [ns_server:debug,2014-08-19T16:51:22.585,ns_1@10.242.238.90:<0.26768.0>:ebucketmigrator_srv:complete_native_vb_filter_change:170]Proceeding with reading unread binaries [ns_server:debug,2014-08-19T16:51:22.585,ns_1@10.242.238.90:<0.26768.0>:ebucketmigrator_srv:confirm_downstream:197]Going to confirm reception downstream messages [ns_server:debug,2014-08-19T16:51:22.585,ns_1@10.242.238.90:<0.26768.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:51:22.585,ns_1@10.242.238.90:<0.27740.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:51:22.585,ns_1@10.242.238.90:<0.27740.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:51:22.586,ns_1@10.242.238.90:<0.26768.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:51:22.586,ns_1@10.242.238.90:<0.26768.0>:ebucketmigrator_srv:confirm_downstream:201]Confirmed upstream messages are feeded to kernel [ns_server:debug,2014-08-19T16:51:22.586,ns_1@10.242.238.90:<0.26768.0>:ebucketmigrator_srv:reply_and_die:210]Passed old state to caller [ns_server:debug,2014-08-19T16:51:22.586,ns_1@10.242.238.90:<0.26768.0>:ebucketmigrator_srv:reply_and_die:213]Sent out state. Preparing to die [ns_server:debug,2014-08-19T16:51:22.586,ns_1@10.242.238.90:<0.27738.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:143]Got old state from previous ebucketmigrator: <0.26768.0> [ns_server:debug,2014-08-19T16:51:22.586,ns_1@10.242.238.90:<0.27738.0>:ns_vbm_new_sup:perform_vbucket_filter_change_loop:184]Sent old state to new instance [ns_server:info,2014-08-19T16:51:22.586,ns_1@10.242.238.90:<0.27742.0>:ns_vbm_new_sup:mk_old_state_retriever:83]Got vbucket filter change old state. Proceeding vbucket filter change operation [ns_server:debug,2014-08-19T16:51:22.587,ns_1@10.242.238.90:<0.27742.0>:ebucketmigrator_srv:init:494]Got old ebucketmigrator state from <0.26768.0>: {state,#Port<0.16372>,#Port<0.16368>,#Port<0.16373>,#Port<0.16369>, <0.26769.0>,<<"cut off">>,<<"cut off">>,[],133,false,false,0, {1408,452682,585006}, completed, {<0.27738.0>,#Ref<0.0.1.51351>}, <<"replication_ns_1@10.242.238.90">>,<0.26768.0>, {had_backfill,false,undefined,[]}, completed,false}. [ns_server:debug,2014-08-19T16:51:22.587,ns_1@10.242.238.90:<0.17162.0>:ns_process_registry:handle_info:98]Got exit msg: {'EXIT',<0.27738.0>,{#Ref<0.0.1.51340>,<0.27742.0>}} [rebalance:debug,2014-08-19T16:51:22.587,ns_1@10.242.238.90:<0.27743.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 379 [rebalance:debug,2014-08-19T16:51:22.587,ns_1@10.242.238.90:<0.27744.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 620 [error_logger:info,2014-08-19T16:51:22.587,ns_1@10.242.238.90:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,'ns_vbm_new_sup-default'} started: [{pid,<0.27742.0>}, {name, {new_child_id, [380,384,385,386,387,388,389,390,391,392,393, 394,395,396,397,398,399,400,401,402,403,404, 405,406,407,408,409,410,411,412,413,414,415, 416,417,418,419,420,421,422,423,424,425,426], 'ns_1@10.242.238.89'}}, {mfargs, {ebucketmigrator_srv,start_link, [{"10.242.238.89",11209}, {"10.242.238.90",11209}, [{old_state_retriever, #Fun}, {on_not_ready_vbuckets, #Fun}, {username,"default"}, {password,get_from_config}, {vbuckets, [380,384,385,386,387,388,389,390,391,392, 393,394,395,396,397,398,399,400,401,402, 403,404,405,406,407,408,409,410,411,412, 413,414,415,416,417,418,419,420,421,422, 423,424,425,426]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]]}}, {restart_type,temporary}, {shutdown,60000}, {child_type,worker}] [rebalance:debug,2014-08-19T16:51:22.588,ns_1@10.242.238.90:<0.27744.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:51:22.588,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.27744.0> (ok) [rebalance:debug,2014-08-19T16:51:22.589,ns_1@10.242.238.90:<0.27743.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:51:22.589,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.27743.0> (ok) [rebalance:debug,2014-08-19T16:51:22.593,ns_1@10.242.238.90:<0.26823.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:51:22.593,ns_1@10.242.238.90:<0.26823.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:51:22.593,ns_1@10.242.238.90:<0.27749.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:51:22.593,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:51:22.593,ns_1@10.242.238.90:<0.27749.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:51:22.593,ns_1@10.242.238.90:<0.26823.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:51:22.597,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 4242 us [ns_server:debug,2014-08-19T16:51:22.598,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:22.599,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:22.599,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{380, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:51:22.599,ns_1@10.242.238.90:<0.27742.0>:ebucketmigrator_srv:init:621]Reusing old upstream: [{vbuckets,[380,384,385,386,387,388,389,390,391,392,393,394,395,396,397,398, 399,400,401,402,403,404,405,406,407,408,409,410,411,412,413,414, 415,416,417,418,419,420,421,422,423,424,425,426]}, {name,<<"replication_ns_1@10.242.238.90">>}, {takeover,false}] [rebalance:debug,2014-08-19T16:51:22.599,ns_1@10.242.238.90:<0.27742.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.27751.0> [ns_server:info,2014-08-19T16:51:22.604,ns_1@10.242.238.90:<0.18786.0>:ns_memcached:do_handle_call:527]Changed vbucket 382 state to replica [ns_server:info,2014-08-19T16:51:22.604,ns_1@10.242.238.90:tap_replication_manager-default<0.18775.0>:tap_replication_manager:change_vbucket_filter:200]Going to change replication from 'ns_1@10.242.238.89' to have [380,382,384,385,386,387,388,389,390,391,392,393,394,395,396,397,398,399,400, 401,402,403,404,405,406,407,408,409,410,411,412,413,414,415,416,417,418,419, 420,421,422,423,424,425,426] ([382], []) [ns_server:debug,2014-08-19T16:51:22.605,ns_1@10.242.238.90:<0.27752.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:135]Registered myself under id:{"default", {new_child_id, [380,382,384,385,386,387,388,389,390,391,392, 393,394,395,396,397,398,399,400,401,402,403, 404,405,406,407,408,409,410,411,412,413,414, 415,416,417,418,419,420,421,422,423,424,425, 426], 'ns_1@10.242.238.89'}, #Ref<0.0.1.51520>} Args:[{"10.242.238.89",11209}, {"10.242.238.90",11209}, [{old_state_retriever,#Fun}, {on_not_ready_vbuckets,#Fun}, {username,"default"}, {password,get_from_config}, {vbuckets,[380,382,384,385,386,387,388,389,390,391,392,393,394,395,396, 397,398,399,400,401,402,403,404,405,406,407,408,409,410,411, 412,413,414,415,416,417,418,419,420,421,422,423,424,425, 426]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]] [ns_server:debug,2014-08-19T16:51:22.606,ns_1@10.242.238.90:<0.27752.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:139]Linked myself to old ebucketmigrator <0.27742.0> [ns_server:info,2014-08-19T16:51:22.606,ns_1@10.242.238.90:<0.27742.0>:ebucketmigrator_srv:handle_call:270]Starting new-style vbucket filter change on stream `replication_ns_1@10.242.238.90` [ns_server:info,2014-08-19T16:51:22.617,ns_1@10.242.238.90:<0.27742.0>:ebucketmigrator_srv:handle_call:297]Changing vbucket filter on tap stream `replication_ns_1@10.242.238.90`: [{380,1}, {382,1}, {384,1}, {385,1}, {386,1}, {387,1}, {388,1}, {389,1}, {390,1}, {391,1}, {392,1}, {393,1}, {394,1}, {395,1}, {396,1}, {397,1}, {398,1}, {399,1}, {400,1}, {401,1}, {402,1}, {403,1}, {404,1}, {405,1}, {406,1}, {407,1}, {408,1}, {409,1}, {410,1}, {411,1}, {412,1}, {413,1}, {414,1}, {415,1}, {416,1}, {417,1}, {418,1}, {419,1}, {420,1}, {421,1}, {422,1}, {423,1}, {424,1}, {425,1}, {426,1}] [ns_server:info,2014-08-19T16:51:22.618,ns_1@10.242.238.90:<0.27742.0>:ebucketmigrator_srv:handle_call:307]Successfully changed vbucket filter on tap stream `replication_ns_1@10.242.238.90`. [ns_server:info,2014-08-19T16:51:22.618,ns_1@10.242.238.90:<0.27742.0>:ebucketmigrator_srv:process_upstream:1027]Got vbucket filter change completion message. Silencing upstream sender [ns_server:info,2014-08-19T16:51:22.618,ns_1@10.242.238.90:<0.27742.0>:ebucketmigrator_srv:handle_info:366]Got reply from upstream silencing request. Completing state transition to a new ebucketmigrator. [ns_server:debug,2014-08-19T16:51:22.619,ns_1@10.242.238.90:<0.27742.0>:ebucketmigrator_srv:complete_native_vb_filter_change:170]Proceeding with reading unread binaries [ns_server:debug,2014-08-19T16:51:22.619,ns_1@10.242.238.90:<0.27742.0>:ebucketmigrator_srv:confirm_downstream:197]Going to confirm reception downstream messages [ns_server:debug,2014-08-19T16:51:22.619,ns_1@10.242.238.90:<0.27742.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:51:22.619,ns_1@10.242.238.90:<0.27754.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:51:22.619,ns_1@10.242.238.90:<0.27754.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:51:22.619,ns_1@10.242.238.90:<0.27742.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:51:22.619,ns_1@10.242.238.90:<0.27742.0>:ebucketmigrator_srv:confirm_downstream:201]Confirmed upstream messages are feeded to kernel [ns_server:debug,2014-08-19T16:51:22.619,ns_1@10.242.238.90:<0.27742.0>:ebucketmigrator_srv:reply_and_die:210]Passed old state to caller [ns_server:debug,2014-08-19T16:51:22.620,ns_1@10.242.238.90:<0.27742.0>:ebucketmigrator_srv:reply_and_die:213]Sent out state. Preparing to die [ns_server:debug,2014-08-19T16:51:22.620,ns_1@10.242.238.90:<0.27752.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:143]Got old state from previous ebucketmigrator: <0.27742.0> [ns_server:debug,2014-08-19T16:51:22.620,ns_1@10.242.238.90:<0.27752.0>:ns_vbm_new_sup:perform_vbucket_filter_change_loop:184]Sent old state to new instance [ns_server:info,2014-08-19T16:51:22.620,ns_1@10.242.238.90:<0.27756.0>:ns_vbm_new_sup:mk_old_state_retriever:83]Got vbucket filter change old state. Proceeding vbucket filter change operation [ns_server:debug,2014-08-19T16:51:22.620,ns_1@10.242.238.90:<0.27756.0>:ebucketmigrator_srv:init:494]Got old ebucketmigrator state from <0.27742.0>: {state,#Port<0.16372>,#Port<0.16368>,#Port<0.16373>,#Port<0.16369>, <0.27751.0>,<<"cut off">>,<<"cut off">>,[],136,false,false,0, {1408,452682,618818}, completed, {<0.27752.0>,#Ref<0.0.1.51533>}, <<"replication_ns_1@10.242.238.90">>,<0.27742.0>, {had_backfill,false,undefined,[]}, completed,false}. [ns_server:debug,2014-08-19T16:51:22.620,ns_1@10.242.238.90:<0.17162.0>:ns_process_registry:handle_info:98]Got exit msg: {'EXIT',<0.27752.0>,{#Ref<0.0.1.51522>,<0.27756.0>}} [error_logger:info,2014-08-19T16:51:22.620,ns_1@10.242.238.90:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,'ns_vbm_new_sup-default'} started: [{pid,<0.27756.0>}, {name, {new_child_id, [380,382,384,385,386,387,388,389,390,391,392, 393,394,395,396,397,398,399,400,401,402,403, 404,405,406,407,408,409,410,411,412,413,414, 415,416,417,418,419,420,421,422,423,424,425, 426], 'ns_1@10.242.238.89'}}, {mfargs, {ebucketmigrator_srv,start_link, [{"10.242.238.89",11209}, {"10.242.238.90",11209}, [{old_state_retriever, #Fun}, {on_not_ready_vbuckets, #Fun}, {username,"default"}, {password,get_from_config}, {vbuckets, [380,382,384,385,386,387,388,389,390,391, 392,393,394,395,396,397,398,399,400,401, 402,403,404,405,406,407,408,409,410,411, 412,413,414,415,416,417,418,419,420,421, 422,423,424,425,426]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]]}}, {restart_type,temporary}, {shutdown,60000}, {child_type,worker}] [ns_server:debug,2014-08-19T16:51:22.624,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:51:22.627,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:22.627,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 2721 us [ns_server:debug,2014-08-19T16:51:22.627,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:22.628,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{382, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:51:22.632,ns_1@10.242.238.90:<0.27756.0>:ebucketmigrator_srv:init:621]Reusing old upstream: [{vbuckets,[380,382,384,385,386,387,388,389,390,391,392,393,394,395,396,397, 398,399,400,401,402,403,404,405,406,407,408,409,410,411,412,413, 414,415,416,417,418,419,420,421,422,423,424,425,426]}, {name,<<"replication_ns_1@10.242.238.90">>}, {takeover,false}] [rebalance:debug,2014-08-19T16:51:22.633,ns_1@10.242.238.90:<0.27756.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.27758.0> [rebalance:debug,2014-08-19T16:51:22.711,ns_1@10.242.238.90:<0.27759.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 622 [rebalance:debug,2014-08-19T16:51:22.712,ns_1@10.242.238.90:<0.27759.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:51:22.712,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.27759.0> (ok) [rebalance:debug,2014-08-19T16:51:22.748,ns_1@10.242.238.90:<0.26848.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:51:22.748,ns_1@10.242.238.90:<0.26848.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:51:22.748,ns_1@10.242.238.90:<0.27763.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:51:22.749,ns_1@10.242.238.90:<0.27763.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:51:22.749,ns_1@10.242.238.90:<0.26848.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:info,2014-08-19T16:51:22.753,ns_1@10.242.238.90:<0.18786.0>:ns_memcached:do_handle_call:527]Changed vbucket 381 state to replica [ns_server:info,2014-08-19T16:51:22.753,ns_1@10.242.238.90:tap_replication_manager-default<0.18775.0>:tap_replication_manager:change_vbucket_filter:200]Going to change replication from 'ns_1@10.242.238.89' to have [380,381,382,384,385,386,387,388,389,390,391,392,393,394,395,396,397,398,399, 400,401,402,403,404,405,406,407,408,409,410,411,412,413,414,415,416,417,418, 419,420,421,422,423,424,425,426] ([381], []) [ns_server:debug,2014-08-19T16:51:22.754,ns_1@10.242.238.90:<0.27764.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:135]Registered myself under id:{"default", {new_child_id, [380,381,382,384,385,386,387,388,389,390,391, 392,393,394,395,396,397,398,399,400,401,402, 403,404,405,406,407,408,409,410,411,412,413, 414,415,416,417,418,419,420,421,422,423,424, 425,426], 'ns_1@10.242.238.89'}, #Ref<0.0.1.51718>} Args:[{"10.242.238.89",11209}, {"10.242.238.90",11209}, [{old_state_retriever,#Fun}, {on_not_ready_vbuckets,#Fun}, {username,"default"}, {password,get_from_config}, {vbuckets,[380,381,382,384,385,386,387,388,389,390,391,392,393,394,395, 396,397,398,399,400,401,402,403,404,405,406,407,408,409,410, 411,412,413,414,415,416,417,418,419,420,421,422,423,424,425, 426]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]] [ns_server:debug,2014-08-19T16:51:22.755,ns_1@10.242.238.90:<0.27764.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:139]Linked myself to old ebucketmigrator <0.27756.0> [ns_server:info,2014-08-19T16:51:22.755,ns_1@10.242.238.90:<0.27756.0>:ebucketmigrator_srv:handle_call:270]Starting new-style vbucket filter change on stream `replication_ns_1@10.242.238.90` [ns_server:info,2014-08-19T16:51:22.771,ns_1@10.242.238.90:<0.27756.0>:ebucketmigrator_srv:handle_call:297]Changing vbucket filter on tap stream `replication_ns_1@10.242.238.90`: [{380,1}, {381,1}, {382,1}, {384,1}, {385,1}, {386,1}, {387,1}, {388,1}, {389,1}, {390,1}, {391,1}, {392,1}, {393,1}, {394,1}, {395,1}, {396,1}, {397,1}, {398,1}, {399,1}, {400,1}, {401,1}, {402,1}, {403,1}, {404,1}, {405,1}, {406,1}, {407,1}, {408,1}, {409,1}, {410,1}, {411,1}, {412,1}, {413,1}, {414,1}, {415,1}, {416,1}, {417,1}, {418,1}, {419,1}, {420,1}, {421,1}, {422,1}, {423,1}, {424,1}, {425,1}, {426,1}] [ns_server:info,2014-08-19T16:51:22.772,ns_1@10.242.238.90:<0.27756.0>:ebucketmigrator_srv:handle_call:307]Successfully changed vbucket filter on tap stream `replication_ns_1@10.242.238.90`. [ns_server:info,2014-08-19T16:51:22.773,ns_1@10.242.238.90:<0.27756.0>:ebucketmigrator_srv:process_upstream:1027]Got vbucket filter change completion message. Silencing upstream sender [ns_server:info,2014-08-19T16:51:22.773,ns_1@10.242.238.90:<0.27756.0>:ebucketmigrator_srv:handle_info:366]Got reply from upstream silencing request. Completing state transition to a new ebucketmigrator. [ns_server:debug,2014-08-19T16:51:22.773,ns_1@10.242.238.90:<0.27756.0>:ebucketmigrator_srv:complete_native_vb_filter_change:170]Proceeding with reading unread binaries [ns_server:debug,2014-08-19T16:51:22.773,ns_1@10.242.238.90:<0.27756.0>:ebucketmigrator_srv:confirm_downstream:197]Going to confirm reception downstream messages [ns_server:debug,2014-08-19T16:51:22.773,ns_1@10.242.238.90:<0.27756.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:51:22.773,ns_1@10.242.238.90:<0.27766.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:51:22.773,ns_1@10.242.238.90:<0.27766.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:51:22.774,ns_1@10.242.238.90:<0.27756.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:51:22.774,ns_1@10.242.238.90:<0.27756.0>:ebucketmigrator_srv:confirm_downstream:201]Confirmed upstream messages are feeded to kernel [ns_server:debug,2014-08-19T16:51:22.774,ns_1@10.242.238.90:<0.27756.0>:ebucketmigrator_srv:reply_and_die:210]Passed old state to caller [ns_server:debug,2014-08-19T16:51:22.774,ns_1@10.242.238.90:<0.27756.0>:ebucketmigrator_srv:reply_and_die:213]Sent out state. Preparing to die [ns_server:debug,2014-08-19T16:51:22.774,ns_1@10.242.238.90:<0.27764.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:143]Got old state from previous ebucketmigrator: <0.27756.0> [ns_server:debug,2014-08-19T16:51:22.774,ns_1@10.242.238.90:<0.27764.0>:ns_vbm_new_sup:perform_vbucket_filter_change_loop:184]Sent old state to new instance [ns_server:info,2014-08-19T16:51:22.774,ns_1@10.242.238.90:<0.27768.0>:ns_vbm_new_sup:mk_old_state_retriever:83]Got vbucket filter change old state. Proceeding vbucket filter change operation [ns_server:debug,2014-08-19T16:51:22.774,ns_1@10.242.238.90:<0.27768.0>:ebucketmigrator_srv:init:494]Got old ebucketmigrator state from <0.27756.0>: {state,#Port<0.16372>,#Port<0.16368>,#Port<0.16373>,#Port<0.16369>, <0.27758.0>,<<"cut off">>,<<"cut off">>,[],139,false,false,0, {1408,452682,773125}, completed, {<0.27764.0>,#Ref<0.0.1.51731>}, <<"replication_ns_1@10.242.238.90">>,<0.27756.0>, {had_backfill,false,undefined,[]}, completed,false}. [ns_server:debug,2014-08-19T16:51:22.775,ns_1@10.242.238.90:<0.17162.0>:ns_process_registry:handle_info:98]Got exit msg: {'EXIT',<0.27764.0>,{#Ref<0.0.1.51720>,<0.27768.0>}} [error_logger:info,2014-08-19T16:51:22.775,ns_1@10.242.238.90:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,'ns_vbm_new_sup-default'} started: [{pid,<0.27768.0>}, {name, {new_child_id, [380,381,382,384,385,386,387,388,389,390,391, 392,393,394,395,396,397,398,399,400,401,402, 403,404,405,406,407,408,409,410,411,412,413, 414,415,416,417,418,419,420,421,422,423,424, 425,426], 'ns_1@10.242.238.89'}}, {mfargs, {ebucketmigrator_srv,start_link, [{"10.242.238.89",11209}, {"10.242.238.90",11209}, [{old_state_retriever, #Fun}, {on_not_ready_vbuckets, #Fun}, {username,"default"}, {password,get_from_config}, {vbuckets, [380,381,382,384,385,386,387,388,389,390, 391,392,393,394,395,396,397,398,399,400, 401,402,403,404,405,406,407,408,409,410, 411,412,413,414,415,416,417,418,419,420, 421,422,423,424,425,426]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]]}}, {restart_type,temporary}, {shutdown,60000}, {child_type,worker}] [ns_server:debug,2014-08-19T16:51:22.780,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:51:22.787,ns_1@10.242.238.90:<0.27768.0>:ebucketmigrator_srv:init:621]Reusing old upstream: [{vbuckets,[380,381,382,384,385,386,387,388,389,390,391,392,393,394,395,396, 397,398,399,400,401,402,403,404,405,406,407,408,409,410,411,412, 413,414,415,416,417,418,419,420,421,422,423,424,425,426]}, {name,<<"replication_ns_1@10.242.238.90">>}, {takeover,false}] [rebalance:debug,2014-08-19T16:51:22.787,ns_1@10.242.238.90:<0.27768.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.27769.0> [ns_server:debug,2014-08-19T16:51:22.790,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:22.790,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 10576 us [ns_server:debug,2014-08-19T16:51:22.791,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:22.791,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{381, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [rebalance:debug,2014-08-19T16:51:22.800,ns_1@10.242.238.90:<0.27771.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 624 [rebalance:debug,2014-08-19T16:51:22.802,ns_1@10.242.238.90:<0.27771.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:51:22.802,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.27771.0> (ok) [rebalance:debug,2014-08-19T16:51:22.836,ns_1@10.242.238.90:<0.26798.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:51:22.836,ns_1@10.242.238.90:<0.26798.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:51:22.836,ns_1@10.242.238.90:<0.27774.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:51:22.836,ns_1@10.242.238.90:<0.27774.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:51:22.837,ns_1@10.242.238.90:<0.26798.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:info,2014-08-19T16:51:22.840,ns_1@10.242.238.90:<0.18786.0>:ns_memcached:do_handle_call:527]Changed vbucket 383 state to replica [ns_server:info,2014-08-19T16:51:22.840,ns_1@10.242.238.90:tap_replication_manager-default<0.18775.0>:tap_replication_manager:change_vbucket_filter:200]Going to change replication from 'ns_1@10.242.238.89' to have [380,381,382,383,384,385,386,387,388,389,390,391,392,393,394,395,396,397,398, 399,400,401,402,403,404,405,406,407,408,409,410,411,412,413,414,415,416,417, 418,419,420,421,422,423,424,425,426] ([383], []) [ns_server:debug,2014-08-19T16:51:22.842,ns_1@10.242.238.90:<0.27775.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:135]Registered myself under id:{"default", {new_child_id, [380,381,382,383,384,385,386,387,388,389,390, 391,392,393,394,395,396,397,398,399,400,401, 402,403,404,405,406,407,408,409,410,411,412, 413,414,415,416,417,418,419,420,421,422,423, 424,425,426], 'ns_1@10.242.238.89'}, #Ref<0.0.1.51887>} Args:[{"10.242.238.89",11209}, {"10.242.238.90",11209}, [{old_state_retriever,#Fun}, {on_not_ready_vbuckets,#Fun}, {username,"default"}, {password,get_from_config}, {vbuckets,[380,381,382,383,384,385,386,387,388,389,390,391,392,393,394, 395,396,397,398,399,400,401,402,403,404,405,406,407,408,409, 410,411,412,413,414,415,416,417,418,419,420,421,422,423,424, 425,426]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]] [ns_server:debug,2014-08-19T16:51:22.842,ns_1@10.242.238.90:<0.27775.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:139]Linked myself to old ebucketmigrator <0.27768.0> [ns_server:info,2014-08-19T16:51:22.842,ns_1@10.242.238.90:<0.27768.0>:ebucketmigrator_srv:handle_call:270]Starting new-style vbucket filter change on stream `replication_ns_1@10.242.238.90` [ns_server:info,2014-08-19T16:51:22.862,ns_1@10.242.238.90:<0.27768.0>:ebucketmigrator_srv:handle_call:297]Changing vbucket filter on tap stream `replication_ns_1@10.242.238.90`: [{380,1}, {381,1}, {382,1}, {383,1}, {384,1}, {385,1}, {386,1}, {387,1}, {388,1}, {389,1}, {390,1}, {391,1}, {392,1}, {393,1}, {394,1}, {395,1}, {396,1}, {397,1}, {398,1}, {399,1}, {400,1}, {401,1}, {402,1}, {403,1}, {404,1}, {405,1}, {406,1}, {407,1}, {408,1}, {409,1}, {410,1}, {411,1}, {412,1}, {413,1}, {414,1}, {415,1}, {416,1}, {417,1}, {418,1}, {419,1}, {420,1}, {421,1}, {422,1}, {423,1}, {424,1}, {425,1}, {426,1}] [ns_server:info,2014-08-19T16:51:22.863,ns_1@10.242.238.90:<0.27768.0>:ebucketmigrator_srv:handle_call:307]Successfully changed vbucket filter on tap stream `replication_ns_1@10.242.238.90`. [ns_server:info,2014-08-19T16:51:22.864,ns_1@10.242.238.90:<0.27768.0>:ebucketmigrator_srv:process_upstream:1027]Got vbucket filter change completion message. Silencing upstream sender [ns_server:info,2014-08-19T16:51:22.864,ns_1@10.242.238.90:<0.27768.0>:ebucketmigrator_srv:handle_info:366]Got reply from upstream silencing request. Completing state transition to a new ebucketmigrator. [ns_server:debug,2014-08-19T16:51:22.864,ns_1@10.242.238.90:<0.27768.0>:ebucketmigrator_srv:complete_native_vb_filter_change:170]Proceeding with reading unread binaries [ns_server:debug,2014-08-19T16:51:22.864,ns_1@10.242.238.90:<0.27768.0>:ebucketmigrator_srv:confirm_downstream:197]Going to confirm reception downstream messages [ns_server:debug,2014-08-19T16:51:22.864,ns_1@10.242.238.90:<0.27768.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:51:22.864,ns_1@10.242.238.90:<0.27777.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:51:22.864,ns_1@10.242.238.90:<0.27777.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:51:22.864,ns_1@10.242.238.90:<0.27768.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:51:22.864,ns_1@10.242.238.90:<0.27768.0>:ebucketmigrator_srv:confirm_downstream:201]Confirmed upstream messages are feeded to kernel [ns_server:debug,2014-08-19T16:51:22.865,ns_1@10.242.238.90:<0.27768.0>:ebucketmigrator_srv:reply_and_die:210]Passed old state to caller [ns_server:debug,2014-08-19T16:51:22.865,ns_1@10.242.238.90:<0.27768.0>:ebucketmigrator_srv:reply_and_die:213]Sent out state. Preparing to die [ns_server:debug,2014-08-19T16:51:22.865,ns_1@10.242.238.90:<0.27775.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:143]Got old state from previous ebucketmigrator: <0.27768.0> [ns_server:debug,2014-08-19T16:51:22.865,ns_1@10.242.238.90:<0.27775.0>:ns_vbm_new_sup:perform_vbucket_filter_change_loop:184]Sent old state to new instance [ns_server:info,2014-08-19T16:51:22.865,ns_1@10.242.238.90:<0.27779.0>:ns_vbm_new_sup:mk_old_state_retriever:83]Got vbucket filter change old state. Proceeding vbucket filter change operation [ns_server:debug,2014-08-19T16:51:22.865,ns_1@10.242.238.90:<0.27779.0>:ebucketmigrator_srv:init:494]Got old ebucketmigrator state from <0.27768.0>: {state,#Port<0.16372>,#Port<0.16368>,#Port<0.16373>,#Port<0.16369>, <0.27769.0>,<<"cut off">>,<<"cut off">>,[],142,false,false,0, {1408,452682,864023}, completed, {<0.27775.0>,#Ref<0.0.1.51902>}, <<"replication_ns_1@10.242.238.90">>,<0.27768.0>, {had_backfill,false,undefined,[]}, completed,false}. [ns_server:debug,2014-08-19T16:51:22.866,ns_1@10.242.238.90:<0.17162.0>:ns_process_registry:handle_info:98]Got exit msg: {'EXIT',<0.27775.0>,{#Ref<0.0.1.51889>,<0.27779.0>}} [error_logger:info,2014-08-19T16:51:22.866,ns_1@10.242.238.90:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,'ns_vbm_new_sup-default'} started: [{pid,<0.27779.0>}, {name, {new_child_id, [380,381,382,383,384,385,386,387,388,389,390, 391,392,393,394,395,396,397,398,399,400,401, 402,403,404,405,406,407,408,409,410,411,412, 413,414,415,416,417,418,419,420,421,422,423, 424,425,426], 'ns_1@10.242.238.89'}}, {mfargs, {ebucketmigrator_srv,start_link, [{"10.242.238.89",11209}, {"10.242.238.90",11209}, [{old_state_retriever, #Fun}, {on_not_ready_vbuckets, #Fun}, {username,"default"}, {password,get_from_config}, {vbuckets, [380,381,382,383,384,385,386,387,388,389, 390,391,392,393,394,395,396,397,398,399, 400,401,402,403,404,405,406,407,408,409, 410,411,412,413,414,415,416,417,418,419, 420,421,422,423,424,425,426]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]]}}, {restart_type,temporary}, {shutdown,60000}, {child_type,worker}] [ns_server:debug,2014-08-19T16:51:22.875,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:51:22.876,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:22.876,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 695 us [ns_server:debug,2014-08-19T16:51:22.877,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:22.877,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{383, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:51:22.879,ns_1@10.242.238.90:<0.27779.0>:ebucketmigrator_srv:init:621]Reusing old upstream: [{vbuckets,[380,381,382,383,384,385,386,387,388,389,390,391,392,393,394,395, 396,397,398,399,400,401,402,403,404,405,406,407,408,409,410,411, 412,413,414,415,416,417,418,419,420,421,422,423,424,425,426]}, {name,<<"replication_ns_1@10.242.238.90">>}, {takeover,false}] [rebalance:debug,2014-08-19T16:51:22.880,ns_1@10.242.238.90:<0.27779.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.27781.0> [rebalance:debug,2014-08-19T16:51:23.019,ns_1@10.242.238.90:<0.27782.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 626 [rebalance:debug,2014-08-19T16:51:23.019,ns_1@10.242.238.90:<0.27783.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 619 [rebalance:debug,2014-08-19T16:51:23.020,ns_1@10.242.238.90:<0.27782.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:51:23.020,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.27782.0> (ok) [rebalance:debug,2014-08-19T16:51:23.020,ns_1@10.242.238.90:<0.27783.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:51:23.020,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.27783.0> (ok) [rebalance:debug,2014-08-19T16:51:23.168,ns_1@10.242.238.90:<0.27788.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 621 [rebalance:debug,2014-08-19T16:51:23.169,ns_1@10.242.238.90:<0.27791.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 628 [rebalance:debug,2014-08-19T16:51:23.170,ns_1@10.242.238.90:<0.27791.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:51:23.170,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.27791.0> (ok) [rebalance:debug,2014-08-19T16:51:23.170,ns_1@10.242.238.90:<0.27788.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:51:23.170,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.27788.0> (ok) [rebalance:debug,2014-08-19T16:51:23.319,ns_1@10.242.238.90:<0.27794.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 630 [rebalance:debug,2014-08-19T16:51:23.319,ns_1@10.242.238.90:<0.27797.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 623 [rebalance:debug,2014-08-19T16:51:23.320,ns_1@10.242.238.90:<0.27794.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:51:23.320,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.27794.0> (ok) [rebalance:debug,2014-08-19T16:51:23.320,ns_1@10.242.238.90:<0.27797.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:51:23.321,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.27797.0> (ok) [rebalance:debug,2014-08-19T16:51:23.461,ns_1@10.242.238.90:<0.27816.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 632 [rebalance:debug,2014-08-19T16:51:23.461,ns_1@10.242.238.90:<0.27819.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 625 [rebalance:debug,2014-08-19T16:51:23.462,ns_1@10.242.238.90:<0.27816.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:51:23.462,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.27816.0> (ok) [rebalance:debug,2014-08-19T16:51:23.463,ns_1@10.242.238.90:<0.27819.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:51:23.463,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.27819.0> (ok) [ns_server:debug,2014-08-19T16:51:23.617,ns_1@10.242.238.90:<0.27823.0>:capi_set_view_manager:do_wait_index_updated:618]References to wait: [] ("default", 634) [ns_server:debug,2014-08-19T16:51:23.617,ns_1@10.242.238.90:<0.27823.0>:capi_set_view_manager:do_wait_index_updated:638]All refs fired [ns_server:debug,2014-08-19T16:51:23.617,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.27822.0> (ok) [rebalance:debug,2014-08-19T16:51:23.617,ns_1@10.242.238.90:<0.26917.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:51:23.618,ns_1@10.242.238.90:<0.26917.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:51:23.618,ns_1@10.242.238.90:<0.27824.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:51:23.618,ns_1@10.242.238.90:<0.27824.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:51:23.618,ns_1@10.242.238.90:<0.26917.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [rebalance:debug,2014-08-19T16:51:23.619,ns_1@10.242.238.90:<0.27825.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 627 [rebalance:debug,2014-08-19T16:51:23.620,ns_1@10.242.238.90:<0.27825.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:51:23.620,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.27825.0> (ok) [ns_server:info,2014-08-19T16:51:23.655,ns_1@10.242.238.90:<0.18786.0>:ns_memcached:do_handle_call:527]Changed vbucket 634 state to active [ns_server:debug,2014-08-19T16:51:23.690,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:51:23.692,ns_1@10.242.238.90:<0.27829.0>:capi_set_view_manager:do_wait_index_updated:618]References to wait: [] ("default", 636) [ns_server:debug,2014-08-19T16:51:23.692,ns_1@10.242.238.90:<0.27829.0>:capi_set_view_manager:do_wait_index_updated:638]All refs fired [ns_server:debug,2014-08-19T16:51:23.692,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.27828.0> (ok) [ns_server:debug,2014-08-19T16:51:23.694,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 2080 us [rebalance:debug,2014-08-19T16:51:23.694,ns_1@10.242.238.90:<0.26853.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [rebalance:debug,2014-08-19T16:51:23.695,ns_1@10.242.238.90:<0.27831.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 629 [ns_server:debug,2014-08-19T16:51:23.695,ns_1@10.242.238.90:<0.26853.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:51:23.695,ns_1@10.242.238.90:<0.27832.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:51:23.696,ns_1@10.242.238.90:<0.27832.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [ns_server:debug,2014-08-19T16:51:23.696,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [rebalance:info,2014-08-19T16:51:23.696,ns_1@10.242.238.90:<0.26853.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:51:23.697,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:23.698,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{634, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [views:debug,2014-08-19T16:51:23.703,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/634. Updated state: active (1) [ns_server:debug,2014-08-19T16:51:23.703,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",634,active,1} [rebalance:debug,2014-08-19T16:51:23.704,ns_1@10.242.238.90:<0.27831.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:51:23.704,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.27831.0> (ok) [ns_server:info,2014-08-19T16:51:23.732,ns_1@10.242.238.90:<0.18786.0>:ns_memcached:do_handle_call:527]Changed vbucket 636 state to active [views:debug,2014-08-19T16:51:23.762,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/636. Updated state: active (1) [ns_server:debug,2014-08-19T16:51:23.763,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",636,active,1} [ns_server:debug,2014-08-19T16:51:23.765,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:51:23.768,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:23.770,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{636, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:51:23.770,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 5260 us [ns_server:debug,2014-08-19T16:51:23.770,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:23.776,ns_1@10.242.238.90:<0.27837.0>:capi_set_view_manager:do_wait_index_updated:618]References to wait: [] ("default", 638) [ns_server:debug,2014-08-19T16:51:23.776,ns_1@10.242.238.90:<0.27837.0>:capi_set_view_manager:do_wait_index_updated:638]All refs fired [ns_server:debug,2014-08-19T16:51:23.776,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.27836.0> (ok) [rebalance:debug,2014-08-19T16:51:23.776,ns_1@10.242.238.90:<0.26817.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:51:23.777,ns_1@10.242.238.90:<0.26817.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:51:23.777,ns_1@10.242.238.90:<0.27838.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:51:23.777,ns_1@10.242.238.90:<0.27838.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:51:23.777,ns_1@10.242.238.90:<0.26817.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [rebalance:debug,2014-08-19T16:51:23.779,ns_1@10.242.238.90:<0.27839.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 631 [rebalance:debug,2014-08-19T16:51:23.781,ns_1@10.242.238.90:<0.27839.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:51:23.781,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.27839.0> (ok) [ns_server:info,2014-08-19T16:51:23.810,ns_1@10.242.238.90:<0.18786.0>:ns_memcached:do_handle_call:527]Changed vbucket 638 state to active [views:debug,2014-08-19T16:51:23.838,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/638. Updated state: active (1) [ns_server:debug,2014-08-19T16:51:23.838,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",638,active,1} [ns_server:debug,2014-08-19T16:51:23.838,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:51:23.842,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:23.842,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 3228 us [ns_server:debug,2014-08-19T16:51:23.843,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{638, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:51:23.843,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [rebalance:debug,2014-08-19T16:51:23.862,ns_1@10.242.238.90:<0.27843.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 633 [rebalance:debug,2014-08-19T16:51:23.864,ns_1@10.242.238.90:<0.27843.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:51:23.864,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.27843.0> (ok) [ns_server:debug,2014-08-19T16:51:23.984,ns_1@10.242.238.90:<0.27847.0>:capi_set_view_manager:do_wait_index_updated:618]References to wait: [] ("default", 635) [ns_server:debug,2014-08-19T16:51:23.984,ns_1@10.242.238.90:<0.27847.0>:capi_set_view_manager:do_wait_index_updated:638]All refs fired [ns_server:debug,2014-08-19T16:51:23.984,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.27846.0> (ok) [rebalance:debug,2014-08-19T16:51:23.985,ns_1@10.242.238.90:<0.26878.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:51:23.985,ns_1@10.242.238.90:<0.26878.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:51:23.985,ns_1@10.242.238.90:<0.27848.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:51:23.985,ns_1@10.242.238.90:<0.27848.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:51:23.985,ns_1@10.242.238.90:<0.26878.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:51:24.017,ns_1@10.242.238.90:<0.27850.0>:capi_set_view_manager:do_wait_index_updated:618]References to wait: [] ("default", 637) [ns_server:debug,2014-08-19T16:51:24.017,ns_1@10.242.238.90:<0.27850.0>:capi_set_view_manager:do_wait_index_updated:638]All refs fired [ns_server:debug,2014-08-19T16:51:24.017,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.27849.0> (ok) [rebalance:debug,2014-08-19T16:51:24.018,ns_1@10.242.238.90:<0.26831.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:51:24.018,ns_1@10.242.238.90:<0.26831.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:51:24.018,ns_1@10.242.238.90:<0.27851.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:51:24.018,ns_1@10.242.238.90:<0.27851.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:51:24.018,ns_1@10.242.238.90:<0.26831.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:info,2014-08-19T16:51:24.021,ns_1@10.242.238.90:<0.18786.0>:ns_memcached:do_handle_call:527]Changed vbucket 635 state to active [ns_server:debug,2014-08-19T16:51:24.048,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:51:24.050,ns_1@10.242.238.90:<0.27853.0>:capi_set_view_manager:do_wait_index_updated:618]References to wait: [] ("default", 639) [ns_server:debug,2014-08-19T16:51:24.051,ns_1@10.242.238.90:<0.27853.0>:capi_set_view_manager:do_wait_index_updated:638]All refs fired [ns_server:debug,2014-08-19T16:51:24.051,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.27852.0> (ok) [ns_server:debug,2014-08-19T16:51:24.051,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:24.051,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 3422 us [ns_server:debug,2014-08-19T16:51:24.052,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:24.052,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{635, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [rebalance:debug,2014-08-19T16:51:24.053,ns_1@10.242.238.90:<0.26778.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:51:24.053,ns_1@10.242.238.90:<0.26778.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:51:24.053,ns_1@10.242.238.90:<0.27855.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:51:24.053,ns_1@10.242.238.90:<0.27855.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:51:24.053,ns_1@10.242.238.90:<0.26778.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:info,2014-08-19T16:51:24.054,ns_1@10.242.238.90:<0.18786.0>:ns_memcached:do_handle_call:527]Changed vbucket 637 state to active [views:debug,2014-08-19T16:51:24.074,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/635. Updated state: active (1) [ns_server:debug,2014-08-19T16:51:24.074,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",635,active,1} [ns_server:debug,2014-08-19T16:51:24.083,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:51:24.090,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 6791 us [ns_server:debug,2014-08-19T16:51:24.090,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:24.091,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:info,2014-08-19T16:51:24.091,ns_1@10.242.238.90:<0.18786.0>:ns_memcached:do_handle_call:527]Changed vbucket 639 state to active [ns_server:debug,2014-08-19T16:51:24.091,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{637, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:51:24.121,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:51:24.124,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 2962 us [ns_server:debug,2014-08-19T16:51:24.124,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:24.125,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:24.125,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{639, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [views:debug,2014-08-19T16:51:24.140,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/637. Updated state: active (1) [ns_server:debug,2014-08-19T16:51:24.140,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",637,active,1} [views:debug,2014-08-19T16:51:24.198,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/639. Updated state: active (1) [ns_server:debug,2014-08-19T16:51:24.198,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",639,active,1} [ns_server:debug,2014-08-19T16:51:24.419,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:51:24.422,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:24.423,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 2980 us [ns_server:debug,2014-08-19T16:51:24.423,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:24.424,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{890, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:51:24.453,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:51:24.455,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 1451 us [ns_server:debug,2014-08-19T16:51:24.455,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:24.455,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:24.456,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{892, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:51:24.492,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:51:24.495,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:24.495,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 3258 us [ns_server:debug,2014-08-19T16:51:24.496,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:24.496,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{894, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:51:24.520,ns_1@10.242.238.90:<0.27863.0>:capi_set_view_manager:do_wait_index_updated:618]References to wait: [] ("default", 620) [ns_server:debug,2014-08-19T16:51:24.520,ns_1@10.242.238.90:<0.27863.0>:capi_set_view_manager:do_wait_index_updated:638]All refs fired [ns_server:debug,2014-08-19T16:51:24.520,ns_1@10.242.238.90:<0.27865.0>:capi_set_view_manager:do_wait_index_updated:618]References to wait: [] ("default", 622) [ns_server:debug,2014-08-19T16:51:24.520,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.27862.0> (ok) [ns_server:debug,2014-08-19T16:51:24.520,ns_1@10.242.238.90:<0.27865.0>:capi_set_view_manager:do_wait_index_updated:638]All refs fired [ns_server:debug,2014-08-19T16:51:24.520,ns_1@10.242.238.90:<0.27868.0>:capi_set_view_manager:do_wait_index_updated:618]References to wait: [] ("default", 624) [ns_server:debug,2014-08-19T16:51:24.520,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.27864.0> (ok) [ns_server:debug,2014-08-19T16:51:24.520,ns_1@10.242.238.90:<0.27868.0>:capi_set_view_manager:do_wait_index_updated:638]All refs fired [ns_server:debug,2014-08-19T16:51:24.520,ns_1@10.242.238.90:<0.27869.0>:capi_set_view_manager:do_wait_index_updated:618]References to wait: [] ("default", 626) [ns_server:debug,2014-08-19T16:51:24.520,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.27866.0> (ok) [ns_server:debug,2014-08-19T16:51:24.520,ns_1@10.242.238.90:<0.27869.0>:capi_set_view_manager:do_wait_index_updated:638]All refs fired [ns_server:debug,2014-08-19T16:51:24.521,ns_1@10.242.238.90:<0.27871.0>:capi_set_view_manager:do_wait_index_updated:618]References to wait: [] ("default", 628) [ns_server:debug,2014-08-19T16:51:24.521,ns_1@10.242.238.90:<0.27871.0>:capi_set_view_manager:do_wait_index_updated:638]All refs fired [ns_server:debug,2014-08-19T16:51:24.521,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.27867.0> (ok) [ns_server:debug,2014-08-19T16:51:24.521,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.27870.0> (ok) [rebalance:debug,2014-08-19T16:51:24.521,ns_1@10.242.238.90:<0.27275.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:51:24.521,ns_1@10.242.238.90:<0.27873.0>:capi_set_view_manager:do_wait_index_updated:618]References to wait: [] ("default", 630) [ns_server:debug,2014-08-19T16:51:24.521,ns_1@10.242.238.90:<0.27873.0>:capi_set_view_manager:do_wait_index_updated:638]All refs fired [rebalance:debug,2014-08-19T16:51:24.521,ns_1@10.242.238.90:<0.27210.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [rebalance:debug,2014-08-19T16:51:24.521,ns_1@10.242.238.90:<0.27159.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:51:24.521,ns_1@10.242.238.90:<0.27875.0>:capi_set_view_manager:do_wait_index_updated:618]References to wait: [] ("default", 632) [ns_server:debug,2014-08-19T16:51:24.521,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.27872.0> (ok) [ns_server:debug,2014-08-19T16:51:24.521,ns_1@10.242.238.90:<0.27875.0>:capi_set_view_manager:do_wait_index_updated:638]All refs fired [ns_server:debug,2014-08-19T16:51:24.521,ns_1@10.242.238.90:<0.27275.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:51:24.521,ns_1@10.242.238.90:<0.27878.0>:capi_set_view_manager:do_wait_index_updated:618]References to wait: [] ("default", 619) [ns_server:debug,2014-08-19T16:51:24.521,ns_1@10.242.238.90:<0.27210.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:51:24.521,ns_1@10.242.238.90:<0.27159.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:51:24.521,ns_1@10.242.238.90:<0.27876.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:51:24.521,ns_1@10.242.238.90:<0.27879.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:51:24.521,ns_1@10.242.238.90:<0.27880.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [rebalance:debug,2014-08-19T16:51:24.521,ns_1@10.242.238.90:<0.27073.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:51:24.521,ns_1@10.242.238.90:<0.27878.0>:capi_set_view_manager:do_wait_index_updated:638]All refs fired [rebalance:debug,2014-08-19T16:51:24.521,ns_1@10.242.238.90:<0.27123.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:51:24.522,ns_1@10.242.238.90:<0.27882.0>:capi_set_view_manager:do_wait_index_updated:618]References to wait: [] ("default", 621) [ns_server:debug,2014-08-19T16:51:24.522,ns_1@10.242.238.90:<0.27876.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [ns_server:debug,2014-08-19T16:51:24.522,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.27874.0> (ok) [ns_server:debug,2014-08-19T16:51:24.522,ns_1@10.242.238.90:<0.27879.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [ns_server:debug,2014-08-19T16:51:24.522,ns_1@10.242.238.90:<0.27880.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [ns_server:debug,2014-08-19T16:51:24.522,ns_1@10.242.238.90:<0.27073.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:51:24.522,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.27877.0> (ok) [rebalance:info,2014-08-19T16:51:24.522,ns_1@10.242.238.90:<0.27275.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:51:24.522,ns_1@10.242.238.90:<0.27882.0>:capi_set_view_manager:do_wait_index_updated:638]All refs fired [ns_server:debug,2014-08-19T16:51:24.522,ns_1@10.242.238.90:<0.27123.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [rebalance:info,2014-08-19T16:51:24.522,ns_1@10.242.238.90:<0.27210.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:51:24.522,ns_1@10.242.238.90:<0.27885.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:51:24.522,ns_1@10.242.238.90:<0.27886.0>:capi_set_view_manager:do_wait_index_updated:618]References to wait: [] ("default", 623) [ns_server:debug,2014-08-19T16:51:24.522,ns_1@10.242.238.90:<0.27883.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [rebalance:debug,2014-08-19T16:51:24.522,ns_1@10.242.238.90:<0.27003.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:51:24.522,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.27881.0> (ok) [ns_server:debug,2014-08-19T16:51:24.522,ns_1@10.242.238.90:<0.27885.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [ns_server:debug,2014-08-19T16:51:24.522,ns_1@10.242.238.90:<0.27886.0>:capi_set_view_manager:do_wait_index_updated:638]All refs fired [rebalance:info,2014-08-19T16:51:24.522,ns_1@10.242.238.90:<0.27159.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:51:24.522,ns_1@10.242.238.90:<0.27888.0>:capi_set_view_manager:do_wait_index_updated:618]References to wait: [] ("default", 625) [ns_server:debug,2014-08-19T16:51:24.522,ns_1@10.242.238.90:<0.27883.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:51:24.522,ns_1@10.242.238.90:<0.27123.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:51:24.523,ns_1@10.242.238.90:<0.27888.0>:capi_set_view_manager:do_wait_index_updated:638]All refs fired [ns_server:debug,2014-08-19T16:51:24.522,ns_1@10.242.238.90:<0.27003.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:51:24.523,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.27884.0> (ok) [rebalance:debug,2014-08-19T16:51:24.523,ns_1@10.242.238.90:<0.26953.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:51:24.523,ns_1@10.242.238.90:<0.27889.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:51:24.523,ns_1@10.242.238.90:<0.27892.0>:capi_set_view_manager:do_wait_index_updated:618]References to wait: [] ("default", 631) [rebalance:debug,2014-08-19T16:51:24.523,ns_1@10.242.238.90:<0.27300.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:51:24.523,ns_1@10.242.238.90:<0.27889.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:51:24.523,ns_1@10.242.238.90:<0.27073.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:51:24.523,ns_1@10.242.238.90:<0.26953.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:51:24.523,ns_1@10.242.238.90:<0.27892.0>:capi_set_view_manager:do_wait_index_updated:638]All refs fired [rebalance:info,2014-08-19T16:51:24.523,ns_1@10.242.238.90:<0.27003.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:51:24.523,ns_1@10.242.238.90:<0.27894.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:51:24.523,ns_1@10.242.238.90:<0.27895.0>:capi_set_view_manager:do_wait_index_updated:618]References to wait: [] ("default", 627) [ns_server:debug,2014-08-19T16:51:24.523,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.27887.0> (ok) [rebalance:debug,2014-08-19T16:51:24.523,ns_1@10.242.238.90:<0.27235.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:51:24.523,ns_1@10.242.238.90:<0.27894.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [ns_server:debug,2014-08-19T16:51:24.523,ns_1@10.242.238.90:<0.27300.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:51:24.523,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.27891.0> (ok) [ns_server:debug,2014-08-19T16:51:24.523,ns_1@10.242.238.90:<0.27895.0>:capi_set_view_manager:do_wait_index_updated:638]All refs fired [ns_server:debug,2014-08-19T16:51:24.523,ns_1@10.242.238.90:<0.27897.0>:capi_set_view_manager:do_wait_index_updated:618]References to wait: [] ("default", 629) [ns_server:debug,2014-08-19T16:51:24.523,ns_1@10.242.238.90:<0.27896.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [rebalance:info,2014-08-19T16:51:24.523,ns_1@10.242.238.90:<0.26953.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:51:24.523,ns_1@10.242.238.90:<0.27235.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:51:24.523,ns_1@10.242.238.90:<0.27898.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:51:24.523,ns_1@10.242.238.90:<0.27897.0>:capi_set_view_manager:do_wait_index_updated:638]All refs fired [rebalance:debug,2014-08-19T16:51:24.523,ns_1@10.242.238.90:<0.27184.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:51:24.523,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.27890.0> (ok) [ns_server:debug,2014-08-19T16:51:24.523,ns_1@10.242.238.90:<0.27896.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [ns_server:debug,2014-08-19T16:51:24.523,ns_1@10.242.238.90:<0.27898.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [ns_server:debug,2014-08-19T16:51:24.523,ns_1@10.242.238.90:<0.27900.0>:capi_set_view_manager:do_wait_index_updated:618]References to wait: [] ("default", 633) [ns_server:debug,2014-08-19T16:51:24.524,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.27893.0> (ok) [rebalance:info,2014-08-19T16:51:24.524,ns_1@10.242.238.90:<0.27300.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:51:24.524,ns_1@10.242.238.90:<0.27184.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:51:24.524,ns_1@10.242.238.90:<0.27901.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:51:24.524,ns_1@10.242.238.90:<0.27900.0>:capi_set_view_manager:do_wait_index_updated:638]All refs fired [rebalance:debug,2014-08-19T16:51:24.524,ns_1@10.242.238.90:<0.27148.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [rebalance:info,2014-08-19T16:51:24.524,ns_1@10.242.238.90:<0.27235.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:51:24.524,ns_1@10.242.238.90:<0.27901.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:debug,2014-08-19T16:51:24.524,ns_1@10.242.238.90:<0.26978.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:51:24.524,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.27899.0> (ok) [rebalance:info,2014-08-19T16:51:24.524,ns_1@10.242.238.90:<0.27184.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:51:24.524,ns_1@10.242.238.90:<0.26978.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [rebalance:debug,2014-08-19T16:51:24.524,ns_1@10.242.238.90:<0.27098.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:51:24.524,ns_1@10.242.238.90:<0.27148.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:51:24.524,ns_1@10.242.238.90:<0.27903.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:51:24.524,ns_1@10.242.238.90:<0.27902.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:51:24.524,ns_1@10.242.238.90:<0.27902.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [ns_server:debug,2014-08-19T16:51:24.524,ns_1@10.242.238.90:<0.27903.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:debug,2014-08-19T16:51:24.524,ns_1@10.242.238.90:<0.27034.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:51:24.524,ns_1@10.242.238.90:<0.27098.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:51:24.524,ns_1@10.242.238.90:<0.27904.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [rebalance:debug,2014-08-19T16:51:24.524,ns_1@10.242.238.90:<0.26942.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:51:24.524,ns_1@10.242.238.90:<0.27904.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:51:24.524,ns_1@10.242.238.90:<0.26978.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:51:24.524,ns_1@10.242.238.90:<0.27905.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [rebalance:info,2014-08-19T16:51:24.524,ns_1@10.242.238.90:<0.27148.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:51:24.524,ns_1@10.242.238.90:<0.27034.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [rebalance:info,2014-08-19T16:51:24.524,ns_1@10.242.238.90:<0.27098.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:51:24.525,ns_1@10.242.238.90:<0.27905.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [ns_server:debug,2014-08-19T16:51:24.525,ns_1@10.242.238.90:<0.26942.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:51:24.525,ns_1@10.242.238.90:<0.27906.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [rebalance:info,2014-08-19T16:51:24.525,ns_1@10.242.238.90:<0.27034.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:51:24.525,ns_1@10.242.238.90:<0.27906.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:51:24.525,ns_1@10.242.238.90:<0.26942.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:info,2014-08-19T16:51:24.676,ns_1@10.242.238.90:<0.18786.0>:ns_memcached:do_handle_call:527]Changed vbucket 626 state to active [ns_server:info,2014-08-19T16:51:24.690,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 624 state to active [ns_server:info,2014-08-19T16:51:24.691,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 632 state to active [ns_server:info,2014-08-19T16:51:24.694,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 630 state to active [ns_server:debug,2014-08-19T16:51:24.703,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:51:24.706,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:24.707,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 3276 us [ns_server:debug,2014-08-19T16:51:24.707,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:24.707,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{891, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:info,2014-08-19T16:51:24.709,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 628 state to active [ns_server:info,2014-08-19T16:51:24.718,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 622 state to active [ns_server:info,2014-08-19T16:51:24.728,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 620 state to active [ns_server:debug,2014-08-19T16:51:24.738,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:51:24.745,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:24.745,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 6928 us [ns_server:debug,2014-08-19T16:51:24.746,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:24.746,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{893, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [views:debug,2014-08-19T16:51:24.749,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/626. Updated state: active (1) [ns_server:debug,2014-08-19T16:51:24.749,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",626,active,1} [rebalance:debug,2014-08-19T16:51:24.776,ns_1@10.242.238.90:<0.27221.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:51:24.776,ns_1@10.242.238.90:<0.27221.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:51:24.776,ns_1@10.242.238.90:<0.27909.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:51:24.776,ns_1@10.242.238.90:<0.27909.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:51:24.777,ns_1@10.242.238.90:<0.27221.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [rebalance:debug,2014-08-19T16:51:24.777,ns_1@10.242.238.90:<0.27104.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:51:24.777,ns_1@10.242.238.90:<0.27104.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:51:24.777,ns_1@10.242.238.90:<0.27910.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:51:24.777,ns_1@10.242.238.90:<0.27910.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:51:24.777,ns_1@10.242.238.90:<0.27104.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:51:24.778,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:51:24.783,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:24.783,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 4730 us [ns_server:debug,2014-08-19T16:51:24.783,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:24.784,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{895, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:info,2014-08-19T16:51:24.787,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 623 state to active [views:debug,2014-08-19T16:51:24.799,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/632. Updated state: active (1) [ns_server:debug,2014-08-19T16:51:24.799,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",632,active,1} [rebalance:debug,2014-08-19T16:51:24.804,ns_1@10.242.238.90:<0.26973.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:51:24.804,ns_1@10.242.238.90:<0.26973.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:51:24.804,ns_1@10.242.238.90:<0.27912.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:51:24.804,ns_1@10.242.238.90:<0.27912.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:51:24.804,ns_1@10.242.238.90:<0.26973.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:51:24.809,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:51:24.812,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 2685 us [ns_server:debug,2014-08-19T16:51:24.812,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:24.812,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:24.813,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{626, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [rebalance:debug,2014-08-19T16:51:24.834,ns_1@10.242.238.90:<0.27079.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:51:24.834,ns_1@10.242.238.90:<0.27079.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:51:24.834,ns_1@10.242.238.90:<0.27914.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:51:24.834,ns_1@10.242.238.90:<0.27914.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:51:24.834,ns_1@10.242.238.90:<0.27079.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:51:24.840,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:51:24.841,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:24.841,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 1412 us [ns_server:debug,2014-08-19T16:51:24.842,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:24.843,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{624, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [rebalance:debug,2014-08-19T16:51:24.846,ns_1@10.242.238.90:<0.27190.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:51:24.846,ns_1@10.242.238.90:<0.27190.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:51:24.846,ns_1@10.242.238.90:<0.27915.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:51:24.846,ns_1@10.242.238.90:<0.27915.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:51:24.847,ns_1@10.242.238.90:<0.27190.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [views:debug,2014-08-19T16:51:24.849,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/630. Updated state: active (1) [ns_server:debug,2014-08-19T16:51:24.849,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",630,active,1} [rebalance:debug,2014-08-19T16:51:24.864,ns_1@10.242.238.90:<0.27306.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:51:24.865,ns_1@10.242.238.90:<0.27306.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:51:24.865,ns_1@10.242.238.90:<0.27917.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:51:24.865,ns_1@10.242.238.90:<0.27917.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:51:24.865,ns_1@10.242.238.90:<0.27306.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:51:24.869,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:51:24.873,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 3999 us [ns_server:debug,2014-08-19T16:51:24.873,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:24.874,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:24.874,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{632, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [views:debug,2014-08-19T16:51:24.900,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/628. Updated state: active (1) [ns_server:info,2014-08-19T16:51:24.901,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 633 state to active [ns_server:debug,2014-08-19T16:51:24.901,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",628,active,1} [ns_server:debug,2014-08-19T16:51:24.904,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:51:24.907,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:24.907,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 2923 us [ns_server:debug,2014-08-19T16:51:24.908,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:24.908,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{630, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [rebalance:debug,2014-08-19T16:51:24.933,ns_1@10.242.238.90:<0.26948.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:51:24.934,ns_1@10.242.238.90:<0.26948.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:51:24.934,ns_1@10.242.238.90:<0.27920.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:51:24.934,ns_1@10.242.238.90:<0.27920.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:51:24.934,ns_1@10.242.238.90:<0.26948.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:51:24.939,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:51:24.944,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 4701 us [ns_server:debug,2014-08-19T16:51:24.944,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:24.944,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:24.945,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{874, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [rebalance:debug,2014-08-19T16:51:24.967,ns_1@10.242.238.90:<0.27023.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:51:24.967,ns_1@10.242.238.90:<0.27023.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:51:24.967,ns_1@10.242.238.90:<0.27922.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:51:24.967,ns_1@10.242.238.90:<0.27922.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:51:24.968,ns_1@10.242.238.90:<0.27023.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:51:24.972,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [rebalance:debug,2014-08-19T16:51:24.972,ns_1@10.242.238.90:<0.27129.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:51:24.973,ns_1@10.242.238.90:<0.27129.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:51:24.973,ns_1@10.242.238.90:<0.27923.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:51:24.973,ns_1@10.242.238.90:<0.27923.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:51:24.973,ns_1@10.242.238.90:<0.27129.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:51:24.974,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:24.974,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 1243 us [ns_server:debug,2014-08-19T16:51:24.974,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [views:debug,2014-08-19T16:51:24.975,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/624. Updated state: active (1) [ns_server:debug,2014-08-19T16:51:24.975,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",624,active,1} [ns_server:debug,2014-08-19T16:51:24.975,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{628, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:info,2014-08-19T16:51:24.983,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 629 state to active [ns_server:debug,2014-08-19T16:51:25.004,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:51:25.007,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 2696 us [ns_server:debug,2014-08-19T16:51:25.007,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:25.008,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:25.008,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{622, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:info,2014-08-19T16:51:25.015,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 621 state to active [views:debug,2014-08-19T16:51:25.033,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/622. Updated state: active (1) [ns_server:debug,2014-08-19T16:51:25.034,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",622,active,1} [ns_server:debug,2014-08-19T16:51:25.034,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:51:25.037,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:25.037,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 2569 us [ns_server:debug,2014-08-19T16:51:25.038,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{620, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:51:25.038,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [rebalance:debug,2014-08-19T16:51:25.060,ns_1@10.242.238.90:<0.26898.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:51:25.060,ns_1@10.242.238.90:<0.26898.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:51:25.060,ns_1@10.242.238.90:<0.27927.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:51:25.060,ns_1@10.242.238.90:<0.27927.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:51:25.061,ns_1@10.242.238.90:<0.26898.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [rebalance:debug,2014-08-19T16:51:25.061,ns_1@10.242.238.90:<0.27281.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:51:25.062,ns_1@10.242.238.90:<0.27281.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:51:25.062,ns_1@10.242.238.90:<0.27928.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:51:25.062,ns_1@10.242.238.90:<0.27928.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:51:25.062,ns_1@10.242.238.90:<0.27281.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:51:25.064,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:51:25.066,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 1402 us [ns_server:debug,2014-08-19T16:51:25.066,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:25.066,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:25.067,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{879, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:info,2014-08-19T16:51:25.072,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 627 state to active [views:debug,2014-08-19T16:51:25.095,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/620. Updated state: active (1) [ns_server:debug,2014-08-19T16:51:25.095,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",620,active,1} [rebalance:debug,2014-08-19T16:51:25.098,ns_1@10.242.238.90:<0.27154.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:51:25.098,ns_1@10.242.238.90:<0.27154.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:51:25.098,ns_1@10.242.238.90:<0.27929.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:51:25.098,ns_1@10.242.238.90:<0.27929.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:51:25.098,ns_1@10.242.238.90:<0.27154.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [rebalance:debug,2014-08-19T16:51:25.099,ns_1@10.242.238.90:<0.26923.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:51:25.099,ns_1@10.242.238.90:<0.26923.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:51:25.099,ns_1@10.242.238.90:<0.27930.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:51:25.099,ns_1@10.242.238.90:<0.27930.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:51:25.099,ns_1@10.242.238.90:<0.26923.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:51:25.103,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:51:25.105,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:25.107,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{876, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:51:25.107,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 4326 us [ns_server:debug,2014-08-19T16:51:25.107,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [rebalance:debug,2014-08-19T16:51:25.108,ns_1@10.242.238.90:<0.27165.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:51:25.108,ns_1@10.242.238.90:<0.27165.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:51:25.108,ns_1@10.242.238.90:<0.27932.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:51:25.108,ns_1@10.242.238.90:<0.27932.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:51:25.108,ns_1@10.242.238.90:<0.27165.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:info,2014-08-19T16:51:25.109,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 366 state to replica [ns_server:info,2014-08-19T16:51:25.109,ns_1@10.242.238.90:tap_replication_manager-default<0.18775.0>:tap_replication_manager:change_vbucket_filter:200]Going to change replication from 'ns_1@10.242.238.89' to have [366,380,381,382,383,384,385,386,387,388,389,390,391,392,393,394,395,396,397, 398,399,400,401,402,403,404,405,406,407,408,409,410,411,412,413,414,415,416, 417,418,419,420,421,422,423,424,425,426] ([366], []) [ns_server:debug,2014-08-19T16:51:25.110,ns_1@10.242.238.90:<0.27933.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:135]Registered myself under id:{"default", {new_child_id, [366,380,381,382,383,384,385,386,387,388,389, 390,391,392,393,394,395,396,397,398,399,400, 401,402,403,404,405,406,407,408,409,410,411, 412,413,414,415,416,417,418,419,420,421,422, 423,424,425,426], 'ns_1@10.242.238.89'}, #Ref<0.0.1.54762>} Args:[{"10.242.238.89",11209}, {"10.242.238.90",11209}, [{old_state_retriever,#Fun}, {on_not_ready_vbuckets,#Fun}, {username,"default"}, {password,get_from_config}, {vbuckets,[366,380,381,382,383,384,385,386,387,388,389,390,391,392,393, 394,395,396,397,398,399,400,401,402,403,404,405,406,407,408, 409,410,411,412,413,414,415,416,417,418,419,420,421,422,423, 424,425,426]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]] [ns_server:debug,2014-08-19T16:51:25.111,ns_1@10.242.238.90:<0.27933.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:139]Linked myself to old ebucketmigrator <0.27779.0> [ns_server:info,2014-08-19T16:51:25.111,ns_1@10.242.238.90:<0.27779.0>:ebucketmigrator_srv:handle_call:270]Starting new-style vbucket filter change on stream `replication_ns_1@10.242.238.90` [ns_server:info,2014-08-19T16:51:25.129,ns_1@10.242.238.90:<0.27779.0>:ebucketmigrator_srv:handle_call:297]Changing vbucket filter on tap stream `replication_ns_1@10.242.238.90`: [{366,1}, {380,1}, {381,1}, {382,1}, {383,1}, {384,1}, {385,1}, {386,1}, {387,1}, {388,1}, {389,1}, {390,1}, {391,1}, {392,1}, {393,1}, {394,1}, {395,1}, {396,1}, {397,1}, {398,1}, {399,1}, {400,1}, {401,1}, {402,1}, {403,1}, {404,1}, {405,1}, {406,1}, {407,1}, {408,1}, {409,1}, {410,1}, {411,1}, {412,1}, {413,1}, {414,1}, {415,1}, {416,1}, {417,1}, {418,1}, {419,1}, {420,1}, {421,1}, {422,1}, {423,1}, {424,1}, {425,1}, {426,1}] [ns_server:info,2014-08-19T16:51:25.129,ns_1@10.242.238.90:<0.27779.0>:ebucketmigrator_srv:handle_call:307]Successfully changed vbucket filter on tap stream `replication_ns_1@10.242.238.90`. [ns_server:info,2014-08-19T16:51:25.130,ns_1@10.242.238.90:<0.27779.0>:ebucketmigrator_srv:process_upstream:1027]Got vbucket filter change completion message. Silencing upstream sender [ns_server:info,2014-08-19T16:51:25.130,ns_1@10.242.238.90:<0.27779.0>:ebucketmigrator_srv:handle_info:366]Got reply from upstream silencing request. Completing state transition to a new ebucketmigrator. [ns_server:debug,2014-08-19T16:51:25.130,ns_1@10.242.238.90:<0.27779.0>:ebucketmigrator_srv:complete_native_vb_filter_change:170]Proceeding with reading unread binaries [ns_server:debug,2014-08-19T16:51:25.130,ns_1@10.242.238.90:<0.27779.0>:ebucketmigrator_srv:confirm_downstream:197]Going to confirm reception downstream messages [ns_server:debug,2014-08-19T16:51:25.130,ns_1@10.242.238.90:<0.27779.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:51:25.130,ns_1@10.242.238.90:<0.27935.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:51:25.130,ns_1@10.242.238.90:<0.27935.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:51:25.130,ns_1@10.242.238.90:<0.27779.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:51:25.131,ns_1@10.242.238.90:<0.27779.0>:ebucketmigrator_srv:confirm_downstream:201]Confirmed upstream messages are feeded to kernel [ns_server:debug,2014-08-19T16:51:25.131,ns_1@10.242.238.90:<0.27779.0>:ebucketmigrator_srv:reply_and_die:210]Passed old state to caller [ns_server:debug,2014-08-19T16:51:25.131,ns_1@10.242.238.90:<0.27779.0>:ebucketmigrator_srv:reply_and_die:213]Sent out state. Preparing to die [ns_server:debug,2014-08-19T16:51:25.131,ns_1@10.242.238.90:<0.27933.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:143]Got old state from previous ebucketmigrator: <0.27779.0> [ns_server:debug,2014-08-19T16:51:25.131,ns_1@10.242.238.90:<0.27933.0>:ns_vbm_new_sup:perform_vbucket_filter_change_loop:184]Sent old state to new instance [ns_server:info,2014-08-19T16:51:25.131,ns_1@10.242.238.90:<0.27937.0>:ns_vbm_new_sup:mk_old_state_retriever:83]Got vbucket filter change old state. Proceeding vbucket filter change operation [ns_server:debug,2014-08-19T16:51:25.131,ns_1@10.242.238.90:<0.27937.0>:ebucketmigrator_srv:init:494]Got old ebucketmigrator state from <0.27779.0>: {state,#Port<0.16372>,#Port<0.16368>,#Port<0.16373>,#Port<0.16369>, <0.27781.0>,<<"cut off">>,<<"cut off">>,[],145,false,false,0, {1408,452685,130019}, completed, {<0.27933.0>,#Ref<0.0.1.54775>}, <<"replication_ns_1@10.242.238.90">>,<0.27779.0>, {had_backfill,false,undefined,[]}, completed,false}. [ns_server:debug,2014-08-19T16:51:25.132,ns_1@10.242.238.90:<0.17162.0>:ns_process_registry:handle_info:98]Got exit msg: {'EXIT',<0.27933.0>,{#Ref<0.0.1.54764>,<0.27937.0>}} [error_logger:info,2014-08-19T16:51:25.132,ns_1@10.242.238.90:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,'ns_vbm_new_sup-default'} started: [{pid,<0.27937.0>}, {name, {new_child_id, [366,380,381,382,383,384,385,386,387,388,389, 390,391,392,393,394,395,396,397,398,399,400, 401,402,403,404,405,406,407,408,409,410,411, 412,413,414,415,416,417,418,419,420,421,422, 423,424,425,426], 'ns_1@10.242.238.89'}}, {mfargs, {ebucketmigrator_srv,start_link, [{"10.242.238.89",11209}, {"10.242.238.90",11209}, [{old_state_retriever, #Fun}, {on_not_ready_vbuckets, #Fun}, {username,"default"}, {password,get_from_config}, {vbuckets, [366,380,381,382,383,384,385,386,387,388, 389,390,391,392,393,394,395,396,397,398, 399,400,401,402,403,404,405,406,407,408, 409,410,411,412,413,414,415,416,417,418, 419,420,421,422,423,424,425,426]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]]}}, {restart_type,temporary}, {shutdown,60000}, {child_type,worker}] [ns_server:info,2014-08-19T16:51:25.132,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 631 state to active [ns_server:info,2014-08-19T16:51:25.134,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 619 state to active [ns_server:debug,2014-08-19T16:51:25.137,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [rebalance:debug,2014-08-19T16:51:25.143,ns_1@10.242.238.90:<0.27054.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:51:25.144,ns_1@10.242.238.90:<0.27054.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:51:25.144,ns_1@10.242.238.90:<0.27938.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:51:25.144,ns_1@10.242.238.90:<0.27938.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [ns_server:debug,2014-08-19T16:51:25.144,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 7682 us [ns_server:debug,2014-08-19T16:51:25.145,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [rebalance:info,2014-08-19T16:51:25.145,ns_1@10.242.238.90:<0.27054.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:51:25.145,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:25.146,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{366, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:info,2014-08-19T16:51:25.147,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 371 state to replica [ns_server:info,2014-08-19T16:51:25.148,ns_1@10.242.238.90:tap_replication_manager-default<0.18775.0>:tap_replication_manager:change_vbucket_filter:200]Going to change replication from 'ns_1@10.242.238.89' to have [366,371,380,381,382,383,384,385,386,387,388,389,390,391,392,393,394,395,396, 397,398,399,400,401,402,403,404,405,406,407,408,409,410,411,412,413,414,415, 416,417,418,419,420,421,422,423,424,425,426] ([371], []) [ns_server:debug,2014-08-19T16:51:25.148,ns_1@10.242.238.90:<0.27940.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:135]Registered myself under id:{"default", {new_child_id, [366,371,380,381,382,383,384,385,386,387,388, 389,390,391,392,393,394,395,396,397,398,399, 400,401,402,403,404,405,406,407,408,409,410, 411,412,413,414,415,416,417,418,419,420,421, 422,423,424,425,426], 'ns_1@10.242.238.89'}, #Ref<0.0.1.54928>} Args:[{"10.242.238.89",11209}, {"10.242.238.90",11209}, [{old_state_retriever,#Fun}, {on_not_ready_vbuckets,#Fun}, {username,"default"}, {password,get_from_config}, {vbuckets,[366,371,380,381,382,383,384,385,386,387,388,389,390,391,392, 393,394,395,396,397,398,399,400,401,402,403,404,405,406,407, 408,409,410,411,412,413,414,415,416,417,418,419,420,421,422, 423,424,425,426]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]] [ns_server:debug,2014-08-19T16:51:25.149,ns_1@10.242.238.90:<0.27940.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:139]Linked myself to old ebucketmigrator <0.27937.0> [ns_server:debug,2014-08-19T16:51:25.150,ns_1@10.242.238.90:<0.27937.0>:ebucketmigrator_srv:init:621]Reusing old upstream: [{vbuckets,[366,380,381,382,383,384,385,386,387,388,389,390,391,392,393,394, 395,396,397,398,399,400,401,402,403,404,405,406,407,408,409,410, 411,412,413,414,415,416,417,418,419,420,421,422,423,424,425,426]}, {name,<<"replication_ns_1@10.242.238.90">>}, {takeover,false}] [rebalance:debug,2014-08-19T16:51:25.150,ns_1@10.242.238.90:<0.27937.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.27942.0> [views:debug,2014-08-19T16:51:25.150,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/633. Updated state: active (1) [ns_server:debug,2014-08-19T16:51:25.151,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",633,active,1} [ns_server:info,2014-08-19T16:51:25.151,ns_1@10.242.238.90:<0.27937.0>:ebucketmigrator_srv:handle_call:270]Starting new-style vbucket filter change on stream `replication_ns_1@10.242.238.90` [rebalance:debug,2014-08-19T16:51:25.154,ns_1@10.242.238.90:<0.27255.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:51:25.155,ns_1@10.242.238.90:<0.27255.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:51:25.155,ns_1@10.242.238.90:<0.27943.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:51:25.155,ns_1@10.242.238.90:<0.27943.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:51:25.155,ns_1@10.242.238.90:<0.27255.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:info,2014-08-19T16:51:25.167,ns_1@10.242.238.90:<0.27937.0>:ebucketmigrator_srv:handle_call:297]Changing vbucket filter on tap stream `replication_ns_1@10.242.238.90`: [{366,1}, {371,1}, {380,1}, {381,1}, {382,1}, {383,1}, {384,1}, {385,1}, {386,1}, {387,1}, {388,1}, {389,1}, {390,1}, {391,1}, {392,1}, {393,1}, {394,1}, {395,1}, {396,1}, {397,1}, {398,1}, {399,1}, {400,1}, {401,1}, {402,1}, {403,1}, {404,1}, {405,1}, {406,1}, {407,1}, {408,1}, {409,1}, {410,1}, {411,1}, {412,1}, {413,1}, {414,1}, {415,1}, {416,1}, {417,1}, {418,1}, {419,1}, {420,1}, {421,1}, {422,1}, {423,1}, {424,1}, {425,1}, {426,1}] [ns_server:info,2014-08-19T16:51:25.168,ns_1@10.242.238.90:<0.27937.0>:ebucketmigrator_srv:handle_call:307]Successfully changed vbucket filter on tap stream `replication_ns_1@10.242.238.90`. [ns_server:info,2014-08-19T16:51:25.169,ns_1@10.242.238.90:<0.27937.0>:ebucketmigrator_srv:process_upstream:1027]Got vbucket filter change completion message. Silencing upstream sender [ns_server:info,2014-08-19T16:51:25.169,ns_1@10.242.238.90:<0.27937.0>:ebucketmigrator_srv:handle_info:366]Got reply from upstream silencing request. Completing state transition to a new ebucketmigrator. [ns_server:debug,2014-08-19T16:51:25.169,ns_1@10.242.238.90:<0.27937.0>:ebucketmigrator_srv:complete_native_vb_filter_change:170]Proceeding with reading unread binaries [ns_server:debug,2014-08-19T16:51:25.169,ns_1@10.242.238.90:<0.27937.0>:ebucketmigrator_srv:confirm_downstream:197]Going to confirm reception downstream messages [ns_server:debug,2014-08-19T16:51:25.169,ns_1@10.242.238.90:<0.27937.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:51:25.169,ns_1@10.242.238.90:<0.27944.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:51:25.169,ns_1@10.242.238.90:<0.27944.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:51:25.170,ns_1@10.242.238.90:<0.27937.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:51:25.170,ns_1@10.242.238.90:<0.27937.0>:ebucketmigrator_srv:confirm_downstream:201]Confirmed upstream messages are feeded to kernel [ns_server:debug,2014-08-19T16:51:25.170,ns_1@10.242.238.90:<0.27937.0>:ebucketmigrator_srv:reply_and_die:210]Passed old state to caller [ns_server:debug,2014-08-19T16:51:25.170,ns_1@10.242.238.90:<0.27937.0>:ebucketmigrator_srv:reply_and_die:213]Sent out state. Preparing to die [ns_server:debug,2014-08-19T16:51:25.170,ns_1@10.242.238.90:<0.27940.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:143]Got old state from previous ebucketmigrator: <0.27937.0> [ns_server:debug,2014-08-19T16:51:25.170,ns_1@10.242.238.90:<0.27940.0>:ns_vbm_new_sup:perform_vbucket_filter_change_loop:184]Sent old state to new instance [ns_server:info,2014-08-19T16:51:25.171,ns_1@10.242.238.90:<0.27946.0>:ns_vbm_new_sup:mk_old_state_retriever:83]Got vbucket filter change old state. Proceeding vbucket filter change operation [ns_server:debug,2014-08-19T16:51:25.171,ns_1@10.242.238.90:<0.27946.0>:ebucketmigrator_srv:init:494]Got old ebucketmigrator state from <0.27937.0>: {state,#Port<0.16372>,#Port<0.16368>,#Port<0.16373>,#Port<0.16369>, <0.27942.0>,<<"cut off">>,<<"cut off">>,[],148,false,false,0, {1408,452685,169325}, completed, {<0.27940.0>,#Ref<0.0.1.54941>}, <<"replication_ns_1@10.242.238.90">>,<0.27937.0>, {had_backfill,false,undefined,[]}, completed,false}. [ns_server:debug,2014-08-19T16:51:25.171,ns_1@10.242.238.90:<0.17162.0>:ns_process_registry:handle_info:98]Got exit msg: {'EXIT',<0.27940.0>,{#Ref<0.0.1.54930>,<0.27946.0>}} [error_logger:info,2014-08-19T16:51:25.171,ns_1@10.242.238.90:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,'ns_vbm_new_sup-default'} started: [{pid,<0.27946.0>}, {name, {new_child_id, [366,371,380,381,382,383,384,385,386,387,388, 389,390,391,392,393,394,395,396,397,398,399, 400,401,402,403,404,405,406,407,408,409,410, 411,412,413,414,415,416,417,418,419,420,421, 422,423,424,425,426], 'ns_1@10.242.238.89'}}, {mfargs, {ebucketmigrator_srv,start_link, [{"10.242.238.89",11209}, {"10.242.238.90",11209}, [{old_state_retriever, #Fun}, {on_not_ready_vbuckets, #Fun}, {username,"default"}, {password,get_from_config}, {vbuckets, [366,371,380,381,382,383,384,385,386,387, 388,389,390,391,392,393,394,395,396,397, 398,399,400,401,402,403,404,405,406,407, 408,409,410,411,412,413,414,415,416,417, 418,419,420,421,422,423,424,425,426]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]]}}, {restart_type,temporary}, {shutdown,60000}, {child_type,worker}] [ns_server:info,2014-08-19T16:51:25.174,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 625 state to active [ns_server:debug,2014-08-19T16:51:25.180,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:51:25.182,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:25.182,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 2019 us [ns_server:debug,2014-08-19T16:51:25.183,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:25.183,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{371, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:51:25.184,ns_1@10.242.238.90:<0.27946.0>:ebucketmigrator_srv:init:621]Reusing old upstream: [{vbuckets,[366,371,380,381,382,383,384,385,386,387,388,389,390,391,392,393, 394,395,396,397,398,399,400,401,402,403,404,405,406,407,408,409, 410,411,412,413,414,415,416,417,418,419,420,421,422,423,424,425, 426]}, {name,<<"replication_ns_1@10.242.238.90">>}, {takeover,false}] [rebalance:debug,2014-08-19T16:51:25.184,ns_1@10.242.238.90:<0.27946.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.27948.0> [rebalance:debug,2014-08-19T16:51:25.186,ns_1@10.242.238.90:<0.26998.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:51:25.186,ns_1@10.242.238.90:<0.26998.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:51:25.186,ns_1@10.242.238.90:<0.27949.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:51:25.186,ns_1@10.242.238.90:<0.27949.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:51:25.186,ns_1@10.242.238.90:<0.26998.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [views:debug,2014-08-19T16:51:25.209,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/629. Updated state: active (1) [ns_server:debug,2014-08-19T16:51:25.209,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",629,active,1} [ns_server:debug,2014-08-19T16:51:25.214,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:51:25.217,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:25.217,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 3156 us [ns_server:debug,2014-08-19T16:51:25.218,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:25.218,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{623, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:info,2014-08-19T16:51:25.220,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 376 state to replica [ns_server:info,2014-08-19T16:51:25.221,ns_1@10.242.238.90:tap_replication_manager-default<0.18775.0>:tap_replication_manager:change_vbucket_filter:200]Going to change replication from 'ns_1@10.242.238.89' to have [366,371,376,380,381,382,383,384,385,386,387,388,389,390,391,392,393,394,395, 396,397,398,399,400,401,402,403,404,405,406,407,408,409,410,411,412,413,414, 415,416,417,418,419,420,421,422,423,424,425,426] ([376], []) [ns_server:debug,2014-08-19T16:51:25.222,ns_1@10.242.238.90:<0.27951.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:135]Registered myself under id:{"default", {new_child_id, [366,371,376,380,381,382,383,384,385,386,387, 388,389,390,391,392,393,394,395,396,397,398, 399,400,401,402,403,404,405,406,407,408,409, 410,411,412,413,414,415,416,417,418,419,420, 421,422,423,424,425,426], 'ns_1@10.242.238.89'}, #Ref<0.0.1.55192>} Args:[{"10.242.238.89",11209}, {"10.242.238.90",11209}, [{old_state_retriever,#Fun}, {on_not_ready_vbuckets,#Fun}, {username,"default"}, {password,get_from_config}, {vbuckets,[366,371,376,380,381,382,383,384,385,386,387,388,389,390,391, 392,393,394,395,396,397,398,399,400,401,402,403,404,405,406, 407,408,409,410,411,412,413,414,415,416,417,418,419,420,421, 422,423,424,425,426]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]] [ns_server:debug,2014-08-19T16:51:25.222,ns_1@10.242.238.90:<0.27951.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:139]Linked myself to old ebucketmigrator <0.27946.0> [ns_server:info,2014-08-19T16:51:25.222,ns_1@10.242.238.90:<0.27946.0>:ebucketmigrator_srv:handle_call:270]Starting new-style vbucket filter change on stream `replication_ns_1@10.242.238.90` [ns_server:info,2014-08-19T16:51:25.235,ns_1@10.242.238.90:<0.27946.0>:ebucketmigrator_srv:handle_call:297]Changing vbucket filter on tap stream `replication_ns_1@10.242.238.90`: [{366,1}, {371,1}, {376,1}, {380,1}, {381,1}, {382,1}, {383,1}, {384,1}, {385,1}, {386,1}, {387,1}, {388,1}, {389,1}, {390,1}, {391,1}, {392,1}, {393,1}, {394,1}, {395,1}, {396,1}, {397,1}, {398,1}, {399,1}, {400,1}, {401,1}, {402,1}, {403,1}, {404,1}, {405,1}, {406,1}, {407,1}, {408,1}, {409,1}, {410,1}, {411,1}, {412,1}, {413,1}, {414,1}, {415,1}, {416,1}, {417,1}, {418,1}, {419,1}, {420,1}, {421,1}, {422,1}, {423,1}, {424,1}, {425,1}, {426,1}] [ns_server:info,2014-08-19T16:51:25.237,ns_1@10.242.238.90:<0.27946.0>:ebucketmigrator_srv:handle_call:307]Successfully changed vbucket filter on tap stream `replication_ns_1@10.242.238.90`. [ns_server:info,2014-08-19T16:51:25.237,ns_1@10.242.238.90:<0.27946.0>:ebucketmigrator_srv:process_upstream:1027]Got vbucket filter change completion message. Silencing upstream sender [ns_server:info,2014-08-19T16:51:25.237,ns_1@10.242.238.90:<0.27946.0>:ebucketmigrator_srv:handle_info:366]Got reply from upstream silencing request. Completing state transition to a new ebucketmigrator. [ns_server:debug,2014-08-19T16:51:25.237,ns_1@10.242.238.90:<0.27946.0>:ebucketmigrator_srv:complete_native_vb_filter_change:170]Proceeding with reading unread binaries [ns_server:debug,2014-08-19T16:51:25.237,ns_1@10.242.238.90:<0.27946.0>:ebucketmigrator_srv:confirm_downstream:197]Going to confirm reception downstream messages [ns_server:debug,2014-08-19T16:51:25.237,ns_1@10.242.238.90:<0.27946.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:51:25.237,ns_1@10.242.238.90:<0.27953.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:51:25.238,ns_1@10.242.238.90:<0.27953.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:51:25.238,ns_1@10.242.238.90:<0.27946.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:51:25.238,ns_1@10.242.238.90:<0.27946.0>:ebucketmigrator_srv:confirm_downstream:201]Confirmed upstream messages are feeded to kernel [ns_server:debug,2014-08-19T16:51:25.238,ns_1@10.242.238.90:<0.27946.0>:ebucketmigrator_srv:reply_and_die:210]Passed old state to caller [ns_server:debug,2014-08-19T16:51:25.238,ns_1@10.242.238.90:<0.27946.0>:ebucketmigrator_srv:reply_and_die:213]Sent out state. Preparing to die [ns_server:debug,2014-08-19T16:51:25.238,ns_1@10.242.238.90:<0.27951.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:143]Got old state from previous ebucketmigrator: <0.27946.0> [ns_server:debug,2014-08-19T16:51:25.239,ns_1@10.242.238.90:<0.27951.0>:ns_vbm_new_sup:perform_vbucket_filter_change_loop:184]Sent old state to new instance [ns_server:info,2014-08-19T16:51:25.239,ns_1@10.242.238.90:<0.27955.0>:ns_vbm_new_sup:mk_old_state_retriever:83]Got vbucket filter change old state. Proceeding vbucket filter change operation [ns_server:debug,2014-08-19T16:51:25.239,ns_1@10.242.238.90:<0.27955.0>:ebucketmigrator_srv:init:494]Got old ebucketmigrator state from <0.27946.0>: {state,#Port<0.16372>,#Port<0.16368>,#Port<0.16373>,#Port<0.16369>, <0.27948.0>,<<"cut off">>,<<"cut off">>,[],151,false,false,0, {1408,452685,237350}, completed, {<0.27951.0>,#Ref<0.0.1.55205>}, <<"replication_ns_1@10.242.238.90">>,<0.27946.0>, {had_backfill,false,undefined,[]}, completed,false}. [ns_server:debug,2014-08-19T16:51:25.239,ns_1@10.242.238.90:<0.17162.0>:ns_process_registry:handle_info:98]Got exit msg: {'EXIT',<0.27951.0>,{#Ref<0.0.1.55194>,<0.27955.0>}} [error_logger:info,2014-08-19T16:51:25.239,ns_1@10.242.238.90:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,'ns_vbm_new_sup-default'} started: [{pid,<0.27955.0>}, {name, {new_child_id, [366,371,376,380,381,382,383,384,385,386,387, 388,389,390,391,392,393,394,395,396,397,398, 399,400,401,402,403,404,405,406,407,408,409, 410,411,412,413,414,415,416,417,418,419,420, 421,422,423,424,425,426], 'ns_1@10.242.238.89'}}, {mfargs, {ebucketmigrator_srv,start_link, [{"10.242.238.89",11209}, {"10.242.238.90",11209}, [{old_state_retriever, #Fun}, {on_not_ready_vbuckets, #Fun}, {username,"default"}, {password,get_from_config}, {vbuckets, [366,371,376,380,381,382,383,384,385,386, 387,388,389,390,391,392,393,394,395,396, 397,398,399,400,401,402,403,404,405,406, 407,408,409,410,411,412,413,414,415,416, 417,418,419,420,421,422,423,424,425, 426]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]]}}, {restart_type,temporary}, {shutdown,60000}, {child_type,worker}] [ns_server:debug,2014-08-19T16:51:25.244,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:51:25.248,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:25.248,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 3257 us [ns_server:debug,2014-08-19T16:51:25.248,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:25.249,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{376, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:51:25.251,ns_1@10.242.238.90:<0.27955.0>:ebucketmigrator_srv:init:621]Reusing old upstream: [{vbuckets,[366,371,376,380,381,382,383,384,385,386,387,388,389,390,391,392, 393,394,395,396,397,398,399,400,401,402,403,404,405,406,407,408, 409,410,411,412,413,414,415,416,417,418,419,420,421,422,423,424, 425,426]}, {name,<<"replication_ns_1@10.242.238.90">>}, {takeover,false}] [rebalance:debug,2014-08-19T16:51:25.252,ns_1@10.242.238.90:<0.27955.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.27957.0> [views:debug,2014-08-19T16:51:25.267,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/627. Updated state: active (1) [ns_server:debug,2014-08-19T16:51:25.267,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",627,active,1} [ns_server:debug,2014-08-19T16:51:25.273,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:51:25.276,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:25.276,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 3247 us [ns_server:debug,2014-08-19T16:51:25.276,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:25.277,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{877, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:info,2014-08-19T16:51:25.283,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 372 state to replica [ns_server:info,2014-08-19T16:51:25.283,ns_1@10.242.238.90:tap_replication_manager-default<0.18775.0>:tap_replication_manager:change_vbucket_filter:200]Going to change replication from 'ns_1@10.242.238.89' to have [366,371,372,376,380,381,382,383,384,385,386,387,388,389,390,391,392,393,394, 395,396,397,398,399,400,401,402,403,404,405,406,407,408,409,410,411,412,413, 414,415,416,417,418,419,420,421,422,423,424,425,426] ([372], []) [ns_server:debug,2014-08-19T16:51:25.284,ns_1@10.242.238.90:<0.27959.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:135]Registered myself under id:{"default", {new_child_id, [366,371,372,376,380,381,382,383,384,385,386, 387,388,389,390,391,392,393,394,395,396,397, 398,399,400,401,402,403,404,405,406,407,408, 409,410,411,412,413,414,415,416,417,418,419, 420,421,422,423,424,425,426], 'ns_1@10.242.238.89'}, #Ref<0.0.1.55383>} Args:[{"10.242.238.89",11209}, {"10.242.238.90",11209}, [{old_state_retriever,#Fun}, {on_not_ready_vbuckets,#Fun}, {username,"default"}, {password,get_from_config}, {vbuckets,[366,371,372,376,380,381,382,383,384,385,386,387,388,389,390, 391,392,393,394,395,396,397,398,399,400,401,402,403,404,405, 406,407,408,409,410,411,412,413,414,415,416,417,418,419,420, 421,422,423,424,425,426]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]] [ns_server:debug,2014-08-19T16:51:25.284,ns_1@10.242.238.90:<0.27959.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:139]Linked myself to old ebucketmigrator <0.27955.0> [ns_server:info,2014-08-19T16:51:25.285,ns_1@10.242.238.90:<0.27955.0>:ebucketmigrator_srv:handle_call:270]Starting new-style vbucket filter change on stream `replication_ns_1@10.242.238.90` [ns_server:info,2014-08-19T16:51:25.298,ns_1@10.242.238.90:<0.27955.0>:ebucketmigrator_srv:handle_call:297]Changing vbucket filter on tap stream `replication_ns_1@10.242.238.90`: [{366,1}, {371,1}, {372,1}, {376,1}, {380,1}, {381,1}, {382,1}, {383,1}, {384,1}, {385,1}, {386,1}, {387,1}, {388,1}, {389,1}, {390,1}, {391,1}, {392,1}, {393,1}, {394,1}, {395,1}, {396,1}, {397,1}, {398,1}, {399,1}, {400,1}, {401,1}, {402,1}, {403,1}, {404,1}, {405,1}, {406,1}, {407,1}, {408,1}, {409,1}, {410,1}, {411,1}, {412,1}, {413,1}, {414,1}, {415,1}, {416,1}, {417,1}, {418,1}, {419,1}, {420,1}, {421,1}, {422,1}, {423,1}, {424,1}, {425,1}, {426,1}] [ns_server:info,2014-08-19T16:51:25.299,ns_1@10.242.238.90:<0.27955.0>:ebucketmigrator_srv:handle_call:307]Successfully changed vbucket filter on tap stream `replication_ns_1@10.242.238.90`. [ns_server:info,2014-08-19T16:51:25.299,ns_1@10.242.238.90:<0.27955.0>:ebucketmigrator_srv:process_upstream:1027]Got vbucket filter change completion message. Silencing upstream sender [ns_server:info,2014-08-19T16:51:25.299,ns_1@10.242.238.90:<0.27955.0>:ebucketmigrator_srv:handle_info:366]Got reply from upstream silencing request. Completing state transition to a new ebucketmigrator. [ns_server:debug,2014-08-19T16:51:25.299,ns_1@10.242.238.90:<0.27955.0>:ebucketmigrator_srv:complete_native_vb_filter_change:170]Proceeding with reading unread binaries [ns_server:debug,2014-08-19T16:51:25.299,ns_1@10.242.238.90:<0.27955.0>:ebucketmigrator_srv:confirm_downstream:197]Going to confirm reception downstream messages [ns_server:debug,2014-08-19T16:51:25.299,ns_1@10.242.238.90:<0.27955.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:51:25.299,ns_1@10.242.238.90:<0.27961.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:51:25.300,ns_1@10.242.238.90:<0.27961.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:51:25.301,ns_1@10.242.238.90:<0.27955.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:51:25.301,ns_1@10.242.238.90:<0.27955.0>:ebucketmigrator_srv:confirm_downstream:201]Confirmed upstream messages are feeded to kernel [ns_server:debug,2014-08-19T16:51:25.301,ns_1@10.242.238.90:<0.27955.0>:ebucketmigrator_srv:reply_and_die:210]Passed old state to caller [ns_server:debug,2014-08-19T16:51:25.301,ns_1@10.242.238.90:<0.27955.0>:ebucketmigrator_srv:reply_and_die:213]Sent out state. Preparing to die [ns_server:debug,2014-08-19T16:51:25.301,ns_1@10.242.238.90:<0.27959.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:143]Got old state from previous ebucketmigrator: <0.27955.0> [ns_server:debug,2014-08-19T16:51:25.302,ns_1@10.242.238.90:<0.27959.0>:ns_vbm_new_sup:perform_vbucket_filter_change_loop:184]Sent old state to new instance [ns_server:info,2014-08-19T16:51:25.302,ns_1@10.242.238.90:<0.27963.0>:ns_vbm_new_sup:mk_old_state_retriever:83]Got vbucket filter change old state. Proceeding vbucket filter change operation [ns_server:debug,2014-08-19T16:51:25.302,ns_1@10.242.238.90:<0.27963.0>:ebucketmigrator_srv:init:494]Got old ebucketmigrator state from <0.27955.0>: {state,#Port<0.16372>,#Port<0.16368>,#Port<0.16373>,#Port<0.16369>, <0.27957.0>,<<"cut off">>,<<"cut off">>,[],154,false,false,0, {1408,452685,299271}, completed, {<0.27959.0>,#Ref<0.0.1.55396>}, <<"replication_ns_1@10.242.238.90">>,<0.27955.0>, {had_backfill,false,undefined,[]}, completed,false}. [ns_server:debug,2014-08-19T16:51:25.302,ns_1@10.242.238.90:<0.17162.0>:ns_process_registry:handle_info:98]Got exit msg: {'EXIT',<0.27959.0>,{#Ref<0.0.1.55385>,<0.27963.0>}} [error_logger:info,2014-08-19T16:51:25.302,ns_1@10.242.238.90:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,'ns_vbm_new_sup-default'} started: [{pid,<0.27963.0>}, {name, {new_child_id, [366,371,372,376,380,381,382,383,384,385,386, 387,388,389,390,391,392,393,394,395,396,397, 398,399,400,401,402,403,404,405,406,407,408, 409,410,411,412,413,414,415,416,417,418,419, 420,421,422,423,424,425,426], 'ns_1@10.242.238.89'}}, {mfargs, {ebucketmigrator_srv,start_link, [{"10.242.238.89",11209}, {"10.242.238.90",11209}, [{old_state_retriever, #Fun}, {on_not_ready_vbuckets, #Fun}, {username,"default"}, {password,get_from_config}, {vbuckets, [366,371,372,376,380,381,382,383,384,385, 386,387,388,389,390,391,392,393,394,395, 396,397,398,399,400,401,402,403,404,405, 406,407,408,409,410,411,412,413,414,415, 416,417,418,419,420,421,422,423,424,425, 426]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]]}}, {restart_type,temporary}, {shutdown,60000}, {child_type,worker}] [ns_server:debug,2014-08-19T16:51:25.308,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:51:25.311,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:25.311,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 3087 us [ns_server:debug,2014-08-19T16:51:25.312,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:25.312,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{372, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:51:25.317,ns_1@10.242.238.90:<0.27963.0>:ebucketmigrator_srv:init:621]Reusing old upstream: [{vbuckets,[366,371,372,376,380,381,382,383,384,385,386,387,388,389,390,391, 392,393,394,395,396,397,398,399,400,401,402,403,404,405,406,407, 408,409,410,411,412,413,414,415,416,417,418,419,420,421,422,423, 424,425,426]}, {name,<<"replication_ns_1@10.242.238.90">>}, {takeover,false}] [rebalance:debug,2014-08-19T16:51:25.318,ns_1@10.242.238.90:<0.27963.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.27965.0> [views:debug,2014-08-19T16:51:25.335,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/623. Updated state: active (1) [ns_server:debug,2014-08-19T16:51:25.336,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",623,active,1} [ns_server:debug,2014-08-19T16:51:25.341,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:51:25.347,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:25.347,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 6459 us [ns_server:debug,2014-08-19T16:51:25.348,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:25.350,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{885, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:info,2014-08-19T16:51:25.350,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 367 state to replica [ns_server:info,2014-08-19T16:51:25.350,ns_1@10.242.238.90:tap_replication_manager-default<0.18775.0>:tap_replication_manager:change_vbucket_filter:200]Going to change replication from 'ns_1@10.242.238.89' to have [366,367,371,372,376,380,381,382,383,384,385,386,387,388,389,390,391,392,393, 394,395,396,397,398,399,400,401,402,403,404,405,406,407,408,409,410,411,412, 413,414,415,416,417,418,419,420,421,422,423,424,425,426] ([367], []) [ns_server:debug,2014-08-19T16:51:25.354,ns_1@10.242.238.90:<0.27967.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:135]Registered myself under id:{"default", {new_child_id, [366,367,371,372,376,380,381,382,383,384,385, 386,387,388,389,390,391,392,393,394,395,396, 397,398,399,400,401,402,403,404,405,406,407, 408,409,410,411,412,413,414,415,416,417,418, 419,420,421,422,423,424,425,426], 'ns_1@10.242.238.89'}, #Ref<0.0.1.55571>} Args:[{"10.242.238.89",11209}, {"10.242.238.90",11209}, [{old_state_retriever,#Fun}, {on_not_ready_vbuckets,#Fun}, {username,"default"}, {password,get_from_config}, {vbuckets,[366,367,371,372,376,380,381,382,383,384,385,386,387,388,389, 390,391,392,393,394,395,396,397,398,399,400,401,402,403,404, 405,406,407,408,409,410,411,412,413,414,415,416,417,418,419, 420,421,422,423,424,425,426]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]] [ns_server:debug,2014-08-19T16:51:25.355,ns_1@10.242.238.90:<0.27967.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:139]Linked myself to old ebucketmigrator <0.27963.0> [ns_server:info,2014-08-19T16:51:25.355,ns_1@10.242.238.90:<0.27963.0>:ebucketmigrator_srv:handle_call:270]Starting new-style vbucket filter change on stream `replication_ns_1@10.242.238.90` [ns_server:info,2014-08-19T16:51:25.367,ns_1@10.242.238.90:<0.27963.0>:ebucketmigrator_srv:handle_call:297]Changing vbucket filter on tap stream `replication_ns_1@10.242.238.90`: [{366,1}, {367,1}, {371,1}, {372,1}, {376,1}, {380,1}, {381,1}, {382,1}, {383,1}, {384,1}, {385,1}, {386,1}, {387,1}, {388,1}, {389,1}, {390,1}, {391,1}, {392,1}, {393,1}, {394,1}, {395,1}, {396,1}, {397,1}, {398,1}, {399,1}, {400,1}, {401,1}, {402,1}, {403,1}, {404,1}, {405,1}, {406,1}, {407,1}, {408,1}, {409,1}, {410,1}, {411,1}, {412,1}, {413,1}, {414,1}, {415,1}, {416,1}, {417,1}, {418,1}, {419,1}, {420,1}, {421,1}, {422,1}, {423,1}, {424,1}, {425,1}, {426,1}] [ns_server:info,2014-08-19T16:51:25.367,ns_1@10.242.238.90:<0.27963.0>:ebucketmigrator_srv:handle_call:307]Successfully changed vbucket filter on tap stream `replication_ns_1@10.242.238.90`. [ns_server:info,2014-08-19T16:51:25.370,ns_1@10.242.238.90:<0.27963.0>:ebucketmigrator_srv:process_upstream:1027]Got vbucket filter change completion message. Silencing upstream sender [ns_server:info,2014-08-19T16:51:25.370,ns_1@10.242.238.90:<0.27963.0>:ebucketmigrator_srv:handle_info:366]Got reply from upstream silencing request. Completing state transition to a new ebucketmigrator. [ns_server:debug,2014-08-19T16:51:25.371,ns_1@10.242.238.90:<0.27963.0>:ebucketmigrator_srv:complete_native_vb_filter_change:170]Proceeding with reading unread binaries [ns_server:debug,2014-08-19T16:51:25.371,ns_1@10.242.238.90:<0.27963.0>:ebucketmigrator_srv:confirm_downstream:197]Going to confirm reception downstream messages [ns_server:debug,2014-08-19T16:51:25.371,ns_1@10.242.238.90:<0.27963.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:51:25.371,ns_1@10.242.238.90:<0.27975.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:51:25.371,ns_1@10.242.238.90:<0.27975.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:51:25.371,ns_1@10.242.238.90:<0.27963.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:51:25.371,ns_1@10.242.238.90:<0.27963.0>:ebucketmigrator_srv:confirm_downstream:201]Confirmed upstream messages are feeded to kernel [ns_server:debug,2014-08-19T16:51:25.372,ns_1@10.242.238.90:<0.27963.0>:ebucketmigrator_srv:reply_and_die:210]Passed old state to caller [ns_server:debug,2014-08-19T16:51:25.372,ns_1@10.242.238.90:<0.27963.0>:ebucketmigrator_srv:reply_and_die:213]Sent out state. Preparing to die [ns_server:debug,2014-08-19T16:51:25.372,ns_1@10.242.238.90:<0.27967.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:143]Got old state from previous ebucketmigrator: <0.27963.0> [ns_server:debug,2014-08-19T16:51:25.372,ns_1@10.242.238.90:<0.27967.0>:ns_vbm_new_sup:perform_vbucket_filter_change_loop:184]Sent old state to new instance [ns_server:info,2014-08-19T16:51:25.372,ns_1@10.242.238.90:<0.27977.0>:ns_vbm_new_sup:mk_old_state_retriever:83]Got vbucket filter change old state. Proceeding vbucket filter change operation [ns_server:debug,2014-08-19T16:51:25.373,ns_1@10.242.238.90:<0.27977.0>:ebucketmigrator_srv:init:494]Got old ebucketmigrator state from <0.27963.0>: {state,#Port<0.16372>,#Port<0.16368>,#Port<0.16373>,#Port<0.16369>, <0.27965.0>,<<"cut off">>,<<"cut off">>,[],157,false,false,0, {1408,452685,370740}, completed, {<0.27967.0>,#Ref<0.0.1.55586>}, <<"replication_ns_1@10.242.238.90">>,<0.27963.0>, {had_backfill,false,undefined,[]}, completed,false}. [ns_server:debug,2014-08-19T16:51:25.373,ns_1@10.242.238.90:<0.17162.0>:ns_process_registry:handle_info:98]Got exit msg: {'EXIT',<0.27967.0>,{#Ref<0.0.1.55573>,<0.27977.0>}} [error_logger:info,2014-08-19T16:51:25.373,ns_1@10.242.238.90:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,'ns_vbm_new_sup-default'} started: [{pid,<0.27977.0>}, {name, {new_child_id, [366,367,371,372,376,380,381,382,383,384,385, 386,387,388,389,390,391,392,393,394,395,396, 397,398,399,400,401,402,403,404,405,406,407, 408,409,410,411,412,413,414,415,416,417,418, 419,420,421,422,423,424,425,426], 'ns_1@10.242.238.89'}}, {mfargs, {ebucketmigrator_srv,start_link, [{"10.242.238.89",11209}, {"10.242.238.90",11209}, [{old_state_retriever, #Fun}, {on_not_ready_vbuckets, #Fun}, {username,"default"}, {password,get_from_config}, {vbuckets, [366,367,371,372,376,380,381,382,383,384, 385,386,387,388,389,390,391,392,393,394, 395,396,397,398,399,400,401,402,403,404, 405,406,407,408,409,410,411,412,413,414, 415,416,417,418,419,420,421,422,423,424, 425,426]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]]}}, {restart_type,temporary}, {shutdown,60000}, {child_type,worker}] [ns_server:debug,2014-08-19T16:51:25.378,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:51:25.382,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:25.382,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 3525 us [ns_server:debug,2014-08-19T16:51:25.382,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:25.383,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{367, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:info,2014-08-19T16:51:25.384,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 363 state to replica [ns_server:info,2014-08-19T16:51:25.384,ns_1@10.242.238.90:tap_replication_manager-default<0.18775.0>:tap_replication_manager:change_vbucket_filter:200]Going to change replication from 'ns_1@10.242.238.89' to have [363,366,367,371,372,376,380,381,382,383,384,385,386,387,388,389,390,391,392, 393,394,395,396,397,398,399,400,401,402,403,404,405,406,407,408,409,410,411, 412,413,414,415,416,417,418,419,420,421,422,423,424,425,426] ([363], []) [ns_server:debug,2014-08-19T16:51:25.386,ns_1@10.242.238.90:<0.27978.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:135]Registered myself under id:{"default", {new_child_id, [363,366,367,371,372,376,380,381,382,383,384, 385,386,387,388,389,390,391,392,393,394,395, 396,397,398,399,400,401,402,403,404,405,406, 407,408,409,410,411,412,413,414,415,416,417, 418,419,420,421,422,423,424,425,426], 'ns_1@10.242.238.89'}, #Ref<0.0.1.55707>} Args:[{"10.242.238.89",11209}, {"10.242.238.90",11209}, [{old_state_retriever,#Fun}, {on_not_ready_vbuckets,#Fun}, {username,"default"}, {password,get_from_config}, {vbuckets,[363,366,367,371,372,376,380,381,382,383,384,385,386,387,388, 389,390,391,392,393,394,395,396,397,398,399,400,401,402,403, 404,405,406,407,408,409,410,411,412,413,414,415,416,417,418, 419,420,421,422,423,424,425,426]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]] [ns_server:debug,2014-08-19T16:51:25.386,ns_1@10.242.238.90:<0.27978.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:139]Linked myself to old ebucketmigrator <0.27977.0> [ns_server:debug,2014-08-19T16:51:25.387,ns_1@10.242.238.90:<0.27977.0>:ebucketmigrator_srv:init:621]Reusing old upstream: [{vbuckets,[366,367,371,372,376,380,381,382,383,384,385,386,387,388,389,390, 391,392,393,394,395,396,397,398,399,400,401,402,403,404,405,406, 407,408,409,410,411,412,413,414,415,416,417,418,419,420,421,422, 423,424,425,426]}, {name,<<"replication_ns_1@10.242.238.90">>}, {takeover,false}] [rebalance:debug,2014-08-19T16:51:25.387,ns_1@10.242.238.90:<0.27977.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.27980.0> [views:debug,2014-08-19T16:51:25.388,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/621. Updated state: active (1) [ns_server:debug,2014-08-19T16:51:25.388,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",621,active,1} [ns_server:info,2014-08-19T16:51:25.388,ns_1@10.242.238.90:<0.27977.0>:ebucketmigrator_srv:handle_call:270]Starting new-style vbucket filter change on stream `replication_ns_1@10.242.238.90` [ns_server:info,2014-08-19T16:51:25.400,ns_1@10.242.238.90:<0.27977.0>:ebucketmigrator_srv:handle_call:297]Changing vbucket filter on tap stream `replication_ns_1@10.242.238.90`: [{363,1}, {366,1}, {367,1}, {371,1}, {372,1}, {376,1}, {380,1}, {381,1}, {382,1}, {383,1}, {384,1}, {385,1}, {386,1}, {387,1}, {388,1}, {389,1}, {390,1}, {391,1}, {392,1}, {393,1}, {394,1}, {395,1}, {396,1}, {397,1}, {398,1}, {399,1}, {400,1}, {401,1}, {402,1}, {403,1}, {404,1}, {405,1}, {406,1}, {407,1}, {408,1}, {409,1}, {410,1}, {411,1}, {412,1}, {413,1}, {414,1}, {415,1}, {416,1}, {417,1}, {418,1}, {419,1}, {420,1}, {421,1}, {422,1}, {423,1}, {424,1}, {425,1}, {426,1}] [ns_server:info,2014-08-19T16:51:25.401,ns_1@10.242.238.90:<0.27977.0>:ebucketmigrator_srv:handle_call:307]Successfully changed vbucket filter on tap stream `replication_ns_1@10.242.238.90`. [ns_server:info,2014-08-19T16:51:25.401,ns_1@10.242.238.90:<0.27977.0>:ebucketmigrator_srv:process_upstream:1027]Got vbucket filter change completion message. Silencing upstream sender [ns_server:info,2014-08-19T16:51:25.402,ns_1@10.242.238.90:<0.27977.0>:ebucketmigrator_srv:handle_info:366]Got reply from upstream silencing request. Completing state transition to a new ebucketmigrator. [ns_server:debug,2014-08-19T16:51:25.402,ns_1@10.242.238.90:<0.27977.0>:ebucketmigrator_srv:complete_native_vb_filter_change:170]Proceeding with reading unread binaries [ns_server:debug,2014-08-19T16:51:25.402,ns_1@10.242.238.90:<0.27977.0>:ebucketmigrator_srv:confirm_downstream:197]Going to confirm reception downstream messages [ns_server:debug,2014-08-19T16:51:25.402,ns_1@10.242.238.90:<0.27977.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:51:25.402,ns_1@10.242.238.90:<0.27982.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:51:25.402,ns_1@10.242.238.90:<0.27982.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:51:25.402,ns_1@10.242.238.90:<0.27977.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:51:25.402,ns_1@10.242.238.90:<0.27977.0>:ebucketmigrator_srv:confirm_downstream:201]Confirmed upstream messages are feeded to kernel [ns_server:debug,2014-08-19T16:51:25.402,ns_1@10.242.238.90:<0.27977.0>:ebucketmigrator_srv:reply_and_die:210]Passed old state to caller [ns_server:debug,2014-08-19T16:51:25.402,ns_1@10.242.238.90:<0.27977.0>:ebucketmigrator_srv:reply_and_die:213]Sent out state. Preparing to die [ns_server:debug,2014-08-19T16:51:25.402,ns_1@10.242.238.90:<0.27978.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:143]Got old state from previous ebucketmigrator: <0.27977.0> [ns_server:debug,2014-08-19T16:51:25.403,ns_1@10.242.238.90:<0.27978.0>:ns_vbm_new_sup:perform_vbucket_filter_change_loop:184]Sent old state to new instance [ns_server:info,2014-08-19T16:51:25.403,ns_1@10.242.238.90:<0.27984.0>:ns_vbm_new_sup:mk_old_state_retriever:83]Got vbucket filter change old state. Proceeding vbucket filter change operation [ns_server:debug,2014-08-19T16:51:25.403,ns_1@10.242.238.90:<0.27984.0>:ebucketmigrator_srv:init:494]Got old ebucketmigrator state from <0.27977.0>: {state,#Port<0.16372>,#Port<0.16368>,#Port<0.16373>,#Port<0.16369>, <0.27980.0>,<<"cut off">>,<<"cut off">>,[],160,false,false,0, {1408,452685,401866}, completed, {<0.27978.0>,#Ref<0.0.1.55720>}, <<"replication_ns_1@10.242.238.90">>,<0.27977.0>, {had_backfill,false,undefined,[]}, completed,false}. [ns_server:debug,2014-08-19T16:51:25.404,ns_1@10.242.238.90:<0.17162.0>:ns_process_registry:handle_info:98]Got exit msg: {'EXIT',<0.27978.0>,{#Ref<0.0.1.55709>,<0.27984.0>}} [error_logger:info,2014-08-19T16:51:25.404,ns_1@10.242.238.90:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,'ns_vbm_new_sup-default'} started: [{pid,<0.27984.0>}, {name, {new_child_id, [363,366,367,371,372,376,380,381,382,383,384, 385,386,387,388,389,390,391,392,393,394,395, 396,397,398,399,400,401,402,403,404,405,406, 407,408,409,410,411,412,413,414,415,416,417, 418,419,420,421,422,423,424,425,426], 'ns_1@10.242.238.89'}}, {mfargs, {ebucketmigrator_srv,start_link, [{"10.242.238.89",11209}, {"10.242.238.90",11209}, [{old_state_retriever, #Fun}, {on_not_ready_vbuckets, #Fun}, {username,"default"}, {password,get_from_config}, {vbuckets, [363,366,367,371,372,376,380,381,382,383, 384,385,386,387,388,389,390,391,392,393, 394,395,396,397,398,399,400,401,402,403, 404,405,406,407,408,409,410,411,412,413, 414,415,416,417,418,419,420,421,422,423, 424,425,426]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]]}}, {restart_type,temporary}, {shutdown,60000}, {child_type,worker}] [ns_server:debug,2014-08-19T16:51:25.408,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:51:25.412,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 2852 us [ns_server:debug,2014-08-19T16:51:25.412,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:25.412,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:25.413,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{363, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:51:25.416,ns_1@10.242.238.90:<0.27984.0>:ebucketmigrator_srv:init:621]Reusing old upstream: [{vbuckets,[363,366,367,371,372,376,380,381,382,383,384,385,386,387,388,389, 390,391,392,393,394,395,396,397,398,399,400,401,402,403,404,405, 406,407,408,409,410,411,412,413,414,415,416,417,418,419,420,421, 422,423,424,425,426]}, {name,<<"replication_ns_1@10.242.238.90">>}, {takeover,false}] [rebalance:debug,2014-08-19T16:51:25.416,ns_1@10.242.238.90:<0.27984.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.27985.0> [ns_server:debug,2014-08-19T16:51:25.438,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [views:debug,2014-08-19T16:51:25.438,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/631. Updated state: active (1) [ns_server:debug,2014-08-19T16:51:25.438,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",631,active,1} [ns_server:debug,2014-08-19T16:51:25.443,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:25.443,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 5061 us [ns_server:debug,2014-08-19T16:51:25.443,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:25.444,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{878, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:51:25.470,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:51:25.473,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 3004 us [ns_server:debug,2014-08-19T16:51:25.473,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:25.474,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:25.474,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{888, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [views:debug,2014-08-19T16:51:25.488,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/625. Updated state: active (1) [ns_server:debug,2014-08-19T16:51:25.488,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",625,active,1} [ns_server:debug,2014-08-19T16:51:25.506,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:51:25.509,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:25.509,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 2733 us [ns_server:debug,2014-08-19T16:51:25.509,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:25.510,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{884, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:51:25.536,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:51:25.541,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 4969 us [ns_server:debug,2014-08-19T16:51:25.541,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:25.542,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:25.542,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{633, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [views:debug,2014-08-19T16:51:25.548,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/619. Updated state: active (1) [ns_server:debug,2014-08-19T16:51:25.548,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",619,active,1} [ns_server:debug,2014-08-19T16:51:25.586,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:51:25.589,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:25.590,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 3648 us [ns_server:debug,2014-08-19T16:51:25.590,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:25.591,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{889, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:info,2014-08-19T16:51:25.593,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 377 state to replica [ns_server:info,2014-08-19T16:51:25.594,ns_1@10.242.238.90:tap_replication_manager-default<0.18775.0>:tap_replication_manager:change_vbucket_filter:200]Going to change replication from 'ns_1@10.242.238.89' to have [363,366,367,371,372,376,377,380,381,382,383,384,385,386,387,388,389,390,391, 392,393,394,395,396,397,398,399,400,401,402,403,404,405,406,407,408,409,410, 411,412,413,414,415,416,417,418,419,420,421,422,423,424,425,426] ([377], []) [ns_server:debug,2014-08-19T16:51:25.596,ns_1@10.242.238.90:<0.27992.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:135]Registered myself under id:{"default", {new_child_id, [363,366,367,371,372,376,377,380,381,382,383, 384,385,386,387,388,389,390,391,392,393,394, 395,396,397,398,399,400,401,402,403,404,405, 406,407,408,409,410,411,412,413,414,415,416, 417,418,419,420,421,422,423,424,425,426], 'ns_1@10.242.238.89'}, #Ref<0.0.1.56089>} Args:[{"10.242.238.89",11209}, {"10.242.238.90",11209}, [{old_state_retriever,#Fun}, {on_not_ready_vbuckets,#Fun}, {username,"default"}, {password,get_from_config}, {vbuckets,[363,366,367,371,372,376,377,380,381,382,383,384,385,386,387, 388,389,390,391,392,393,394,395,396,397,398,399,400,401,402, 403,404,405,406,407,408,409,410,411,412,413,414,415,416,417, 418,419,420,421,422,423,424,425,426]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]] [ns_server:debug,2014-08-19T16:51:25.596,ns_1@10.242.238.90:<0.27992.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:139]Linked myself to old ebucketmigrator <0.27984.0> [ns_server:info,2014-08-19T16:51:25.597,ns_1@10.242.238.90:<0.27984.0>:ebucketmigrator_srv:handle_call:270]Starting new-style vbucket filter change on stream `replication_ns_1@10.242.238.90` [ns_server:info,2014-08-19T16:51:25.612,ns_1@10.242.238.90:<0.27984.0>:ebucketmigrator_srv:handle_call:297]Changing vbucket filter on tap stream `replication_ns_1@10.242.238.90`: [{363,1}, {366,1}, {367,1}, {371,1}, {372,1}, {376,1}, {377,1}, {380,1}, {381,1}, {382,1}, {383,1}, {384,1}, {385,1}, {386,1}, {387,1}, {388,1}, {389,1}, {390,1}, {391,1}, {392,1}, {393,1}, {394,1}, {395,1}, {396,1}, {397,1}, {398,1}, {399,1}, {400,1}, {401,1}, {402,1}, {403,1}, {404,1}, {405,1}, {406,1}, {407,1}, {408,1}, {409,1}, {410,1}, {411,1}, {412,1}, {413,1}, {414,1}, {415,1}, {416,1}, {417,1}, {418,1}, {419,1}, {420,1}, {421,1}, {422,1}, {423,1}, {424,1}, {425,1}, {426,1}] [ns_server:info,2014-08-19T16:51:25.613,ns_1@10.242.238.90:<0.27984.0>:ebucketmigrator_srv:handle_call:307]Successfully changed vbucket filter on tap stream `replication_ns_1@10.242.238.90`. [ns_server:info,2014-08-19T16:51:25.613,ns_1@10.242.238.90:<0.27984.0>:ebucketmigrator_srv:process_upstream:1027]Got vbucket filter change completion message. Silencing upstream sender [ns_server:info,2014-08-19T16:51:25.614,ns_1@10.242.238.90:<0.27984.0>:ebucketmigrator_srv:handle_info:366]Got reply from upstream silencing request. Completing state transition to a new ebucketmigrator. [ns_server:debug,2014-08-19T16:51:25.614,ns_1@10.242.238.90:<0.27984.0>:ebucketmigrator_srv:complete_native_vb_filter_change:170]Proceeding with reading unread binaries [ns_server:debug,2014-08-19T16:51:25.614,ns_1@10.242.238.90:<0.27984.0>:ebucketmigrator_srv:confirm_downstream:197]Going to confirm reception downstream messages [ns_server:debug,2014-08-19T16:51:25.614,ns_1@10.242.238.90:<0.27984.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:51:25.614,ns_1@10.242.238.90:<0.27994.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:51:25.614,ns_1@10.242.238.90:<0.27994.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:51:25.614,ns_1@10.242.238.90:<0.27984.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:51:25.615,ns_1@10.242.238.90:<0.27984.0>:ebucketmigrator_srv:confirm_downstream:201]Confirmed upstream messages are feeded to kernel [ns_server:debug,2014-08-19T16:51:25.615,ns_1@10.242.238.90:<0.27984.0>:ebucketmigrator_srv:reply_and_die:210]Passed old state to caller [ns_server:debug,2014-08-19T16:51:25.615,ns_1@10.242.238.90:<0.27984.0>:ebucketmigrator_srv:reply_and_die:213]Sent out state. Preparing to die [ns_server:debug,2014-08-19T16:51:25.615,ns_1@10.242.238.90:<0.27992.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:143]Got old state from previous ebucketmigrator: <0.27984.0> [ns_server:debug,2014-08-19T16:51:25.615,ns_1@10.242.238.90:<0.27992.0>:ns_vbm_new_sup:perform_vbucket_filter_change_loop:184]Sent old state to new instance [ns_server:info,2014-08-19T16:51:25.615,ns_1@10.242.238.90:<0.27996.0>:ns_vbm_new_sup:mk_old_state_retriever:83]Got vbucket filter change old state. Proceeding vbucket filter change operation [ns_server:debug,2014-08-19T16:51:25.615,ns_1@10.242.238.90:<0.27996.0>:ebucketmigrator_srv:init:494]Got old ebucketmigrator state from <0.27984.0>: {state,#Port<0.16372>,#Port<0.16368>,#Port<0.16373>,#Port<0.16369>, <0.27985.0>,<<"cut off">>,<<"cut off">>,[],163,false,false,0, {1408,452685,613964}, completed, {<0.27992.0>,#Ref<0.0.1.56104>}, <<"replication_ns_1@10.242.238.90">>,<0.27984.0>, {had_backfill,false,undefined,[]}, completed,false}. [ns_server:debug,2014-08-19T16:51:25.616,ns_1@10.242.238.90:<0.17162.0>:ns_process_registry:handle_info:98]Got exit msg: {'EXIT',<0.27992.0>,{#Ref<0.0.1.56091>,<0.27996.0>}} [error_logger:info,2014-08-19T16:51:25.616,ns_1@10.242.238.90:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,'ns_vbm_new_sup-default'} started: [{pid,<0.27996.0>}, {name, {new_child_id, [363,366,367,371,372,376,377,380,381,382,383, 384,385,386,387,388,389,390,391,392,393,394, 395,396,397,398,399,400,401,402,403,404,405, 406,407,408,409,410,411,412,413,414,415,416, 417,418,419,420,421,422,423,424,425,426], 'ns_1@10.242.238.89'}}, {mfargs, {ebucketmigrator_srv,start_link, [{"10.242.238.89",11209}, {"10.242.238.90",11209}, [{old_state_retriever, #Fun}, {on_not_ready_vbuckets, #Fun}, {username,"default"}, {password,get_from_config}, {vbuckets, [363,366,367,371,372,376,377,380,381,382, 383,384,385,386,387,388,389,390,391,392, 393,394,395,396,397,398,399,400,401,402, 403,404,405,406,407,408,409,410,411,412, 413,414,415,416,417,418,419,420,421,422, 423,424,425,426]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]]}}, {restart_type,temporary}, {shutdown,60000}, {child_type,worker}] [ns_server:debug,2014-08-19T16:51:25.621,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:51:25.624,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:25.624,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 3291 us [ns_server:debug,2014-08-19T16:51:25.625,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:25.625,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{377, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:51:25.628,ns_1@10.242.238.90:<0.27996.0>:ebucketmigrator_srv:init:621]Reusing old upstream: [{vbuckets,[363,366,367,371,372,376,377,380,381,382,383,384,385,386,387,388, 389,390,391,392,393,394,395,396,397,398,399,400,401,402,403,404, 405,406,407,408,409,410,411,412,413,414,415,416,417,418,419,420, 421,422,423,424,425,426]}, {name,<<"replication_ns_1@10.242.238.90">>}, {takeover,false}] [rebalance:debug,2014-08-19T16:51:25.628,ns_1@10.242.238.90:<0.27996.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.27998.0> [ns_server:debug,2014-08-19T16:51:25.652,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:51:25.655,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:25.655,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 3059 us [ns_server:debug,2014-08-19T16:51:25.656,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:25.656,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{886, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:info,2014-08-19T16:51:25.658,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 374 state to replica [ns_server:info,2014-08-19T16:51:25.659,ns_1@10.242.238.90:tap_replication_manager-default<0.18775.0>:tap_replication_manager:change_vbucket_filter:200]Going to change replication from 'ns_1@10.242.238.89' to have [363,366,367,371,372,374,376,377,380,381,382,383,384,385,386,387,388,389,390, 391,392,393,394,395,396,397,398,399,400,401,402,403,404,405,406,407,408,409, 410,411,412,413,414,415,416,417,418,419,420,421,422,423,424,425,426] ([374], []) [ns_server:debug,2014-08-19T16:51:25.659,ns_1@10.242.238.90:<0.27999.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:135]Registered myself under id:{"default", {new_child_id, [363,366,367,371,372,374,376,377,380,381,382, 383,384,385,386,387,388,389,390,391,392,393, 394,395,396,397,398,399,400,401,402,403,404, 405,406,407,408,409,410,411,412,413,414,415, 416,417,418,419,420,421,422,423,424,425,426], 'ns_1@10.242.238.89'}, #Ref<0.0.1.56255>} Args:[{"10.242.238.89",11209}, {"10.242.238.90",11209}, [{old_state_retriever,#Fun}, {on_not_ready_vbuckets,#Fun}, {username,"default"}, {password,get_from_config}, {vbuckets,[363,366,367,371,372,374,376,377,380,381,382,383,384,385,386, 387,388,389,390,391,392,393,394,395,396,397,398,399,400,401, 402,403,404,405,406,407,408,409,410,411,412,413,414,415,416, 417,418,419,420,421,422,423,424,425,426]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]] [ns_server:debug,2014-08-19T16:51:25.660,ns_1@10.242.238.90:<0.27999.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:139]Linked myself to old ebucketmigrator <0.27996.0> [ns_server:info,2014-08-19T16:51:25.660,ns_1@10.242.238.90:<0.27996.0>:ebucketmigrator_srv:handle_call:270]Starting new-style vbucket filter change on stream `replication_ns_1@10.242.238.90` [ns_server:info,2014-08-19T16:51:25.676,ns_1@10.242.238.90:<0.27996.0>:ebucketmigrator_srv:handle_call:297]Changing vbucket filter on tap stream `replication_ns_1@10.242.238.90`: [{363,1}, {366,1}, {367,1}, {371,1}, {372,1}, {374,1}, {376,1}, {377,1}, {380,1}, {381,1}, {382,1}, {383,1}, {384,1}, {385,1}, {386,1}, {387,1}, {388,1}, {389,1}, {390,1}, {391,1}, {392,1}, {393,1}, {394,1}, {395,1}, {396,1}, {397,1}, {398,1}, {399,1}, {400,1}, {401,1}, {402,1}, {403,1}, {404,1}, {405,1}, {406,1}, {407,1}, {408,1}, {409,1}, {410,1}, {411,1}, {412,1}, {413,1}, {414,1}, {415,1}, {416,1}, {417,1}, {418,1}, {419,1}, {420,1}, {421,1}, {422,1}, {423,1}, {424,1}, {425,1}, {426,1}] [ns_server:info,2014-08-19T16:51:25.677,ns_1@10.242.238.90:<0.27996.0>:ebucketmigrator_srv:handle_call:307]Successfully changed vbucket filter on tap stream `replication_ns_1@10.242.238.90`. [ns_server:info,2014-08-19T16:51:25.678,ns_1@10.242.238.90:<0.27996.0>:ebucketmigrator_srv:process_upstream:1027]Got vbucket filter change completion message. Silencing upstream sender [ns_server:info,2014-08-19T16:51:25.678,ns_1@10.242.238.90:<0.27996.0>:ebucketmigrator_srv:handle_info:366]Got reply from upstream silencing request. Completing state transition to a new ebucketmigrator. [ns_server:debug,2014-08-19T16:51:25.678,ns_1@10.242.238.90:<0.27996.0>:ebucketmigrator_srv:complete_native_vb_filter_change:170]Proceeding with reading unread binaries [ns_server:debug,2014-08-19T16:51:25.678,ns_1@10.242.238.90:<0.27996.0>:ebucketmigrator_srv:confirm_downstream:197]Going to confirm reception downstream messages [ns_server:debug,2014-08-19T16:51:25.678,ns_1@10.242.238.90:<0.27996.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:51:25.678,ns_1@10.242.238.90:<0.28002.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:51:25.678,ns_1@10.242.238.90:<0.28002.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:51:25.678,ns_1@10.242.238.90:<0.27996.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:51:25.679,ns_1@10.242.238.90:<0.27996.0>:ebucketmigrator_srv:confirm_downstream:201]Confirmed upstream messages are feeded to kernel [ns_server:debug,2014-08-19T16:51:25.679,ns_1@10.242.238.90:<0.27996.0>:ebucketmigrator_srv:reply_and_die:210]Passed old state to caller [ns_server:debug,2014-08-19T16:51:25.679,ns_1@10.242.238.90:<0.27996.0>:ebucketmigrator_srv:reply_and_die:213]Sent out state. Preparing to die [ns_server:debug,2014-08-19T16:51:25.679,ns_1@10.242.238.90:<0.27999.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:143]Got old state from previous ebucketmigrator: <0.27996.0> [ns_server:debug,2014-08-19T16:51:25.679,ns_1@10.242.238.90:<0.27999.0>:ns_vbm_new_sup:perform_vbucket_filter_change_loop:184]Sent old state to new instance [ns_server:info,2014-08-19T16:51:25.679,ns_1@10.242.238.90:<0.28004.0>:ns_vbm_new_sup:mk_old_state_retriever:83]Got vbucket filter change old state. Proceeding vbucket filter change operation [ns_server:debug,2014-08-19T16:51:25.679,ns_1@10.242.238.90:<0.28004.0>:ebucketmigrator_srv:init:494]Got old ebucketmigrator state from <0.27996.0>: {state,#Port<0.16372>,#Port<0.16368>,#Port<0.16373>,#Port<0.16369>, <0.27998.0>,<<"cut off">>,<<"cut off">>,[],166,false,false,0, {1408,452685,678167}, completed, {<0.27999.0>,#Ref<0.0.1.56270>}, <<"replication_ns_1@10.242.238.90">>,<0.27996.0>, {had_backfill,false,undefined,[]}, completed,false}. [ns_server:debug,2014-08-19T16:51:25.680,ns_1@10.242.238.90:<0.17162.0>:ns_process_registry:handle_info:98]Got exit msg: {'EXIT',<0.27999.0>,{#Ref<0.0.1.56257>,<0.28004.0>}} [error_logger:info,2014-08-19T16:51:25.680,ns_1@10.242.238.90:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,'ns_vbm_new_sup-default'} started: [{pid,<0.28004.0>}, {name, {new_child_id, [363,366,367,371,372,374,376,377,380,381,382, 383,384,385,386,387,388,389,390,391,392,393, 394,395,396,397,398,399,400,401,402,403,404, 405,406,407,408,409,410,411,412,413,414,415, 416,417,418,419,420,421,422,423,424,425,426], 'ns_1@10.242.238.89'}}, {mfargs, {ebucketmigrator_srv,start_link, [{"10.242.238.89",11209}, {"10.242.238.90",11209}, [{old_state_retriever, #Fun}, {on_not_ready_vbuckets, #Fun}, {username,"default"}, {password,get_from_config}, {vbuckets, [363,366,367,371,372,374,376,377,380,381, 382,383,384,385,386,387,388,389,390,391, 392,393,394,395,396,397,398,399,400,401, 402,403,404,405,406,407,408,409,410,411, 412,413,414,415,416,417,418,419,420,421, 422,423,424,425,426]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]]}}, {restart_type,temporary}, {shutdown,60000}, {child_type,worker}] [ns_server:debug,2014-08-19T16:51:25.687,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:51:25.688,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 1398 us [ns_server:debug,2014-08-19T16:51:25.688,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:25.689,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:25.690,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{374, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:51:25.694,ns_1@10.242.238.90:<0.28004.0>:ebucketmigrator_srv:init:621]Reusing old upstream: [{vbuckets,[363,366,367,371,372,374,376,377,380,381,382,383,384,385,386,387, 388,389,390,391,392,393,394,395,396,397,398,399,400,401,402,403, 404,405,406,407,408,409,410,411,412,413,414,415,416,417,418,419, 420,421,422,423,424,425,426]}, {name,<<"replication_ns_1@10.242.238.90">>}, {takeover,false}] [rebalance:debug,2014-08-19T16:51:25.694,ns_1@10.242.238.90:<0.28004.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.28005.0> [ns_server:debug,2014-08-19T16:51:25.718,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:51:25.720,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:25.721,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 2738 us [ns_server:debug,2014-08-19T16:51:25.721,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:25.722,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{887, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:info,2014-08-19T16:51:25.723,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 370 state to replica [ns_server:info,2014-08-19T16:51:25.723,ns_1@10.242.238.90:tap_replication_manager-default<0.18775.0>:tap_replication_manager:change_vbucket_filter:200]Going to change replication from 'ns_1@10.242.238.89' to have [363,366,367,370,371,372,374,376,377,380,381,382,383,384,385,386,387,388,389, 390,391,392,393,394,395,396,397,398,399,400,401,402,403,404,405,406,407,408, 409,410,411,412,413,414,415,416,417,418,419,420,421,422,423,424,425,426] ([370], []) [ns_server:debug,2014-08-19T16:51:25.724,ns_1@10.242.238.90:<0.28007.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:135]Registered myself under id:{"default", {new_child_id, [363,366,367,370,371,372,374,376,377,380,381, 382,383,384,385,386,387,388,389,390,391,392, 393,394,395,396,397,398,399,400,401,402,403, 404,405,406,407,408,409,410,411,412,413,414, 415,416,417,418,419,420,421,422,423,424,425, 426], 'ns_1@10.242.238.89'}, #Ref<0.0.1.56426>} Args:[{"10.242.238.89",11209}, {"10.242.238.90",11209}, [{old_state_retriever,#Fun}, {on_not_ready_vbuckets,#Fun}, {username,"default"}, {password,get_from_config}, {vbuckets,[363,366,367,370,371,372,374,376,377,380,381,382,383,384,385, 386,387,388,389,390,391,392,393,394,395,396,397,398,399,400, 401,402,403,404,405,406,407,408,409,410,411,412,413,414,415, 416,417,418,419,420,421,422,423,424,425,426]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]] [ns_server:debug,2014-08-19T16:51:25.725,ns_1@10.242.238.90:<0.28007.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:139]Linked myself to old ebucketmigrator <0.28004.0> [ns_server:info,2014-08-19T16:51:25.725,ns_1@10.242.238.90:<0.28004.0>:ebucketmigrator_srv:handle_call:270]Starting new-style vbucket filter change on stream `replication_ns_1@10.242.238.90` [ns_server:info,2014-08-19T16:51:25.742,ns_1@10.242.238.90:<0.28004.0>:ebucketmigrator_srv:handle_call:297]Changing vbucket filter on tap stream `replication_ns_1@10.242.238.90`: [{363,1}, {366,1}, {367,1}, {370,1}, {371,1}, {372,1}, {374,1}, {376,1}, {377,1}, {380,1}, {381,1}, {382,1}, {383,1}, {384,1}, {385,1}, {386,1}, {387,1}, {388,1}, {389,1}, {390,1}, {391,1}, {392,1}, {393,1}, {394,1}, {395,1}, {396,1}, {397,1}, {398,1}, {399,1}, {400,1}, {401,1}, {402,1}, {403,1}, {404,1}, {405,1}, {406,1}, {407,1}, {408,1}, {409,1}, {410,1}, {411,1}, {412,1}, {413,1}, {414,1}, {415,1}, {416,1}, {417,1}, {418,1}, {419,1}, {420,1}, {421,1}, {422,1}, {423,1}, {424,1}, {425,1}, {426,1}] [ns_server:info,2014-08-19T16:51:25.743,ns_1@10.242.238.90:<0.28004.0>:ebucketmigrator_srv:handle_call:307]Successfully changed vbucket filter on tap stream `replication_ns_1@10.242.238.90`. [ns_server:info,2014-08-19T16:51:25.743,ns_1@10.242.238.90:<0.28004.0>:ebucketmigrator_srv:process_upstream:1027]Got vbucket filter change completion message. Silencing upstream sender [ns_server:info,2014-08-19T16:51:25.743,ns_1@10.242.238.90:<0.28004.0>:ebucketmigrator_srv:handle_info:366]Got reply from upstream silencing request. Completing state transition to a new ebucketmigrator. [ns_server:debug,2014-08-19T16:51:25.743,ns_1@10.242.238.90:<0.28004.0>:ebucketmigrator_srv:complete_native_vb_filter_change:170]Proceeding with reading unread binaries [ns_server:debug,2014-08-19T16:51:25.743,ns_1@10.242.238.90:<0.28004.0>:ebucketmigrator_srv:confirm_downstream:197]Going to confirm reception downstream messages [ns_server:debug,2014-08-19T16:51:25.743,ns_1@10.242.238.90:<0.28004.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:51:25.743,ns_1@10.242.238.90:<0.28010.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:51:25.744,ns_1@10.242.238.90:<0.28010.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:51:25.744,ns_1@10.242.238.90:<0.28004.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:51:25.744,ns_1@10.242.238.90:<0.28004.0>:ebucketmigrator_srv:confirm_downstream:201]Confirmed upstream messages are feeded to kernel [ns_server:debug,2014-08-19T16:51:25.744,ns_1@10.242.238.90:<0.28004.0>:ebucketmigrator_srv:reply_and_die:210]Passed old state to caller [ns_server:debug,2014-08-19T16:51:25.744,ns_1@10.242.238.90:<0.28004.0>:ebucketmigrator_srv:reply_and_die:213]Sent out state. Preparing to die [ns_server:debug,2014-08-19T16:51:25.744,ns_1@10.242.238.90:<0.28007.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:143]Got old state from previous ebucketmigrator: <0.28004.0> [ns_server:debug,2014-08-19T16:51:25.744,ns_1@10.242.238.90:<0.28007.0>:ns_vbm_new_sup:perform_vbucket_filter_change_loop:184]Sent old state to new instance [ns_server:info,2014-08-19T16:51:25.744,ns_1@10.242.238.90:<0.28012.0>:ns_vbm_new_sup:mk_old_state_retriever:83]Got vbucket filter change old state. Proceeding vbucket filter change operation [ns_server:debug,2014-08-19T16:51:25.745,ns_1@10.242.238.90:<0.28012.0>:ebucketmigrator_srv:init:494]Got old ebucketmigrator state from <0.28004.0>: {state,#Port<0.16372>,#Port<0.16368>,#Port<0.16373>,#Port<0.16369>, <0.28005.0>,<<"cut off">>,<<"cut off">>,[],169,false,false,0, {1408,452685,743350}, completed, {<0.28007.0>,#Ref<0.0.1.56439>}, <<"replication_ns_1@10.242.238.90">>,<0.28004.0>, {had_backfill,false,undefined,[]}, completed,false}. [ns_server:debug,2014-08-19T16:51:25.745,ns_1@10.242.238.90:<0.17162.0>:ns_process_registry:handle_info:98]Got exit msg: {'EXIT',<0.28007.0>,{#Ref<0.0.1.56428>,<0.28012.0>}} [error_logger:info,2014-08-19T16:51:25.745,ns_1@10.242.238.90:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,'ns_vbm_new_sup-default'} started: [{pid,<0.28012.0>}, {name, {new_child_id, [363,366,367,370,371,372,374,376,377,380,381, 382,383,384,385,386,387,388,389,390,391,392, 393,394,395,396,397,398,399,400,401,402,403, 404,405,406,407,408,409,410,411,412,413,414, 415,416,417,418,419,420,421,422,423,424,425, 426], 'ns_1@10.242.238.89'}}, {mfargs, {ebucketmigrator_srv,start_link, [{"10.242.238.89",11209}, {"10.242.238.90",11209}, [{old_state_retriever, #Fun}, {on_not_ready_vbuckets, #Fun}, {username,"default"}, {password,get_from_config}, {vbuckets, [363,366,367,370,371,372,374,376,377,380, 381,382,383,384,385,386,387,388,389,390, 391,392,393,394,395,396,397,398,399,400, 401,402,403,404,405,406,407,408,409,410, 411,412,413,414,415,416,417,418,419,420, 421,422,423,424,425,426]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]]}}, {restart_type,temporary}, {shutdown,60000}, {child_type,worker}] [ns_server:debug,2014-08-19T16:51:25.750,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:51:25.759,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 8884 us [ns_server:debug,2014-08-19T16:51:25.759,ns_1@10.242.238.90:<0.28012.0>:ebucketmigrator_srv:init:621]Reusing old upstream: [{vbuckets,[363,366,367,370,371,372,374,376,377,380,381,382,383,384,385,386, 387,388,389,390,391,392,393,394,395,396,397,398,399,400,401,402, 403,404,405,406,407,408,409,410,411,412,413,414,415,416,417,418, 419,420,421,422,423,424,425,426]}, {name,<<"replication_ns_1@10.242.238.90">>}, {takeover,false}] [ns_server:debug,2014-08-19T16:51:25.759,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [rebalance:debug,2014-08-19T16:51:25.759,ns_1@10.242.238.90:<0.28012.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.28013.0> [ns_server:debug,2014-08-19T16:51:25.760,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:25.760,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{370, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:51:25.784,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:51:25.788,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:25.788,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 3693 us [ns_server:debug,2014-08-19T16:51:25.788,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:25.789,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{629, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:51:25.815,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:51:25.818,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:25.818,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 2782 us [ns_server:debug,2014-08-19T16:51:25.819,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:25.819,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{875, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:51:25.847,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:51:25.850,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:25.851,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 2987 us [ns_server:debug,2014-08-19T16:51:25.851,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:25.852,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{881, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:51:25.877,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:51:25.878,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 1212 us [ns_server:debug,2014-08-19T16:51:25.878,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:25.879,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:25.880,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{621, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:51:25.908,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:51:25.910,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:25.910,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 2634 us [ns_server:debug,2014-08-19T16:51:25.911,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:25.912,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{883, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:51:25.942,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:51:25.949,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 6807 us [ns_server:debug,2014-08-19T16:51:25.949,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:25.949,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:25.950,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{882, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:info,2014-08-19T16:51:25.951,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 379 state to replica [ns_server:info,2014-08-19T16:51:25.951,ns_1@10.242.238.90:tap_replication_manager-default<0.18775.0>:tap_replication_manager:change_vbucket_filter:200]Going to change replication from 'ns_1@10.242.238.89' to have [363,366,367,370,371,372,374,376,377,379,380,381,382,383,384,385,386,387,388, 389,390,391,392,393,394,395,396,397,398,399,400,401,402,403,404,405,406,407, 408,409,410,411,412,413,414,415,416,417,418,419,420,421,422,423,424,425,426] ([379], []) [ns_server:debug,2014-08-19T16:51:25.952,ns_1@10.242.238.90:<0.28021.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:135]Registered myself under id:{"default", {new_child_id, [363,366,367,370,371,372,374,376,377,379,380, 381,382,383,384,385,386,387,388,389,390,391, 392,393,394,395,396,397,398,399,400,401,402, 403,404,405,406,407,408,409,410,411,412,413, 414,415,416,417,418,419,420,421,422,423,424, 425,426], 'ns_1@10.242.238.89'}, #Ref<0.0.1.56720>} Args:[{"10.242.238.89",11209}, {"10.242.238.90",11209}, [{old_state_retriever,#Fun}, {on_not_ready_vbuckets,#Fun}, {username,"default"}, {password,get_from_config}, {vbuckets,[363,366,367,370,371,372,374,376,377,379,380,381,382,383,384, 385,386,387,388,389,390,391,392,393,394,395,396,397,398,399, 400,401,402,403,404,405,406,407,408,409,410,411,412,413,414, 415,416,417,418,419,420,421,422,423,424,425,426]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]] [ns_server:debug,2014-08-19T16:51:25.953,ns_1@10.242.238.90:<0.28021.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:139]Linked myself to old ebucketmigrator <0.28012.0> [ns_server:info,2014-08-19T16:51:25.953,ns_1@10.242.238.90:<0.28012.0>:ebucketmigrator_srv:handle_call:270]Starting new-style vbucket filter change on stream `replication_ns_1@10.242.238.90` [ns_server:info,2014-08-19T16:51:25.976,ns_1@10.242.238.90:<0.28012.0>:ebucketmigrator_srv:handle_call:297]Changing vbucket filter on tap stream `replication_ns_1@10.242.238.90`: [{363,1}, {366,1}, {367,1}, {370,1}, {371,1}, {372,1}, {374,1}, {376,1}, {377,1}, {379,1}, {380,1}, {381,1}, {382,1}, {383,1}, {384,1}, {385,1}, {386,1}, {387,1}, {388,1}, {389,1}, {390,1}, {391,1}, {392,1}, {393,1}, {394,1}, {395,1}, {396,1}, {397,1}, {398,1}, {399,1}, {400,1}, {401,1}, {402,1}, {403,1}, {404,1}, {405,1}, {406,1}, {407,1}, {408,1}, {409,1}, {410,1}, {411,1}, {412,1}, {413,1}, {414,1}, {415,1}, {416,1}, {417,1}, {418,1}, {419,1}, {420,1}, {421,1}, {422,1}, {423,1}, {424,1}, {425,1}, {426,1}] [ns_server:info,2014-08-19T16:51:25.977,ns_1@10.242.238.90:<0.28012.0>:ebucketmigrator_srv:handle_call:307]Successfully changed vbucket filter on tap stream `replication_ns_1@10.242.238.90`. [ns_server:info,2014-08-19T16:51:25.977,ns_1@10.242.238.90:<0.28012.0>:ebucketmigrator_srv:process_upstream:1027]Got vbucket filter change completion message. Silencing upstream sender [ns_server:info,2014-08-19T16:51:25.977,ns_1@10.242.238.90:<0.28012.0>:ebucketmigrator_srv:handle_info:366]Got reply from upstream silencing request. Completing state transition to a new ebucketmigrator. [ns_server:debug,2014-08-19T16:51:25.978,ns_1@10.242.238.90:<0.28012.0>:ebucketmigrator_srv:complete_native_vb_filter_change:170]Proceeding with reading unread binaries [ns_server:debug,2014-08-19T16:51:25.978,ns_1@10.242.238.90:<0.28012.0>:ebucketmigrator_srv:confirm_downstream:197]Going to confirm reception downstream messages [ns_server:debug,2014-08-19T16:51:25.978,ns_1@10.242.238.90:<0.28012.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:51:25.978,ns_1@10.242.238.90:<0.28023.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:51:25.978,ns_1@10.242.238.90:<0.28023.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:51:25.978,ns_1@10.242.238.90:<0.28012.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:51:25.978,ns_1@10.242.238.90:<0.28012.0>:ebucketmigrator_srv:confirm_downstream:201]Confirmed upstream messages are feeded to kernel [ns_server:debug,2014-08-19T16:51:25.978,ns_1@10.242.238.90:<0.28012.0>:ebucketmigrator_srv:reply_and_die:210]Passed old state to caller [ns_server:debug,2014-08-19T16:51:25.978,ns_1@10.242.238.90:<0.28012.0>:ebucketmigrator_srv:reply_and_die:213]Sent out state. Preparing to die [ns_server:debug,2014-08-19T16:51:25.978,ns_1@10.242.238.90:<0.28021.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:143]Got old state from previous ebucketmigrator: <0.28012.0> [ns_server:debug,2014-08-19T16:51:25.979,ns_1@10.242.238.90:<0.28021.0>:ns_vbm_new_sup:perform_vbucket_filter_change_loop:184]Sent old state to new instance [ns_server:info,2014-08-19T16:51:25.979,ns_1@10.242.238.90:<0.28025.0>:ns_vbm_new_sup:mk_old_state_retriever:83]Got vbucket filter change old state. Proceeding vbucket filter change operation [ns_server:debug,2014-08-19T16:51:25.979,ns_1@10.242.238.90:<0.28025.0>:ebucketmigrator_srv:init:494]Got old ebucketmigrator state from <0.28012.0>: {state,#Port<0.16372>,#Port<0.16368>,#Port<0.16373>,#Port<0.16369>, <0.28013.0>,<<"cut off">>,<<"cut off">>,[],172,false,false,0, {1408,452685,977804}, completed, {<0.28021.0>,#Ref<0.0.1.56733>}, <<"replication_ns_1@10.242.238.90">>,<0.28012.0>, {had_backfill,false,undefined,[]}, completed,false}. [ns_server:debug,2014-08-19T16:51:25.979,ns_1@10.242.238.90:<0.17162.0>:ns_process_registry:handle_info:98]Got exit msg: {'EXIT',<0.28021.0>,{#Ref<0.0.1.56722>,<0.28025.0>}} [error_logger:info,2014-08-19T16:51:25.979,ns_1@10.242.238.90:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,'ns_vbm_new_sup-default'} started: [{pid,<0.28025.0>}, {name, {new_child_id, [363,366,367,370,371,372,374,376,377,379,380, 381,382,383,384,385,386,387,388,389,390,391, 392,393,394,395,396,397,398,399,400,401,402, 403,404,405,406,407,408,409,410,411,412,413, 414,415,416,417,418,419,420,421,422,423,424, 425,426], 'ns_1@10.242.238.89'}}, {mfargs, {ebucketmigrator_srv,start_link, [{"10.242.238.89",11209}, {"10.242.238.90",11209}, [{old_state_retriever, #Fun}, {on_not_ready_vbuckets, #Fun}, {username,"default"}, {password,get_from_config}, {vbuckets, [363,366,367,370,371,372,374,376,377,379, 380,381,382,383,384,385,386,387,388,389, 390,391,392,393,394,395,396,397,398,399, 400,401,402,403,404,405,406,407,408,409, 410,411,412,413,414,415,416,417,418,419, 420,421,422,423,424,425,426]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]]}}, {restart_type,temporary}, {shutdown,60000}, {child_type,worker}] [ns_server:debug,2014-08-19T16:51:25.984,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:51:25.987,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:25.987,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 3238 us [ns_server:debug,2014-08-19T16:51:25.987,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:25.988,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{379, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:info,2014-08-19T16:51:25.989,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 364 state to replica [ns_server:info,2014-08-19T16:51:25.990,ns_1@10.242.238.90:tap_replication_manager-default<0.18775.0>:tap_replication_manager:change_vbucket_filter:200]Going to change replication from 'ns_1@10.242.238.89' to have [363,364,366,367,370,371,372,374,376,377,379,380,381,382,383,384,385,386,387, 388,389,390,391,392,393,394,395,396,397,398,399,400,401,402,403,404,405,406, 407,408,409,410,411,412,413,414,415,416,417,418,419,420,421,422,423,424,425, 426] ([364], []) [ns_server:debug,2014-08-19T16:51:25.991,ns_1@10.242.238.90:<0.28027.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:135]Registered myself under id:{"default", {new_child_id, [363,364,366,367,370,371,372,374,376,377,379, 380,381,382,383,384,385,386,387,388,389,390, 391,392,393,394,395,396,397,398,399,400,401, 402,403,404,405,406,407,408,409,410,411,412, 413,414,415,416,417,418,419,420,421,422,423, 424,425,426], 'ns_1@10.242.238.89'}, #Ref<0.0.1.56855>} Args:[{"10.242.238.89",11209}, {"10.242.238.90",11209}, [{old_state_retriever,#Fun}, {on_not_ready_vbuckets,#Fun}, {username,"default"}, {password,get_from_config}, {vbuckets,[363,364,366,367,370,371,372,374,376,377,379,380,381,382,383, 384,385,386,387,388,389,390,391,392,393,394,395,396,397,398, 399,400,401,402,403,404,405,406,407,408,409,410,411,412,413, 414,415,416,417,418,419,420,421,422,423,424,425,426]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]] [ns_server:debug,2014-08-19T16:51:25.991,ns_1@10.242.238.90:<0.28027.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:139]Linked myself to old ebucketmigrator <0.28025.0> [ns_server:debug,2014-08-19T16:51:25.991,ns_1@10.242.238.90:<0.28025.0>:ebucketmigrator_srv:init:621]Reusing old upstream: [{vbuckets,[363,366,367,370,371,372,374,376,377,379,380,381,382,383,384,385, 386,387,388,389,390,391,392,393,394,395,396,397,398,399,400,401, 402,403,404,405,406,407,408,409,410,411,412,413,414,415,416,417, 418,419,420,421,422,423,424,425,426]}, {name,<<"replication_ns_1@10.242.238.90">>}, {takeover,false}] [rebalance:debug,2014-08-19T16:51:25.992,ns_1@10.242.238.90:<0.28025.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.28029.0> [ns_server:info,2014-08-19T16:51:25.992,ns_1@10.242.238.90:<0.28025.0>:ebucketmigrator_srv:handle_call:270]Starting new-style vbucket filter change on stream `replication_ns_1@10.242.238.90` [ns_server:info,2014-08-19T16:51:26.004,ns_1@10.242.238.90:<0.28025.0>:ebucketmigrator_srv:handle_call:297]Changing vbucket filter on tap stream `replication_ns_1@10.242.238.90`: [{363,1}, {364,1}, {366,1}, {367,1}, {370,1}, {371,1}, {372,1}, {374,1}, {376,1}, {377,1}, {379,1}, {380,1}, {381,1}, {382,1}, {383,1}, {384,1}, {385,1}, {386,1}, {387,1}, {388,1}, {389,1}, {390,1}, {391,1}, {392,1}, {393,1}, {394,1}, {395,1}, {396,1}, {397,1}, {398,1}, {399,1}, {400,1}, {401,1}, {402,1}, {403,1}, {404,1}, {405,1}, {406,1}, {407,1}, {408,1}, {409,1}, {410,1}, {411,1}, {412,1}, {413,1}, {414,1}, {415,1}, {416,1}, {417,1}, {418,1}, {419,1}, {420,1}, {421,1}, {422,1}, {423,1}, {424,1}, {425,1}, {426,1}] [ns_server:info,2014-08-19T16:51:26.005,ns_1@10.242.238.90:<0.28025.0>:ebucketmigrator_srv:handle_call:307]Successfully changed vbucket filter on tap stream `replication_ns_1@10.242.238.90`. [ns_server:info,2014-08-19T16:51:26.005,ns_1@10.242.238.90:<0.28025.0>:ebucketmigrator_srv:process_upstream:1027]Got vbucket filter change completion message. Silencing upstream sender [ns_server:info,2014-08-19T16:51:26.005,ns_1@10.242.238.90:<0.28025.0>:ebucketmigrator_srv:handle_info:366]Got reply from upstream silencing request. Completing state transition to a new ebucketmigrator. [ns_server:debug,2014-08-19T16:51:26.005,ns_1@10.242.238.90:<0.28025.0>:ebucketmigrator_srv:complete_native_vb_filter_change:170]Proceeding with reading unread binaries [ns_server:debug,2014-08-19T16:51:26.005,ns_1@10.242.238.90:<0.28025.0>:ebucketmigrator_srv:confirm_downstream:197]Going to confirm reception downstream messages [ns_server:debug,2014-08-19T16:51:26.006,ns_1@10.242.238.90:<0.28025.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:51:26.006,ns_1@10.242.238.90:<0.28030.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:51:26.006,ns_1@10.242.238.90:<0.28030.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:51:26.006,ns_1@10.242.238.90:<0.28025.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:51:26.006,ns_1@10.242.238.90:<0.28025.0>:ebucketmigrator_srv:confirm_downstream:201]Confirmed upstream messages are feeded to kernel [ns_server:debug,2014-08-19T16:51:26.006,ns_1@10.242.238.90:<0.28025.0>:ebucketmigrator_srv:reply_and_die:210]Passed old state to caller [ns_server:debug,2014-08-19T16:51:26.006,ns_1@10.242.238.90:<0.28025.0>:ebucketmigrator_srv:reply_and_die:213]Sent out state. Preparing to die [ns_server:debug,2014-08-19T16:51:26.007,ns_1@10.242.238.90:<0.28027.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:143]Got old state from previous ebucketmigrator: <0.28025.0> [ns_server:debug,2014-08-19T16:51:26.007,ns_1@10.242.238.90:<0.28027.0>:ns_vbm_new_sup:perform_vbucket_filter_change_loop:184]Sent old state to new instance [ns_server:info,2014-08-19T16:51:26.007,ns_1@10.242.238.90:<0.28032.0>:ns_vbm_new_sup:mk_old_state_retriever:83]Got vbucket filter change old state. Proceeding vbucket filter change operation [ns_server:debug,2014-08-19T16:51:26.007,ns_1@10.242.238.90:<0.28032.0>:ebucketmigrator_srv:init:494]Got old ebucketmigrator state from <0.28025.0>: {state,#Port<0.16372>,#Port<0.16368>,#Port<0.16373>,#Port<0.16369>, <0.28029.0>,<<"cut off">>,<<"cut off">>,[],175,false,false,0, {1408,452686,5579}, completed, {<0.28027.0>,#Ref<0.0.1.56868>}, <<"replication_ns_1@10.242.238.90">>,<0.28025.0>, {had_backfill,false,undefined,[]}, completed,false}. [ns_server:debug,2014-08-19T16:51:26.007,ns_1@10.242.238.90:<0.17162.0>:ns_process_registry:handle_info:98]Got exit msg: {'EXIT',<0.28027.0>,{#Ref<0.0.1.56857>,<0.28032.0>}} [error_logger:info,2014-08-19T16:51:26.007,ns_1@10.242.238.90:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,'ns_vbm_new_sup-default'} started: [{pid,<0.28032.0>}, {name, {new_child_id, [363,364,366,367,370,371,372,374,376,377,379, 380,381,382,383,384,385,386,387,388,389,390, 391,392,393,394,395,396,397,398,399,400,401, 402,403,404,405,406,407,408,409,410,411,412, 413,414,415,416,417,418,419,420,421,422,423, 424,425,426], 'ns_1@10.242.238.89'}}, {mfargs, {ebucketmigrator_srv,start_link, [{"10.242.238.89",11209}, {"10.242.238.90",11209}, [{old_state_retriever, #Fun}, {on_not_ready_vbuckets, #Fun}, {username,"default"}, {password,get_from_config}, {vbuckets, [363,364,366,367,370,371,372,374,376,377, 379,380,381,382,383,384,385,386,387,388, 389,390,391,392,393,394,395,396,397,398, 399,400,401,402,403,404,405,406,407,408, 409,410,411,412,413,414,415,416,417,418, 419,420,421,422,423,424,425,426]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]]}}, {restart_type,temporary}, {shutdown,60000}, {child_type,worker}] [ns_server:debug,2014-08-19T16:51:26.012,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:51:26.015,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 2704 us [ns_server:debug,2014-08-19T16:51:26.015,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:26.015,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:26.016,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{364, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:51:26.020,ns_1@10.242.238.90:<0.28032.0>:ebucketmigrator_srv:init:621]Reusing old upstream: [{vbuckets,[363,364,366,367,370,371,372,374,376,377,379,380,381,382,383,384, 385,386,387,388,389,390,391,392,393,394,395,396,397,398,399,400, 401,402,403,404,405,406,407,408,409,410,411,412,413,414,415,416, 417,418,419,420,421,422,423,424,425,426]}, {name,<<"replication_ns_1@10.242.238.90">>}, {takeover,false}] [rebalance:debug,2014-08-19T16:51:26.020,ns_1@10.242.238.90:<0.28032.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.28034.0> [ns_server:debug,2014-08-19T16:51:26.045,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:51:26.047,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:26.047,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 1642 us [ns_server:debug,2014-08-19T16:51:26.048,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:26.048,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{627, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:info,2014-08-19T16:51:26.050,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 369 state to replica [ns_server:info,2014-08-19T16:51:26.050,ns_1@10.242.238.90:tap_replication_manager-default<0.18775.0>:tap_replication_manager:change_vbucket_filter:200]Going to change replication from 'ns_1@10.242.238.89' to have [363,364,366,367,369,370,371,372,374,376,377,379,380,381,382,383,384,385,386, 387,388,389,390,391,392,393,394,395,396,397,398,399,400,401,402,403,404,405, 406,407,408,409,410,411,412,413,414,415,416,417,418,419,420,421,422,423,424, 425,426] ([369], []) [ns_server:debug,2014-08-19T16:51:26.051,ns_1@10.242.238.90:<0.28036.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:135]Registered myself under id:{"default", {new_child_id, [363,364,366,367,369,370,371,372,374,376,377, 379,380,381,382,383,384,385,386,387,388,389, 390,391,392,393,394,395,396,397,398,399,400, 401,402,403,404,405,406,407,408,409,410,411, 412,413,414,415,416,417,418,419,420,421,422, 423,424,425,426], 'ns_1@10.242.238.89'}, #Ref<0.0.1.57030>} Args:[{"10.242.238.89",11209}, {"10.242.238.90",11209}, [{old_state_retriever,#Fun}, {on_not_ready_vbuckets,#Fun}, {username,"default"}, {password,get_from_config}, {vbuckets,[363,364,366,367,369,370,371,372,374,376,377,379,380,381,382, 383,384,385,386,387,388,389,390,391,392,393,394,395,396,397, 398,399,400,401,402,403,404,405,406,407,408,409,410,411,412, 413,414,415,416,417,418,419,420,421,422,423,424,425,426]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]] [ns_server:debug,2014-08-19T16:51:26.052,ns_1@10.242.238.90:<0.28036.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:139]Linked myself to old ebucketmigrator <0.28032.0> [ns_server:info,2014-08-19T16:51:26.052,ns_1@10.242.238.90:<0.28032.0>:ebucketmigrator_srv:handle_call:270]Starting new-style vbucket filter change on stream `replication_ns_1@10.242.238.90` [ns_server:info,2014-08-19T16:51:26.068,ns_1@10.242.238.90:<0.28032.0>:ebucketmigrator_srv:handle_call:297]Changing vbucket filter on tap stream `replication_ns_1@10.242.238.90`: [{363,1}, {364,1}, {366,1}, {367,1}, {369,1}, {370,1}, {371,1}, {372,1}, {374,1}, {376,1}, {377,1}, {379,1}, {380,1}, {381,1}, {382,1}, {383,1}, {384,1}, {385,1}, {386,1}, {387,1}, {388,1}, {389,1}, {390,1}, {391,1}, {392,1}, {393,1}, {394,1}, {395,1}, {396,1}, {397,1}, {398,1}, {399,1}, {400,1}, {401,1}, {402,1}, {403,1}, {404,1}, {405,1}, {406,1}, {407,1}, {408,1}, {409,1}, {410,1}, {411,1}, {412,1}, {413,1}, {414,1}, {415,1}, {416,1}, {417,1}, {418,1}, {419,1}, {420,1}, {421,1}, {422,1}, {423,1}, {424,1}, {425,1}, {426,1}] [ns_server:info,2014-08-19T16:51:26.069,ns_1@10.242.238.90:<0.28032.0>:ebucketmigrator_srv:handle_call:307]Successfully changed vbucket filter on tap stream `replication_ns_1@10.242.238.90`. [ns_server:info,2014-08-19T16:51:26.069,ns_1@10.242.238.90:<0.28032.0>:ebucketmigrator_srv:process_upstream:1027]Got vbucket filter change completion message. Silencing upstream sender [ns_server:info,2014-08-19T16:51:26.069,ns_1@10.242.238.90:<0.28032.0>:ebucketmigrator_srv:handle_info:366]Got reply from upstream silencing request. Completing state transition to a new ebucketmigrator. [ns_server:debug,2014-08-19T16:51:26.069,ns_1@10.242.238.90:<0.28032.0>:ebucketmigrator_srv:complete_native_vb_filter_change:170]Proceeding with reading unread binaries [ns_server:debug,2014-08-19T16:51:26.069,ns_1@10.242.238.90:<0.28032.0>:ebucketmigrator_srv:confirm_downstream:197]Going to confirm reception downstream messages [ns_server:debug,2014-08-19T16:51:26.069,ns_1@10.242.238.90:<0.28032.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:51:26.069,ns_1@10.242.238.90:<0.28038.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:51:26.070,ns_1@10.242.238.90:<0.28038.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:51:26.070,ns_1@10.242.238.90:<0.28032.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:51:26.070,ns_1@10.242.238.90:<0.28032.0>:ebucketmigrator_srv:confirm_downstream:201]Confirmed upstream messages are feeded to kernel [ns_server:debug,2014-08-19T16:51:26.070,ns_1@10.242.238.90:<0.28032.0>:ebucketmigrator_srv:reply_and_die:210]Passed old state to caller [ns_server:debug,2014-08-19T16:51:26.070,ns_1@10.242.238.90:<0.28032.0>:ebucketmigrator_srv:reply_and_die:213]Sent out state. Preparing to die [ns_server:debug,2014-08-19T16:51:26.070,ns_1@10.242.238.90:<0.28036.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:143]Got old state from previous ebucketmigrator: <0.28032.0> [ns_server:debug,2014-08-19T16:51:26.071,ns_1@10.242.238.90:<0.28036.0>:ns_vbm_new_sup:perform_vbucket_filter_change_loop:184]Sent old state to new instance [ns_server:info,2014-08-19T16:51:26.071,ns_1@10.242.238.90:<0.28040.0>:ns_vbm_new_sup:mk_old_state_retriever:83]Got vbucket filter change old state. Proceeding vbucket filter change operation [ns_server:debug,2014-08-19T16:51:26.071,ns_1@10.242.238.90:<0.28040.0>:ebucketmigrator_srv:init:494]Got old ebucketmigrator state from <0.28032.0>: {state,#Port<0.16372>,#Port<0.16368>,#Port<0.16373>,#Port<0.16369>, <0.28034.0>,<<"cut off">>,<<"cut off">>,[],178,false,false,0, {1408,452686,69443}, completed, {<0.28036.0>,#Ref<0.0.1.57043>}, <<"replication_ns_1@10.242.238.90">>,<0.28032.0>, {had_backfill,false,undefined,[]}, completed,false}. [ns_server:debug,2014-08-19T16:51:26.071,ns_1@10.242.238.90:<0.17162.0>:ns_process_registry:handle_info:98]Got exit msg: {'EXIT',<0.28036.0>,{#Ref<0.0.1.57032>,<0.28040.0>}} [error_logger:info,2014-08-19T16:51:26.071,ns_1@10.242.238.90:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,'ns_vbm_new_sup-default'} started: [{pid,<0.28040.0>}, {name, {new_child_id, [363,364,366,367,369,370,371,372,374,376,377, 379,380,381,382,383,384,385,386,387,388,389, 390,391,392,393,394,395,396,397,398,399,400, 401,402,403,404,405,406,407,408,409,410,411, 412,413,414,415,416,417,418,419,420,421,422, 423,424,425,426], 'ns_1@10.242.238.89'}}, {mfargs, {ebucketmigrator_srv,start_link, [{"10.242.238.89",11209}, {"10.242.238.90",11209}, [{old_state_retriever, #Fun}, {on_not_ready_vbuckets, #Fun}, {username,"default"}, {password,get_from_config}, {vbuckets, [363,364,366,367,369,370,371,372,374,376, 377,379,380,381,382,383,384,385,386,387, 388,389,390,391,392,393,394,395,396,397, 398,399,400,401,402,403,404,405,406,407, 408,409,410,411,412,413,414,415,416,417, 418,419,420,421,422,423,424,425,426]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]]}}, {restart_type,temporary}, {shutdown,60000}, {child_type,worker}] [ns_server:debug,2014-08-19T16:51:26.075,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:51:26.079,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 3334 us [ns_server:debug,2014-08-19T16:51:26.079,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:26.079,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:26.080,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{369, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:info,2014-08-19T16:51:26.085,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 378 state to replica [ns_server:info,2014-08-19T16:51:26.085,ns_1@10.242.238.90:tap_replication_manager-default<0.18775.0>:tap_replication_manager:change_vbucket_filter:200]Going to change replication from 'ns_1@10.242.238.89' to have [363,364,366,367,369,370,371,372,374,376,377,378,379,380,381,382,383,384,385, 386,387,388,389,390,391,392,393,394,395,396,397,398,399,400,401,402,403,404, 405,406,407,408,409,410,411,412,413,414,415,416,417,418,419,420,421,422,423, 424,425,426] ([378], []) [ns_server:debug,2014-08-19T16:51:26.085,ns_1@10.242.238.90:<0.28040.0>:ebucketmigrator_srv:init:621]Reusing old upstream: [{vbuckets,[363,364,366,367,369,370,371,372,374,376,377,379,380,381,382,383, 384,385,386,387,388,389,390,391,392,393,394,395,396,397,398,399, 400,401,402,403,404,405,406,407,408,409,410,411,412,413,414,415, 416,417,418,419,420,421,422,423,424,425,426]}, {name,<<"replication_ns_1@10.242.238.90">>}, {takeover,false}] [rebalance:debug,2014-08-19T16:51:26.086,ns_1@10.242.238.90:<0.28040.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.28042.0> [ns_server:debug,2014-08-19T16:51:26.088,ns_1@10.242.238.90:<0.28043.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:135]Registered myself under id:{"default", {new_child_id, [363,364,366,367,369,370,371,372,374,376,377, 378,379,380,381,382,383,384,385,386,387,388, 389,390,391,392,393,394,395,396,397,398,399, 400,401,402,403,404,405,406,407,408,409,410, 411,412,413,414,415,416,417,418,419,420,421, 422,423,424,425,426], 'ns_1@10.242.238.89'}, #Ref<0.0.1.57182>} Args:[{"10.242.238.89",11209}, {"10.242.238.90",11209}, [{old_state_retriever,#Fun}, {on_not_ready_vbuckets,#Fun}, {username,"default"}, {password,get_from_config}, {vbuckets,[363,364,366,367,369,370,371,372,374,376,377,378,379,380,381, 382,383,384,385,386,387,388,389,390,391,392,393,394,395,396, 397,398,399,400,401,402,403,404,405,406,407,408,409,410,411, 412,413,414,415,416,417,418,419,420,421,422,423,424,425, 426]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]] [ns_server:debug,2014-08-19T16:51:26.088,ns_1@10.242.238.90:<0.28043.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:139]Linked myself to old ebucketmigrator <0.28040.0> [ns_server:info,2014-08-19T16:51:26.089,ns_1@10.242.238.90:<0.28040.0>:ebucketmigrator_srv:handle_call:270]Starting new-style vbucket filter change on stream `replication_ns_1@10.242.238.90` [ns_server:info,2014-08-19T16:51:26.100,ns_1@10.242.238.90:<0.28040.0>:ebucketmigrator_srv:handle_call:297]Changing vbucket filter on tap stream `replication_ns_1@10.242.238.90`: [{363,1}, {364,1}, {366,1}, {367,1}, {369,1}, {370,1}, {371,1}, {372,1}, {374,1}, {376,1}, {377,1}, {378,1}, {379,1}, {380,1}, {381,1}, {382,1}, {383,1}, {384,1}, {385,1}, {386,1}, {387,1}, {388,1}, {389,1}, {390,1}, {391,1}, {392,1}, {393,1}, {394,1}, {395,1}, {396,1}, {397,1}, {398,1}, {399,1}, {400,1}, {401,1}, {402,1}, {403,1}, {404,1}, {405,1}, {406,1}, {407,1}, {408,1}, {409,1}, {410,1}, {411,1}, {412,1}, {413,1}, {414,1}, {415,1}, {416,1}, {417,1}, {418,1}, {419,1}, {420,1}, {421,1}, {422,1}, {423,1}, {424,1}, {425,1}, {426,1}] [ns_server:info,2014-08-19T16:51:26.102,ns_1@10.242.238.90:<0.28040.0>:ebucketmigrator_srv:handle_call:307]Successfully changed vbucket filter on tap stream `replication_ns_1@10.242.238.90`. [ns_server:info,2014-08-19T16:51:26.103,ns_1@10.242.238.90:<0.28040.0>:ebucketmigrator_srv:process_upstream:1027]Got vbucket filter change completion message. Silencing upstream sender [ns_server:info,2014-08-19T16:51:26.103,ns_1@10.242.238.90:<0.28040.0>:ebucketmigrator_srv:handle_info:366]Got reply from upstream silencing request. Completing state transition to a new ebucketmigrator. [ns_server:debug,2014-08-19T16:51:26.103,ns_1@10.242.238.90:<0.28040.0>:ebucketmigrator_srv:complete_native_vb_filter_change:170]Proceeding with reading unread binaries [ns_server:debug,2014-08-19T16:51:26.103,ns_1@10.242.238.90:<0.28040.0>:ebucketmigrator_srv:confirm_downstream:197]Going to confirm reception downstream messages [ns_server:debug,2014-08-19T16:51:26.103,ns_1@10.242.238.90:<0.28040.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:51:26.103,ns_1@10.242.238.90:<0.28045.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:51:26.103,ns_1@10.242.238.90:<0.28045.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:51:26.103,ns_1@10.242.238.90:<0.28040.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:51:26.103,ns_1@10.242.238.90:<0.28040.0>:ebucketmigrator_srv:confirm_downstream:201]Confirmed upstream messages are feeded to kernel [ns_server:debug,2014-08-19T16:51:26.104,ns_1@10.242.238.90:<0.28040.0>:ebucketmigrator_srv:reply_and_die:210]Passed old state to caller [ns_server:debug,2014-08-19T16:51:26.104,ns_1@10.242.238.90:<0.28040.0>:ebucketmigrator_srv:reply_and_die:213]Sent out state. Preparing to die [ns_server:debug,2014-08-19T16:51:26.104,ns_1@10.242.238.90:<0.28043.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:143]Got old state from previous ebucketmigrator: <0.28040.0> [ns_server:debug,2014-08-19T16:51:26.106,ns_1@10.242.238.90:<0.28043.0>:ns_vbm_new_sup:perform_vbucket_filter_change_loop:184]Sent old state to new instance [ns_server:info,2014-08-19T16:51:26.106,ns_1@10.242.238.90:<0.28047.0>:ns_vbm_new_sup:mk_old_state_retriever:83]Got vbucket filter change old state. Proceeding vbucket filter change operation [ns_server:debug,2014-08-19T16:51:26.107,ns_1@10.242.238.90:<0.28047.0>:ebucketmigrator_srv:init:494]Got old ebucketmigrator state from <0.28040.0>: {state,#Port<0.16372>,#Port<0.16368>,#Port<0.16373>,#Port<0.16369>, <0.28042.0>,<<"cut off">>,<<"cut off">>,[],181,false,false,0, {1408,452686,103092}, completed, {<0.28043.0>,#Ref<0.0.1.57195>}, <<"replication_ns_1@10.242.238.90">>,<0.28040.0>, {had_backfill,false,undefined,[]}, completed,false}. [ns_server:debug,2014-08-19T16:51:26.107,ns_1@10.242.238.90:<0.17162.0>:ns_process_registry:handle_info:98]Got exit msg: {'EXIT',<0.28043.0>,{#Ref<0.0.1.57184>,<0.28047.0>}} [error_logger:info,2014-08-19T16:51:26.107,ns_1@10.242.238.90:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,'ns_vbm_new_sup-default'} started: [{pid,<0.28047.0>}, {name, {new_child_id, [363,364,366,367,369,370,371,372,374,376,377, 378,379,380,381,382,383,384,385,386,387,388, 389,390,391,392,393,394,395,396,397,398,399, 400,401,402,403,404,405,406,407,408,409,410, 411,412,413,414,415,416,417,418,419,420,421, 422,423,424,425,426], 'ns_1@10.242.238.89'}}, {mfargs, {ebucketmigrator_srv,start_link, [{"10.242.238.89",11209}, {"10.242.238.90",11209}, [{old_state_retriever, #Fun}, {on_not_ready_vbuckets, #Fun}, {username,"default"}, {password,get_from_config}, {vbuckets, [363,364,366,367,369,370,371,372,374,376, 377,378,379,380,381,382,383,384,385,386, 387,388,389,390,391,392,393,394,395,396, 397,398,399,400,401,402,403,404,405,406, 407,408,409,410,411,412,413,414,415,416, 417,418,419,420,421,422,423,424,425, 426]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]]}}, {restart_type,temporary}, {shutdown,60000}, {child_type,worker}] [ns_server:debug,2014-08-19T16:51:26.115,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:26.115,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:51:26.116,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 25 us [ns_server:debug,2014-08-19T16:51:26.116,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:26.116,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{378, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:info,2014-08-19T16:51:26.118,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 368 state to replica [ns_server:info,2014-08-19T16:51:26.118,ns_1@10.242.238.90:tap_replication_manager-default<0.18775.0>:tap_replication_manager:change_vbucket_filter:200]Going to change replication from 'ns_1@10.242.238.89' to have [363,364,366,367,368,369,370,371,372,374,376,377,378,379,380,381,382,383,384, 385,386,387,388,389,390,391,392,393,394,395,396,397,398,399,400,401,402,403, 404,405,406,407,408,409,410,411,412,413,414,415,416,417,418,419,420,421,422, 423,424,425,426] ([368], []) [ns_server:debug,2014-08-19T16:51:26.119,ns_1@10.242.238.90:<0.28049.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:135]Registered myself under id:{"default", {new_child_id, [363,364,366,367,368,369,370,371,372,374,376, 377,378,379,380,381,382,383,384,385,386,387, 388,389,390,391,392,393,394,395,396,397,398, 399,400,401,402,403,404,405,406,407,408,409, 410,411,412,413,414,415,416,417,418,419,420, 421,422,423,424,425,426], 'ns_1@10.242.238.89'}, #Ref<0.0.1.57315>} Args:[{"10.242.238.89",11209}, {"10.242.238.90",11209}, [{old_state_retriever,#Fun}, {on_not_ready_vbuckets,#Fun}, {username,"default"}, {password,get_from_config}, {vbuckets,[363,364,366,367,368,369,370,371,372,374,376,377,378,379,380, 381,382,383,384,385,386,387,388,389,390,391,392,393,394,395, 396,397,398,399,400,401,402,403,404,405,406,407,408,409,410, 411,412,413,414,415,416,417,418,419,420,421,422,423,424,425, 426]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]] [ns_server:debug,2014-08-19T16:51:26.120,ns_1@10.242.238.90:<0.28049.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:139]Linked myself to old ebucketmigrator <0.28047.0> [ns_server:debug,2014-08-19T16:51:26.122,ns_1@10.242.238.90:<0.28047.0>:ebucketmigrator_srv:init:621]Reusing old upstream: [{vbuckets,[363,364,366,367,369,370,371,372,374,376,377,378,379,380,381,382, 383,384,385,386,387,388,389,390,391,392,393,394,395,396,397,398, 399,400,401,402,403,404,405,406,407,408,409,410,411,412,413,414, 415,416,417,418,419,420,421,422,423,424,425,426]}, {name,<<"replication_ns_1@10.242.238.90">>}, {takeover,false}] [rebalance:debug,2014-08-19T16:51:26.122,ns_1@10.242.238.90:<0.28047.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.28051.0> [ns_server:info,2014-08-19T16:51:26.122,ns_1@10.242.238.90:<0.28047.0>:ebucketmigrator_srv:handle_call:270]Starting new-style vbucket filter change on stream `replication_ns_1@10.242.238.90` [ns_server:info,2014-08-19T16:51:26.133,ns_1@10.242.238.90:<0.28047.0>:ebucketmigrator_srv:handle_call:297]Changing vbucket filter on tap stream `replication_ns_1@10.242.238.90`: [{363,1}, {364,1}, {366,1}, {367,1}, {368,1}, {369,1}, {370,1}, {371,1}, {372,1}, {374,1}, {376,1}, {377,1}, {378,1}, {379,1}, {380,1}, {381,1}, {382,1}, {383,1}, {384,1}, {385,1}, {386,1}, {387,1}, {388,1}, {389,1}, {390,1}, {391,1}, {392,1}, {393,1}, {394,1}, {395,1}, {396,1}, {397,1}, {398,1}, {399,1}, {400,1}, {401,1}, {402,1}, {403,1}, {404,1}, {405,1}, {406,1}, {407,1}, {408,1}, {409,1}, {410,1}, {411,1}, {412,1}, {413,1}, {414,1}, {415,1}, {416,1}, {417,1}, {418,1}, {419,1}, {420,1}, {421,1}, {422,1}, {423,1}, {424,1}, {425,1}, {426,1}] [ns_server:info,2014-08-19T16:51:26.134,ns_1@10.242.238.90:<0.28047.0>:ebucketmigrator_srv:handle_call:307]Successfully changed vbucket filter on tap stream `replication_ns_1@10.242.238.90`. [ns_server:info,2014-08-19T16:51:26.134,ns_1@10.242.238.90:<0.28047.0>:ebucketmigrator_srv:process_upstream:1027]Got vbucket filter change completion message. Silencing upstream sender [ns_server:info,2014-08-19T16:51:26.134,ns_1@10.242.238.90:<0.28047.0>:ebucketmigrator_srv:handle_info:366]Got reply from upstream silencing request. Completing state transition to a new ebucketmigrator. [ns_server:debug,2014-08-19T16:51:26.134,ns_1@10.242.238.90:<0.28047.0>:ebucketmigrator_srv:complete_native_vb_filter_change:170]Proceeding with reading unread binaries [ns_server:debug,2014-08-19T16:51:26.134,ns_1@10.242.238.90:<0.28047.0>:ebucketmigrator_srv:confirm_downstream:197]Going to confirm reception downstream messages [ns_server:debug,2014-08-19T16:51:26.134,ns_1@10.242.238.90:<0.28047.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:51:26.134,ns_1@10.242.238.90:<0.28052.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:51:26.135,ns_1@10.242.238.90:<0.28052.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:51:26.135,ns_1@10.242.238.90:<0.28047.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:51:26.135,ns_1@10.242.238.90:<0.28047.0>:ebucketmigrator_srv:confirm_downstream:201]Confirmed upstream messages are feeded to kernel [ns_server:debug,2014-08-19T16:51:26.135,ns_1@10.242.238.90:<0.28047.0>:ebucketmigrator_srv:reply_and_die:210]Passed old state to caller [ns_server:debug,2014-08-19T16:51:26.135,ns_1@10.242.238.90:<0.28047.0>:ebucketmigrator_srv:reply_and_die:213]Sent out state. Preparing to die [ns_server:debug,2014-08-19T16:51:26.135,ns_1@10.242.238.90:<0.28049.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:143]Got old state from previous ebucketmigrator: <0.28047.0> [ns_server:debug,2014-08-19T16:51:26.136,ns_1@10.242.238.90:<0.28049.0>:ns_vbm_new_sup:perform_vbucket_filter_change_loop:184]Sent old state to new instance [ns_server:info,2014-08-19T16:51:26.136,ns_1@10.242.238.90:<0.28054.0>:ns_vbm_new_sup:mk_old_state_retriever:83]Got vbucket filter change old state. Proceeding vbucket filter change operation [ns_server:debug,2014-08-19T16:51:26.136,ns_1@10.242.238.90:<0.28054.0>:ebucketmigrator_srv:init:494]Got old ebucketmigrator state from <0.28047.0>: {state,#Port<0.16372>,#Port<0.16368>,#Port<0.16373>,#Port<0.16369>, <0.28051.0>,<<"cut off">>,<<"cut off">>,[],184,false,false,0, {1408,452686,134474}, completed, {<0.28049.0>,#Ref<0.0.1.57328>}, <<"replication_ns_1@10.242.238.90">>,<0.28047.0>, {had_backfill,false,undefined,[]}, completed,false}. [ns_server:debug,2014-08-19T16:51:26.136,ns_1@10.242.238.90:<0.17162.0>:ns_process_registry:handle_info:98]Got exit msg: {'EXIT',<0.28049.0>,{#Ref<0.0.1.57317>,<0.28054.0>}} [error_logger:info,2014-08-19T16:51:26.136,ns_1@10.242.238.90:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,'ns_vbm_new_sup-default'} started: [{pid,<0.28054.0>}, {name, {new_child_id, [363,364,366,367,368,369,370,371,372,374,376, 377,378,379,380,381,382,383,384,385,386,387, 388,389,390,391,392,393,394,395,396,397,398, 399,400,401,402,403,404,405,406,407,408,409, 410,411,412,413,414,415,416,417,418,419,420, 421,422,423,424,425,426], 'ns_1@10.242.238.89'}}, {mfargs, {ebucketmigrator_srv,start_link, [{"10.242.238.89",11209}, {"10.242.238.90",11209}, [{old_state_retriever, #Fun}, {on_not_ready_vbuckets, #Fun}, {username,"default"}, {password,get_from_config}, {vbuckets, [363,364,366,367,368,369,370,371,372,374, 376,377,378,379,380,381,382,383,384,385, 386,387,388,389,390,391,392,393,394,395, 396,397,398,399,400,401,402,403,404,405, 406,407,408,409,410,411,412,413,414,415, 416,417,418,419,420,421,422,423,424,425, 426]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]]}}, {restart_type,temporary}, {shutdown,60000}, {child_type,worker}] [ns_server:debug,2014-08-19T16:51:26.140,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:51:26.147,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:26.148,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:26.148,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{368, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:51:26.147,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 7028 us [ns_server:debug,2014-08-19T16:51:26.151,ns_1@10.242.238.90:<0.28054.0>:ebucketmigrator_srv:init:621]Reusing old upstream: [{vbuckets,[363,364,366,367,368,369,370,371,372,374,376,377,378,379,380,381, 382,383,384,385,386,387,388,389,390,391,392,393,394,395,396,397, 398,399,400,401,402,403,404,405,406,407,408,409,410,411,412,413, 414,415,416,417,418,419,420,421,422,423,424,425,426]}, {name,<<"replication_ns_1@10.242.238.90">>}, {takeover,false}] [rebalance:debug,2014-08-19T16:51:26.151,ns_1@10.242.238.90:<0.28054.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.28056.0> [ns_server:debug,2014-08-19T16:51:26.174,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:51:26.177,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:26.177,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 3701 us [ns_server:debug,2014-08-19T16:51:26.178,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:26.178,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{631, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:51:26.205,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:51:26.209,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 3962 us [ns_server:debug,2014-08-19T16:51:26.209,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:26.210,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:26.210,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{619, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:info,2014-08-19T16:51:26.224,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 373 state to replica [ns_server:info,2014-08-19T16:51:26.224,ns_1@10.242.238.90:tap_replication_manager-default<0.18775.0>:tap_replication_manager:change_vbucket_filter:200]Going to change replication from 'ns_1@10.242.238.89' to have [363,364,366,367,368,369,370,371,372,373,374,376,377,378,379,380,381,382,383, 384,385,386,387,388,389,390,391,392,393,394,395,396,397,398,399,400,401,402, 403,404,405,406,407,408,409,410,411,412,413,414,415,416,417,418,419,420,421, 422,423,424,425,426] ([373], []) [ns_server:debug,2014-08-19T16:51:26.225,ns_1@10.242.238.90:<0.28059.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:135]Registered myself under id:{"default", {new_child_id, [363,364,366,367,368,369,370,371,372,373,374, 376,377,378,379,380,381,382,383,384,385,386, 387,388,389,390,391,392,393,394,395,396,397, 398,399,400,401,402,403,404,405,406,407,408, 409,410,411,412,413,414,415,416,417,418,419, 420,421,422,423,424,425,426], 'ns_1@10.242.238.89'}, #Ref<0.0.1.57514>} Args:[{"10.242.238.89",11209}, {"10.242.238.90",11209}, [{old_state_retriever,#Fun}, {on_not_ready_vbuckets,#Fun}, {username,"default"}, {password,get_from_config}, {vbuckets,[363,364,366,367,368,369,370,371,372,373,374,376,377,378,379, 380,381,382,383,384,385,386,387,388,389,390,391,392,393,394, 395,396,397,398,399,400,401,402,403,404,405,406,407,408,409, 410,411,412,413,414,415,416,417,418,419,420,421,422,423,424, 425,426]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]] [ns_server:debug,2014-08-19T16:51:26.226,ns_1@10.242.238.90:<0.28059.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:139]Linked myself to old ebucketmigrator <0.28054.0> [ns_server:info,2014-08-19T16:51:26.226,ns_1@10.242.238.90:<0.28054.0>:ebucketmigrator_srv:handle_call:270]Starting new-style vbucket filter change on stream `replication_ns_1@10.242.238.90` [ns_server:info,2014-08-19T16:51:26.243,ns_1@10.242.238.90:<0.28054.0>:ebucketmigrator_srv:handle_call:297]Changing vbucket filter on tap stream `replication_ns_1@10.242.238.90`: [{363,1}, {364,1}, {366,1}, {367,1}, {368,1}, {369,1}, {370,1}, {371,1}, {372,1}, {373,1}, {374,1}, {376,1}, {377,1}, {378,1}, {379,1}, {380,1}, {381,1}, {382,1}, {383,1}, {384,1}, {385,1}, {386,1}, {387,1}, {388,1}, {389,1}, {390,1}, {391,1}, {392,1}, {393,1}, {394,1}, {395,1}, {396,1}, {397,1}, {398,1}, {399,1}, {400,1}, {401,1}, {402,1}, {403,1}, {404,1}, {405,1}, {406,1}, {407,1}, {408,1}, {409,1}, {410,1}, {411,1}, {412,1}, {413,1}, {414,1}, {415,1}, {416,1}, {417,1}, {418,1}, {419,1}, {420,1}, {421,1}, {422,1}, {423,1}, {424,1}, {425,1}, {426,1}] [ns_server:info,2014-08-19T16:51:26.244,ns_1@10.242.238.90:<0.28054.0>:ebucketmigrator_srv:handle_call:307]Successfully changed vbucket filter on tap stream `replication_ns_1@10.242.238.90`. [ns_server:info,2014-08-19T16:51:26.245,ns_1@10.242.238.90:<0.28054.0>:ebucketmigrator_srv:process_upstream:1027]Got vbucket filter change completion message. Silencing upstream sender [ns_server:info,2014-08-19T16:51:26.245,ns_1@10.242.238.90:<0.28054.0>:ebucketmigrator_srv:handle_info:366]Got reply from upstream silencing request. Completing state transition to a new ebucketmigrator. [ns_server:debug,2014-08-19T16:51:26.245,ns_1@10.242.238.90:<0.28054.0>:ebucketmigrator_srv:complete_native_vb_filter_change:170]Proceeding with reading unread binaries [ns_server:debug,2014-08-19T16:51:26.245,ns_1@10.242.238.90:<0.28054.0>:ebucketmigrator_srv:confirm_downstream:197]Going to confirm reception downstream messages [ns_server:debug,2014-08-19T16:51:26.245,ns_1@10.242.238.90:<0.28054.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:51:26.245,ns_1@10.242.238.90:<0.28061.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:51:26.246,ns_1@10.242.238.90:<0.28061.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:51:26.246,ns_1@10.242.238.90:<0.28054.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:51:26.246,ns_1@10.242.238.90:<0.28054.0>:ebucketmigrator_srv:confirm_downstream:201]Confirmed upstream messages are feeded to kernel [ns_server:debug,2014-08-19T16:51:26.246,ns_1@10.242.238.90:<0.28054.0>:ebucketmigrator_srv:reply_and_die:210]Passed old state to caller [ns_server:debug,2014-08-19T16:51:26.246,ns_1@10.242.238.90:<0.28054.0>:ebucketmigrator_srv:reply_and_die:213]Sent out state. Preparing to die [ns_server:debug,2014-08-19T16:51:26.246,ns_1@10.242.238.90:<0.28059.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:143]Got old state from previous ebucketmigrator: <0.28054.0> [ns_server:debug,2014-08-19T16:51:26.247,ns_1@10.242.238.90:<0.28059.0>:ns_vbm_new_sup:perform_vbucket_filter_change_loop:184]Sent old state to new instance [ns_server:info,2014-08-19T16:51:26.247,ns_1@10.242.238.90:<0.28063.0>:ns_vbm_new_sup:mk_old_state_retriever:83]Got vbucket filter change old state. Proceeding vbucket filter change operation [ns_server:debug,2014-08-19T16:51:26.247,ns_1@10.242.238.90:<0.28063.0>:ebucketmigrator_srv:init:494]Got old ebucketmigrator state from <0.28054.0>: {state,#Port<0.16372>,#Port<0.16368>,#Port<0.16373>,#Port<0.16369>, <0.28056.0>,<<"cut off">>,<<"cut off">>,[],187,false,false,0, {1408,452686,245204}, completed, {<0.28059.0>,#Ref<0.0.1.57527>}, <<"replication_ns_1@10.242.238.90">>,<0.28054.0>, {had_backfill,false,undefined,[]}, completed,false}. [ns_server:debug,2014-08-19T16:51:26.247,ns_1@10.242.238.90:<0.17162.0>:ns_process_registry:handle_info:98]Got exit msg: {'EXIT',<0.28059.0>,{#Ref<0.0.1.57516>,<0.28063.0>}} [error_logger:info,2014-08-19T16:51:26.247,ns_1@10.242.238.90:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,'ns_vbm_new_sup-default'} started: [{pid,<0.28063.0>}, {name, {new_child_id, [363,364,366,367,368,369,370,371,372,373,374, 376,377,378,379,380,381,382,383,384,385,386, 387,388,389,390,391,392,393,394,395,396,397, 398,399,400,401,402,403,404,405,406,407,408, 409,410,411,412,413,414,415,416,417,418,419, 420,421,422,423,424,425,426], 'ns_1@10.242.238.89'}}, {mfargs, {ebucketmigrator_srv,start_link, [{"10.242.238.89",11209}, {"10.242.238.90",11209}, [{old_state_retriever, #Fun}, {on_not_ready_vbuckets, #Fun}, {username,"default"}, {password,get_from_config}, {vbuckets, [363,364,366,367,368,369,370,371,372,373, 374,376,377,378,379,380,381,382,383,384, 385,386,387,388,389,390,391,392,393,394, 395,396,397,398,399,400,401,402,403,404, 405,406,407,408,409,410,411,412,413,414, 415,416,417,418,419,420,421,422,423,424, 425,426]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]]}}, {restart_type,temporary}, {shutdown,60000}, {child_type,worker}] [ns_server:debug,2014-08-19T16:51:26.252,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:51:26.255,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:26.256,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 3868 us [ns_server:debug,2014-08-19T16:51:26.256,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:26.257,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{373, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:info,2014-08-19T16:51:26.259,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 365 state to replica [ns_server:info,2014-08-19T16:51:26.259,ns_1@10.242.238.90:tap_replication_manager-default<0.18775.0>:tap_replication_manager:change_vbucket_filter:200]Going to change replication from 'ns_1@10.242.238.89' to have [363,364,365,366,367,368,369,370,371,372,373,374,376,377,378,379,380,381,382, 383,384,385,386,387,388,389,390,391,392,393,394,395,396,397,398,399,400,401, 402,403,404,405,406,407,408,409,410,411,412,413,414,415,416,417,418,419,420, 421,422,423,424,425,426] ([365], []) [ns_server:debug,2014-08-19T16:51:26.261,ns_1@10.242.238.90:<0.28063.0>:ebucketmigrator_srv:init:621]Reusing old upstream: [{vbuckets,[363,364,366,367,368,369,370,371,372,373,374,376,377,378,379,380, 381,382,383,384,385,386,387,388,389,390,391,392,393,394,395,396, 397,398,399,400,401,402,403,404,405,406,407,408,409,410,411,412, 413,414,415,416,417,418,419,420,421,422,423,424,425,426]}, {name,<<"replication_ns_1@10.242.238.90">>}, {takeover,false}] [rebalance:debug,2014-08-19T16:51:26.261,ns_1@10.242.238.90:<0.28063.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.28065.0> [ns_server:debug,2014-08-19T16:51:26.261,ns_1@10.242.238.90:<0.28066.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:135]Registered myself under id:{"default", {new_child_id, [363,364,365,366,367,368,369,370,371,372,373, 374,376,377,378,379,380,381,382,383,384,385, 386,387,388,389,390,391,392,393,394,395,396, 397,398,399,400,401,402,403,404,405,406,407, 408,409,410,411,412,413,414,415,416,417,418, 419,420,421,422,423,424,425,426], 'ns_1@10.242.238.89'}, #Ref<0.0.1.57653>} Args:[{"10.242.238.89",11209}, {"10.242.238.90",11209}, [{old_state_retriever,#Fun}, {on_not_ready_vbuckets,#Fun}, {username,"default"}, {password,get_from_config}, {vbuckets,[363,364,365,366,367,368,369,370,371,372,373,374,376,377,378, 379,380,381,382,383,384,385,386,387,388,389,390,391,392,393, 394,395,396,397,398,399,400,401,402,403,404,405,406,407,408, 409,410,411,412,413,414,415,416,417,418,419,420,421,422,423, 424,425,426]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]] [ns_server:debug,2014-08-19T16:51:26.262,ns_1@10.242.238.90:<0.28066.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:139]Linked myself to old ebucketmigrator <0.28063.0> [ns_server:info,2014-08-19T16:51:26.262,ns_1@10.242.238.90:<0.28063.0>:ebucketmigrator_srv:handle_call:270]Starting new-style vbucket filter change on stream `replication_ns_1@10.242.238.90` [ns_server:info,2014-08-19T16:51:26.273,ns_1@10.242.238.90:<0.28063.0>:ebucketmigrator_srv:handle_call:297]Changing vbucket filter on tap stream `replication_ns_1@10.242.238.90`: [{363,1}, {364,1}, {365,1}, {366,1}, {367,1}, {368,1}, {369,1}, {370,1}, {371,1}, {372,1}, {373,1}, {374,1}, {376,1}, {377,1}, {378,1}, {379,1}, {380,1}, {381,1}, {382,1}, {383,1}, {384,1}, {385,1}, {386,1}, {387,1}, {388,1}, {389,1}, {390,1}, {391,1}, {392,1}, {393,1}, {394,1}, {395,1}, {396,1}, {397,1}, {398,1}, {399,1}, {400,1}, {401,1}, {402,1}, {403,1}, {404,1}, {405,1}, {406,1}, {407,1}, {408,1}, {409,1}, {410,1}, {411,1}, {412,1}, {413,1}, {414,1}, {415,1}, {416,1}, {417,1}, {418,1}, {419,1}, {420,1}, {421,1}, {422,1}, {423,1}, {424,1}, {425,1}, {426,1}] [ns_server:info,2014-08-19T16:51:26.274,ns_1@10.242.238.90:<0.28063.0>:ebucketmigrator_srv:handle_call:307]Successfully changed vbucket filter on tap stream `replication_ns_1@10.242.238.90`. [ns_server:info,2014-08-19T16:51:26.274,ns_1@10.242.238.90:<0.28063.0>:ebucketmigrator_srv:process_upstream:1027]Got vbucket filter change completion message. Silencing upstream sender [ns_server:info,2014-08-19T16:51:26.274,ns_1@10.242.238.90:<0.28063.0>:ebucketmigrator_srv:handle_info:366]Got reply from upstream silencing request. Completing state transition to a new ebucketmigrator. [ns_server:debug,2014-08-19T16:51:26.274,ns_1@10.242.238.90:<0.28063.0>:ebucketmigrator_srv:complete_native_vb_filter_change:170]Proceeding with reading unread binaries [ns_server:debug,2014-08-19T16:51:26.274,ns_1@10.242.238.90:<0.28063.0>:ebucketmigrator_srv:confirm_downstream:197]Going to confirm reception downstream messages [ns_server:debug,2014-08-19T16:51:26.275,ns_1@10.242.238.90:<0.28063.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:51:26.275,ns_1@10.242.238.90:<0.28068.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:51:26.275,ns_1@10.242.238.90:<0.28068.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:51:26.275,ns_1@10.242.238.90:<0.28063.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:51:26.275,ns_1@10.242.238.90:<0.28063.0>:ebucketmigrator_srv:confirm_downstream:201]Confirmed upstream messages are feeded to kernel [ns_server:debug,2014-08-19T16:51:26.275,ns_1@10.242.238.90:<0.28063.0>:ebucketmigrator_srv:reply_and_die:210]Passed old state to caller [ns_server:debug,2014-08-19T16:51:26.275,ns_1@10.242.238.90:<0.28063.0>:ebucketmigrator_srv:reply_and_die:213]Sent out state. Preparing to die [ns_server:debug,2014-08-19T16:51:26.275,ns_1@10.242.238.90:<0.28066.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:143]Got old state from previous ebucketmigrator: <0.28063.0> [ns_server:debug,2014-08-19T16:51:26.276,ns_1@10.242.238.90:<0.28066.0>:ns_vbm_new_sup:perform_vbucket_filter_change_loop:184]Sent old state to new instance [ns_server:info,2014-08-19T16:51:26.276,ns_1@10.242.238.90:<0.28070.0>:ns_vbm_new_sup:mk_old_state_retriever:83]Got vbucket filter change old state. Proceeding vbucket filter change operation [ns_server:debug,2014-08-19T16:51:26.276,ns_1@10.242.238.90:<0.28070.0>:ebucketmigrator_srv:init:494]Got old ebucketmigrator state from <0.28063.0>: {state,#Port<0.16372>,#Port<0.16368>,#Port<0.16373>,#Port<0.16369>, <0.28065.0>,<<"cut off">>,<<"cut off">>,[],190,false,false,0, {1408,452686,274583}, completed, {<0.28066.0>,#Ref<0.0.1.57669>}, <<"replication_ns_1@10.242.238.90">>,<0.28063.0>, {had_backfill,false,undefined,[]}, completed,false}. [ns_server:debug,2014-08-19T16:51:26.276,ns_1@10.242.238.90:<0.17162.0>:ns_process_registry:handle_info:98]Got exit msg: {'EXIT',<0.28066.0>,{#Ref<0.0.1.57655>,<0.28070.0>}} [error_logger:info,2014-08-19T16:51:26.276,ns_1@10.242.238.90:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,'ns_vbm_new_sup-default'} started: [{pid,<0.28070.0>}, {name, {new_child_id, [363,364,365,366,367,368,369,370,371,372,373, 374,376,377,378,379,380,381,382,383,384,385, 386,387,388,389,390,391,392,393,394,395,396, 397,398,399,400,401,402,403,404,405,406,407, 408,409,410,411,412,413,414,415,416,417,418, 419,420,421,422,423,424,425,426], 'ns_1@10.242.238.89'}}, {mfargs, {ebucketmigrator_srv,start_link, [{"10.242.238.89",11209}, {"10.242.238.90",11209}, [{old_state_retriever, #Fun}, {on_not_ready_vbuckets, #Fun}, {username,"default"}, {password,get_from_config}, {vbuckets, [363,364,365,366,367,368,369,370,371,372, 373,374,376,377,378,379,380,381,382,383, 384,385,386,387,388,389,390,391,392,393, 394,395,396,397,398,399,400,401,402,403, 404,405,406,407,408,409,410,411,412,413, 414,415,416,417,418,419,420,421,422,423, 424,425,426]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]]}}, {restart_type,temporary}, {shutdown,60000}, {child_type,worker}] [ns_server:debug,2014-08-19T16:51:26.281,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:51:26.285,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 3991 us [ns_server:debug,2014-08-19T16:51:26.285,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:26.286,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:26.286,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{365, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:51:26.289,ns_1@10.242.238.90:<0.28070.0>:ebucketmigrator_srv:init:621]Reusing old upstream: [{vbuckets,[363,364,365,366,367,368,369,370,371,372,373,374,376,377,378,379, 380,381,382,383,384,385,386,387,388,389,390,391,392,393,394,395, 396,397,398,399,400,401,402,403,404,405,406,407,408,409,410,411, 412,413,414,415,416,417,418,419,420,421,422,423,424,425,426]}, {name,<<"replication_ns_1@10.242.238.90">>}, {takeover,false}] [rebalance:debug,2014-08-19T16:51:26.289,ns_1@10.242.238.90:<0.28070.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.28072.0> [ns_server:debug,2014-08-19T16:51:26.317,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:51:26.320,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:26.320,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 2764 us [ns_server:debug,2014-08-19T16:51:26.321,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{880, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:51:26.321,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:26.345,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:51:26.352,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 6695 us [ns_server:debug,2014-08-19T16:51:26.352,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:26.353,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:26.353,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{625, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:info,2014-08-19T16:51:26.355,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 375 state to replica [ns_server:info,2014-08-19T16:51:26.355,ns_1@10.242.238.90:tap_replication_manager-default<0.18775.0>:tap_replication_manager:change_vbucket_filter:200]Going to change replication from 'ns_1@10.242.238.89' to have [363,364,365,366,367,368,369,370,371,372,373,374,375,376,377,378,379,380,381, 382,383,384,385,386,387,388,389,390,391,392,393,394,395,396,397,398,399,400, 401,402,403,404,405,406,407,408,409,410,411,412,413,414,415,416,417,418,419, 420,421,422,423,424,425,426] ([375], []) [ns_server:debug,2014-08-19T16:51:26.358,ns_1@10.242.238.90:<0.28075.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:135]Registered myself under id:{"default", {new_child_id, [363,364,365,366,367,368,369,370,371,372,373, 374,375,376,377,378,379,380,381,382,383,384, 385,386,387,388,389,390,391,392,393,394,395, 396,397,398,399,400,401,402,403,404,405,406, 407,408,409,410,411,412,413,414,415,416,417, 418,419,420,421,422,423,424,425,426], 'ns_1@10.242.238.89'}, #Ref<0.0.1.57846>} Args:[{"10.242.238.89",11209}, {"10.242.238.90",11209}, [{old_state_retriever,#Fun}, {on_not_ready_vbuckets,#Fun}, {username,"default"}, {password,get_from_config}, {vbuckets,[363,364,365,366,367,368,369,370,371,372,373,374,375,376,377, 378,379,380,381,382,383,384,385,386,387,388,389,390,391,392, 393,394,395,396,397,398,399,400,401,402,403,404,405,406,407, 408,409,410,411,412,413,414,415,416,417,418,419,420,421,422, 423,424,425,426]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]] [ns_server:debug,2014-08-19T16:51:26.358,ns_1@10.242.238.90:<0.28075.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:139]Linked myself to old ebucketmigrator <0.28070.0> [ns_server:info,2014-08-19T16:51:26.358,ns_1@10.242.238.90:<0.28070.0>:ebucketmigrator_srv:handle_call:270]Starting new-style vbucket filter change on stream `replication_ns_1@10.242.238.90` [ns_server:info,2014-08-19T16:51:26.375,ns_1@10.242.238.90:<0.28070.0>:ebucketmigrator_srv:handle_call:297]Changing vbucket filter on tap stream `replication_ns_1@10.242.238.90`: [{363,1}, {364,1}, {365,1}, {366,1}, {367,1}, {368,1}, {369,1}, {370,1}, {371,1}, {372,1}, {373,1}, {374,1}, {375,1}, {376,1}, {377,1}, {378,1}, {379,1}, {380,1}, {381,1}, {382,1}, {383,1}, {384,1}, {385,1}, {386,1}, {387,1}, {388,1}, {389,1}, {390,1}, {391,1}, {392,1}, {393,1}, {394,1}, {395,1}, {396,1}, {397,1}, {398,1}, {399,1}, {400,1}, {401,1}, {402,1}, {403,1}, {404,1}, {405,1}, {406,1}, {407,1}, {408,1}, {409,1}, {410,1}, {411,1}, {412,1}, {413,1}, {414,1}, {415,1}, {416,1}, {417,1}, {418,1}, {419,1}, {420,1}, {421,1}, {422,1}, {423,1}, {424,1}, {425,1}, {426,1}] [ns_server:info,2014-08-19T16:51:26.376,ns_1@10.242.238.90:<0.28070.0>:ebucketmigrator_srv:handle_call:307]Successfully changed vbucket filter on tap stream `replication_ns_1@10.242.238.90`. [ns_server:info,2014-08-19T16:51:26.379,ns_1@10.242.238.90:<0.28070.0>:ebucketmigrator_srv:process_upstream:1027]Got vbucket filter change completion message. Silencing upstream sender [ns_server:info,2014-08-19T16:51:26.379,ns_1@10.242.238.90:<0.28070.0>:ebucketmigrator_srv:handle_info:366]Got reply from upstream silencing request. Completing state transition to a new ebucketmigrator. [ns_server:debug,2014-08-19T16:51:26.379,ns_1@10.242.238.90:<0.28070.0>:ebucketmigrator_srv:complete_native_vb_filter_change:170]Proceeding with reading unread binaries [ns_server:debug,2014-08-19T16:51:26.380,ns_1@10.242.238.90:<0.28070.0>:ebucketmigrator_srv:confirm_downstream:197]Going to confirm reception downstream messages [ns_server:debug,2014-08-19T16:51:26.380,ns_1@10.242.238.90:<0.28070.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:51:26.380,ns_1@10.242.238.90:<0.28077.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:51:26.380,ns_1@10.242.238.90:<0.28077.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:51:26.380,ns_1@10.242.238.90:<0.28070.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:51:26.380,ns_1@10.242.238.90:<0.28070.0>:ebucketmigrator_srv:confirm_downstream:201]Confirmed upstream messages are feeded to kernel [ns_server:debug,2014-08-19T16:51:26.380,ns_1@10.242.238.90:<0.28070.0>:ebucketmigrator_srv:reply_and_die:210]Passed old state to caller [ns_server:debug,2014-08-19T16:51:26.380,ns_1@10.242.238.90:<0.28070.0>:ebucketmigrator_srv:reply_and_die:213]Sent out state. Preparing to die [ns_server:debug,2014-08-19T16:51:26.380,ns_1@10.242.238.90:<0.28075.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:143]Got old state from previous ebucketmigrator: <0.28070.0> [ns_server:debug,2014-08-19T16:51:26.381,ns_1@10.242.238.90:<0.28075.0>:ns_vbm_new_sup:perform_vbucket_filter_change_loop:184]Sent old state to new instance [ns_server:info,2014-08-19T16:51:26.381,ns_1@10.242.238.90:<0.28079.0>:ns_vbm_new_sup:mk_old_state_retriever:83]Got vbucket filter change old state. Proceeding vbucket filter change operation [ns_server:debug,2014-08-19T16:51:26.381,ns_1@10.242.238.90:<0.28079.0>:ebucketmigrator_srv:init:494]Got old ebucketmigrator state from <0.28070.0>: {state,#Port<0.16372>,#Port<0.16368>,#Port<0.16373>,#Port<0.16369>, <0.28072.0>,<<"cut off">>,<<"cut off">>,[],193,false,false,0, {1408,452686,379565}, completed, {<0.28075.0>,#Ref<0.0.1.57859>}, <<"replication_ns_1@10.242.238.90">>,<0.28070.0>, {had_backfill,false,undefined,[]}, completed,false}. [ns_server:debug,2014-08-19T16:51:26.381,ns_1@10.242.238.90:<0.17162.0>:ns_process_registry:handle_info:98]Got exit msg: {'EXIT',<0.28075.0>,{#Ref<0.0.1.57848>,<0.28079.0>}} [error_logger:info,2014-08-19T16:51:26.381,ns_1@10.242.238.90:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,'ns_vbm_new_sup-default'} started: [{pid,<0.28079.0>}, {name, {new_child_id, [363,364,365,366,367,368,369,370,371,372,373, 374,375,376,377,378,379,380,381,382,383,384, 385,386,387,388,389,390,391,392,393,394,395, 396,397,398,399,400,401,402,403,404,405,406, 407,408,409,410,411,412,413,414,415,416,417, 418,419,420,421,422,423,424,425,426], 'ns_1@10.242.238.89'}}, {mfargs, {ebucketmigrator_srv,start_link, [{"10.242.238.89",11209}, {"10.242.238.90",11209}, [{old_state_retriever, #Fun}, {on_not_ready_vbuckets, #Fun}, {username,"default"}, {password,get_from_config}, {vbuckets, [363,364,365,366,367,368,369,370,371,372, 373,374,375,376,377,378,379,380,381,382, 383,384,385,386,387,388,389,390,391,392, 393,394,395,396,397,398,399,400,401,402, 403,404,405,406,407,408,409,410,411,412, 413,414,415,416,417,418,419,420,421,422, 423,424,425,426]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]]}}, {restart_type,temporary}, {shutdown,60000}, {child_type,worker}] [ns_server:debug,2014-08-19T16:51:26.386,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:51:26.390,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:26.390,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 3945 us [ns_server:debug,2014-08-19T16:51:26.391,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:26.392,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{375, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:51:26.395,ns_1@10.242.238.90:<0.28079.0>:ebucketmigrator_srv:init:621]Reusing old upstream: [{vbuckets,[363,364,365,366,367,368,369,370,371,372,373,374,375,376,377,378, 379,380,381,382,383,384,385,386,387,388,389,390,391,392,393,394, 395,396,397,398,399,400,401,402,403,404,405,406,407,408,409,410, 411,412,413,414,415,416,417,418,419,420,421,422,423,424,425,426]}, {name,<<"replication_ns_1@10.242.238.90">>}, {takeover,false}] [rebalance:debug,2014-08-19T16:51:26.395,ns_1@10.242.238.90:<0.28079.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.28080.0> [ns_server:info,2014-08-19T16:51:26.469,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 618 state to replica [ns_server:info,2014-08-19T16:51:26.475,ns_1@10.242.238.90:<0.28082.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 618 to state replica [ns_server:debug,2014-08-19T16:51:26.509,ns_1@10.242.238.90:<0.28082.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_618_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:51:26.510,ns_1@10.242.238.90:<0.28082.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[618]}, {checkpoints,[{618,0}]}, {name,<<"replication_building_618_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[618]}, {takeover,false}, {suffix,"building_618_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",618,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,true}]} [rebalance:debug,2014-08-19T16:51:26.511,ns_1@10.242.238.90:<0.28082.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.28083.0> [rebalance:debug,2014-08-19T16:51:26.511,ns_1@10.242.238.90:<0.28082.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:51:26.511,ns_1@10.242.238.90:<0.28082.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.22775.1>,#Ref<16550.0.2.12383>}]} [rebalance:info,2014-08-19T16:51:26.512,ns_1@10.242.238.90:<0.28082.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 618 [rebalance:debug,2014-08-19T16:51:26.512,ns_1@10.242.238.90:<0.28082.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.22775.1>,#Ref<16550.0.2.12383>}] [ns_server:debug,2014-08-19T16:51:26.513,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.28084.0> (ok) [ns_server:debug,2014-08-19T16:51:26.513,ns_1@10.242.238.90:<0.28082.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:debug,2014-08-19T16:51:26.514,ns_1@10.242.238.90:<0.28085.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 618 [ns_server:info,2014-08-19T16:51:26.518,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 362 state to replica [ns_server:info,2014-08-19T16:51:26.522,ns_1@10.242.238.90:<0.28088.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 362 to state replica [ns_server:debug,2014-08-19T16:51:26.544,ns_1@10.242.238.90:<0.28088.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_362_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:51:26.546,ns_1@10.242.238.90:<0.28088.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[362]}, {checkpoints,[{362,0}]}, {name,<<"replication_building_362_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[362]}, {takeover,false}, {suffix,"building_362_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",362,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,false}]} [rebalance:debug,2014-08-19T16:51:26.546,ns_1@10.242.238.90:<0.28088.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.28103.0> [rebalance:debug,2014-08-19T16:51:26.546,ns_1@10.242.238.90:<0.28088.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:51:26.547,ns_1@10.242.238.90:<0.28088.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.22798.1>,#Ref<16550.0.2.12486>}]} [rebalance:info,2014-08-19T16:51:26.547,ns_1@10.242.238.90:<0.28088.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 362 [rebalance:debug,2014-08-19T16:51:26.547,ns_1@10.242.238.90:<0.28088.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.22798.1>,#Ref<16550.0.2.12486>}] [ns_server:debug,2014-08-19T16:51:26.548,ns_1@10.242.238.90:<0.28088.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:debug,2014-08-19T16:51:26.560,ns_1@10.242.238.90:<0.28104.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 362 [ns_server:debug,2014-08-19T16:51:26.614,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 618. Nacking mccouch update. [views:debug,2014-08-19T16:51:26.614,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/618. Updated state: replica (0) [ns_server:debug,2014-08-19T16:51:26.614,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",618,replica,0} [ns_server:info,2014-08-19T16:51:26.615,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 617 state to replica [ns_server:debug,2014-08-19T16:51:26.615,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,750,686,622,375,984,737,673,426,971,724,660,413,958,711,647,400,1022,945, 762,698,634,387,1009,996,749,685,621,374,983,736,672,425,970,723,659,412,957, 710,646,399,1021,944,761,697,633,386,1008,995,748,684,620,373,982,735,671, 424,969,722,658,411,956,709,645,398,1020,943,760,696,632,385,1007,994,747, 683,619,372,981,734,670,423,968,721,657,410,955,708,644,397,1019,942,759,695, 631,384,1006,993,746,682,618,371,980,733,669,422,967,720,656,409,954,707,643, 396,1018,941,758,694,630,383,1005,992,745,681,370,979,732,668,421,966,719, 655,408,985,953,738,706,674,642,395,363,1017,972,940,757,725,693,661,629,414, 382,1004,991,959,744,712,680,648,401,369,1023,978,946,763,731,699,667,635, 420,388,1010,965,718,654,407,952,705,641,394,1016,939,756,692,628,381,1003, 990,743,679,368,977,730,666,419,964,717,653,406,951,704,640,393,1015,938,755, 691,627,380,1002,989,742,678,367,976,729,665,418,963,716,652,405,950,767,703, 639,392,1014,754,690,626,379,1001,988,741,677,366,975,728,664,417,962,715, 651,404,949,766,702,638,391,1013,753,689,625,378,1000,987,740,676,365,974, 727,663,416,961,714,650,403,948,765,701,637,390,1012,999,752,688,624,377,986, 739,675,364,973,726,662,415,960,713,649,402,947,764,700,636,389,1011,998,751, 687,623,376] [ns_server:info,2014-08-19T16:51:26.622,ns_1@10.242.238.90:<0.28107.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 617 to state replica [ns_server:debug,2014-08-19T16:51:26.649,ns_1@10.242.238.90:<0.28107.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_617_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:51:26.650,ns_1@10.242.238.90:<0.28107.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[617]}, {checkpoints,[{617,0}]}, {name,<<"replication_building_617_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[617]}, {takeover,false}, {suffix,"building_617_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",617,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,true}]} [rebalance:debug,2014-08-19T16:51:26.651,ns_1@10.242.238.90:<0.28107.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.28108.0> [rebalance:debug,2014-08-19T16:51:26.651,ns_1@10.242.238.90:<0.28107.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:51:26.652,ns_1@10.242.238.90:<0.28107.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.22855.1>,#Ref<16550.0.2.13090>}]} [rebalance:info,2014-08-19T16:51:26.652,ns_1@10.242.238.90:<0.28107.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 617 [rebalance:debug,2014-08-19T16:51:26.652,ns_1@10.242.238.90:<0.28107.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.22855.1>,#Ref<16550.0.2.13090>}] [ns_server:debug,2014-08-19T16:51:26.653,ns_1@10.242.238.90:<0.28107.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:51:26.653,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.28109.0> (ok) [rebalance:debug,2014-08-19T16:51:26.654,ns_1@10.242.238.90:<0.28110.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 617 [ns_server:info,2014-08-19T16:51:26.658,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 361 state to replica [ns_server:info,2014-08-19T16:51:26.662,ns_1@10.242.238.90:<0.28113.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 361 to state replica [views:debug,2014-08-19T16:51:26.681,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/618. Updated state: replica (0) [ns_server:debug,2014-08-19T16:51:26.681,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",618,replica,0} [ns_server:debug,2014-08-19T16:51:26.684,ns_1@10.242.238.90:<0.28113.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_361_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:51:26.685,ns_1@10.242.238.90:<0.28113.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[361]}, {checkpoints,[{361,0}]}, {name,<<"replication_building_361_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[361]}, {takeover,false}, {suffix,"building_361_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",361,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,false}]} [rebalance:debug,2014-08-19T16:51:26.686,ns_1@10.242.238.90:<0.28113.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.28114.0> [rebalance:debug,2014-08-19T16:51:26.686,ns_1@10.242.238.90:<0.28113.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:51:26.687,ns_1@10.242.238.90:<0.28113.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.22886.1>,#Ref<16550.0.2.13434>}]} [rebalance:info,2014-08-19T16:51:26.687,ns_1@10.242.238.90:<0.28113.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 361 [rebalance:debug,2014-08-19T16:51:26.687,ns_1@10.242.238.90:<0.28113.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.22886.1>,#Ref<16550.0.2.13434>}] [ns_server:debug,2014-08-19T16:51:26.688,ns_1@10.242.238.90:<0.28113.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:debug,2014-08-19T16:51:26.697,ns_1@10.242.238.90:<0.28115.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 361 [views:debug,2014-08-19T16:51:26.748,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/618. Updated state: pending (0) [ns_server:debug,2014-08-19T16:51:26.748,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",618,pending,0} [ns_server:info,2014-08-19T16:51:26.758,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 616 state to replica [ns_server:info,2014-08-19T16:51:26.765,ns_1@10.242.238.90:<0.28118.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 616 to state replica [ns_server:debug,2014-08-19T16:51:26.794,ns_1@10.242.238.90:<0.28118.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_616_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:51:26.795,ns_1@10.242.238.90:<0.28118.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[616]}, {checkpoints,[{616,0}]}, {name,<<"replication_building_616_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[616]}, {takeover,false}, {suffix,"building_616_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",616,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,true}]} [rebalance:debug,2014-08-19T16:51:26.796,ns_1@10.242.238.90:<0.28118.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.28133.0> [rebalance:debug,2014-08-19T16:51:26.796,ns_1@10.242.238.90:<0.28118.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:51:26.796,ns_1@10.242.238.90:<0.28118.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.22950.1>,#Ref<16550.0.2.14217>}]} [rebalance:info,2014-08-19T16:51:26.797,ns_1@10.242.238.90:<0.28118.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 616 [rebalance:debug,2014-08-19T16:51:26.797,ns_1@10.242.238.90:<0.28118.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.22950.1>,#Ref<16550.0.2.14217>}] [ns_server:debug,2014-08-19T16:51:26.798,ns_1@10.242.238.90:<0.28118.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:51:26.798,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.28134.0> (ok) [rebalance:debug,2014-08-19T16:51:26.799,ns_1@10.242.238.90:<0.28135.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 616 [ns_server:info,2014-08-19T16:51:26.804,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 360 state to replica [ns_server:info,2014-08-19T16:51:26.808,ns_1@10.242.238.90:<0.28138.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 360 to state replica [ns_server:debug,2014-08-19T16:51:26.828,ns_1@10.242.238.90:<0.28138.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_360_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:51:26.829,ns_1@10.242.238.90:<0.28138.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[360]}, {checkpoints,[{360,0}]}, {name,<<"replication_building_360_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[360]}, {takeover,false}, {suffix,"building_360_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",360,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,false}]} [rebalance:debug,2014-08-19T16:51:26.830,ns_1@10.242.238.90:<0.28138.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.28139.0> [rebalance:debug,2014-08-19T16:51:26.830,ns_1@10.242.238.90:<0.28138.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:51:26.830,ns_1@10.242.238.90:<0.28138.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.22981.1>,#Ref<16550.0.2.14549>}]} [rebalance:info,2014-08-19T16:51:26.830,ns_1@10.242.238.90:<0.28138.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 360 [rebalance:debug,2014-08-19T16:51:26.831,ns_1@10.242.238.90:<0.28138.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.22981.1>,#Ref<16550.0.2.14549>}] [ns_server:debug,2014-08-19T16:51:26.831,ns_1@10.242.238.90:<0.28138.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:51:26.840,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 362. Nacking mccouch update. [views:debug,2014-08-19T16:51:26.840,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/362. Updated state: replica (0) [ns_server:debug,2014-08-19T16:51:26.840,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",362,replica,0} [ns_server:debug,2014-08-19T16:51:26.841,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,750,686,622,375,984,737,673,426,362,971,724,660,413,958,711,647,400,1022, 945,762,698,634,387,1009,996,749,685,621,374,983,736,672,425,970,723,659,412, 957,710,646,399,1021,944,761,697,633,386,1008,995,748,684,620,373,982,735, 671,424,969,722,658,411,956,709,645,398,1020,943,760,696,632,385,1007,994, 747,683,619,372,981,734,670,423,968,721,657,410,955,708,644,397,1019,942,759, 695,631,384,1006,993,746,682,618,371,980,733,669,422,967,720,656,409,954,707, 643,396,1018,941,758,694,630,383,1005,992,745,681,370,979,732,668,421,966, 719,655,408,953,706,642,395,1017,972,940,757,725,693,661,629,414,382,1004, 991,959,744,712,680,648,401,369,1023,978,946,763,731,699,667,635,420,388, 1010,965,718,654,407,952,705,641,394,1016,939,756,692,628,381,1003,990,743, 679,368,977,730,666,419,964,717,653,406,951,704,640,393,1015,938,755,691,627, 380,1002,989,742,678,367,976,729,665,418,963,716,652,405,950,767,703,639,392, 1014,754,690,626,379,1001,988,741,677,366,975,728,664,417,962,715,651,404, 949,766,702,638,391,1013,753,689,625,378,1000,987,740,676,365,974,727,663, 416,961,714,650,403,948,765,701,637,390,1012,999,752,688,624,377,986,739,675, 364,973,726,662,415,960,713,649,402,947,764,700,636,389,1011,998,751,687,623, 376,985,738,674,363] [rebalance:debug,2014-08-19T16:51:26.844,ns_1@10.242.238.90:<0.28140.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 360 [views:debug,2014-08-19T16:51:26.892,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/362. Updated state: replica (0) [ns_server:debug,2014-08-19T16:51:26.892,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",362,replica,0} [ns_server:info,2014-08-19T16:51:26.910,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 615 state to replica [ns_server:info,2014-08-19T16:51:26.917,ns_1@10.242.238.90:<0.28143.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 615 to state replica [ns_server:debug,2014-08-19T16:51:26.945,ns_1@10.242.238.90:<0.28143.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_615_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:51:26.947,ns_1@10.242.238.90:<0.28143.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[615]}, {checkpoints,[{615,0}]}, {name,<<"replication_building_615_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[615]}, {takeover,false}, {suffix,"building_615_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",615,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,true}]} [rebalance:debug,2014-08-19T16:51:26.947,ns_1@10.242.238.90:<0.28143.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.28144.0> [rebalance:debug,2014-08-19T16:51:26.947,ns_1@10.242.238.90:<0.28143.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:51:26.948,ns_1@10.242.238.90:<0.28143.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.23029.1>,#Ref<16550.0.2.14908>}]} [rebalance:info,2014-08-19T16:51:26.948,ns_1@10.242.238.90:<0.28143.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 615 [rebalance:debug,2014-08-19T16:51:26.948,ns_1@10.242.238.90:<0.28143.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.23029.1>,#Ref<16550.0.2.14908>}] [ns_server:debug,2014-08-19T16:51:26.949,ns_1@10.242.238.90:<0.28143.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:51:26.949,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.28145.0> (ok) [rebalance:debug,2014-08-19T16:51:26.951,ns_1@10.242.238.90:<0.28146.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 615 [ns_server:info,2014-08-19T16:51:26.955,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 359 state to replica [ns_server:info,2014-08-19T16:51:26.959,ns_1@10.242.238.90:<0.28163.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 359 to state replica [ns_server:debug,2014-08-19T16:51:26.978,ns_1@10.242.238.90:<0.28163.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_359_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:51:26.979,ns_1@10.242.238.90:<0.28163.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[359]}, {checkpoints,[{359,0}]}, {name,<<"replication_building_359_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[359]}, {takeover,false}, {suffix,"building_359_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",359,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,false}]} [rebalance:debug,2014-08-19T16:51:26.980,ns_1@10.242.238.90:<0.28163.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.28164.0> [rebalance:debug,2014-08-19T16:51:26.980,ns_1@10.242.238.90:<0.28163.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:51:26.981,ns_1@10.242.238.90:<0.28163.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.23051.1>,#Ref<16550.0.2.15013>}]} [rebalance:info,2014-08-19T16:51:26.981,ns_1@10.242.238.90:<0.28163.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 359 [rebalance:debug,2014-08-19T16:51:26.981,ns_1@10.242.238.90:<0.28163.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.23051.1>,#Ref<16550.0.2.15013>}] [ns_server:debug,2014-08-19T16:51:26.982,ns_1@10.242.238.90:<0.28163.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:debug,2014-08-19T16:51:26.994,ns_1@10.242.238.90:<0.28165.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 359 [ns_server:debug,2014-08-19T16:51:27.025,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 617. Nacking mccouch update. [views:debug,2014-08-19T16:51:27.026,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/617. Updated state: pending (0) [ns_server:debug,2014-08-19T16:51:27.026,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",617,pending,0} [ns_server:debug,2014-08-19T16:51:27.026,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,750,686,622,375,984,737,673,426,362,971,724,660,413,958,711,647,400,1022, 945,762,698,634,387,1009,996,749,685,621,374,983,736,672,425,970,723,659,412, 957,710,646,399,1021,944,761,697,633,386,1008,995,748,684,620,373,982,735, 671,424,969,722,658,411,956,709,645,398,1020,943,760,696,632,385,1007,994, 747,683,619,372,981,734,670,423,968,721,657,410,955,708,644,397,1019,942,759, 695,631,384,1006,993,746,682,618,371,980,733,669,422,967,720,656,409,954,707, 643,396,1018,941,758,694,630,383,1005,992,745,681,617,370,979,732,668,421, 966,719,655,408,953,706,642,395,1017,972,940,757,725,693,661,629,414,382, 1004,991,959,744,712,680,648,401,369,1023,978,946,763,731,699,667,635,420, 388,1010,965,718,654,407,952,705,641,394,1016,939,756,692,628,381,1003,990, 743,679,368,977,730,666,419,964,717,653,406,951,704,640,393,1015,938,755,691, 627,380,1002,989,742,678,367,976,729,665,418,963,716,652,405,950,767,703,639, 392,1014,754,690,626,379,1001,988,741,677,366,975,728,664,417,962,715,651, 404,949,766,702,638,391,1013,753,689,625,378,1000,987,740,676,365,974,727, 663,416,961,714,650,403,948,765,701,637,390,1012,999,752,688,624,377,986,739, 675,364,973,726,662,415,960,713,649,402,947,764,700,636,389,1011,998,751,687, 623,376,985,738,674,363] [ns_server:info,2014-08-19T16:51:27.044,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 614 state to replica [ns_server:info,2014-08-19T16:51:27.050,ns_1@10.242.238.90:<0.28168.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 614 to state replica [ns_server:debug,2014-08-19T16:51:27.079,ns_1@10.242.238.90:<0.28168.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_614_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:51:27.080,ns_1@10.242.238.90:<0.28168.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[614]}, {checkpoints,[{614,0}]}, {name,<<"replication_building_614_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[614]}, {takeover,false}, {suffix,"building_614_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",614,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,true}]} [rebalance:debug,2014-08-19T16:51:27.081,ns_1@10.242.238.90:<0.28168.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.28169.0> [rebalance:debug,2014-08-19T16:51:27.081,ns_1@10.242.238.90:<0.28168.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:51:27.082,ns_1@10.242.238.90:<0.28168.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.23092.1>,#Ref<16550.0.2.15210>}]} [rebalance:info,2014-08-19T16:51:27.082,ns_1@10.242.238.90:<0.28168.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 614 [rebalance:debug,2014-08-19T16:51:27.082,ns_1@10.242.238.90:<0.28168.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.23092.1>,#Ref<16550.0.2.15210>}] [ns_server:debug,2014-08-19T16:51:27.083,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.28170.0> (ok) [ns_server:debug,2014-08-19T16:51:27.083,ns_1@10.242.238.90:<0.28168.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:debug,2014-08-19T16:51:27.084,ns_1@10.242.238.90:<0.28171.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 614 [ns_server:info,2014-08-19T16:51:27.088,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 358 state to replica [ns_server:info,2014-08-19T16:51:27.092,ns_1@10.242.238.90:<0.28174.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 358 to state replica [views:debug,2014-08-19T16:51:27.093,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/617. Updated state: pending (0) [ns_server:debug,2014-08-19T16:51:27.093,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",617,pending,0} [ns_server:debug,2014-08-19T16:51:27.112,ns_1@10.242.238.90:<0.28174.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_358_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:51:27.114,ns_1@10.242.238.90:<0.28174.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[358]}, {checkpoints,[{358,0}]}, {name,<<"replication_building_358_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[358]}, {takeover,false}, {suffix,"building_358_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",358,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,false}]} [rebalance:debug,2014-08-19T16:51:27.114,ns_1@10.242.238.90:<0.28174.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.28175.0> [rebalance:debug,2014-08-19T16:51:27.114,ns_1@10.242.238.90:<0.28174.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:51:27.115,ns_1@10.242.238.90:<0.28174.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.23119.1>,#Ref<16550.0.2.15333>}]} [rebalance:info,2014-08-19T16:51:27.115,ns_1@10.242.238.90:<0.28174.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 358 [rebalance:debug,2014-08-19T16:51:27.115,ns_1@10.242.238.90:<0.28174.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.23119.1>,#Ref<16550.0.2.15333>}] [ns_server:debug,2014-08-19T16:51:27.116,ns_1@10.242.238.90:<0.28174.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:debug,2014-08-19T16:51:27.128,ns_1@10.242.238.90:<0.28176.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 358 [ns_server:info,2014-08-19T16:51:27.177,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 613 state to replica [ns_server:info,2014-08-19T16:51:27.183,ns_1@10.242.238.90:<0.28193.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 613 to state replica [ns_server:debug,2014-08-19T16:51:27.211,ns_1@10.242.238.90:<0.28193.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_613_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:51:27.213,ns_1@10.242.238.90:<0.28193.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[613]}, {checkpoints,[{613,0}]}, {name,<<"replication_building_613_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[613]}, {takeover,false}, {suffix,"building_613_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",613,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,true}]} [rebalance:debug,2014-08-19T16:51:27.213,ns_1@10.242.238.90:<0.28193.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.28194.0> [rebalance:debug,2014-08-19T16:51:27.213,ns_1@10.242.238.90:<0.28193.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:51:27.214,ns_1@10.242.238.90:<0.28193.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.23160.1>,#Ref<16550.0.2.15533>}]} [rebalance:info,2014-08-19T16:51:27.214,ns_1@10.242.238.90:<0.28193.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 613 [rebalance:debug,2014-08-19T16:51:27.214,ns_1@10.242.238.90:<0.28193.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.23160.1>,#Ref<16550.0.2.15533>}] [ns_server:debug,2014-08-19T16:51:27.215,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.28195.0> (ok) [ns_server:debug,2014-08-19T16:51:27.215,ns_1@10.242.238.90:<0.28193.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:debug,2014-08-19T16:51:27.216,ns_1@10.242.238.90:<0.28196.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 613 [ns_server:info,2014-08-19T16:51:27.220,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 357 state to replica [ns_server:info,2014-08-19T16:51:27.224,ns_1@10.242.238.90:<0.28199.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 357 to state replica [ns_server:debug,2014-08-19T16:51:27.246,ns_1@10.242.238.90:<0.28199.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_357_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:51:27.247,ns_1@10.242.238.90:<0.28199.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[357]}, {checkpoints,[{357,0}]}, {name,<<"replication_building_357_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[357]}, {takeover,false}, {suffix,"building_357_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",357,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,false}]} [rebalance:debug,2014-08-19T16:51:27.248,ns_1@10.242.238.90:<0.28199.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.28200.0> [rebalance:debug,2014-08-19T16:51:27.248,ns_1@10.242.238.90:<0.28199.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:51:27.248,ns_1@10.242.238.90:<0.28199.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.23182.1>,#Ref<16550.0.2.15638>}]} [rebalance:info,2014-08-19T16:51:27.249,ns_1@10.242.238.90:<0.28199.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 357 [rebalance:debug,2014-08-19T16:51:27.249,ns_1@10.242.238.90:<0.28199.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.23182.1>,#Ref<16550.0.2.15638>}] [ns_server:debug,2014-08-19T16:51:27.250,ns_1@10.242.238.90:<0.28199.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:51:27.252,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 361. Nacking mccouch update. [views:debug,2014-08-19T16:51:27.253,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/361. Updated state: replica (0) [ns_server:debug,2014-08-19T16:51:27.253,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",361,replica,0} [ns_server:debug,2014-08-19T16:51:27.253,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,750,686,622,375,984,737,673,426,362,971,724,660,413,958,711,647,400,1022, 945,762,698,634,387,1009,996,749,685,621,374,983,736,672,425,361,970,723,659, 412,957,710,646,399,1021,944,761,697,633,386,1008,995,748,684,620,373,982, 735,671,424,969,722,658,411,956,709,645,398,1020,943,760,696,632,385,1007, 994,747,683,619,372,981,734,670,423,968,721,657,410,955,708,644,397,1019,942, 759,695,631,384,1006,993,746,682,618,371,980,733,669,422,967,720,656,409,954, 707,643,396,1018,941,758,694,630,383,1005,992,745,681,617,370,979,732,668, 421,966,719,655,408,953,706,642,395,1017,972,940,757,725,693,661,629,414,382, 1004,991,959,744,712,680,648,401,369,1023,978,946,763,731,699,667,635,420, 388,1010,965,718,654,407,952,705,641,394,1016,939,756,692,628,381,1003,990, 743,679,368,977,730,666,419,964,717,653,406,951,704,640,393,1015,938,755,691, 627,380,1002,989,742,678,367,976,729,665,418,963,716,652,405,950,767,703,639, 392,1014,754,690,626,379,1001,988,741,677,366,975,728,664,417,962,715,651, 404,949,766,702,638,391,1013,753,689,625,378,1000,987,740,676,365,974,727, 663,416,961,714,650,403,948,765,701,637,390,1012,999,752,688,624,377,986,739, 675,364,973,726,662,415,960,713,649,402,947,764,700,636,389,1011,998,751,687, 623,376,985,738,674,363] [rebalance:debug,2014-08-19T16:51:27.260,ns_1@10.242.238.90:<0.28201.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 357 [ns_server:info,2014-08-19T16:51:27.313,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 612 state to replica [ns_server:info,2014-08-19T16:51:27.319,ns_1@10.242.238.90:<0.28204.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 612 to state replica [views:debug,2014-08-19T16:51:27.336,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/361. Updated state: replica (0) [ns_server:debug,2014-08-19T16:51:27.337,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",361,replica,0} [rebalance:debug,2014-08-19T16:51:27.338,ns_1@10.242.238.90:<0.28201.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:51:27.338,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.28201.0> (ok) [ns_server:debug,2014-08-19T16:51:27.348,ns_1@10.242.238.90:<0.28204.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_612_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:51:27.349,ns_1@10.242.238.90:<0.28204.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[612]}, {checkpoints,[{612,0}]}, {name,<<"replication_building_612_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[612]}, {takeover,false}, {suffix,"building_612_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",612,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,true}]} [rebalance:debug,2014-08-19T16:51:27.350,ns_1@10.242.238.90:<0.28204.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.28205.0> [rebalance:debug,2014-08-19T16:51:27.350,ns_1@10.242.238.90:<0.28204.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:51:27.350,ns_1@10.242.238.90:<0.28204.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.23223.1>,#Ref<16550.0.2.15835>}]} [rebalance:info,2014-08-19T16:51:27.350,ns_1@10.242.238.90:<0.28204.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 612 [rebalance:debug,2014-08-19T16:51:27.351,ns_1@10.242.238.90:<0.28204.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.23223.1>,#Ref<16550.0.2.15835>}] [ns_server:debug,2014-08-19T16:51:27.351,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.28206.0> (ok) [ns_server:debug,2014-08-19T16:51:27.351,ns_1@10.242.238.90:<0.28204.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:debug,2014-08-19T16:51:27.353,ns_1@10.242.238.90:<0.28207.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 612 [ns_server:info,2014-08-19T16:51:27.357,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 356 state to replica [ns_server:info,2014-08-19T16:51:27.361,ns_1@10.242.238.90:<0.28210.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 356 to state replica [ns_server:debug,2014-08-19T16:51:27.382,ns_1@10.242.238.90:<0.28210.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_356_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:51:27.384,ns_1@10.242.238.90:<0.28210.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[356]}, {checkpoints,[{356,0}]}, {name,<<"replication_building_356_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[356]}, {takeover,false}, {suffix,"building_356_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",356,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,false}]} [rebalance:debug,2014-08-19T16:51:27.384,ns_1@10.242.238.90:<0.28210.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.28217.0> [rebalance:debug,2014-08-19T16:51:27.384,ns_1@10.242.238.90:<0.28210.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:51:27.385,ns_1@10.242.238.90:<0.28210.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.23245.1>,#Ref<16550.0.2.15931>}]} [rebalance:info,2014-08-19T16:51:27.385,ns_1@10.242.238.90:<0.28210.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 356 [rebalance:debug,2014-08-19T16:51:27.385,ns_1@10.242.238.90:<0.28210.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.23245.1>,#Ref<16550.0.2.15931>}] [ns_server:debug,2014-08-19T16:51:27.386,ns_1@10.242.238.90:<0.28210.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:debug,2014-08-19T16:51:27.397,ns_1@10.242.238.90:<0.28218.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 356 [ns_server:debug,2014-08-19T16:51:27.437,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 616. Nacking mccouch update. [views:debug,2014-08-19T16:51:27.437,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/616. Updated state: pending (0) [ns_server:debug,2014-08-19T16:51:27.437,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",616,pending,0} [ns_server:debug,2014-08-19T16:51:27.438,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,750,686,622,375,984,737,673,426,362,971,724,660,413,958,711,647,400,1022, 945,762,698,634,387,1009,996,749,685,621,374,983,736,672,425,361,970,723,659, 412,957,710,646,399,1021,944,761,697,633,386,1008,995,748,684,620,373,982, 735,671,424,969,722,658,411,956,709,645,398,1020,943,760,696,632,385,1007, 994,747,683,619,372,981,734,670,423,968,721,657,410,955,708,644,397,1019,942, 759,695,631,384,1006,993,746,682,618,371,980,733,669,422,967,720,656,409,954, 707,643,396,1018,941,758,694,630,383,1005,992,745,681,617,370,979,732,668, 421,966,719,655,408,953,706,642,395,1017,972,940,757,725,693,661,629,414,382, 1004,991,959,744,712,680,648,616,401,369,1023,978,946,763,731,699,667,635, 420,388,1010,965,718,654,407,952,705,641,394,1016,939,756,692,628,381,1003, 990,743,679,368,977,730,666,419,964,717,653,406,951,704,640,393,1015,938,755, 691,627,380,1002,989,742,678,367,976,729,665,418,963,716,652,405,950,767,703, 639,392,1014,754,690,626,379,1001,988,741,677,366,975,728,664,417,962,715, 651,404,949,766,702,638,391,1013,753,689,625,378,1000,987,740,676,365,974, 727,663,416,961,714,650,403,948,765,701,637,390,1012,999,752,688,624,377,986, 739,675,364,973,726,662,415,960,713,649,402,947,764,700,636,389,1011,998,751, 687,623,376,985,738,674,363] [ns_server:info,2014-08-19T16:51:27.446,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 611 state to replica [ns_server:info,2014-08-19T16:51:27.452,ns_1@10.242.238.90:<0.28236.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 611 to state replica [views:debug,2014-08-19T16:51:27.471,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/616. Updated state: pending (0) [ns_server:debug,2014-08-19T16:51:27.471,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",616,pending,0} [ns_server:debug,2014-08-19T16:51:27.484,ns_1@10.242.238.90:<0.28236.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_611_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:51:27.485,ns_1@10.242.238.90:<0.28236.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[611]}, {checkpoints,[{611,0}]}, {name,<<"replication_building_611_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[611]}, {takeover,false}, {suffix,"building_611_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",611,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,true}]} [rebalance:debug,2014-08-19T16:51:27.486,ns_1@10.242.238.90:<0.28236.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.28237.0> [rebalance:debug,2014-08-19T16:51:27.486,ns_1@10.242.238.90:<0.28236.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:51:27.487,ns_1@10.242.238.90:<0.28236.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.23286.1>,#Ref<16550.0.2.16118>}]} [rebalance:info,2014-08-19T16:51:27.487,ns_1@10.242.238.90:<0.28236.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 611 [rebalance:debug,2014-08-19T16:51:27.487,ns_1@10.242.238.90:<0.28236.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.23286.1>,#Ref<16550.0.2.16118>}] [ns_server:debug,2014-08-19T16:51:27.488,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.28238.0> (ok) [ns_server:debug,2014-08-19T16:51:27.488,ns_1@10.242.238.90:<0.28236.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:debug,2014-08-19T16:51:27.489,ns_1@10.242.238.90:<0.28239.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 611 [ns_server:info,2014-08-19T16:51:27.493,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 355 state to replica [ns_server:info,2014-08-19T16:51:27.497,ns_1@10.242.238.90:<0.28242.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 355 to state replica [ns_server:debug,2014-08-19T16:51:27.519,ns_1@10.242.238.90:<0.28242.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_355_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:51:27.520,ns_1@10.242.238.90:<0.28242.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[355]}, {checkpoints,[{355,0}]}, {name,<<"replication_building_355_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[355]}, {takeover,false}, {suffix,"building_355_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",355,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,false}]} [rebalance:debug,2014-08-19T16:51:27.520,ns_1@10.242.238.90:<0.28242.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.28257.0> [rebalance:debug,2014-08-19T16:51:27.520,ns_1@10.242.238.90:<0.28242.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:51:27.521,ns_1@10.242.238.90:<0.28242.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.23308.1>,#Ref<16550.0.2.16225>}]} [rebalance:info,2014-08-19T16:51:27.521,ns_1@10.242.238.90:<0.28242.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 355 [rebalance:debug,2014-08-19T16:51:27.521,ns_1@10.242.238.90:<0.28242.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.23308.1>,#Ref<16550.0.2.16225>}] [ns_server:debug,2014-08-19T16:51:27.522,ns_1@10.242.238.90:<0.28242.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:debug,2014-08-19T16:51:27.533,ns_1@10.242.238.90:<0.28258.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 355 [ns_server:debug,2014-08-19T16:51:27.563,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 614. Nacking mccouch update. [views:debug,2014-08-19T16:51:27.563,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/614. Updated state: pending (0) [ns_server:debug,2014-08-19T16:51:27.563,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",614,pending,0} [ns_server:debug,2014-08-19T16:51:27.563,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,750,686,622,375,984,737,673,426,362,971,724,660,413,958,711,647,400,1022, 945,762,698,634,387,1009,996,749,685,621,374,983,736,672,425,361,970,723,659, 412,957,710,646,399,1021,944,761,697,633,386,1008,995,748,684,620,373,982, 735,671,424,969,722,658,411,956,709,645,398,1020,943,760,696,632,385,1007, 994,747,683,619,372,981,734,670,423,968,721,657,410,955,708,644,397,1019,942, 759,695,631,384,1006,993,746,682,618,371,980,733,669,422,967,720,656,409,954, 707,643,396,1018,941,758,694,630,383,1005,992,745,681,617,370,979,732,668, 421,966,719,655,408,953,706,642,395,1017,972,940,757,725,693,661,629,414,382, 1004,991,959,744,712,680,648,616,401,369,1023,978,946,763,731,699,667,635, 420,388,1010,965,718,654,407,952,705,641,394,1016,939,756,692,628,381,1003, 990,743,679,368,977,730,666,419,964,717,653,406,951,704,640,393,1015,938,755, 691,627,380,1002,989,742,678,614,367,976,729,665,418,963,716,652,405,950,767, 703,639,392,1014,754,690,626,379,1001,988,741,677,366,975,728,664,417,962, 715,651,404,949,766,702,638,391,1013,753,689,625,378,1000,987,740,676,365, 974,727,663,416,961,714,650,403,948,765,701,637,390,1012,999,752,688,624,377, 986,739,675,364,973,726,662,415,960,713,649,402,947,764,700,636,389,1011,998, 751,687,623,376,985,738,674,363] [ns_server:info,2014-08-19T16:51:27.582,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 610 state to replica [ns_server:info,2014-08-19T16:51:27.588,ns_1@10.242.238.90:<0.28261.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 610 to state replica [ns_server:debug,2014-08-19T16:51:27.616,ns_1@10.242.238.90:<0.28261.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_610_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:51:27.618,ns_1@10.242.238.90:<0.28261.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[610]}, {checkpoints,[{610,0}]}, {name,<<"replication_building_610_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[610]}, {takeover,false}, {suffix,"building_610_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",610,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,true}]} [rebalance:debug,2014-08-19T16:51:27.618,ns_1@10.242.238.90:<0.28261.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.28262.0> [rebalance:debug,2014-08-19T16:51:27.619,ns_1@10.242.238.90:<0.28261.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:51:27.619,ns_1@10.242.238.90:<0.28261.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.23349.1>,#Ref<16550.0.2.17586>}]} [rebalance:info,2014-08-19T16:51:27.619,ns_1@10.242.238.90:<0.28261.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 610 [rebalance:debug,2014-08-19T16:51:27.620,ns_1@10.242.238.90:<0.28261.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.23349.1>,#Ref<16550.0.2.17586>}] [ns_server:debug,2014-08-19T16:51:27.620,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.28263.0> (ok) [ns_server:debug,2014-08-19T16:51:27.620,ns_1@10.242.238.90:<0.28261.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [views:debug,2014-08-19T16:51:27.621,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/614. Updated state: pending (0) [ns_server:debug,2014-08-19T16:51:27.621,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",614,pending,0} [rebalance:debug,2014-08-19T16:51:27.622,ns_1@10.242.238.90:<0.28264.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 610 [ns_server:info,2014-08-19T16:51:27.626,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 354 state to replica [ns_server:info,2014-08-19T16:51:27.629,ns_1@10.242.238.90:<0.28267.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 354 to state replica [ns_server:debug,2014-08-19T16:51:27.650,ns_1@10.242.238.90:<0.28267.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_354_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:51:27.651,ns_1@10.242.238.90:<0.28267.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[354]}, {checkpoints,[{354,0}]}, {name,<<"replication_building_354_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[354]}, {takeover,false}, {suffix,"building_354_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",354,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,false}]} [rebalance:debug,2014-08-19T16:51:27.652,ns_1@10.242.238.90:<0.28267.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.28268.0> [rebalance:debug,2014-08-19T16:51:27.652,ns_1@10.242.238.90:<0.28267.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:51:27.653,ns_1@10.242.238.90:<0.28267.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.23371.1>,#Ref<16550.0.2.17694>}]} [rebalance:info,2014-08-19T16:51:27.653,ns_1@10.242.238.90:<0.28267.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 354 [rebalance:debug,2014-08-19T16:51:27.653,ns_1@10.242.238.90:<0.28267.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.23371.1>,#Ref<16550.0.2.17694>}] [ns_server:debug,2014-08-19T16:51:27.654,ns_1@10.242.238.90:<0.28267.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:debug,2014-08-19T16:51:27.666,ns_1@10.242.238.90:<0.28269.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 354 [ns_server:info,2014-08-19T16:51:27.718,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 609 state to replica [ns_server:debug,2014-08-19T16:51:27.722,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 612. Nacking mccouch update. [views:debug,2014-08-19T16:51:27.722,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/612. Updated state: pending (0) [ns_server:debug,2014-08-19T16:51:27.722,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",612,pending,0} [ns_server:debug,2014-08-19T16:51:27.723,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,750,686,622,375,984,737,673,426,362,971,724,660,413,958,711,647,400,1022, 945,762,698,634,387,1009,996,749,685,621,374,983,736,672,425,361,970,723,659, 412,957,710,646,399,1021,944,761,697,633,386,1008,995,748,684,620,373,982, 735,671,424,969,722,658,411,956,709,645,398,1020,943,760,696,632,385,1007, 994,747,683,619,372,981,734,670,423,968,721,657,410,955,708,644,397,1019,942, 759,695,631,384,1006,993,746,682,618,371,980,733,669,422,967,720,656,409,954, 707,643,396,1018,941,758,694,630,383,1005,992,745,681,617,370,979,732,668, 421,966,719,655,408,953,706,642,395,1017,940,757,693,629,382,1004,991,959, 744,712,680,648,616,401,369,1023,978,946,763,731,699,667,635,420,388,1010, 965,718,654,407,952,705,641,394,1016,939,756,692,628,381,1003,990,743,679, 368,977,730,666,419,964,717,653,406,951,704,640,393,1015,938,755,691,627,380, 1002,989,742,678,614,367,976,729,665,418,963,716,652,405,950,767,703,639,392, 1014,754,690,626,379,1001,988,741,677,366,975,728,664,417,962,715,651,404, 949,766,702,638,391,1013,753,689,625,378,1000,987,740,676,612,365,974,727, 663,416,961,714,650,403,948,765,701,637,390,1012,999,752,688,624,377,986,739, 675,364,973,726,662,415,960,713,649,402,947,764,700,636,389,1011,998,751,687, 623,376,985,738,674,363,972,725,661,414] [ns_server:info,2014-08-19T16:51:27.724,ns_1@10.242.238.90:<0.28286.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 609 to state replica [ns_server:debug,2014-08-19T16:51:27.753,ns_1@10.242.238.90:<0.28286.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_609_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:51:27.755,ns_1@10.242.238.90:<0.28286.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[609]}, {checkpoints,[{609,0}]}, {name,<<"replication_building_609_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[609]}, {takeover,false}, {suffix,"building_609_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",609,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,true}]} [rebalance:debug,2014-08-19T16:51:27.755,ns_1@10.242.238.90:<0.28286.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.28288.0> [rebalance:debug,2014-08-19T16:51:27.756,ns_1@10.242.238.90:<0.28286.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:51:27.756,ns_1@10.242.238.90:<0.28286.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.23412.1>,#Ref<16550.0.2.17895>}]} [rebalance:info,2014-08-19T16:51:27.756,ns_1@10.242.238.90:<0.28286.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 609 [rebalance:debug,2014-08-19T16:51:27.756,ns_1@10.242.238.90:<0.28286.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.23412.1>,#Ref<16550.0.2.17895>}] [ns_server:debug,2014-08-19T16:51:27.757,ns_1@10.242.238.90:<0.28286.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:51:27.757,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.28289.0> (ok) [rebalance:debug,2014-08-19T16:51:27.759,ns_1@10.242.238.90:<0.28290.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 609 [ns_server:info,2014-08-19T16:51:27.763,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 353 state to replica [ns_server:info,2014-08-19T16:51:27.767,ns_1@10.242.238.90:<0.28293.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 353 to state replica [views:debug,2014-08-19T16:51:27.772,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/612. Updated state: pending (0) [ns_server:debug,2014-08-19T16:51:27.772,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",612,pending,0} [ns_server:debug,2014-08-19T16:51:27.789,ns_1@10.242.238.90:<0.28293.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_353_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:51:27.790,ns_1@10.242.238.90:<0.28293.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[353]}, {checkpoints,[{353,0}]}, {name,<<"replication_building_353_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[353]}, {takeover,false}, {suffix,"building_353_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",353,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,false}]} [rebalance:debug,2014-08-19T16:51:27.791,ns_1@10.242.238.90:<0.28293.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.28294.0> [rebalance:debug,2014-08-19T16:51:27.791,ns_1@10.242.238.90:<0.28293.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:51:27.792,ns_1@10.242.238.90:<0.28293.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.23434.1>,#Ref<16550.0.2.17991>}]} [rebalance:info,2014-08-19T16:51:27.792,ns_1@10.242.238.90:<0.28293.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 353 [rebalance:debug,2014-08-19T16:51:27.792,ns_1@10.242.238.90:<0.28293.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.23434.1>,#Ref<16550.0.2.17991>}] [ns_server:debug,2014-08-19T16:51:27.793,ns_1@10.242.238.90:<0.28293.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:debug,2014-08-19T16:51:27.803,ns_1@10.242.238.90:<0.28295.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 353 [ns_server:info,2014-08-19T16:51:27.852,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 608 state to replica [ns_server:info,2014-08-19T16:51:27.859,ns_1@10.242.238.90:<0.28312.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 608 to state replica [ns_server:debug,2014-08-19T16:51:27.887,ns_1@10.242.238.90:<0.28312.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_608_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:51:27.889,ns_1@10.242.238.90:<0.28312.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[608]}, {checkpoints,[{608,0}]}, {name,<<"replication_building_608_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[608]}, {takeover,false}, {suffix,"building_608_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",608,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,true}]} [rebalance:debug,2014-08-19T16:51:27.890,ns_1@10.242.238.90:<0.28312.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.28313.0> [rebalance:debug,2014-08-19T16:51:27.890,ns_1@10.242.238.90:<0.28312.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:51:27.890,ns_1@10.242.238.90:<0.28312.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.23475.1>,#Ref<16550.0.2.18208>}]} [rebalance:info,2014-08-19T16:51:27.890,ns_1@10.242.238.90:<0.28312.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 608 [rebalance:debug,2014-08-19T16:51:27.891,ns_1@10.242.238.90:<0.28312.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.23475.1>,#Ref<16550.0.2.18208>}] [ns_server:debug,2014-08-19T16:51:27.891,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.28314.0> (ok) [ns_server:debug,2014-08-19T16:51:27.892,ns_1@10.242.238.90:<0.28312.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:debug,2014-08-19T16:51:27.893,ns_1@10.242.238.90:<0.28315.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 608 [ns_server:info,2014-08-19T16:51:27.897,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 352 state to replica [ns_server:info,2014-08-19T16:51:27.901,ns_1@10.242.238.90:<0.28318.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 352 to state replica [ns_server:debug,2014-08-19T16:51:27.909,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 360. Nacking mccouch update. [views:debug,2014-08-19T16:51:27.909,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/360. Updated state: replica (0) [ns_server:debug,2014-08-19T16:51:27.909,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",360,replica,0} [ns_server:debug,2014-08-19T16:51:27.912,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,750,686,622,375,984,737,673,426,362,971,724,660,413,958,711,647,400,1022, 945,762,698,634,387,1009,996,749,685,621,374,983,736,672,425,361,970,723,659, 412,957,710,646,399,1021,944,761,697,633,386,1008,995,748,684,620,373,982, 735,671,424,360,969,722,658,411,956,709,645,398,1020,943,760,696,632,385, 1007,994,747,683,619,372,981,734,670,423,968,721,657,410,955,708,644,397, 1019,942,759,695,631,384,1006,993,746,682,618,371,980,733,669,422,967,720, 656,409,954,707,643,396,1018,941,758,694,630,383,1005,992,745,681,617,370, 979,732,668,421,966,719,655,408,953,706,642,395,1017,940,757,693,629,382, 1004,991,959,744,712,680,648,616,401,369,1023,978,946,763,731,699,667,635, 420,388,1010,965,718,654,407,952,705,641,394,1016,939,756,692,628,381,1003, 990,743,679,368,977,730,666,419,964,717,653,406,951,704,640,393,1015,938,755, 691,627,380,1002,989,742,678,614,367,976,729,665,418,963,716,652,405,950,767, 703,639,392,1014,754,690,626,379,1001,988,741,677,366,975,728,664,417,962, 715,651,404,949,766,702,638,391,1013,753,689,625,378,1000,987,740,676,612, 365,974,727,663,416,961,714,650,403,948,765,701,637,390,1012,999,752,688,624, 377,986,739,675,364,973,726,662,415,960,713,649,402,947,764,700,636,389,1011, 998,751,687,623,376,985,738,674,363,972,725,661,414] [ns_server:debug,2014-08-19T16:51:27.926,ns_1@10.242.238.90:<0.28318.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_352_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:51:27.928,ns_1@10.242.238.90:<0.28318.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[352]}, {checkpoints,[{352,0}]}, {name,<<"replication_building_352_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[352]}, {takeover,false}, {suffix,"building_352_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",352,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,false}]} [rebalance:debug,2014-08-19T16:51:27.928,ns_1@10.242.238.90:<0.28318.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.28319.0> [rebalance:debug,2014-08-19T16:51:27.928,ns_1@10.242.238.90:<0.28318.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:51:27.929,ns_1@10.242.238.90:<0.28318.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.23529.1>,#Ref<16550.0.2.18731>}]} [rebalance:info,2014-08-19T16:51:27.929,ns_1@10.242.238.90:<0.28318.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 352 [rebalance:debug,2014-08-19T16:51:27.929,ns_1@10.242.238.90:<0.28318.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.23529.1>,#Ref<16550.0.2.18731>}] [ns_server:debug,2014-08-19T16:51:27.930,ns_1@10.242.238.90:<0.28318.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:debug,2014-08-19T16:51:27.937,ns_1@10.242.238.90:<0.28320.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 352 [views:debug,2014-08-19T16:51:27.985,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/360. Updated state: replica (0) [ns_server:debug,2014-08-19T16:51:27.985,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",360,replica,0} [ns_server:info,2014-08-19T16:51:27.986,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 607 state to replica [ns_server:info,2014-08-19T16:51:27.992,ns_1@10.242.238.90:<0.28323.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 607 to state replica [ns_server:debug,2014-08-19T16:51:28.020,ns_1@10.242.238.90:<0.28323.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_607_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:51:28.022,ns_1@10.242.238.90:<0.28323.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[607]}, {checkpoints,[{607,0}]}, {name,<<"replication_building_607_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[607]}, {takeover,false}, {suffix,"building_607_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",607,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,true}]} [rebalance:debug,2014-08-19T16:51:28.022,ns_1@10.242.238.90:<0.28323.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.28324.0> [rebalance:debug,2014-08-19T16:51:28.022,ns_1@10.242.238.90:<0.28323.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:51:28.023,ns_1@10.242.238.90:<0.28323.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.23570.1>,#Ref<16550.0.2.18938>}]} [rebalance:info,2014-08-19T16:51:28.023,ns_1@10.242.238.90:<0.28323.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 607 [rebalance:debug,2014-08-19T16:51:28.023,ns_1@10.242.238.90:<0.28323.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.23570.1>,#Ref<16550.0.2.18938>}] [ns_server:debug,2014-08-19T16:51:28.024,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.28325.0> (ok) [ns_server:debug,2014-08-19T16:51:28.024,ns_1@10.242.238.90:<0.28323.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:debug,2014-08-19T16:51:28.025,ns_1@10.242.238.90:<0.28326.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 607 [ns_server:info,2014-08-19T16:51:28.030,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 351 state to replica [ns_server:info,2014-08-19T16:51:28.033,ns_1@10.242.238.90:<0.28329.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 351 to state replica [ns_server:debug,2014-08-19T16:51:28.055,ns_1@10.242.238.90:<0.28329.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_351_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:51:28.056,ns_1@10.242.238.90:<0.28329.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[351]}, {checkpoints,[{351,0}]}, {name,<<"replication_building_351_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[351]}, {takeover,false}, {suffix,"building_351_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",351,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,false}]} [rebalance:debug,2014-08-19T16:51:28.057,ns_1@10.242.238.90:<0.28329.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.28344.0> [rebalance:debug,2014-08-19T16:51:28.057,ns_1@10.242.238.90:<0.28329.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:51:28.057,ns_1@10.242.238.90:<0.28329.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.23592.1>,#Ref<16550.0.2.19043>}]} [rebalance:info,2014-08-19T16:51:28.057,ns_1@10.242.238.90:<0.28329.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 351 [rebalance:debug,2014-08-19T16:51:28.058,ns_1@10.242.238.90:<0.28329.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.23592.1>,#Ref<16550.0.2.19043>}] [ns_server:debug,2014-08-19T16:51:28.058,ns_1@10.242.238.90:<0.28329.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:debug,2014-08-19T16:51:28.070,ns_1@10.242.238.90:<0.28345.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 351 [ns_server:info,2014-08-19T16:51:28.124,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 606 state to replica [ns_server:debug,2014-08-19T16:51:28.127,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 358. Nacking mccouch update. [views:debug,2014-08-19T16:51:28.127,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/358. Updated state: replica (0) [ns_server:debug,2014-08-19T16:51:28.127,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",358,replica,0} [ns_server:debug,2014-08-19T16:51:28.128,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,750,686,622,375,984,737,673,426,362,971,724,660,413,958,711,647,400,1022, 945,762,698,634,387,1009,996,749,685,621,374,983,736,672,425,361,970,723,659, 412,957,710,646,399,1021,944,761,697,633,386,1008,995,748,684,620,373,982, 735,671,424,360,969,722,658,411,956,709,645,398,1020,943,760,696,632,385, 1007,994,747,683,619,372,981,734,670,423,968,721,657,410,955,708,644,397, 1019,942,759,695,631,384,1006,993,746,682,618,371,980,733,669,422,358,967, 720,656,409,954,707,643,396,1018,941,758,694,630,383,1005,992,745,681,617, 370,979,732,668,421,966,719,655,408,953,706,642,395,1017,940,757,693,629,382, 1004,991,959,744,712,680,648,616,401,369,1023,978,946,763,731,699,667,635, 420,388,1010,965,718,654,407,952,705,641,394,1016,939,756,692,628,381,1003, 990,743,679,368,977,730,666,419,964,717,653,406,951,704,640,393,1015,938,755, 691,627,380,1002,989,742,678,614,367,976,729,665,418,963,716,652,405,950,767, 703,639,392,1014,754,690,626,379,1001,988,741,677,366,975,728,664,417,962, 715,651,404,949,766,702,638,391,1013,753,689,625,378,1000,987,740,676,612, 365,974,727,663,416,961,714,650,403,948,765,701,637,390,1012,999,752,688,624, 377,986,739,675,364,973,726,662,415,960,713,649,402,947,764,700,636,389,1011, 998,751,687,623,376,985,738,674,363,972,725,661,414] [ns_server:info,2014-08-19T16:51:28.130,ns_1@10.242.238.90:<0.28348.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 606 to state replica [ns_server:debug,2014-08-19T16:51:28.159,ns_1@10.242.238.90:<0.28348.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_606_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:51:28.160,ns_1@10.242.238.90:<0.28348.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[606]}, {checkpoints,[{606,0}]}, {name,<<"replication_building_606_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[606]}, {takeover,false}, {suffix,"building_606_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",606,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,true}]} [rebalance:debug,2014-08-19T16:51:28.161,ns_1@10.242.238.90:<0.28348.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.28349.0> [rebalance:debug,2014-08-19T16:51:28.161,ns_1@10.242.238.90:<0.28348.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:51:28.161,ns_1@10.242.238.90:<0.28348.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.23633.1>,#Ref<16550.0.2.19265>}]} [rebalance:info,2014-08-19T16:51:28.161,ns_1@10.242.238.90:<0.28348.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 606 [rebalance:debug,2014-08-19T16:51:28.162,ns_1@10.242.238.90:<0.28348.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.23633.1>,#Ref<16550.0.2.19265>}] [ns_server:debug,2014-08-19T16:51:28.162,ns_1@10.242.238.90:<0.28348.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:51:28.163,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.28350.0> (ok) [rebalance:debug,2014-08-19T16:51:28.166,ns_1@10.242.238.90:<0.28351.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 606 [ns_server:info,2014-08-19T16:51:28.170,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 350 state to replica [ns_server:info,2014-08-19T16:51:28.175,ns_1@10.242.238.90:<0.28354.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 350 to state replica [ns_server:debug,2014-08-19T16:51:28.195,ns_1@10.242.238.90:<0.28354.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_350_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:51:28.197,ns_1@10.242.238.90:<0.28354.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[350]}, {checkpoints,[{350,0}]}, {name,<<"replication_building_350_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[350]}, {takeover,false}, {suffix,"building_350_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",350,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,false}]} [rebalance:debug,2014-08-19T16:51:28.197,ns_1@10.242.238.90:<0.28354.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.28355.0> [rebalance:debug,2014-08-19T16:51:28.198,ns_1@10.242.238.90:<0.28354.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:51:28.198,ns_1@10.242.238.90:<0.28354.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.23655.1>,#Ref<16550.0.2.19380>}]} [rebalance:info,2014-08-19T16:51:28.198,ns_1@10.242.238.90:<0.28354.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 350 [rebalance:debug,2014-08-19T16:51:28.199,ns_1@10.242.238.90:<0.28354.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.23655.1>,#Ref<16550.0.2.19380>}] [ns_server:debug,2014-08-19T16:51:28.200,ns_1@10.242.238.90:<0.28354.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:debug,2014-08-19T16:51:28.211,ns_1@10.242.238.90:<0.28356.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 350 [views:debug,2014-08-19T16:51:28.211,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/358. Updated state: replica (0) [ns_server:debug,2014-08-19T16:51:28.211,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",358,replica,0} [ns_server:info,2014-08-19T16:51:28.259,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 605 state to replica [ns_server:info,2014-08-19T16:51:28.265,ns_1@10.242.238.90:<0.28373.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 605 to state replica [ns_server:debug,2014-08-19T16:51:28.293,ns_1@10.242.238.90:<0.28373.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_605_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:51:28.295,ns_1@10.242.238.90:<0.28373.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[605]}, {checkpoints,[{605,0}]}, {name,<<"replication_building_605_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[605]}, {takeover,false}, {suffix,"building_605_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",605,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,true}]} [rebalance:debug,2014-08-19T16:51:28.295,ns_1@10.242.238.90:<0.28373.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.28374.0> [rebalance:debug,2014-08-19T16:51:28.295,ns_1@10.242.238.90:<0.28373.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:51:28.296,ns_1@10.242.238.90:<0.28373.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.23696.1>,#Ref<16550.0.2.19587>}]} [rebalance:info,2014-08-19T16:51:28.296,ns_1@10.242.238.90:<0.28373.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 605 [rebalance:debug,2014-08-19T16:51:28.296,ns_1@10.242.238.90:<0.28373.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.23696.1>,#Ref<16550.0.2.19587>}] [ns_server:debug,2014-08-19T16:51:28.297,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.28375.0> (ok) [ns_server:debug,2014-08-19T16:51:28.297,ns_1@10.242.238.90:<0.28373.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:debug,2014-08-19T16:51:28.299,ns_1@10.242.238.90:<0.28376.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 605 [ns_server:debug,2014-08-19T16:51:28.303,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 615. Nacking mccouch update. [views:debug,2014-08-19T16:51:28.303,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/615. Updated state: pending (0) [ns_server:debug,2014-08-19T16:51:28.303,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",615,pending,0} [ns_server:info,2014-08-19T16:51:28.303,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 349 state to replica [ns_server:debug,2014-08-19T16:51:28.304,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,750,686,622,375,984,737,673,426,362,971,724,660,413,958,711,647,400,1022, 945,762,698,634,387,1009,996,749,685,621,374,983,736,672,425,361,970,723,659, 412,957,710,646,399,1021,944,761,697,633,386,1008,995,748,684,620,373,982, 735,671,424,360,969,722,658,411,956,709,645,398,1020,943,760,696,632,385, 1007,994,747,683,619,372,981,734,670,423,968,721,657,410,955,708,644,397, 1019,942,759,695,631,384,1006,993,746,682,618,371,980,733,669,422,358,967, 720,656,409,954,707,643,396,1018,941,758,694,630,383,1005,992,745,681,617, 370,979,732,668,421,966,719,655,408,953,706,642,395,1017,940,757,693,629,382, 1004,991,959,744,712,680,648,616,401,369,1023,978,946,763,731,699,667,635, 420,388,1010,965,718,654,407,952,705,641,394,1016,939,756,692,628,381,1003, 990,743,679,615,368,977,730,666,419,964,717,653,406,951,704,640,393,1015,938, 755,691,627,380,1002,989,742,678,614,367,976,729,665,418,963,716,652,405,950, 767,703,639,392,1014,754,690,626,379,1001,988,741,677,366,975,728,664,417, 962,715,651,404,949,766,702,638,391,1013,753,689,625,378,1000,987,740,676, 612,365,974,727,663,416,961,714,650,403,948,765,701,637,390,1012,999,752,688, 624,377,986,739,675,364,973,726,662,415,960,713,649,402,947,764,700,636,389, 1011,998,751,687,623,376,985,738,674,363,972,725,661,414] [ns_server:info,2014-08-19T16:51:28.308,ns_1@10.242.238.90:<0.28379.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 349 to state replica [ns_server:debug,2014-08-19T16:51:28.333,ns_1@10.242.238.90:<0.28379.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_349_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:51:28.334,ns_1@10.242.238.90:<0.28379.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[349]}, {checkpoints,[{349,0}]}, {name,<<"replication_building_349_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[349]}, {takeover,false}, {suffix,"building_349_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",349,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,false}]} [rebalance:debug,2014-08-19T16:51:28.335,ns_1@10.242.238.90:<0.28379.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.28380.0> [rebalance:debug,2014-08-19T16:51:28.335,ns_1@10.242.238.90:<0.28379.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:51:28.336,ns_1@10.242.238.90:<0.28379.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.23718.1>,#Ref<16550.0.2.19693>}]} [rebalance:info,2014-08-19T16:51:28.336,ns_1@10.242.238.90:<0.28379.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 349 [rebalance:debug,2014-08-19T16:51:28.337,ns_1@10.242.238.90:<0.28379.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.23718.1>,#Ref<16550.0.2.19693>}] [ns_server:debug,2014-08-19T16:51:28.338,ns_1@10.242.238.90:<0.28379.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:debug,2014-08-19T16:51:28.344,ns_1@10.242.238.90:<0.28389.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 349 [views:debug,2014-08-19T16:51:28.362,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/615. Updated state: pending (0) [ns_server:debug,2014-08-19T16:51:28.362,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",615,pending,0} [ns_server:info,2014-08-19T16:51:28.396,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 604 state to replica [ns_server:info,2014-08-19T16:51:28.403,ns_1@10.242.238.90:<0.28392.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 604 to state replica [ns_server:debug,2014-08-19T16:51:28.441,ns_1@10.242.238.90:<0.28392.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_604_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:51:28.442,ns_1@10.242.238.90:<0.28392.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[604]}, {checkpoints,[{604,0}]}, {name,<<"replication_building_604_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[604]}, {takeover,false}, {suffix,"building_604_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",604,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,true}]} [rebalance:debug,2014-08-19T16:51:28.443,ns_1@10.242.238.90:<0.28392.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.28409.0> [rebalance:debug,2014-08-19T16:51:28.443,ns_1@10.242.238.90:<0.28392.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:51:28.443,ns_1@10.242.238.90:<0.28392.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.23759.1>,#Ref<16550.0.2.19918>}]} [rebalance:info,2014-08-19T16:51:28.443,ns_1@10.242.238.90:<0.28392.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 604 [rebalance:debug,2014-08-19T16:51:28.444,ns_1@10.242.238.90:<0.28392.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.23759.1>,#Ref<16550.0.2.19918>}] [ns_server:debug,2014-08-19T16:51:28.444,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.28410.0> (ok) [ns_server:debug,2014-08-19T16:51:28.445,ns_1@10.242.238.90:<0.28392.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:debug,2014-08-19T16:51:28.446,ns_1@10.242.238.90:<0.28411.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 604 [ns_server:info,2014-08-19T16:51:28.450,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 348 state to replica [ns_server:info,2014-08-19T16:51:28.456,ns_1@10.242.238.90:<0.28414.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 348 to state replica [ns_server:debug,2014-08-19T16:51:28.477,ns_1@10.242.238.90:<0.28414.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_348_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:51:28.478,ns_1@10.242.238.90:<0.28414.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[348]}, {checkpoints,[{348,0}]}, {name,<<"replication_building_348_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[348]}, {takeover,false}, {suffix,"building_348_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",348,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,false}]} [rebalance:debug,2014-08-19T16:51:28.479,ns_1@10.242.238.90:<0.28414.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.28415.0> [rebalance:debug,2014-08-19T16:51:28.479,ns_1@10.242.238.90:<0.28414.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:51:28.480,ns_1@10.242.238.90:<0.28414.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.23781.1>,#Ref<16550.0.2.20013>}]} [rebalance:info,2014-08-19T16:51:28.480,ns_1@10.242.238.90:<0.28414.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 348 [rebalance:debug,2014-08-19T16:51:28.480,ns_1@10.242.238.90:<0.28414.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.23781.1>,#Ref<16550.0.2.20013>}] [ns_server:debug,2014-08-19T16:51:28.481,ns_1@10.242.238.90:<0.28414.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:debug,2014-08-19T16:51:28.492,ns_1@10.242.238.90:<0.28416.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 348 [ns_server:debug,2014-08-19T16:51:28.504,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 613. Nacking mccouch update. [views:debug,2014-08-19T16:51:28.504,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/613. Updated state: pending (0) [ns_server:debug,2014-08-19T16:51:28.504,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",613,pending,0} [ns_server:debug,2014-08-19T16:51:28.506,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,750,686,622,375,984,737,673,426,362,971,724,660,413,958,711,647,400,1022, 945,762,698,634,387,1009,996,749,685,621,374,983,736,672,425,361,970,723,659, 412,957,710,646,399,1021,944,761,697,633,386,1008,995,748,684,620,373,982, 735,671,424,360,969,722,658,411,956,709,645,398,1020,943,760,696,632,385, 1007,994,747,683,619,372,981,734,670,423,968,721,657,410,955,708,644,397, 1019,942,759,695,631,384,1006,993,746,682,618,371,980,733,669,422,358,967, 720,656,409,954,707,643,396,1018,941,758,694,630,383,1005,992,745,681,617, 370,979,732,668,421,966,719,655,408,953,706,642,395,1017,940,757,693,629,382, 1004,991,959,744,712,680,648,616,401,369,1023,978,946,763,731,699,667,635, 420,388,1010,965,718,654,407,952,705,641,394,1016,939,756,692,628,381,1003, 990,743,679,615,368,977,730,666,419,964,717,653,406,951,704,640,393,1015,938, 755,691,627,380,1002,989,742,678,614,367,976,729,665,418,963,716,652,405,950, 767,703,639,392,1014,754,690,626,379,1001,988,741,677,613,366,975,728,664, 417,962,715,651,404,949,766,702,638,391,1013,753,689,625,378,1000,987,740, 676,612,365,974,727,663,416,961,714,650,403,948,765,701,637,390,1012,999,752, 688,624,377,986,739,675,364,973,726,662,415,960,713,649,402,947,764,700,636, 389,1011,998,751,687,623,376,985,738,674,363,972,725,661,414] [ns_server:info,2014-08-19T16:51:28.543,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 603 state to replica [ns_server:info,2014-08-19T16:51:28.553,ns_1@10.242.238.90:<0.28419.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 603 to state replica [views:debug,2014-08-19T16:51:28.576,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/613. Updated state: pending (0) [ns_server:debug,2014-08-19T16:51:28.576,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",613,pending,0} [ns_server:debug,2014-08-19T16:51:28.584,ns_1@10.242.238.90:<0.28419.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_603_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:51:28.586,ns_1@10.242.238.90:<0.28419.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[603]}, {checkpoints,[{603,0}]}, {name,<<"replication_building_603_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[603]}, {takeover,false}, {suffix,"building_603_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",603,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,true}]} [rebalance:debug,2014-08-19T16:51:28.586,ns_1@10.242.238.90:<0.28419.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.28420.0> [rebalance:debug,2014-08-19T16:51:28.587,ns_1@10.242.238.90:<0.28419.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:51:28.587,ns_1@10.242.238.90:<0.28419.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.23822.1>,#Ref<16550.0.2.20213>}]} [rebalance:info,2014-08-19T16:51:28.587,ns_1@10.242.238.90:<0.28419.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 603 [rebalance:debug,2014-08-19T16:51:28.588,ns_1@10.242.238.90:<0.28419.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.23822.1>,#Ref<16550.0.2.20213>}] [ns_server:debug,2014-08-19T16:51:28.588,ns_1@10.242.238.90:<0.28419.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:51:28.589,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.28421.0> (ok) [rebalance:debug,2014-08-19T16:51:28.590,ns_1@10.242.238.90:<0.28422.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 603 [ns_server:info,2014-08-19T16:51:28.594,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 347 state to replica [ns_server:info,2014-08-19T16:51:28.598,ns_1@10.242.238.90:<0.28425.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 347 to state replica [ns_server:debug,2014-08-19T16:51:28.619,ns_1@10.242.238.90:<0.28425.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_347_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:51:28.620,ns_1@10.242.238.90:<0.28425.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[347]}, {checkpoints,[{347,0}]}, {name,<<"replication_building_347_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[347]}, {takeover,false}, {suffix,"building_347_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",347,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,false}]} [rebalance:debug,2014-08-19T16:51:28.621,ns_1@10.242.238.90:<0.28425.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.28440.0> [rebalance:debug,2014-08-19T16:51:28.621,ns_1@10.242.238.90:<0.28425.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:51:28.622,ns_1@10.242.238.90:<0.28425.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.23844.1>,#Ref<16550.0.2.20310>}]} [rebalance:info,2014-08-19T16:51:28.622,ns_1@10.242.238.90:<0.28425.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 347 [rebalance:debug,2014-08-19T16:51:28.622,ns_1@10.242.238.90:<0.28425.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.23844.1>,#Ref<16550.0.2.20310>}] [ns_server:debug,2014-08-19T16:51:28.623,ns_1@10.242.238.90:<0.28425.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:debug,2014-08-19T16:51:28.635,ns_1@10.242.238.90:<0.28441.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 347 [ns_server:debug,2014-08-19T16:51:28.664,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 611. Nacking mccouch update. [views:debug,2014-08-19T16:51:28.664,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/611. Updated state: pending (0) [ns_server:debug,2014-08-19T16:51:28.664,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",611,pending,0} [ns_server:debug,2014-08-19T16:51:28.665,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,750,686,622,375,984,737,673,426,362,971,724,660,413,958,711,647,400,1022, 945,762,698,634,387,1009,996,749,685,621,374,983,736,672,425,361,970,723,659, 412,957,710,646,399,1021,944,761,697,633,386,1008,995,748,684,620,373,982, 735,671,424,360,969,722,658,411,956,709,645,398,1020,943,760,696,632,385, 1007,994,747,683,619,372,981,734,670,423,968,721,657,410,955,708,644,397, 1019,942,759,695,631,384,1006,993,746,682,618,371,980,733,669,422,358,967, 720,656,409,954,707,643,396,1018,941,758,694,630,383,1005,992,745,681,617, 370,979,732,668,421,966,719,655,408,953,706,642,395,1017,940,757,693,629,382, 1004,991,744,680,616,369,978,946,763,731,699,667,635,420,388,1010,965,718, 654,407,952,705,641,394,1016,939,756,692,628,381,1003,990,743,679,615,368, 977,730,666,419,964,717,653,406,951,704,640,393,1015,938,755,691,627,380, 1002,989,742,678,614,367,976,729,665,418,963,716,652,405,950,767,703,639,392, 1014,754,690,626,379,1001,988,741,677,613,366,975,728,664,417,962,715,651, 404,949,766,702,638,391,1013,753,689,625,378,1000,987,740,676,612,365,974, 727,663,416,961,714,650,403,948,765,701,637,390,1012,999,752,688,624,377,986, 739,675,611,364,973,726,662,415,960,713,649,402,947,764,700,636,389,1011,998, 751,687,623,376,985,738,674,363,972,725,661,414,959,712,648,401,1023] [ns_server:info,2014-08-19T16:51:28.683,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 602 state to replica [ns_server:info,2014-08-19T16:51:28.690,ns_1@10.242.238.90:<0.28444.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 602 to state replica [views:debug,2014-08-19T16:51:28.698,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/611. Updated state: pending (0) [ns_server:debug,2014-08-19T16:51:28.698,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",611,pending,0} [ns_server:debug,2014-08-19T16:51:28.720,ns_1@10.242.238.90:<0.28444.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_602_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:51:28.722,ns_1@10.242.238.90:<0.28444.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[602]}, {checkpoints,[{602,0}]}, {name,<<"replication_building_602_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[602]}, {takeover,false}, {suffix,"building_602_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",602,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,true}]} [rebalance:debug,2014-08-19T16:51:28.722,ns_1@10.242.238.90:<0.28444.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.28445.0> [rebalance:debug,2014-08-19T16:51:28.722,ns_1@10.242.238.90:<0.28444.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:51:28.723,ns_1@10.242.238.90:<0.28444.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.23885.1>,#Ref<16550.0.2.20507>}]} [rebalance:info,2014-08-19T16:51:28.723,ns_1@10.242.238.90:<0.28444.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 602 [rebalance:debug,2014-08-19T16:51:28.724,ns_1@10.242.238.90:<0.28444.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.23885.1>,#Ref<16550.0.2.20507>}] [ns_server:debug,2014-08-19T16:51:28.724,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.28446.0> (ok) [ns_server:debug,2014-08-19T16:51:28.725,ns_1@10.242.238.90:<0.28444.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:debug,2014-08-19T16:51:28.726,ns_1@10.242.238.90:<0.28447.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 602 [ns_server:info,2014-08-19T16:51:28.730,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 346 state to replica [ns_server:info,2014-08-19T16:51:28.734,ns_1@10.242.238.90:<0.28450.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 346 to state replica [ns_server:debug,2014-08-19T16:51:28.754,ns_1@10.242.238.90:<0.28450.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_346_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:51:28.756,ns_1@10.242.238.90:<0.28450.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[346]}, {checkpoints,[{346,0}]}, {name,<<"replication_building_346_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[346]}, {takeover,false}, {suffix,"building_346_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",346,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,false}]} [rebalance:debug,2014-08-19T16:51:28.756,ns_1@10.242.238.90:<0.28450.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.28465.0> [rebalance:debug,2014-08-19T16:51:28.756,ns_1@10.242.238.90:<0.28450.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:51:28.757,ns_1@10.242.238.90:<0.28450.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.23907.1>,#Ref<16550.0.2.20613>}]} [rebalance:info,2014-08-19T16:51:28.757,ns_1@10.242.238.90:<0.28450.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 346 [rebalance:debug,2014-08-19T16:51:28.757,ns_1@10.242.238.90:<0.28450.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.23907.1>,#Ref<16550.0.2.20613>}] [ns_server:debug,2014-08-19T16:51:28.758,ns_1@10.242.238.90:<0.28450.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:debug,2014-08-19T16:51:28.771,ns_1@10.242.238.90:<0.28466.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 346 [ns_server:debug,2014-08-19T16:51:28.781,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 609. Nacking mccouch update. [views:debug,2014-08-19T16:51:28.782,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/609. Updated state: pending (0) [ns_server:debug,2014-08-19T16:51:28.782,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",609,pending,0} [ns_server:debug,2014-08-19T16:51:28.782,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,750,686,622,375,984,737,673,609,426,362,971,724,660,413,958,711,647,400, 1022,945,762,698,634,387,1009,996,749,685,621,374,983,736,672,425,361,970, 723,659,412,957,710,646,399,1021,944,761,697,633,386,1008,995,748,684,620, 373,982,735,671,424,360,969,722,658,411,956,709,645,398,1020,943,760,696,632, 385,1007,994,747,683,619,372,981,734,670,423,968,721,657,410,955,708,644,397, 1019,942,759,695,631,384,1006,993,746,682,618,371,980,733,669,422,358,967, 720,656,409,954,707,643,396,1018,941,758,694,630,383,1005,992,745,681,617, 370,979,732,668,421,966,719,655,408,953,706,642,395,1017,940,757,693,629,382, 1004,991,744,680,616,369,978,946,763,731,699,667,635,420,388,1010,965,718, 654,407,952,705,641,394,1016,939,756,692,628,381,1003,990,743,679,615,368, 977,730,666,419,964,717,653,406,951,704,640,393,1015,938,755,691,627,380, 1002,989,742,678,614,367,976,729,665,418,963,716,652,405,950,767,703,639,392, 1014,754,690,626,379,1001,988,741,677,613,366,975,728,664,417,962,715,651, 404,949,766,702,638,391,1013,753,689,625,378,1000,987,740,676,612,365,974, 727,663,416,961,714,650,403,948,765,701,637,390,1012,999,752,688,624,377,986, 739,675,611,364,973,726,662,415,960,713,649,402,947,764,700,636,389,1011,998, 751,687,623,376,985,738,674,363,972,725,661,414,959,712,648,401,1023] [views:debug,2014-08-19T16:51:28.815,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/609. Updated state: pending (0) [ns_server:debug,2014-08-19T16:51:28.815,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",609,pending,0} [ns_server:info,2014-08-19T16:51:28.819,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 601 state to replica [ns_server:info,2014-08-19T16:51:28.826,ns_1@10.242.238.90:<0.28469.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 601 to state replica [ns_server:debug,2014-08-19T16:51:28.854,ns_1@10.242.238.90:<0.28469.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_601_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:51:28.855,ns_1@10.242.238.90:<0.28469.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[601]}, {checkpoints,[{601,0}]}, {name,<<"replication_building_601_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[601]}, {takeover,false}, {suffix,"building_601_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",601,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,true}]} [rebalance:debug,2014-08-19T16:51:28.856,ns_1@10.242.238.90:<0.28469.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.28470.0> [rebalance:debug,2014-08-19T16:51:28.856,ns_1@10.242.238.90:<0.28469.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:51:28.857,ns_1@10.242.238.90:<0.28469.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.23948.1>,#Ref<16550.0.2.20820>}]} [rebalance:info,2014-08-19T16:51:28.857,ns_1@10.242.238.90:<0.28469.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 601 [rebalance:debug,2014-08-19T16:51:28.857,ns_1@10.242.238.90:<0.28469.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.23948.1>,#Ref<16550.0.2.20820>}] [ns_server:debug,2014-08-19T16:51:28.858,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.28471.0> (ok) [ns_server:debug,2014-08-19T16:51:28.858,ns_1@10.242.238.90:<0.28469.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:debug,2014-08-19T16:51:28.859,ns_1@10.242.238.90:<0.28472.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 601 [ns_server:info,2014-08-19T16:51:28.863,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 345 state to replica [ns_server:info,2014-08-19T16:51:28.867,ns_1@10.242.238.90:<0.28475.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 345 to state replica [ns_server:debug,2014-08-19T16:51:28.888,ns_1@10.242.238.90:<0.28475.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_345_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:51:28.890,ns_1@10.242.238.90:<0.28475.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[345]}, {checkpoints,[{345,0}]}, {name,<<"replication_building_345_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[345]}, {takeover,false}, {suffix,"building_345_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",345,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,false}]} [rebalance:debug,2014-08-19T16:51:28.890,ns_1@10.242.238.90:<0.28475.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.28490.0> [rebalance:debug,2014-08-19T16:51:28.891,ns_1@10.242.238.90:<0.28475.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:51:28.891,ns_1@10.242.238.90:<0.28475.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.23970.1>,#Ref<16550.0.2.20917>}]} [rebalance:info,2014-08-19T16:51:28.891,ns_1@10.242.238.90:<0.28475.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 345 [rebalance:debug,2014-08-19T16:51:28.892,ns_1@10.242.238.90:<0.28475.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.23970.1>,#Ref<16550.0.2.20917>}] [ns_server:debug,2014-08-19T16:51:28.893,ns_1@10.242.238.90:<0.28475.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:debug,2014-08-19T16:51:28.905,ns_1@10.242.238.90:<0.28491.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 345 [ns_server:debug,2014-08-19T16:51:28.949,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 607. Nacking mccouch update. [views:debug,2014-08-19T16:51:28.949,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/607. Updated state: pending (0) [ns_server:debug,2014-08-19T16:51:28.949,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",607,pending,0} [ns_server:debug,2014-08-19T16:51:28.950,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,750,686,622,375,984,737,673,609,426,362,971,724,660,413,958,711,647,400, 1022,945,762,698,634,387,1009,996,749,685,621,374,983,736,672,425,361,970, 723,659,412,957,710,646,399,1021,944,761,697,633,386,1008,995,748,684,620, 373,982,735,671,607,424,360,969,722,658,411,956,709,645,398,1020,943,760,696, 632,385,1007,994,747,683,619,372,981,734,670,423,968,721,657,410,955,708,644, 397,1019,942,759,695,631,384,1006,993,746,682,618,371,980,733,669,422,358, 967,720,656,409,954,707,643,396,1018,941,758,694,630,383,1005,992,745,681, 617,370,979,732,668,421,966,719,655,408,953,706,642,395,1017,940,757,693,629, 382,1004,991,744,680,616,369,978,946,763,731,699,667,635,420,388,1010,965, 718,654,407,952,705,641,394,1016,939,756,692,628,381,1003,990,743,679,615, 368,977,730,666,419,964,717,653,406,951,704,640,393,1015,938,755,691,627,380, 1002,989,742,678,614,367,976,729,665,418,963,716,652,405,950,767,703,639,392, 1014,754,690,626,379,1001,988,741,677,613,366,975,728,664,417,962,715,651, 404,949,766,702,638,391,1013,753,689,625,378,1000,987,740,676,612,365,974, 727,663,416,961,714,650,403,948,765,701,637,390,1012,999,752,688,624,377,986, 739,675,611,364,973,726,662,415,960,713,649,402,947,764,700,636,389,1011,998, 751,687,623,376,985,738,674,363,972,725,661,414,959,712,648,401,1023] [ns_server:info,2014-08-19T16:51:28.954,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 600 state to replica [ns_server:info,2014-08-19T16:51:28.961,ns_1@10.242.238.90:<0.28494.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 600 to state replica [views:debug,2014-08-19T16:51:28.983,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/607. Updated state: pending (0) [ns_server:debug,2014-08-19T16:51:28.983,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",607,pending,0} [ns_server:debug,2014-08-19T16:51:28.989,ns_1@10.242.238.90:<0.28494.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_600_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:51:28.990,ns_1@10.242.238.90:<0.28494.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[600]}, {checkpoints,[{600,0}]}, {name,<<"replication_building_600_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[600]}, {takeover,false}, {suffix,"building_600_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",600,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,true}]} [rebalance:debug,2014-08-19T16:51:28.991,ns_1@10.242.238.90:<0.28494.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.28495.0> [rebalance:debug,2014-08-19T16:51:28.991,ns_1@10.242.238.90:<0.28494.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:51:28.992,ns_1@10.242.238.90:<0.28494.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.24011.1>,#Ref<16550.0.2.21124>}]} [rebalance:info,2014-08-19T16:51:28.992,ns_1@10.242.238.90:<0.28494.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 600 [rebalance:debug,2014-08-19T16:51:28.992,ns_1@10.242.238.90:<0.28494.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.24011.1>,#Ref<16550.0.2.21124>}] [ns_server:debug,2014-08-19T16:51:28.993,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.28496.0> (ok) [ns_server:debug,2014-08-19T16:51:28.993,ns_1@10.242.238.90:<0.28494.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:debug,2014-08-19T16:51:28.994,ns_1@10.242.238.90:<0.28497.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 600 [ns_server:info,2014-08-19T16:51:28.999,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 344 state to replica [ns_server:info,2014-08-19T16:51:29.002,ns_1@10.242.238.90:<0.28500.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 344 to state replica [ns_server:debug,2014-08-19T16:51:29.023,ns_1@10.242.238.90:<0.28500.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_344_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:51:29.025,ns_1@10.242.238.90:<0.28500.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[344]}, {checkpoints,[{344,0}]}, {name,<<"replication_building_344_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[344]}, {takeover,false}, {suffix,"building_344_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",344,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,false}]} [rebalance:debug,2014-08-19T16:51:29.025,ns_1@10.242.238.90:<0.28500.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.28501.0> [rebalance:debug,2014-08-19T16:51:29.026,ns_1@10.242.238.90:<0.28500.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:51:29.026,ns_1@10.242.238.90:<0.28500.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.24033.1>,#Ref<16550.0.2.21219>}]} [rebalance:info,2014-08-19T16:51:29.026,ns_1@10.242.238.90:<0.28500.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 344 [rebalance:debug,2014-08-19T16:51:29.027,ns_1@10.242.238.90:<0.28500.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.24033.1>,#Ref<16550.0.2.21219>}] [ns_server:debug,2014-08-19T16:51:29.028,ns_1@10.242.238.90:<0.28500.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:debug,2014-08-19T16:51:29.039,ns_1@10.242.238.90:<0.28516.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 344 [ns_server:debug,2014-08-19T16:51:29.074,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 359. Nacking mccouch update. [views:debug,2014-08-19T16:51:29.075,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/359. Updated state: replica (0) [ns_server:debug,2014-08-19T16:51:29.075,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",359,replica,0} [ns_server:debug,2014-08-19T16:51:29.075,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,750,686,622,375,984,737,673,609,426,362,971,724,660,413,958,711,647,400, 1022,945,762,698,634,387,1009,996,749,685,621,374,983,736,672,425,361,970, 723,659,412,957,710,646,399,1021,944,761,697,633,386,1008,995,748,684,620, 373,982,735,671,607,424,360,969,722,658,411,956,709,645,398,1020,943,760,696, 632,385,1007,994,747,683,619,372,981,734,670,423,359,968,721,657,410,955,708, 644,397,1019,942,759,695,631,384,1006,993,746,682,618,371,980,733,669,422, 358,967,720,656,409,954,707,643,396,1018,941,758,694,630,383,1005,992,745, 681,617,370,979,732,668,421,966,719,655,408,953,706,642,395,1017,940,757,693, 629,382,1004,991,744,680,616,369,978,946,763,731,699,667,635,420,388,1010, 965,718,654,407,952,705,641,394,1016,939,756,692,628,381,1003,990,743,679, 615,368,977,730,666,419,964,717,653,406,951,704,640,393,1015,938,755,691,627, 380,1002,989,742,678,614,367,976,729,665,418,963,716,652,405,950,767,703,639, 392,1014,754,690,626,379,1001,988,741,677,613,366,975,728,664,417,962,715, 651,404,949,766,702,638,391,1013,753,689,625,378,1000,987,740,676,612,365, 974,727,663,416,961,714,650,403,948,765,701,637,390,1012,999,752,688,624,377, 986,739,675,611,364,973,726,662,415,960,713,649,402,947,764,700,636,389,1011, 998,751,687,623,376,985,738,674,363,972,725,661,414,959,712,648,401,1023] [ns_server:info,2014-08-19T16:51:29.089,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 599 state to replica [ns_server:info,2014-08-19T16:51:29.095,ns_1@10.242.238.90:<0.28519.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 599 to state replica [views:debug,2014-08-19T16:51:29.108,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/359. Updated state: replica (0) [ns_server:debug,2014-08-19T16:51:29.109,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",359,replica,0} [ns_server:debug,2014-08-19T16:51:29.125,ns_1@10.242.238.90:<0.28519.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_599_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:51:29.127,ns_1@10.242.238.90:<0.28519.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[599]}, {checkpoints,[{599,0}]}, {name,<<"replication_building_599_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[599]}, {takeover,false}, {suffix,"building_599_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",599,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,true}]} [rebalance:debug,2014-08-19T16:51:29.127,ns_1@10.242.238.90:<0.28519.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.28520.0> [rebalance:debug,2014-08-19T16:51:29.127,ns_1@10.242.238.90:<0.28519.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:51:29.128,ns_1@10.242.238.90:<0.28519.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.24079.1>,#Ref<16550.0.2.21438>}]} [rebalance:info,2014-08-19T16:51:29.128,ns_1@10.242.238.90:<0.28519.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 599 [rebalance:debug,2014-08-19T16:51:29.128,ns_1@10.242.238.90:<0.28519.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.24079.1>,#Ref<16550.0.2.21438>}] [ns_server:debug,2014-08-19T16:51:29.129,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.28521.0> (ok) [ns_server:debug,2014-08-19T16:51:29.130,ns_1@10.242.238.90:<0.28519.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:debug,2014-08-19T16:51:29.131,ns_1@10.242.238.90:<0.28522.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 599 [ns_server:info,2014-08-19T16:51:29.135,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 343 state to replica [ns_server:info,2014-08-19T16:51:29.139,ns_1@10.242.238.90:<0.28525.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 343 to state replica [ns_server:debug,2014-08-19T16:51:29.159,ns_1@10.242.238.90:<0.28525.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_343_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:51:29.161,ns_1@10.242.238.90:<0.28525.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[343]}, {checkpoints,[{343,0}]}, {name,<<"replication_building_343_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[343]}, {takeover,false}, {suffix,"building_343_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",343,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,false}]} [rebalance:debug,2014-08-19T16:51:29.161,ns_1@10.242.238.90:<0.28525.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.28532.0> [rebalance:debug,2014-08-19T16:51:29.161,ns_1@10.242.238.90:<0.28525.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:51:29.162,ns_1@10.242.238.90:<0.28525.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.24101.1>,#Ref<16550.0.2.21555>}]} [rebalance:info,2014-08-19T16:51:29.162,ns_1@10.242.238.90:<0.28525.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 343 [rebalance:debug,2014-08-19T16:51:29.162,ns_1@10.242.238.90:<0.28525.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.24101.1>,#Ref<16550.0.2.21555>}] [ns_server:debug,2014-08-19T16:51:29.163,ns_1@10.242.238.90:<0.28525.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:debug,2014-08-19T16:51:29.175,ns_1@10.242.238.90:<0.28541.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 343 [ns_server:debug,2014-08-19T16:51:29.217,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 357. Nacking mccouch update. [views:debug,2014-08-19T16:51:29.217,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/357. Updated state: replica (1) [ns_server:debug,2014-08-19T16:51:29.217,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",357,replica,1} [ns_server:debug,2014-08-19T16:51:29.218,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,750,686,622,375,984,737,673,609,426,362,971,724,660,413,958,711,647,400, 1022,945,762,698,634,387,1009,996,749,685,621,374,983,736,672,425,361,970, 723,659,412,957,710,646,399,1021,944,761,697,633,386,1008,995,748,684,620, 373,982,735,671,607,424,360,969,722,658,411,956,709,645,398,1020,943,760,696, 632,385,1007,994,747,683,619,372,981,734,670,423,359,968,721,657,410,955,708, 644,397,1019,942,759,695,631,384,1006,993,746,682,618,371,980,733,669,422, 358,967,720,656,409,954,707,643,396,1018,941,758,694,630,383,1005,992,745, 681,617,370,979,732,668,421,357,966,719,655,408,953,706,642,395,1017,940,757, 693,629,382,1004,991,744,680,616,369,978,946,763,731,699,667,635,420,388, 1010,965,718,654,407,952,705,641,394,1016,939,756,692,628,381,1003,990,743, 679,615,368,977,730,666,419,964,717,653,406,951,704,640,393,1015,938,755,691, 627,380,1002,989,742,678,614,367,976,729,665,418,963,716,652,405,950,767,703, 639,392,1014,754,690,626,379,1001,988,741,677,613,366,975,728,664,417,962, 715,651,404,949,766,702,638,391,1013,753,689,625,378,1000,987,740,676,612, 365,974,727,663,416,961,714,650,403,948,765,701,637,390,1012,999,752,688,624, 377,986,739,675,611,364,973,726,662,415,960,713,649,402,947,764,700,636,389, 1011,998,751,687,623,376,985,738,674,363,972,725,661,414,959,712,648,401, 1023] [ns_server:info,2014-08-19T16:51:29.227,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 598 state to replica [ns_server:info,2014-08-19T16:51:29.234,ns_1@10.242.238.90:<0.28544.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 598 to state replica [ns_server:debug,2014-08-19T16:51:29.262,ns_1@10.242.238.90:<0.28544.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_598_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:51:29.264,ns_1@10.242.238.90:<0.28544.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[598]}, {checkpoints,[{598,0}]}, {name,<<"replication_building_598_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[598]}, {takeover,false}, {suffix,"building_598_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",598,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,true}]} [rebalance:debug,2014-08-19T16:51:29.264,ns_1@10.242.238.90:<0.28544.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.28545.0> [rebalance:debug,2014-08-19T16:51:29.265,ns_1@10.242.238.90:<0.28544.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:51:29.265,ns_1@10.242.238.90:<0.28544.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.24142.1>,#Ref<16550.0.2.21743>}]} [rebalance:info,2014-08-19T16:51:29.265,ns_1@10.242.238.90:<0.28544.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 598 [rebalance:debug,2014-08-19T16:51:29.266,ns_1@10.242.238.90:<0.28544.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.24142.1>,#Ref<16550.0.2.21743>}] [ns_server:debug,2014-08-19T16:51:29.266,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.28546.0> (ok) [ns_server:debug,2014-08-19T16:51:29.267,ns_1@10.242.238.90:<0.28544.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:debug,2014-08-19T16:51:29.268,ns_1@10.242.238.90:<0.28547.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 598 [ns_server:info,2014-08-19T16:51:29.272,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 342 state to replica [ns_server:info,2014-08-19T16:51:29.276,ns_1@10.242.238.90:<0.28550.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 342 to state replica [views:debug,2014-08-19T16:51:29.276,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/357. Updated state: replica (1) [ns_server:debug,2014-08-19T16:51:29.276,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",357,replica,1} [ns_server:debug,2014-08-19T16:51:29.296,ns_1@10.242.238.90:<0.28550.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_342_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:51:29.298,ns_1@10.242.238.90:<0.28550.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[342]}, {checkpoints,[{342,0}]}, {name,<<"replication_building_342_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[342]}, {takeover,false}, {suffix,"building_342_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",342,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,false}]} [rebalance:debug,2014-08-19T16:51:29.299,ns_1@10.242.238.90:<0.28550.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.28551.0> [rebalance:debug,2014-08-19T16:51:29.299,ns_1@10.242.238.90:<0.28550.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:51:29.299,ns_1@10.242.238.90:<0.28550.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.24164.1>,#Ref<16550.0.2.21848>}]} [rebalance:info,2014-08-19T16:51:29.299,ns_1@10.242.238.90:<0.28550.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 342 [rebalance:debug,2014-08-19T16:51:29.300,ns_1@10.242.238.90:<0.28550.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.24164.1>,#Ref<16550.0.2.21848>}] [ns_server:debug,2014-08-19T16:51:29.301,ns_1@10.242.238.90:<0.28550.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:debug,2014-08-19T16:51:29.313,ns_1@10.242.238.90:<0.28552.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 342 [ns_server:debug,2014-08-19T16:51:29.364,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 355. Nacking mccouch update. [views:debug,2014-08-19T16:51:29.364,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/355. Updated state: replica (0) [ns_server:debug,2014-08-19T16:51:29.364,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",355,replica,0} [ns_server:debug,2014-08-19T16:51:29.364,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,750,686,622,375,984,737,673,609,426,362,971,724,660,413,958,711,647,400, 1022,945,762,698,634,387,1009,996,749,685,621,374,983,736,672,425,361,970, 723,659,412,957,710,646,399,1021,944,761,697,633,386,1008,995,748,684,620, 373,982,735,671,607,424,360,969,722,658,411,956,709,645,398,1020,943,760,696, 632,385,1007,994,747,683,619,372,981,734,670,423,359,968,721,657,410,955,708, 644,397,1019,942,759,695,631,384,1006,993,746,682,618,371,980,733,669,422, 358,967,720,656,409,954,707,643,396,1018,941,758,694,630,383,1005,992,745, 681,617,370,979,732,668,421,357,966,719,655,408,953,706,642,395,1017,940,757, 693,629,382,1004,991,744,680,616,369,978,731,667,420,965,718,654,407,952,705, 641,394,1016,939,756,692,628,381,1003,990,743,679,615,368,977,730,666,419, 355,964,717,653,406,951,704,640,393,1015,938,755,691,627,380,1002,989,742, 678,614,367,976,729,665,418,963,716,652,405,950,767,703,639,392,1014,754,690, 626,379,1001,988,741,677,613,366,975,728,664,417,962,715,651,404,949,766,702, 638,391,1013,753,689,625,378,1000,987,740,676,612,365,974,727,663,416,961, 714,650,403,948,765,701,637,390,1012,999,752,688,624,377,986,739,675,611,364, 973,726,662,415,960,713,649,402,947,764,700,636,389,1011,998,751,687,623,376, 985,738,674,363,972,725,661,414,959,712,648,401,1023,946,763,699,635,388, 1010] [views:debug,2014-08-19T16:51:29.414,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/355. Updated state: replica (0) [ns_server:debug,2014-08-19T16:51:29.414,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",355,replica,0} [ns_server:debug,2014-08-19T16:51:29.564,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 353. Nacking mccouch update. [views:debug,2014-08-19T16:51:29.565,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/353. Updated state: replica (0) [ns_server:debug,2014-08-19T16:51:29.565,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",353,replica,0} [ns_server:debug,2014-08-19T16:51:29.565,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,750,686,622,375,984,737,673,609,426,362,971,724,660,413,958,711,647,400, 1022,945,762,698,634,387,1009,996,749,685,621,374,983,736,672,425,361,970, 723,659,412,957,710,646,399,1021,944,761,697,633,386,1008,995,748,684,620, 373,982,735,671,607,424,360,969,722,658,411,956,709,645,398,1020,943,760,696, 632,385,1007,994,747,683,619,372,981,734,670,423,359,968,721,657,410,955,708, 644,397,1019,942,759,695,631,384,1006,993,746,682,618,371,980,733,669,422, 358,967,720,656,409,954,707,643,396,1018,941,758,694,630,383,1005,992,745, 681,617,370,979,732,668,421,357,966,719,655,408,953,706,642,395,1017,940,757, 693,629,382,1004,991,744,680,616,369,978,731,667,420,965,718,654,407,952,705, 641,394,1016,939,756,692,628,381,1003,990,743,679,615,368,977,730,666,419, 355,964,717,653,406,951,704,640,393,1015,938,755,691,627,380,1002,989,742, 678,614,367,976,729,665,418,963,716,652,405,950,767,703,639,392,1014,754,690, 626,379,1001,988,741,677,613,366,975,728,664,417,353,962,715,651,404,949,766, 702,638,391,1013,753,689,625,378,1000,987,740,676,612,365,974,727,663,416, 961,714,650,403,948,765,701,637,390,1012,999,752,688,624,377,986,739,675,611, 364,973,726,662,415,960,713,649,402,947,764,700,636,389,1011,998,751,687,623, 376,985,738,674,363,972,725,661,414,959,712,648,401,1023,946,763,699,635,388, 1010] [views:debug,2014-08-19T16:51:29.632,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/353. Updated state: replica (0) [ns_server:debug,2014-08-19T16:51:29.632,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",353,replica,0} [ns_server:debug,2014-08-19T16:51:29.782,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 351. Nacking mccouch update. [views:debug,2014-08-19T16:51:29.782,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/351. Updated state: replica (0) [ns_server:debug,2014-08-19T16:51:29.782,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",351,replica,0} [ns_server:debug,2014-08-19T16:51:29.783,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,750,686,622,375,984,737,673,609,426,362,971,724,660,413,958,711,647,400, 1022,945,762,698,634,387,1009,996,749,685,621,374,983,736,672,425,361,970, 723,659,412,957,710,646,399,1021,944,761,697,633,386,1008,995,748,684,620, 373,982,735,671,607,424,360,969,722,658,411,956,709,645,398,1020,943,760,696, 632,385,1007,994,747,683,619,372,981,734,670,423,359,968,721,657,410,955,708, 644,397,1019,942,759,695,631,384,1006,993,746,682,618,371,980,733,669,422, 358,967,720,656,409,954,707,643,396,1018,941,758,694,630,383,1005,992,745, 681,617,370,979,732,668,421,357,966,719,655,408,953,706,642,395,1017,940,757, 693,629,382,1004,991,744,680,616,369,978,731,667,420,965,718,654,407,952,705, 641,394,1016,939,756,692,628,381,1003,990,743,679,615,368,977,730,666,419, 355,964,717,653,406,951,704,640,393,1015,938,755,691,627,380,1002,989,742, 678,614,367,976,729,665,418,963,716,652,405,950,767,703,639,392,1014,754,690, 626,379,1001,988,741,677,613,366,975,728,664,417,353,962,715,651,404,949,766, 702,638,391,1013,753,689,625,378,1000,987,740,676,612,365,974,727,663,416, 961,714,650,403,948,765,701,637,390,1012,999,752,688,624,377,986,739,675,611, 364,973,726,662,415,351,960,713,649,402,947,764,700,636,389,1011,998,751,687, 623,376,985,738,674,363,972,725,661,414,959,712,648,401,1023,946,763,699,635, 388,1010] [views:debug,2014-08-19T16:51:29.833,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/351. Updated state: replica (0) [ns_server:debug,2014-08-19T16:51:29.833,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",351,replica,0} [ns_server:debug,2014-08-19T16:51:29.901,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 610. Nacking mccouch update. [views:debug,2014-08-19T16:51:29.901,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/610. Updated state: pending (0) [ns_server:debug,2014-08-19T16:51:29.901,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",610,pending,0} [ns_server:debug,2014-08-19T16:51:29.902,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,750,686,622,375,984,737,673,609,426,362,971,724,660,413,958,711,647,400, 1022,945,762,698,634,387,1009,996,749,685,621,374,983,736,672,425,361,970, 723,659,412,957,710,646,399,1021,944,761,697,633,386,1008,995,748,684,620, 373,982,735,671,607,424,360,969,722,658,411,956,709,645,398,1020,943,760,696, 632,385,1007,994,747,683,619,372,981,734,670,423,359,968,721,657,410,955,708, 644,397,1019,942,759,695,631,384,1006,993,746,682,618,371,980,733,669,422, 358,967,720,656,409,954,707,643,396,1018,941,758,694,630,383,1005,992,745, 681,617,370,979,732,668,421,357,966,719,655,408,953,706,642,395,1017,940,757, 693,629,382,1004,991,744,680,616,369,978,731,667,420,965,718,654,407,952,705, 641,394,1016,939,756,692,628,381,1003,990,743,679,615,368,977,730,666,419, 355,964,717,653,406,951,704,640,393,1015,938,755,691,627,380,1002,989,742, 678,614,367,976,729,665,418,963,716,652,405,950,767,703,639,392,1014,754,690, 626,379,1001,988,741,677,613,366,975,728,664,417,353,962,715,651,404,949,766, 702,638,391,1013,753,689,625,378,1000,987,740,676,612,365,974,727,663,416, 961,714,650,403,948,765,701,637,390,1012,999,752,688,624,377,986,739,675,611, 364,973,726,662,415,351,960,713,649,402,947,764,700,636,389,1011,998,751,687, 623,376,985,738,674,610,363,972,725,661,414,959,712,648,401,1023,946,763,699, 635,388,1010] [views:debug,2014-08-19T16:51:29.935,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/610. Updated state: pending (0) [ns_server:debug,2014-08-19T16:51:29.935,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",610,pending,0} [ns_server:debug,2014-08-19T16:51:30.043,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 608. Nacking mccouch update. [views:debug,2014-08-19T16:51:30.043,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/608. Updated state: pending (0) [ns_server:debug,2014-08-19T16:51:30.043,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",608,pending,0} [ns_server:debug,2014-08-19T16:51:30.044,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,750,686,622,375,984,737,673,609,426,362,971,724,660,413,958,711,647,400, 1022,945,762,698,634,387,1009,996,749,685,621,374,983,736,672,608,425,361, 970,723,659,412,957,710,646,399,1021,944,761,697,633,386,1008,995,748,684, 620,373,982,735,671,607,424,360,969,722,658,411,956,709,645,398,1020,943,760, 696,632,385,1007,994,747,683,619,372,981,734,670,423,359,968,721,657,410,955, 708,644,397,1019,942,759,695,631,384,1006,993,746,682,618,371,980,733,669, 422,358,967,720,656,409,954,707,643,396,1018,941,758,694,630,383,1005,992, 745,681,617,370,979,732,668,421,357,966,719,655,408,953,706,642,395,1017,940, 757,693,629,382,1004,991,744,680,616,369,978,731,667,420,965,718,654,407,952, 705,641,394,1016,939,756,692,628,381,1003,990,743,679,615,368,977,730,666, 419,355,964,717,653,406,951,704,640,393,1015,938,755,691,627,380,1002,989, 742,678,614,367,976,729,665,418,963,716,652,405,950,767,703,639,392,1014,754, 690,626,379,1001,988,741,677,613,366,975,728,664,417,353,962,715,651,404,949, 766,702,638,391,1013,753,689,625,378,1000,987,740,676,612,365,974,727,663, 416,961,714,650,403,948,765,701,637,390,1012,999,752,688,624,377,986,739,675, 611,364,973,726,662,415,351,960,713,649,402,947,764,700,636,389,1011,998,751, 687,623,376,985,738,674,610,363,972,725,661,414,959,712,648,401,1023,946,763, 699,635,388,1010] [views:debug,2014-08-19T16:51:30.102,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/608. Updated state: pending (0) [ns_server:debug,2014-08-19T16:51:30.102,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",608,pending,0} [ns_server:debug,2014-08-19T16:51:30.178,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 606. Nacking mccouch update. [views:debug,2014-08-19T16:51:30.178,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/606. Updated state: pending (0) [ns_server:debug,2014-08-19T16:51:30.178,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",606,pending,0} [ns_server:debug,2014-08-19T16:51:30.179,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,984,737,673,609,426,362,971,724,660,413,958,711,647,400,1022,945,762, 698,634,387,1009,996,749,685,621,374,983,736,672,608,425,361,970,723,659,412, 957,710,646,399,1021,944,761,697,633,386,1008,995,748,684,620,373,982,735, 671,607,424,360,969,722,658,411,956,709,645,398,1020,943,760,696,632,385, 1007,994,747,683,619,372,981,734,670,606,423,359,968,721,657,410,955,708,644, 397,1019,942,759,695,631,384,1006,993,746,682,618,371,980,733,669,422,358, 967,720,656,409,954,707,643,396,1018,941,758,694,630,383,1005,992,745,681, 617,370,979,732,668,421,357,966,719,655,408,953,706,642,395,1017,940,757,693, 629,382,1004,991,744,680,616,369,978,731,667,420,965,718,654,407,952,705,641, 394,1016,939,756,692,628,381,1003,990,743,679,615,368,977,730,666,419,355, 964,717,653,406,951,704,640,393,1015,938,755,691,627,380,1002,989,742,678, 614,367,976,729,665,418,963,716,652,405,950,767,703,639,392,1014,754,690,626, 379,1001,988,741,677,613,366,975,728,664,417,353,962,715,651,404,949,766,702, 638,391,1013,753,689,625,378,1000,987,740,676,612,365,974,727,663,416,961, 714,650,403,948,765,701,637,390,1012,999,752,688,624,377,986,739,675,611,364, 973,726,662,415,351,960,713,649,402,947,764,700,636,389,1011,998,751,687,623, 376,985,738,674,610,363,972,725,661,414,959,712,648,401,1023,946,763,699,635, 388,1010,997,686,375] [views:debug,2014-08-19T16:51:30.213,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/606. Updated state: pending (0) [ns_server:debug,2014-08-19T16:51:30.213,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",606,pending,0} [ns_server:debug,2014-08-19T16:51:30.362,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 604. Nacking mccouch update. [views:debug,2014-08-19T16:51:30.362,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/604. Updated state: pending (0) [ns_server:debug,2014-08-19T16:51:30.362,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",604,pending,0} [ns_server:debug,2014-08-19T16:51:30.363,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,984,737,673,609,426,362,971,724,660,413,958,711,647,400,1022,945,762, 698,634,387,1009,996,749,685,621,374,983,736,672,608,425,361,970,723,659,412, 957,710,646,399,1021,944,761,697,633,386,1008,995,748,684,620,373,982,735, 671,607,424,360,969,722,658,411,956,709,645,398,1020,943,760,696,632,385, 1007,994,747,683,619,372,981,734,670,606,423,359,968,721,657,410,955,708,644, 397,1019,942,759,695,631,384,1006,993,746,682,618,371,980,733,669,422,358, 967,720,656,409,954,707,643,396,1018,941,758,694,630,383,1005,992,745,681, 617,370,979,732,668,604,421,357,966,719,655,408,953,706,642,395,1017,940,757, 693,629,382,1004,991,744,680,616,369,978,731,667,420,965,718,654,407,952,705, 641,394,1016,939,756,692,628,381,1003,990,743,679,615,368,977,730,666,419, 355,964,717,653,406,951,704,640,393,1015,938,755,691,627,380,1002,989,742, 678,614,367,976,729,665,418,963,716,652,405,950,767,703,639,392,1014,754,690, 626,379,1001,988,741,677,613,366,975,728,664,417,353,962,715,651,404,949,766, 702,638,391,1013,753,689,625,378,1000,987,740,676,612,365,974,727,663,416, 961,714,650,403,948,765,701,637,390,1012,999,752,688,624,377,986,739,675,611, 364,973,726,662,415,351,960,713,649,402,947,764,700,636,389,1011,998,751,687, 623,376,985,738,674,610,363,972,725,661,414,959,712,648,401,1023,946,763,699, 635,388,1010,997,686,375] [views:debug,2014-08-19T16:51:30.413,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/604. Updated state: pending (0) [ns_server:debug,2014-08-19T16:51:30.413,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",604,pending,0} [ns_server:debug,2014-08-19T16:51:30.555,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 602. Nacking mccouch update. [views:debug,2014-08-19T16:51:30.555,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/602. Updated state: pending (0) [ns_server:debug,2014-08-19T16:51:30.555,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",602,pending,0} [ns_server:debug,2014-08-19T16:51:30.555,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,984,737,673,609,426,362,971,724,660,413,958,711,647,400,1022,945,762, 698,634,387,1009,996,749,685,621,374,983,736,672,608,425,361,970,723,659,412, 957,710,646,399,1021,944,761,697,633,386,1008,995,748,684,620,373,982,735, 671,607,424,360,969,722,658,411,956,709,645,398,1020,943,760,696,632,385, 1007,994,747,683,619,372,981,734,670,606,423,359,968,721,657,410,955,708,644, 397,1019,942,759,695,631,384,1006,993,746,682,618,371,980,733,669,422,358, 967,720,656,409,954,707,643,396,1018,941,758,694,630,383,1005,992,745,681, 617,370,979,732,668,604,421,357,966,719,655,408,953,706,642,395,1017,940,757, 693,629,382,1004,991,744,680,616,369,978,731,667,420,965,718,654,407,952,705, 641,394,1016,939,756,692,628,381,1003,990,743,679,615,368,977,730,666,602, 419,355,964,717,653,406,951,704,640,393,1015,938,755,691,627,380,1002,989, 742,678,614,367,976,729,665,418,963,716,652,405,950,767,703,639,392,1014,754, 690,626,379,1001,988,741,677,613,366,975,728,664,417,353,962,715,651,404,949, 766,702,638,391,1013,753,689,625,378,1000,987,740,676,612,365,974,727,663, 416,961,714,650,403,948,765,701,637,390,1012,999,752,688,624,377,986,739,675, 611,364,973,726,662,415,351,960,713,649,402,947,764,700,636,389,1011,998,751, 687,623,376,985,738,674,610,363,972,725,661,414,959,712,648,401,1023,946,763, 699,635,388,1010,997,686,375] [views:debug,2014-08-19T16:51:30.605,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/602. Updated state: pending (0) [ns_server:debug,2014-08-19T16:51:30.605,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",602,pending,0} [ns_server:debug,2014-08-19T16:51:30.755,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 600. Nacking mccouch update. [views:debug,2014-08-19T16:51:30.756,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/600. Updated state: pending (0) [ns_server:debug,2014-08-19T16:51:30.756,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",600,pending,0} [ns_server:debug,2014-08-19T16:51:30.756,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,984,737,673,609,426,362,971,724,660,413,958,711,647,400,1022,945,762, 698,634,387,1009,996,749,685,621,374,983,736,672,608,425,361,970,723,659,412, 957,710,646,399,1021,944,761,697,633,386,1008,995,748,684,620,373,982,735, 671,607,424,360,969,722,658,411,956,709,645,398,1020,943,760,696,632,385, 1007,994,747,683,619,372,981,734,670,606,423,359,968,721,657,410,955,708,644, 397,1019,942,759,695,631,384,1006,993,746,682,618,371,980,733,669,422,358, 967,720,656,409,954,707,643,396,1018,941,758,694,630,383,1005,992,745,681, 617,370,979,732,668,604,421,357,966,719,655,408,953,706,642,395,1017,940,757, 693,629,382,1004,991,744,680,616,369,978,731,667,420,965,718,654,407,952,705, 641,394,1016,939,756,692,628,381,1003,990,743,679,615,368,977,730,666,602, 419,355,964,717,653,406,951,704,640,393,1015,938,755,691,627,380,1002,989, 742,678,614,367,976,729,665,418,963,716,652,405,950,767,703,639,392,1014,754, 690,626,379,1001,988,741,677,613,366,975,728,664,600,417,353,962,715,651,404, 949,766,702,638,391,1013,753,689,625,378,1000,987,740,676,612,365,974,727, 663,416,961,714,650,403,948,765,701,637,390,1012,999,752,688,624,377,986,739, 675,611,364,973,726,662,415,351,960,713,649,402,947,764,700,636,389,1011,998, 751,687,623,376,985,738,674,610,363,972,725,661,414,959,712,648,401,1023,946, 763,699,635,388,1010,997,686,375] [views:debug,2014-08-19T16:51:30.819,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/600. Updated state: pending (0) [ns_server:debug,2014-08-19T16:51:30.819,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",600,pending,0} [ns_server:debug,2014-08-19T16:51:30.911,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 598. Nacking mccouch update. [views:debug,2014-08-19T16:51:30.911,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/598. Updated state: pending (0) [ns_server:debug,2014-08-19T16:51:30.911,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",598,pending,0} [ns_server:debug,2014-08-19T16:51:30.912,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,984,737,673,609,426,362,971,724,660,413,958,711,647,400,1022,945,762, 698,634,387,1009,996,749,685,621,374,983,736,672,608,425,361,970,723,659,412, 957,710,646,399,1021,944,761,697,633,386,1008,995,748,684,620,373,982,735, 671,607,424,360,969,722,658,411,956,709,645,398,1020,943,760,696,632,385, 1007,994,747,683,619,372,981,734,670,606,423,359,968,721,657,410,955,708,644, 397,1019,942,759,695,631,384,1006,993,746,682,618,371,980,733,669,422,358, 967,720,656,409,954,707,643,396,1018,941,758,694,630,383,1005,992,745,681, 617,370,979,732,668,604,421,357,966,719,655,408,953,706,642,395,1017,940,757, 693,629,382,1004,991,744,680,616,369,978,731,667,420,965,718,654,407,952,705, 641,394,1016,939,756,692,628,381,1003,990,743,679,615,368,977,730,666,602, 419,355,964,717,653,406,951,704,640,393,1015,938,755,691,627,380,1002,989, 742,678,614,367,976,729,665,418,963,716,652,405,950,767,703,639,392,1014,754, 690,626,379,1001,988,741,677,613,366,975,728,664,600,417,353,962,715,651,404, 949,766,702,638,391,1013,753,689,625,378,1000,987,740,676,612,365,974,727, 663,416,961,714,650,403,948,765,701,637,390,1012,999,752,688,624,377,986,739, 675,611,364,973,726,662,598,415,351,960,713,649,402,947,764,700,636,389,1011, 998,751,687,623,376,985,738,674,610,363,972,725,661,414,959,712,648,401,1023, 946,763,699,635,388,1010,997,686,375] [views:debug,2014-08-19T16:51:30.961,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/598. Updated state: pending (0) [ns_server:debug,2014-08-19T16:51:30.962,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",598,pending,0} [ns_server:debug,2014-08-19T16:51:31.045,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 356. Nacking mccouch update. [views:debug,2014-08-19T16:51:31.045,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/356. Updated state: replica (0) [ns_server:debug,2014-08-19T16:51:31.045,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",356,replica,0} [ns_server:debug,2014-08-19T16:51:31.046,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,984,673,362,971,724,660,413,958,711,647,400,1022,945,762,698,634,387, 1009,996,749,685,621,374,983,736,672,608,425,361,970,723,659,412,957,710,646, 399,1021,944,761,697,633,386,1008,995,748,684,620,373,982,735,671,607,424, 360,969,722,658,411,956,709,645,398,1020,943,760,696,632,385,1007,994,747, 683,619,372,981,734,670,606,423,359,968,721,657,410,955,708,644,397,1019,942, 759,695,631,384,1006,993,746,682,618,371,980,733,669,422,358,967,720,656,409, 954,707,643,396,1018,941,758,694,630,383,1005,992,745,681,617,370,979,732, 668,604,421,357,966,719,655,408,953,706,642,395,1017,940,757,693,629,382, 1004,991,744,680,616,369,978,731,667,420,356,965,718,654,407,952,705,641,394, 1016,939,756,692,628,381,1003,990,743,679,615,368,977,730,666,602,419,355, 964,717,653,406,951,704,640,393,1015,938,755,691,627,380,1002,989,742,678, 614,367,976,729,665,418,963,716,652,405,950,767,703,639,392,1014,754,690,626, 379,1001,988,741,677,613,366,975,728,664,600,417,353,962,715,651,404,949,766, 702,638,391,1013,753,689,625,378,1000,987,740,676,612,365,974,727,663,416, 961,714,650,403,948,765,701,637,390,1012,999,752,688,624,377,986,739,675,611, 364,973,726,662,598,415,351,960,713,649,402,947,764,700,636,389,1011,998,751, 687,623,376,985,738,674,610,363,972,725,661,414,959,712,648,401,1023,946,763, 699,635,388,1010,997,686,375,737,609,426] [views:debug,2014-08-19T16:51:31.104,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/356. Updated state: replica (0) [ns_server:debug,2014-08-19T16:51:31.105,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",356,replica,0} [ns_server:debug,2014-08-19T16:51:31.254,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 354. Nacking mccouch update. [views:debug,2014-08-19T16:51:31.254,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/354. Updated state: replica (0) [ns_server:debug,2014-08-19T16:51:31.254,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",354,replica,0} [ns_server:debug,2014-08-19T16:51:31.255,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,984,673,362,971,724,660,413,958,711,647,400,1022,945,762,698,634,387, 1009,996,749,685,621,374,983,736,672,608,425,361,970,723,659,412,957,710,646, 399,1021,944,761,697,633,386,1008,995,748,684,620,373,982,735,671,607,424, 360,969,722,658,411,956,709,645,398,1020,943,760,696,632,385,1007,994,747, 683,619,372,981,734,670,606,423,359,968,721,657,410,955,708,644,397,1019,942, 759,695,631,384,1006,993,746,682,618,371,980,733,669,422,358,967,720,656,409, 954,707,643,396,1018,941,758,694,630,383,1005,992,745,681,617,370,979,732, 668,604,421,357,966,719,655,408,953,706,642,395,1017,940,757,693,629,382, 1004,991,744,680,616,369,978,731,667,420,356,965,718,654,407,952,705,641,394, 1016,939,756,692,628,381,1003,990,743,679,615,368,977,730,666,602,419,355, 964,717,653,406,951,704,640,393,1015,938,755,691,627,380,1002,989,742,678, 614,367,976,729,665,418,354,963,716,652,405,950,767,703,639,392,1014,754,690, 626,379,1001,988,741,677,613,366,975,728,664,600,417,353,962,715,651,404,949, 766,702,638,391,1013,753,689,625,378,1000,987,740,676,612,365,974,727,663, 416,961,714,650,403,948,765,701,637,390,1012,999,752,688,624,377,986,739,675, 611,364,973,726,662,598,415,351,960,713,649,402,947,764,700,636,389,1011,998, 751,687,623,376,985,738,674,610,363,972,725,661,414,959,712,648,401,1023,946, 763,699,635,388,1010,997,686,375,737,609,426] [views:debug,2014-08-19T16:51:31.338,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/354. Updated state: replica (0) [ns_server:debug,2014-08-19T16:51:31.338,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",354,replica,0} [ns_server:debug,2014-08-19T16:51:31.497,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 352. Nacking mccouch update. [views:debug,2014-08-19T16:51:31.497,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/352. Updated state: replica (0) [ns_server:debug,2014-08-19T16:51:31.497,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",352,replica,0} [ns_server:debug,2014-08-19T16:51:31.498,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,984,673,362,971,724,660,413,958,711,647,400,1022,945,762,698,634,387, 1009,996,749,685,621,374,983,736,672,608,425,361,970,723,659,412,957,710,646, 399,1021,944,761,697,633,386,1008,995,748,684,620,373,982,735,671,607,424, 360,969,722,658,411,956,709,645,398,1020,943,760,696,632,385,1007,994,747, 683,619,372,981,734,670,606,423,359,968,721,657,410,955,708,644,397,1019,942, 759,695,631,384,1006,993,746,682,618,371,980,733,669,422,358,967,720,656,409, 954,707,643,396,1018,941,758,694,630,383,1005,992,745,681,617,370,979,732, 668,604,421,357,966,719,655,408,953,706,642,395,1017,940,757,693,629,382, 1004,991,744,680,616,369,978,731,667,420,356,965,718,654,407,952,705,641,394, 1016,939,756,692,628,381,1003,990,743,679,615,368,977,730,666,602,419,355, 964,717,653,406,951,704,640,393,1015,938,755,691,627,380,1002,989,742,678, 614,367,976,729,665,418,354,963,716,652,405,950,767,703,639,392,1014,754,690, 626,379,1001,988,741,677,613,366,975,728,664,600,417,353,962,715,651,404,949, 766,702,638,391,1013,753,689,625,378,1000,987,740,676,612,365,974,727,663, 416,352,961,714,650,403,948,765,701,637,390,1012,999,752,688,624,377,986,739, 675,611,364,973,726,662,598,415,351,960,713,649,402,947,764,700,636,389,1011, 998,751,687,623,376,985,738,674,610,363,972,725,661,414,959,712,648,401,1023, 946,763,699,635,388,1010,997,686,375,737,609,426] [views:debug,2014-08-19T16:51:31.556,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/352. Updated state: replica (0) [ns_server:debug,2014-08-19T16:51:31.556,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",352,replica,0} [ns_server:debug,2014-08-19T16:51:31.673,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 350. Nacking mccouch update. [views:debug,2014-08-19T16:51:31.673,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/350. Updated state: replica (0) [ns_server:debug,2014-08-19T16:51:31.673,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",350,replica,0} [ns_server:debug,2014-08-19T16:51:31.674,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,984,673,362,971,724,660,413,958,711,647,400,1022,945,762,698,634,387, 1009,996,749,685,621,374,983,736,672,608,425,361,970,723,659,412,957,710,646, 399,1021,944,761,697,633,386,1008,995,748,684,620,373,982,735,671,607,424, 360,969,722,658,411,956,709,645,398,1020,943,760,696,632,385,1007,994,747, 683,619,372,981,734,670,606,423,359,968,721,657,410,955,708,644,397,1019,942, 759,695,631,384,1006,993,746,682,618,371,980,733,669,422,358,967,720,656,409, 954,707,643,396,1018,941,758,694,630,383,1005,992,745,681,617,370,979,732, 668,604,421,357,966,719,655,408,953,706,642,395,1017,940,757,693,629,382, 1004,991,744,680,616,369,978,731,667,420,356,965,718,654,407,952,705,641,394, 1016,939,756,692,628,381,1003,990,743,679,615,368,977,730,666,602,419,355, 964,717,653,406,951,704,640,393,1015,938,755,691,627,380,1002,989,742,678, 614,367,976,729,665,418,354,963,716,652,405,950,767,703,639,392,1014,754,690, 626,379,1001,988,741,677,613,366,975,728,664,600,417,353,962,715,651,404,949, 766,702,638,391,1013,753,689,625,378,1000,987,740,676,612,365,974,727,663, 416,352,961,714,650,403,948,765,701,637,390,1012,999,752,688,624,377,986,739, 675,611,364,973,726,662,598,415,351,960,713,649,402,947,764,700,636,389,1011, 998,751,687,623,376,985,738,674,610,363,972,725,661,414,350,959,712,648,401, 1023,946,763,699,635,388,1010,997,686,375,737,609,426] [views:debug,2014-08-19T16:51:31.740,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/350. Updated state: replica (0) [ns_server:debug,2014-08-19T16:51:31.740,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",350,replica,0} [ns_server:debug,2014-08-19T16:51:31.819,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:51:31.822,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 1570 us [ns_server:debug,2014-08-19T16:51:31.823,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:31.823,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:31.824,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{852, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.88']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:51:31.898,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 348. Nacking mccouch update. [views:debug,2014-08-19T16:51:31.898,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/348. Updated state: replica (0) [ns_server:debug,2014-08-19T16:51:31.899,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",348,replica,0} [ns_server:debug,2014-08-19T16:51:31.900,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,984,673,362,971,724,660,413,958,711,647,400,1022,945,762,698,634,387, 1009,996,749,685,621,374,983,736,672,608,425,361,970,723,659,412,348,957,710, 646,399,1021,944,761,697,633,386,1008,995,748,684,620,373,982,735,671,607, 424,360,969,722,658,411,956,709,645,398,1020,943,760,696,632,385,1007,994, 747,683,619,372,981,734,670,606,423,359,968,721,657,410,955,708,644,397,1019, 942,759,695,631,384,1006,993,746,682,618,371,980,733,669,422,358,967,720,656, 409,954,707,643,396,1018,941,758,694,630,383,1005,992,745,681,617,370,979, 732,668,604,421,357,966,719,655,408,953,706,642,395,1017,940,757,693,629,382, 1004,991,744,680,616,369,978,731,667,420,356,965,718,654,407,952,705,641,394, 1016,939,756,692,628,381,1003,990,743,679,615,368,977,730,666,602,419,355, 964,717,653,406,951,704,640,393,1015,938,755,691,627,380,1002,989,742,678, 614,367,976,729,665,418,354,963,716,652,405,950,767,703,639,392,1014,754,690, 626,379,1001,988,741,677,613,366,975,728,664,600,417,353,962,715,651,404,949, 766,702,638,391,1013,753,689,625,378,1000,987,740,676,612,365,974,727,663, 416,352,961,714,650,403,948,765,701,637,390,1012,999,752,688,624,377,986,739, 675,611,364,973,726,662,598,415,351,960,713,649,402,947,764,700,636,389,1011, 998,751,687,623,376,985,738,674,610,363,972,725,661,414,350,959,712,648,401, 1023,946,763,699,635,388,1010,997,686,375,737,609,426] [views:debug,2014-08-19T16:51:31.932,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/348. Updated state: replica (0) [ns_server:debug,2014-08-19T16:51:31.933,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",348,replica,0} [ns_server:debug,2014-08-19T16:51:31.999,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 346. Nacking mccouch update. [views:debug,2014-08-19T16:51:31.999,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/346. Updated state: replica (0) [ns_server:debug,2014-08-19T16:51:31.999,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",346,replica,0} [ns_server:debug,2014-08-19T16:51:32.000,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,984,673,362,724,413,958,711,647,400,1022,945,762,698,634,387,1009, 996,749,685,621,374,983,736,672,608,425,361,970,723,659,412,348,957,710,646, 399,1021,944,761,697,633,386,1008,995,748,684,620,373,982,735,671,607,424, 360,969,722,658,411,956,709,645,398,1020,943,760,696,632,385,1007,994,747, 683,619,372,981,734,670,606,423,359,968,721,657,410,346,955,708,644,397,1019, 942,759,695,631,384,1006,993,746,682,618,371,980,733,669,422,358,967,720,656, 409,954,707,643,396,1018,941,758,694,630,383,1005,992,745,681,617,370,979, 732,668,604,421,357,966,719,655,408,953,706,642,395,1017,940,757,693,629,382, 1004,991,744,680,616,369,978,731,667,420,356,965,718,654,407,952,705,641,394, 1016,939,756,692,628,381,1003,990,743,679,615,368,977,730,666,602,419,355, 964,717,653,406,951,704,640,393,1015,938,755,691,627,380,1002,989,742,678, 614,367,976,729,665,418,354,963,716,652,405,950,767,703,639,392,1014,754,690, 626,379,1001,988,741,677,613,366,975,728,664,600,417,353,962,715,651,404,949, 766,702,638,391,1013,753,689,625,378,1000,987,740,676,612,365,974,727,663, 416,352,961,714,650,403,948,765,701,637,390,1012,999,752,688,624,377,986,739, 675,611,364,973,726,662,598,415,351,960,713,649,402,947,764,700,636,389,1011, 998,751,687,623,376,985,738,674,610,363,972,725,661,414,350,959,712,648,401, 1023,946,763,699,635,388,1010,997,686,375,737,609,426,971,660] [views:debug,2014-08-19T16:51:32.033,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/346. Updated state: replica (0) [ns_server:debug,2014-08-19T16:51:32.033,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",346,replica,0} [ns_server:debug,2014-08-19T16:51:32.100,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 344. Nacking mccouch update. [views:debug,2014-08-19T16:51:32.100,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/344. Updated state: replica (0) [ns_server:debug,2014-08-19T16:51:32.100,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",344,replica,0} [ns_server:debug,2014-08-19T16:51:32.101,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,984,673,362,724,413,958,711,647,400,1022,945,762,698,634,387,1009, 996,749,685,621,374,983,736,672,608,425,361,970,723,659,412,348,957,710,646, 399,1021,944,761,697,633,386,1008,995,748,684,620,373,982,735,671,607,424, 360,969,722,658,411,956,709,645,398,1020,943,760,696,632,385,1007,994,747, 683,619,372,981,734,670,606,423,359,968,721,657,410,346,955,708,644,397,1019, 942,759,695,631,384,1006,993,746,682,618,371,980,733,669,422,358,967,720,656, 409,954,707,643,396,1018,941,758,694,630,383,1005,992,745,681,617,370,979, 732,668,604,421,357,966,719,655,408,344,953,706,642,395,1017,940,757,693,629, 382,1004,991,744,680,616,369,978,731,667,420,356,965,718,654,407,952,705,641, 394,1016,939,756,692,628,381,1003,990,743,679,615,368,977,730,666,602,419, 355,964,717,653,406,951,704,640,393,1015,938,755,691,627,380,1002,989,742, 678,614,367,976,729,665,418,354,963,716,652,405,950,767,703,639,392,1014,754, 690,626,379,1001,988,741,677,613,366,975,728,664,600,417,353,962,715,651,404, 949,766,702,638,391,1013,753,689,625,378,1000,987,740,676,612,365,974,727, 663,416,352,961,714,650,403,948,765,701,637,390,1012,999,752,688,624,377,986, 739,675,611,364,973,726,662,598,415,351,960,713,649,402,947,764,700,636,389, 1011,998,751,687,623,376,985,738,674,610,363,972,725,661,414,350,959,712,648, 401,1023,946,763,699,635,388,1010,997,686,375,737,609,426,971,660] [views:debug,2014-08-19T16:51:32.134,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/344. Updated state: replica (0) [ns_server:debug,2014-08-19T16:51:32.134,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",344,replica,0} [ns_server:debug,2014-08-19T16:51:32.214,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 342. Nacking mccouch update. [views:debug,2014-08-19T16:51:32.214,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/342. Updated state: replica (0) [ns_server:debug,2014-08-19T16:51:32.214,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",342,replica,0} [ns_server:debug,2014-08-19T16:51:32.215,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,984,673,362,724,413,958,711,647,400,1022,945,762,698,634,387,1009, 996,749,685,621,374,983,736,672,608,425,361,970,723,659,412,348,957,710,646, 399,1021,944,761,697,633,386,1008,995,748,684,620,373,982,735,671,607,424, 360,969,722,658,411,956,709,645,398,1020,943,760,696,632,385,1007,994,747, 683,619,372,981,734,670,606,423,359,968,721,657,410,346,955,708,644,397,1019, 942,759,695,631,384,1006,993,746,682,618,371,980,733,669,422,358,967,720,656, 409,954,707,643,396,1018,941,758,694,630,383,1005,992,745,681,617,370,979, 732,668,604,421,357,966,719,655,408,344,953,706,642,395,1017,940,757,693,629, 382,1004,991,744,680,616,369,978,731,667,420,356,965,718,654,407,952,705,641, 394,1016,939,756,692,628,381,1003,990,743,679,615,368,977,730,666,602,419, 355,964,717,653,406,342,951,704,640,393,1015,938,755,691,627,380,1002,989, 742,678,614,367,976,729,665,418,354,963,716,652,405,950,767,703,639,392,1014, 754,690,626,379,1001,988,741,677,613,366,975,728,664,600,417,353,962,715,651, 404,949,766,702,638,391,1013,753,689,625,378,1000,987,740,676,612,365,974, 727,663,416,352,961,714,650,403,948,765,701,637,390,1012,999,752,688,624,377, 986,739,675,611,364,973,726,662,598,415,351,960,713,649,402,947,764,700,636, 389,1011,998,751,687,623,376,985,738,674,610,363,972,725,661,414,350,959,712, 648,401,1023,946,763,699,635,388,1010,997,686,375,737,609,426,971,660] [views:debug,2014-08-19T16:51:32.289,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/342. Updated state: replica (0) [ns_server:debug,2014-08-19T16:51:32.290,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",342,replica,0} [ns_server:debug,2014-08-19T16:51:32.423,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 605. Nacking mccouch update. [views:debug,2014-08-19T16:51:32.423,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/605. Updated state: pending (0) [ns_server:debug,2014-08-19T16:51:32.424,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",605,pending,0} [ns_server:debug,2014-08-19T16:51:32.424,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,984,673,362,724,413,958,711,647,400,1022,945,762,698,634,387,1009, 996,749,685,621,374,983,736,672,608,425,361,970,723,659,412,348,957,710,646, 399,1021,944,761,697,633,386,1008,995,748,684,620,373,982,735,671,607,424, 360,969,722,658,411,956,709,645,398,1020,943,760,696,632,385,1007,994,747, 683,619,372,981,734,670,606,423,359,968,721,657,410,346,955,708,644,397,1019, 942,759,695,631,384,1006,993,746,682,618,371,980,733,669,605,422,358,967,720, 656,409,954,707,643,396,1018,941,758,694,630,383,1005,992,745,681,617,370, 979,732,668,604,421,357,966,719,655,408,344,953,706,642,395,1017,940,757,693, 629,382,1004,991,744,680,616,369,978,731,667,420,356,965,718,654,407,952,705, 641,394,1016,939,756,692,628,381,1003,990,743,679,615,368,977,730,666,602, 419,355,964,717,653,406,342,951,704,640,393,1015,938,755,691,627,380,1002, 989,742,678,614,367,976,729,665,418,354,963,716,652,405,950,767,703,639,392, 1014,754,690,626,379,1001,988,741,677,613,366,975,728,664,600,417,353,962, 715,651,404,949,766,702,638,391,1013,753,689,625,378,1000,987,740,676,612, 365,974,727,663,416,352,961,714,650,403,948,765,701,637,390,1012,999,752,688, 624,377,986,739,675,611,364,973,726,662,598,415,351,960,713,649,402,947,764, 700,636,389,1011,998,751,687,623,376,985,738,674,610,363,972,725,661,414,350, 959,712,648,401,1023,946,763,699,635,388,1010,997,686,375,737,609,426,971, 660] [views:debug,2014-08-19T16:51:32.474,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/605. Updated state: pending (0) [ns_server:debug,2014-08-19T16:51:32.474,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",605,pending,0} [ns_server:debug,2014-08-19T16:51:32.557,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 603. Nacking mccouch update. [views:debug,2014-08-19T16:51:32.558,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/603. Updated state: pending (0) [ns_server:debug,2014-08-19T16:51:32.558,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",603,pending,0} [ns_server:debug,2014-08-19T16:51:32.558,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,984,673,362,724,413,958,711,647,400,1022,945,762,698,634,387,1009, 996,749,685,621,374,983,736,672,608,425,361,970,723,659,412,348,957,710,646, 399,1021,944,761,697,633,386,1008,995,748,684,620,373,982,735,671,607,424, 360,969,722,658,411,956,709,645,398,1020,943,760,696,632,385,1007,994,747, 683,619,372,981,734,670,606,423,359,968,721,657,410,346,955,708,644,397,1019, 942,759,695,631,384,1006,993,746,682,618,371,980,733,669,605,422,358,967,720, 656,409,954,707,643,396,1018,941,758,694,630,383,1005,992,745,681,617,370, 979,732,668,604,421,357,966,719,655,408,344,953,706,642,395,1017,940,757,693, 629,382,1004,991,744,680,616,369,978,731,667,603,420,356,965,718,654,407,952, 705,641,394,1016,939,756,692,628,381,1003,990,743,679,615,368,977,730,666, 602,419,355,964,717,653,406,342,951,704,640,393,1015,938,755,691,627,380, 1002,989,742,678,614,367,976,729,665,418,354,963,716,652,405,950,767,703,639, 392,1014,754,690,626,379,1001,988,741,677,613,366,975,728,664,600,417,353, 962,715,651,404,949,766,702,638,391,1013,753,689,625,378,1000,987,740,676, 612,365,974,727,663,416,352,961,714,650,403,948,765,701,637,390,1012,999,752, 688,624,377,986,739,675,611,364,973,726,662,598,415,351,960,713,649,402,947, 764,700,636,389,1011,998,751,687,623,376,985,738,674,610,363,972,725,661,414, 350,959,712,648,401,1023,946,763,699,635,388,1010,997,686,375,737,609,426, 971,660] [views:debug,2014-08-19T16:51:32.608,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/603. Updated state: pending (0) [ns_server:debug,2014-08-19T16:51:32.608,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",603,pending,0} [ns_server:debug,2014-08-19T16:51:32.692,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 601. Nacking mccouch update. [views:debug,2014-08-19T16:51:32.692,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/601. Updated state: pending (0) [ns_server:debug,2014-08-19T16:51:32.692,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",601,pending,0} [ns_server:debug,2014-08-19T16:51:32.693,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,984,673,362,724,413,958,647,945,762,698,634,387,1009,996,749,685,621, 374,983,736,672,608,425,361,970,723,659,412,348,957,710,646,399,1021,944,761, 697,633,386,1008,995,748,684,620,373,982,735,671,607,424,360,969,722,658,411, 956,709,645,398,1020,943,760,696,632,385,1007,994,747,683,619,372,981,734, 670,606,423,359,968,721,657,410,346,955,708,644,397,1019,942,759,695,631,384, 1006,993,746,682,618,371,980,733,669,605,422,358,967,720,656,409,954,707,643, 396,1018,941,758,694,630,383,1005,992,745,681,617,370,979,732,668,604,421, 357,966,719,655,408,344,953,706,642,395,1017,940,757,693,629,382,1004,991, 744,680,616,369,978,731,667,603,420,356,965,718,654,407,952,705,641,394,1016, 939,756,692,628,381,1003,990,743,679,615,368,977,730,666,602,419,355,964,717, 653,406,342,951,704,640,393,1015,938,755,691,627,380,1002,989,742,678,614, 367,976,729,665,601,418,354,963,716,652,405,950,767,703,639,392,1014,754,690, 626,379,1001,988,741,677,613,366,975,728,664,600,417,353,962,715,651,404,949, 766,702,638,391,1013,753,689,625,378,1000,987,740,676,612,365,974,727,663, 416,352,961,714,650,403,948,765,701,637,390,1012,999,752,688,624,377,986,739, 675,611,364,973,726,662,598,415,351,960,713,649,402,947,764,700,636,389,1011, 998,751,687,623,376,985,738,674,610,363,972,725,661,414,350,959,712,648,401, 1023,946,763,699,635,388,1010,997,686,375,737,609,426,971,660,711,400,1022] [views:debug,2014-08-19T16:51:32.742,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/601. Updated state: pending (0) [ns_server:debug,2014-08-19T16:51:32.742,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",601,pending,0} [ns_server:debug,2014-08-19T16:51:32.894,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 599. Nacking mccouch update. [views:debug,2014-08-19T16:51:32.894,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/599. Updated state: pending (0) [ns_server:debug,2014-08-19T16:51:32.894,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",599,pending,0} [ns_server:debug,2014-08-19T16:51:32.895,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,984,673,362,724,413,958,647,945,762,698,634,387,1009,996,749,685,621, 374,983,736,672,608,425,361,970,723,659,412,348,957,710,646,399,1021,944,761, 697,633,386,1008,995,748,684,620,373,982,735,671,607,424,360,969,722,658,411, 956,709,645,398,1020,943,760,696,632,385,1007,994,747,683,619,372,981,734, 670,606,423,359,968,721,657,410,346,955,708,644,397,1019,942,759,695,631,384, 1006,993,746,682,618,371,980,733,669,605,422,358,967,720,656,409,954,707,643, 396,1018,941,758,694,630,383,1005,992,745,681,617,370,979,732,668,604,421, 357,966,719,655,408,344,953,706,642,395,1017,940,757,693,629,382,1004,991, 744,680,616,369,978,731,667,603,420,356,965,718,654,407,952,705,641,394,1016, 939,756,692,628,381,1003,990,743,679,615,368,977,730,666,602,419,355,964,717, 653,406,342,951,704,640,393,1015,938,755,691,627,380,1002,989,742,678,614, 367,976,729,665,601,418,354,963,716,652,405,950,767,703,639,392,1014,754,690, 626,379,1001,988,741,677,613,366,975,728,664,600,417,353,962,715,651,404,949, 766,702,638,391,1013,753,689,625,378,1000,987,740,676,612,365,974,727,663, 599,416,352,961,714,650,403,948,765,701,637,390,1012,999,752,688,624,377,986, 739,675,611,364,973,726,662,598,415,351,960,713,649,402,947,764,700,636,389, 1011,998,751,687,623,376,985,738,674,610,363,972,725,661,414,350,959,712,648, 401,1023,946,763,699,635,388,1010,997,686,375,737,609,426,971,660,711,400, 1022] [views:debug,2014-08-19T16:51:32.945,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/599. Updated state: pending (0) [ns_server:debug,2014-08-19T16:51:32.945,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",599,pending,0} [ns_server:debug,2014-08-19T16:51:33.018,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 349. Nacking mccouch update. [views:debug,2014-08-19T16:51:33.018,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/349. Updated state: replica (0) [ns_server:debug,2014-08-19T16:51:33.018,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",349,replica,0} [ns_server:debug,2014-08-19T16:51:33.019,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,984,673,362,724,413,958,647,945,762,698,634,387,1009,996,749,685,621, 374,983,736,672,608,425,361,970,723,659,412,348,957,710,646,399,1021,944,761, 697,633,386,1008,995,748,684,620,373,982,735,671,607,424,360,969,722,658,411, 956,709,645,398,1020,943,760,696,632,385,1007,994,747,683,619,372,981,734, 670,606,423,359,968,721,657,410,346,955,708,644,397,1019,942,759,695,631,384, 1006,993,746,682,618,371,980,733,669,605,422,358,967,720,656,409,954,707,643, 396,1018,941,758,694,630,383,1005,992,745,681,617,370,979,732,668,604,421, 357,966,719,655,408,344,953,706,642,395,1017,940,757,693,629,382,1004,991, 744,680,616,369,978,731,667,603,420,356,965,718,654,407,952,705,641,394,1016, 939,756,692,628,381,1003,990,743,679,615,368,977,730,666,602,419,355,964,717, 653,406,342,951,704,640,393,1015,938,755,691,627,380,1002,989,742,678,614, 367,976,729,665,601,418,354,963,716,652,405,950,767,703,639,392,1014,754,690, 626,379,1001,988,741,677,613,366,975,728,664,600,417,353,962,715,651,404,949, 766,702,638,391,1013,753,689,625,378,1000,987,740,676,612,365,974,727,663, 599,416,352,961,714,650,403,948,765,701,637,390,1012,999,752,688,624,377,986, 739,675,611,364,973,726,662,598,415,351,960,713,649,402,947,764,700,636,389, 1011,998,751,687,623,376,985,738,674,610,363,972,725,661,414,350,959,712,648, 401,1023,946,763,699,635,388,1010,997,686,375,737,609,426,971,660,349,711, 400,1022] [views:debug,2014-08-19T16:51:33.052,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/349. Updated state: replica (0) [ns_server:debug,2014-08-19T16:51:33.052,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",349,replica,0} [ns_server:debug,2014-08-19T16:51:33.135,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 347. Nacking mccouch update. [views:debug,2014-08-19T16:51:33.135,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/347. Updated state: replica (0) [ns_server:debug,2014-08-19T16:51:33.136,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",347,replica,0} [ns_server:debug,2014-08-19T16:51:33.136,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,984,673,362,724,413,958,647,945,762,698,634,387,1009,996,749,685,621, 374,983,736,672,608,425,361,970,723,659,412,348,957,710,646,399,1021,944,761, 697,633,386,1008,995,748,684,620,373,982,735,671,607,424,360,969,722,658,411, 347,956,709,645,398,1020,943,760,696,632,385,1007,994,747,683,619,372,981, 734,670,606,423,359,968,721,657,410,346,955,708,644,397,1019,942,759,695,631, 384,1006,993,746,682,618,371,980,733,669,605,422,358,967,720,656,409,954,707, 643,396,1018,941,758,694,630,383,1005,992,745,681,617,370,979,732,668,604, 421,357,966,719,655,408,344,953,706,642,395,1017,940,757,693,629,382,1004, 991,744,680,616,369,978,731,667,603,420,356,965,718,654,407,952,705,641,394, 1016,939,756,692,628,381,1003,990,743,679,615,368,977,730,666,602,419,355, 964,717,653,406,342,951,704,640,393,1015,938,755,691,627,380,1002,989,742, 678,614,367,976,729,665,601,418,354,963,716,652,405,950,767,703,639,392,1014, 754,690,626,379,1001,988,741,677,613,366,975,728,664,600,417,353,962,715,651, 404,949,766,702,638,391,1013,753,689,625,378,1000,987,740,676,612,365,974, 727,663,599,416,352,961,714,650,403,948,765,701,637,390,1012,999,752,688,624, 377,986,739,675,611,364,973,726,662,598,415,351,960,713,649,402,947,764,700, 636,389,1011,998,751,687,623,376,985,738,674,610,363,972,725,661,414,350,959, 712,648,401,1023,946,763,699,635,388,1010,997,686,375,737,609,426,971,660, 349,711,400,1022] [ns_server:info,2014-08-19T16:51:33.166,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:do_pull:341]Pulling config from: 'ns_1@10.242.238.89' [views:debug,2014-08-19T16:51:33.170,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/347. Updated state: replica (0) [ns_server:debug,2014-08-19T16:51:33.170,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",347,replica,0} [ns_server:debug,2014-08-19T16:51:33.254,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 345. Nacking mccouch update. [views:debug,2014-08-19T16:51:33.254,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/345. Updated state: replica (0) [ns_server:debug,2014-08-19T16:51:33.254,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",345,replica,0} [ns_server:debug,2014-08-19T16:51:33.255,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,984,673,362,724,413,958,647,945,762,698,634,387,1009,996,749,685,621, 374,983,736,672,608,425,361,970,723,659,412,348,957,710,646,399,1021,944,761, 697,633,386,1008,995,748,684,620,373,982,735,671,607,424,360,969,722,658,411, 347,956,709,645,398,1020,943,760,696,632,385,1007,994,747,683,619,372,981, 734,670,606,423,359,968,721,657,410,346,955,708,644,397,1019,942,759,695,631, 384,1006,993,746,682,618,371,980,733,669,605,422,358,967,720,656,409,345,954, 707,643,396,1018,941,758,694,630,383,1005,992,745,681,617,370,979,732,668, 604,421,357,966,719,655,408,344,953,706,642,395,1017,940,757,693,629,382, 1004,991,744,680,616,369,978,731,667,603,420,356,965,718,654,407,952,705,641, 394,1016,939,756,692,628,381,1003,990,743,679,615,368,977,730,666,602,419, 355,964,717,653,406,342,951,704,640,393,1015,938,755,691,627,380,1002,989, 742,678,614,367,976,729,665,601,418,354,963,716,652,405,950,767,703,639,392, 1014,754,690,626,379,1001,988,741,677,613,366,975,728,664,600,417,353,962, 715,651,404,949,766,702,638,391,1013,753,689,625,378,1000,987,740,676,612, 365,974,727,663,599,416,352,961,714,650,403,948,765,701,637,390,1012,999,752, 688,624,377,986,739,675,611,364,973,726,662,598,415,351,960,713,649,402,947, 764,700,636,389,1011,998,751,687,623,376,985,738,674,610,363,972,725,661,414, 350,959,712,648,401,1023,946,763,699,635,388,1010,997,686,375,737,609,426, 971,660,349,711,400,1022] [views:debug,2014-08-19T16:51:33.306,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/345. Updated state: replica (0) [ns_server:debug,2014-08-19T16:51:33.306,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",345,replica,0} [ns_server:debug,2014-08-19T16:51:33.455,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 343. Nacking mccouch update. [views:debug,2014-08-19T16:51:33.455,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/343. Updated state: replica (0) [ns_server:debug,2014-08-19T16:51:33.455,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",343,replica,0} [ns_server:debug,2014-08-19T16:51:33.456,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,984,673,362,724,413,958,647,698,387,1009,996,749,685,621,374,983,736, 672,608,425,361,970,723,659,412,348,957,710,646,399,1021,944,761,697,633,386, 1008,995,748,684,620,373,982,735,671,607,424,360,969,722,658,411,347,956,709, 645,398,1020,943,760,696,632,385,1007,994,747,683,619,372,981,734,670,606, 423,359,968,721,657,410,346,955,708,644,397,1019,942,759,695,631,384,1006, 993,746,682,618,371,980,733,669,605,422,358,967,720,656,409,345,954,707,643, 396,1018,941,758,694,630,383,1005,992,745,681,617,370,979,732,668,604,421, 357,966,719,655,408,344,953,706,642,395,1017,940,757,693,629,382,1004,991, 744,680,616,369,978,731,667,603,420,356,965,718,654,407,343,952,705,641,394, 1016,939,756,692,628,381,1003,990,743,679,615,368,977,730,666,602,419,355, 964,717,653,406,342,951,704,640,393,1015,938,755,691,627,380,1002,989,742, 678,614,367,976,729,665,601,418,354,963,716,652,405,950,767,703,639,392,1014, 754,690,626,379,1001,988,741,677,613,366,975,728,664,600,417,353,962,715,651, 404,949,766,702,638,391,1013,753,689,625,378,1000,987,740,676,612,365,974, 727,663,599,416,352,961,714,650,403,948,765,701,637,390,1012,999,752,688,624, 377,986,739,675,611,364,973,726,662,598,415,351,960,713,649,402,947,764,700, 636,389,1011,998,751,687,623,376,985,738,674,610,363,972,725,661,414,350,959, 712,648,401,1023,946,763,699,635,388,1010,997,686,375,737,609,426,971,660, 349,711,400,1022,945,762,634] [views:debug,2014-08-19T16:51:33.506,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/343. Updated state: replica (0) [ns_server:debug,2014-08-19T16:51:33.506,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",343,replica,0} [rebalance:debug,2014-08-19T16:51:33.506,ns_1@10.242.238.90:<0.28140.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:51:33.506,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.28140.0> (ok) [rebalance:debug,2014-08-19T16:51:33.506,ns_1@10.242.238.90:<0.28165.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:51:33.506,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.28165.0> (ok) [rebalance:debug,2014-08-19T16:51:33.615,ns_1@10.242.238.90:<0.28104.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:51:33.616,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.28104.0> (ok) [rebalance:debug,2014-08-19T16:51:33.616,ns_1@10.242.238.90:<0.28115.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:51:33.616,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.28115.0> (ok) [rebalance:debug,2014-08-19T16:51:33.704,ns_1@10.242.238.90:<0.28171.0>:janitor_agent:handle_call:795]Done [rebalance:debug,2014-08-19T16:51:33.704,ns_1@10.242.238.90:<0.28196.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:51:33.704,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.28171.0> (ok) [ns_server:debug,2014-08-19T16:51:33.704,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.28196.0> (ok) [rebalance:debug,2014-08-19T16:51:33.805,ns_1@10.242.238.90:<0.28135.0>:janitor_agent:handle_call:795]Done [rebalance:debug,2014-08-19T16:51:33.805,ns_1@10.242.238.90:<0.28146.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:51:33.805,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.28135.0> (ok) [ns_server:debug,2014-08-19T16:51:33.805,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.28146.0> (ok) [rebalance:debug,2014-08-19T16:51:33.888,ns_1@10.242.238.90:<0.28085.0>:janitor_agent:handle_call:795]Done [rebalance:debug,2014-08-19T16:51:33.889,ns_1@10.242.238.90:<0.28110.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:51:33.889,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.28085.0> (ok) [ns_server:debug,2014-08-19T16:51:33.889,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.28110.0> (ok) [rebalance:debug,2014-08-19T16:51:33.973,ns_1@10.242.238.90:<0.28552.0>:janitor_agent:handle_call:795]Done [rebalance:debug,2014-08-19T16:51:33.973,ns_1@10.242.238.90:<0.28541.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:51:33.973,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.28552.0> (ok) [ns_server:debug,2014-08-19T16:51:33.973,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.28541.0> (ok) [rebalance:debug,2014-08-19T16:51:34.065,ns_1@10.242.238.90:<0.28491.0>:janitor_agent:handle_call:795]Done [rebalance:debug,2014-08-19T16:51:34.065,ns_1@10.242.238.90:<0.28516.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:51:34.065,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.28491.0> (ok) [ns_server:debug,2014-08-19T16:51:34.065,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.28516.0> (ok) [rebalance:debug,2014-08-19T16:51:34.174,ns_1@10.242.238.90:<0.28466.0>:janitor_agent:handle_call:795]Done [rebalance:debug,2014-08-19T16:51:34.174,ns_1@10.242.238.90:<0.28441.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:51:34.174,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.28466.0> (ok) [ns_server:debug,2014-08-19T16:51:34.174,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.28441.0> (ok) [rebalance:debug,2014-08-19T16:51:34.177,ns_1@10.242.238.90:<0.28954.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 347 [rebalance:debug,2014-08-19T16:51:34.315,ns_1@10.242.238.90:<0.28416.0>:janitor_agent:handle_call:795]Done [rebalance:debug,2014-08-19T16:51:34.315,ns_1@10.242.238.90:<0.28389.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:51:34.315,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.28416.0> (ok) [ns_server:debug,2014-08-19T16:51:34.315,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.28389.0> (ok) [rebalance:debug,2014-08-19T16:51:34.390,ns_1@10.242.238.90:<0.28356.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:51:34.390,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.28356.0> (ok) [rebalance:debug,2014-08-19T16:51:34.390,ns_1@10.242.238.90:<0.28345.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:51:34.390,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.28345.0> (ok) [rebalance:debug,2014-08-19T16:51:34.491,ns_1@10.242.238.90:<0.28320.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:51:34.491,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.28320.0> (ok) [rebalance:debug,2014-08-19T16:51:34.491,ns_1@10.242.238.90:<0.28295.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:51:34.491,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.28295.0> (ok) [rebalance:debug,2014-08-19T16:51:34.608,ns_1@10.242.238.90:<0.28269.0>:janitor_agent:handle_call:795]Done [rebalance:debug,2014-08-19T16:51:34.608,ns_1@10.242.238.90:<0.28258.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:51:34.608,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.28269.0> (ok) [ns_server:debug,2014-08-19T16:51:34.608,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.28258.0> (ok) [rebalance:debug,2014-08-19T16:51:34.708,ns_1@10.242.238.90:<0.28218.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:51:34.708,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.28218.0> (ok) [rebalance:debug,2014-08-19T16:51:34.708,ns_1@10.242.238.90:<0.28522.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:51:34.708,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.28522.0> (ok) [rebalance:debug,2014-08-19T16:51:34.825,ns_1@10.242.238.90:<0.28176.0>:janitor_agent:handle_call:795]Done [rebalance:debug,2014-08-19T16:51:34.825,ns_1@10.242.238.90:<0.28472.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:51:34.825,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.28176.0> (ok) [ns_server:debug,2014-08-19T16:51:34.825,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.28472.0> (ok) [rebalance:debug,2014-08-19T16:51:34.942,ns_1@10.242.238.90:<0.28547.0>:janitor_agent:handle_call:795]Done [rebalance:debug,2014-08-19T16:51:34.942,ns_1@10.242.238.90:<0.28422.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:51:34.942,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.28547.0> (ok) [ns_server:debug,2014-08-19T16:51:34.943,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.28422.0> (ok) [rebalance:debug,2014-08-19T16:51:35.101,ns_1@10.242.238.90:<0.28497.0>:janitor_agent:handle_call:795]Done [rebalance:debug,2014-08-19T16:51:35.101,ns_1@10.242.238.90:<0.28376.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:51:35.101,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.28497.0> (ok) [ns_server:debug,2014-08-19T16:51:35.102,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.28376.0> (ok) [rebalance:debug,2014-08-19T16:51:35.216,ns_1@10.242.238.90:<0.28447.0>:janitor_agent:handle_call:795]Done [rebalance:debug,2014-08-19T16:51:35.216,ns_1@10.242.238.90:<0.28326.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:51:35.216,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.28447.0> (ok) [ns_server:debug,2014-08-19T16:51:35.216,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.28326.0> (ok) [rebalance:debug,2014-08-19T16:51:35.333,ns_1@10.242.238.90:<0.28290.0>:janitor_agent:handle_call:795]Done [rebalance:debug,2014-08-19T16:51:35.333,ns_1@10.242.238.90:<0.28411.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:51:35.333,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.28290.0> (ok) [ns_server:debug,2014-08-19T16:51:35.333,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.28411.0> (ok) [rebalance:debug,2014-08-19T16:51:35.458,ns_1@10.242.238.90:<0.28351.0>:janitor_agent:handle_call:795]Done [rebalance:debug,2014-08-19T16:51:35.458,ns_1@10.242.238.90:<0.28239.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:51:35.458,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.28351.0> (ok) [ns_server:debug,2014-08-19T16:51:35.458,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.28239.0> (ok) [rebalance:debug,2014-08-19T16:51:35.592,ns_1@10.242.238.90:<0.28315.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:51:35.592,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.28315.0> (ok) [rebalance:debug,2014-08-19T16:51:35.642,ns_1@10.242.238.90:<0.28264.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:51:35.642,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.28264.0> (ok) [rebalance:debug,2014-08-19T16:51:35.692,ns_1@10.242.238.90:<0.28207.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:51:35.693,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.28207.0> (ok) [rebalance:debug,2014-08-19T16:51:35.760,ns_1@10.242.238.90:<0.28954.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:51:35.760,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.28954.0> (ok) [rebalance:debug,2014-08-19T16:51:37.438,ns_1@10.242.238.90:<0.28970.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 349 [rebalance:debug,2014-08-19T16:51:37.438,ns_1@10.242.238.90:<0.28973.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 348 [rebalance:debug,2014-08-19T16:51:37.439,ns_1@10.242.238.90:<0.28973.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:51:37.439,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.28973.0> (ok) [rebalance:debug,2014-08-19T16:51:37.440,ns_1@10.242.238.90:<0.28970.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:51:37.440,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.28970.0> (ok) [rebalance:debug,2014-08-19T16:51:37.549,ns_1@10.242.238.90:<0.28976.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 351 [rebalance:debug,2014-08-19T16:51:37.550,ns_1@10.242.238.90:<0.28978.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 350 [rebalance:debug,2014-08-19T16:51:37.551,ns_1@10.242.238.90:<0.28978.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:51:37.551,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.28978.0> (ok) [rebalance:debug,2014-08-19T16:51:37.551,ns_1@10.242.238.90:<0.28976.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:51:37.551,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.28976.0> (ok) [rebalance:debug,2014-08-19T16:51:37.630,ns_1@10.242.238.90:<0.28982.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 353 [rebalance:debug,2014-08-19T16:51:37.630,ns_1@10.242.238.90:<0.28985.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 352 [rebalance:debug,2014-08-19T16:51:37.631,ns_1@10.242.238.90:<0.28985.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:51:37.631,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.28985.0> (ok) [rebalance:debug,2014-08-19T16:51:37.632,ns_1@10.242.238.90:<0.28982.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:51:37.632,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.28982.0> (ok) [rebalance:debug,2014-08-19T16:51:37.697,ns_1@10.242.238.90:<0.28988.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 354 [rebalance:debug,2014-08-19T16:51:37.697,ns_1@10.242.238.90:<0.28991.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 355 [rebalance:debug,2014-08-19T16:51:37.698,ns_1@10.242.238.90:<0.28988.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:51:37.698,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.28988.0> (ok) [rebalance:debug,2014-08-19T16:51:37.699,ns_1@10.242.238.90:<0.28991.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:51:37.699,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.28991.0> (ok) [rebalance:debug,2014-08-19T16:51:37.781,ns_1@10.242.238.90:<0.28995.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 356 [rebalance:debug,2014-08-19T16:51:37.781,ns_1@10.242.238.90:<0.28998.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 357 [rebalance:debug,2014-08-19T16:51:37.782,ns_1@10.242.238.90:<0.28995.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:51:37.782,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.28995.0> (ok) [rebalance:debug,2014-08-19T16:51:37.783,ns_1@10.242.238.90:<0.28998.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:51:37.783,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.28998.0> (ok) [rebalance:debug,2014-08-19T16:51:37.848,ns_1@10.242.238.90:<0.29001.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 358 [rebalance:debug,2014-08-19T16:51:37.848,ns_1@10.242.238.90:<0.29004.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 359 [rebalance:debug,2014-08-19T16:51:37.849,ns_1@10.242.238.90:<0.29001.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:51:37.849,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.29001.0> (ok) [rebalance:debug,2014-08-19T16:51:37.849,ns_1@10.242.238.90:<0.29004.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:51:37.850,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.29004.0> (ok) [rebalance:debug,2014-08-19T16:51:37.941,ns_1@10.242.238.90:<0.29007.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 360 [rebalance:debug,2014-08-19T16:51:37.941,ns_1@10.242.238.90:<0.29010.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 361 [rebalance:debug,2014-08-19T16:51:37.942,ns_1@10.242.238.90:<0.29007.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:51:37.942,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.29007.0> (ok) [rebalance:debug,2014-08-19T16:51:37.942,ns_1@10.242.238.90:<0.29010.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:51:37.943,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.29010.0> (ok) [rebalance:debug,2014-08-19T16:51:38.024,ns_1@10.242.238.90:<0.29013.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 362 [rebalance:debug,2014-08-19T16:51:38.024,ns_1@10.242.238.90:<0.29016.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 603 [rebalance:debug,2014-08-19T16:51:38.025,ns_1@10.242.238.90:<0.29013.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:51:38.025,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.29013.0> (ok) [rebalance:debug,2014-08-19T16:51:38.025,ns_1@10.242.238.90:<0.29016.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:51:38.025,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.29016.0> (ok) [rebalance:debug,2014-08-19T16:51:38.145,ns_1@10.242.238.90:<0.29019.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 602 [rebalance:debug,2014-08-19T16:51:38.146,ns_1@10.242.238.90:<0.29022.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 605 [rebalance:debug,2014-08-19T16:51:38.147,ns_1@10.242.238.90:<0.29019.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:51:38.147,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.29019.0> (ok) [rebalance:debug,2014-08-19T16:51:38.147,ns_1@10.242.238.90:<0.29022.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:51:38.147,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.29022.0> (ok) [rebalance:debug,2014-08-19T16:51:38.254,ns_1@10.242.238.90:<0.29025.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 604 [rebalance:debug,2014-08-19T16:51:38.255,ns_1@10.242.238.90:<0.29028.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 607 [rebalance:debug,2014-08-19T16:51:38.256,ns_1@10.242.238.90:<0.29025.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:51:38.256,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.29025.0> (ok) [rebalance:debug,2014-08-19T16:51:38.256,ns_1@10.242.238.90:<0.29028.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:51:38.256,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.29028.0> (ok) [rebalance:debug,2014-08-19T16:51:38.372,ns_1@10.242.238.90:<0.29039.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 606 [rebalance:debug,2014-08-19T16:51:38.372,ns_1@10.242.238.90:<0.29042.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 609 [rebalance:debug,2014-08-19T16:51:38.373,ns_1@10.242.238.90:<0.29039.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:51:38.373,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.29039.0> (ok) [rebalance:debug,2014-08-19T16:51:38.373,ns_1@10.242.238.90:<0.29042.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:51:38.373,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.29042.0> (ok) [rebalance:debug,2014-08-19T16:51:38.480,ns_1@10.242.238.90:<0.29047.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 608 [rebalance:debug,2014-08-19T16:51:38.480,ns_1@10.242.238.90:<0.29050.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 611 [rebalance:debug,2014-08-19T16:51:38.481,ns_1@10.242.238.90:<0.29047.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:51:38.481,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.29047.0> (ok) [rebalance:debug,2014-08-19T16:51:38.482,ns_1@10.242.238.90:<0.29050.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:51:38.482,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.29050.0> (ok) [rebalance:debug,2014-08-19T16:51:38.582,ns_1@10.242.238.90:<0.29053.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 613 [rebalance:debug,2014-08-19T16:51:38.582,ns_1@10.242.238.90:<0.29056.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 610 [rebalance:debug,2014-08-19T16:51:38.583,ns_1@10.242.238.90:<0.29056.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:51:38.583,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.29056.0> (ok) [rebalance:debug,2014-08-19T16:51:38.583,ns_1@10.242.238.90:<0.29053.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:51:38.583,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.29053.0> (ok) [rebalance:debug,2014-08-19T16:51:38.690,ns_1@10.242.238.90:<0.29059.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 612 [rebalance:debug,2014-08-19T16:51:38.691,ns_1@10.242.238.90:<0.29062.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 615 [rebalance:debug,2014-08-19T16:51:38.692,ns_1@10.242.238.90:<0.29059.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:51:38.692,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.29059.0> (ok) [rebalance:debug,2014-08-19T16:51:38.692,ns_1@10.242.238.90:<0.29062.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:51:38.692,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.29062.0> (ok) [rebalance:debug,2014-08-19T16:51:38.808,ns_1@10.242.238.90:<0.29065.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 614 [rebalance:debug,2014-08-19T16:51:38.808,ns_1@10.242.238.90:<0.29068.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 617 [rebalance:debug,2014-08-19T16:51:38.809,ns_1@10.242.238.90:<0.29065.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:51:38.809,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.29065.0> (ok) [rebalance:debug,2014-08-19T16:51:38.809,ns_1@10.242.238.90:<0.29068.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:51:38.809,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.29068.0> (ok) [rebalance:debug,2014-08-19T16:51:38.916,ns_1@10.242.238.90:<0.29071.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 616 [rebalance:debug,2014-08-19T16:51:38.918,ns_1@10.242.238.90:<0.29071.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:51:38.918,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.29071.0> (ok) [rebalance:debug,2014-08-19T16:51:39.060,ns_1@10.242.238.90:<0.29074.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 618 [rebalance:debug,2014-08-19T16:51:39.061,ns_1@10.242.238.90:<0.29074.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:51:39.061,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.29074.0> (ok) [ns_server:debug,2014-08-19T16:51:39.894,ns_1@10.242.238.90:compaction_daemon<0.17567.0>:compaction_daemon:handle_info:447]Starting compaction for the following buckets: [<<"default">>] [ns_server:info,2014-08-19T16:51:39.896,ns_1@10.242.238.90:<0.29084.0>:compaction_daemon:try_to_cleanup_indexes:650]Cleaning up indexes for bucket `default` [ns_server:info,2014-08-19T16:51:39.897,ns_1@10.242.238.90:<0.29084.0>:compaction_daemon:spawn_bucket_compactor:609]Compacting bucket default with config: [{database_fragmentation_threshold,{30,undefined}}, {view_fragmentation_threshold,{30,undefined}}] [ns_server:debug,2014-08-19T16:51:39.901,ns_1@10.242.238.90:<0.29087.0>:compaction_daemon:bucket_needs_compaction:1042]`default` data size is 41400, disk size is 6234600 [ns_server:debug,2014-08-19T16:51:39.901,ns_1@10.242.238.90:compaction_daemon<0.17567.0>:compaction_daemon:handle_info:505]Finished compaction iteration. [ns_server:debug,2014-08-19T16:51:39.902,ns_1@10.242.238.90:compaction_daemon<0.17567.0>:compaction_daemon:schedule_next_compaction:1519]Finished compaction too soon. Next run will be in 30s [rebalance:debug,2014-08-19T16:51:39.933,ns_1@10.242.238.90:<0.29088.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 343 [rebalance:debug,2014-08-19T16:51:39.935,ns_1@10.242.238.90:<0.29088.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:51:39.935,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.29088.0> (ok) [rebalance:debug,2014-08-19T16:51:39.983,ns_1@10.242.238.90:<0.29091.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 345 [rebalance:debug,2014-08-19T16:51:39.984,ns_1@10.242.238.90:<0.29091.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:51:39.984,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.29091.0> (ok) [rebalance:debug,2014-08-19T16:51:40.033,ns_1@10.242.238.90:<0.29094.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 342 [rebalance:debug,2014-08-19T16:51:40.034,ns_1@10.242.238.90:<0.29094.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:51:40.035,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.29094.0> (ok) [rebalance:debug,2014-08-19T16:51:40.067,ns_1@10.242.238.90:<0.28425.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:51:40.067,ns_1@10.242.238.90:<0.28425.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:51:40.068,ns_1@10.242.238.90:<0.29097.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:51:40.068,ns_1@10.242.238.90:<0.29097.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:51:40.068,ns_1@10.242.238.90:<0.28425.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:info,2014-08-19T16:51:40.071,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 347 state to replica [ns_server:info,2014-08-19T16:51:40.071,ns_1@10.242.238.90:tap_replication_manager-default<0.18775.0>:tap_replication_manager:change_vbucket_filter:200]Going to change replication from 'ns_1@10.242.238.89' to have [347,363,364,365,366,367,368,369,370,371,372,373,374,375,376,377,378,379,380, 381,382,383,384,385,386,387,388,389,390,391,392,393,394,395,396,397,398,399, 400,401,402,403,404,405,406,407,408,409,410,411,412,413,414,415,416,417,418, 419,420,421,422,423,424,425,426] ([347], []) [ns_server:debug,2014-08-19T16:51:40.072,ns_1@10.242.238.90:<0.29098.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:135]Registered myself under id:{"default", {new_child_id, [347,363,364,365,366,367,368,369,370,371,372, 373,374,375,376,377,378,379,380,381,382,383, 384,385,386,387,388,389,390,391,392,393,394, 395,396,397,398,399,400,401,402,403,404,405, 406,407,408,409,410,411,412,413,414,415,416, 417,418,419,420,421,422,423,424,425,426], 'ns_1@10.242.238.89'}, #Ref<0.0.1.69144>} Args:[{"10.242.238.89",11209}, {"10.242.238.90",11209}, [{old_state_retriever,#Fun}, {on_not_ready_vbuckets,#Fun}, {username,"default"}, {password,get_from_config}, {vbuckets,[347,363,364,365,366,367,368,369,370,371,372,373,374,375,376, 377,378,379,380,381,382,383,384,385,386,387,388,389,390,391, 392,393,394,395,396,397,398,399,400,401,402,403,404,405,406, 407,408,409,410,411,412,413,414,415,416,417,418,419,420,421, 422,423,424,425,426]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]] [ns_server:debug,2014-08-19T16:51:40.073,ns_1@10.242.238.90:<0.29098.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:139]Linked myself to old ebucketmigrator <0.28079.0> [ns_server:info,2014-08-19T16:51:40.073,ns_1@10.242.238.90:<0.28079.0>:ebucketmigrator_srv:handle_call:270]Starting new-style vbucket filter change on stream `replication_ns_1@10.242.238.90` [ns_server:info,2014-08-19T16:51:40.088,ns_1@10.242.238.90:<0.28079.0>:ebucketmigrator_srv:handle_call:297]Changing vbucket filter on tap stream `replication_ns_1@10.242.238.90`: [{347,1}, {363,1}, {364,1}, {365,1}, {366,1}, {367,1}, {368,1}, {369,1}, {370,1}, {371,1}, {372,1}, {373,1}, {374,1}, {375,1}, {376,1}, {377,1}, {378,1}, {379,1}, {380,1}, {381,1}, {382,1}, {383,1}, {384,1}, {385,1}, {386,1}, {387,1}, {388,1}, {389,1}, {390,1}, {391,1}, {392,1}, {393,1}, {394,1}, {395,1}, {396,1}, {397,1}, {398,1}, {399,1}, {400,1}, {401,1}, {402,1}, {403,1}, {404,1}, {405,1}, {406,1}, {407,1}, {408,1}, {409,1}, {410,1}, {411,1}, {412,1}, {413,1}, {414,1}, {415,1}, {416,1}, {417,1}, {418,1}, {419,1}, {420,1}, {421,1}, {422,1}, {423,1}, {424,1}, {425,1}, {426,1}] [ns_server:info,2014-08-19T16:51:40.089,ns_1@10.242.238.90:<0.28079.0>:ebucketmigrator_srv:handle_call:307]Successfully changed vbucket filter on tap stream `replication_ns_1@10.242.238.90`. [ns_server:info,2014-08-19T16:51:40.090,ns_1@10.242.238.90:<0.28079.0>:ebucketmigrator_srv:process_upstream:1027]Got vbucket filter change completion message. Silencing upstream sender [ns_server:info,2014-08-19T16:51:40.090,ns_1@10.242.238.90:<0.28079.0>:ebucketmigrator_srv:handle_info:366]Got reply from upstream silencing request. Completing state transition to a new ebucketmigrator. [ns_server:debug,2014-08-19T16:51:40.090,ns_1@10.242.238.90:<0.28079.0>:ebucketmigrator_srv:complete_native_vb_filter_change:170]Proceeding with reading unread binaries [ns_server:debug,2014-08-19T16:51:40.090,ns_1@10.242.238.90:<0.28079.0>:ebucketmigrator_srv:confirm_downstream:197]Going to confirm reception downstream messages [ns_server:debug,2014-08-19T16:51:40.090,ns_1@10.242.238.90:<0.28079.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:51:40.090,ns_1@10.242.238.90:<0.29100.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:51:40.090,ns_1@10.242.238.90:<0.29100.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:51:40.090,ns_1@10.242.238.90:<0.28079.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:51:40.090,ns_1@10.242.238.90:<0.28079.0>:ebucketmigrator_srv:confirm_downstream:201]Confirmed upstream messages are feeded to kernel [ns_server:debug,2014-08-19T16:51:40.091,ns_1@10.242.238.90:<0.28079.0>:ebucketmigrator_srv:reply_and_die:210]Passed old state to caller [ns_server:debug,2014-08-19T16:51:40.091,ns_1@10.242.238.90:<0.28079.0>:ebucketmigrator_srv:reply_and_die:213]Sent out state. Preparing to die [ns_server:debug,2014-08-19T16:51:40.091,ns_1@10.242.238.90:<0.29098.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:143]Got old state from previous ebucketmigrator: <0.28079.0> [ns_server:debug,2014-08-19T16:51:40.091,ns_1@10.242.238.90:<0.29098.0>:ns_vbm_new_sup:perform_vbucket_filter_change_loop:184]Sent old state to new instance [ns_server:info,2014-08-19T16:51:40.091,ns_1@10.242.238.90:<0.29102.0>:ns_vbm_new_sup:mk_old_state_retriever:83]Got vbucket filter change old state. Proceeding vbucket filter change operation [ns_server:debug,2014-08-19T16:51:40.091,ns_1@10.242.238.90:<0.29102.0>:ebucketmigrator_srv:init:494]Got old ebucketmigrator state from <0.28079.0>: {state,#Port<0.16372>,#Port<0.16368>,#Port<0.16373>,#Port<0.16369>, <0.28080.0>,<<"cut off">>,<<"cut off">>,[],196,false,false,0, {1408,452700,90009}, completed, {<0.29098.0>,#Ref<0.0.1.69157>}, <<"replication_ns_1@10.242.238.90">>,<0.28079.0>, {had_backfill,false,undefined,[]}, completed,false}. [ns_server:debug,2014-08-19T16:51:40.092,ns_1@10.242.238.90:<0.17162.0>:ns_process_registry:handle_info:98]Got exit msg: {'EXIT',<0.29098.0>,{#Ref<0.0.1.69146>,<0.29102.0>}} [error_logger:info,2014-08-19T16:51:40.092,ns_1@10.242.238.90:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,'ns_vbm_new_sup-default'} started: [{pid,<0.29102.0>}, {name, {new_child_id, [347,363,364,365,366,367,368,369,370,371,372, 373,374,375,376,377,378,379,380,381,382,383, 384,385,386,387,388,389,390,391,392,393,394, 395,396,397,398,399,400,401,402,403,404,405, 406,407,408,409,410,411,412,413,414,415,416, 417,418,419,420,421,422,423,424,425,426], 'ns_1@10.242.238.89'}}, {mfargs, {ebucketmigrator_srv,start_link, [{"10.242.238.89",11209}, {"10.242.238.90",11209}, [{old_state_retriever, #Fun}, {on_not_ready_vbuckets, #Fun}, {username,"default"}, {password,get_from_config}, {vbuckets, [347,363,364,365,366,367,368,369,370,371, 372,373,374,375,376,377,378,379,380,381, 382,383,384,385,386,387,388,389,390,391, 392,393,394,395,396,397,398,399,400,401, 402,403,404,405,406,407,408,409,410,411, 412,413,414,415,416,417,418,419,420,421, 422,423,424,425,426]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]]}}, {restart_type,temporary}, {shutdown,60000}, {child_type,worker}] [rebalance:debug,2014-08-19T16:51:40.092,ns_1@10.242.238.90:<0.29103.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 344 [rebalance:debug,2014-08-19T16:51:40.094,ns_1@10.242.238.90:<0.29103.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:51:40.094,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.29103.0> (ok) [ns_server:debug,2014-08-19T16:51:40.098,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:51:40.100,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:40.100,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 1403 us [ns_server:debug,2014-08-19T16:51:40.100,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:40.101,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{347, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:51:40.109,ns_1@10.242.238.90:<0.29102.0>:ebucketmigrator_srv:init:621]Reusing old upstream: [{vbuckets,[347,363,364,365,366,367,368,369,370,371,372,373,374,375,376,377, 378,379,380,381,382,383,384,385,386,387,388,389,390,391,392,393, 394,395,396,397,398,399,400,401,402,403,404,405,406,407,408,409, 410,411,412,413,414,415,416,417,418,419,420,421,422,423,424,425, 426]}, {name,<<"replication_ns_1@10.242.238.90">>}, {takeover,false}] [rebalance:debug,2014-08-19T16:51:40.109,ns_1@10.242.238.90:<0.29102.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.29107.0> [rebalance:debug,2014-08-19T16:51:40.130,ns_1@10.242.238.90:<0.29108.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 346 [rebalance:debug,2014-08-19T16:51:40.131,ns_1@10.242.238.90:<0.29108.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:51:40.131,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.29108.0> (ok) [rebalance:debug,2014-08-19T16:51:40.132,ns_1@10.242.238.90:<0.28379.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:51:40.133,ns_1@10.242.238.90:<0.28379.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:51:40.133,ns_1@10.242.238.90:<0.29111.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:51:40.133,ns_1@10.242.238.90:<0.29111.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:51:40.133,ns_1@10.242.238.90:<0.28379.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:info,2014-08-19T16:51:40.137,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 349 state to replica [ns_server:info,2014-08-19T16:51:40.137,ns_1@10.242.238.90:tap_replication_manager-default<0.18775.0>:tap_replication_manager:change_vbucket_filter:200]Going to change replication from 'ns_1@10.242.238.89' to have [347,349,363,364,365,366,367,368,369,370,371,372,373,374,375,376,377,378,379, 380,381,382,383,384,385,386,387,388,389,390,391,392,393,394,395,396,397,398, 399,400,401,402,403,404,405,406,407,408,409,410,411,412,413,414,415,416,417, 418,419,420,421,422,423,424,425,426] ([349], []) [ns_server:debug,2014-08-19T16:51:40.140,ns_1@10.242.238.90:<0.29112.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:135]Registered myself under id:{"default", {new_child_id, [347,349,363,364,365,366,367,368,369,370,371, 372,373,374,375,376,377,378,379,380,381,382, 383,384,385,386,387,388,389,390,391,392,393, 394,395,396,397,398,399,400,401,402,403,404, 405,406,407,408,409,410,411,412,413,414,415, 416,417,418,419,420,421,422,423,424,425,426], 'ns_1@10.242.238.89'}, #Ref<0.0.1.69341>} Args:[{"10.242.238.89",11209}, {"10.242.238.90",11209}, [{old_state_retriever,#Fun}, {on_not_ready_vbuckets,#Fun}, {username,"default"}, {password,get_from_config}, {vbuckets,[347,349,363,364,365,366,367,368,369,370,371,372,373,374,375, 376,377,378,379,380,381,382,383,384,385,386,387,388,389,390, 391,392,393,394,395,396,397,398,399,400,401,402,403,404,405, 406,407,408,409,410,411,412,413,414,415,416,417,418,419,420, 421,422,423,424,425,426]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]] [ns_server:debug,2014-08-19T16:51:40.141,ns_1@10.242.238.90:<0.29112.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:139]Linked myself to old ebucketmigrator <0.29102.0> [ns_server:info,2014-08-19T16:51:40.141,ns_1@10.242.238.90:<0.29102.0>:ebucketmigrator_srv:handle_call:270]Starting new-style vbucket filter change on stream `replication_ns_1@10.242.238.90` [ns_server:info,2014-08-19T16:51:40.157,ns_1@10.242.238.90:<0.29102.0>:ebucketmigrator_srv:handle_call:297]Changing vbucket filter on tap stream `replication_ns_1@10.242.238.90`: [{347,1}, {349,1}, {363,1}, {364,1}, {365,1}, {366,1}, {367,1}, {368,1}, {369,1}, {370,1}, {371,1}, {372,1}, {373,1}, {374,1}, {375,1}, {376,1}, {377,1}, {378,1}, {379,1}, {380,1}, {381,1}, {382,1}, {383,1}, {384,1}, {385,1}, {386,1}, {387,1}, {388,1}, {389,1}, {390,1}, {391,1}, {392,1}, {393,1}, {394,1}, {395,1}, {396,1}, {397,1}, {398,1}, {399,1}, {400,1}, {401,1}, {402,1}, {403,1}, {404,1}, {405,1}, {406,1}, {407,1}, {408,1}, {409,1}, {410,1}, {411,1}, {412,1}, {413,1}, {414,1}, {415,1}, {416,1}, {417,1}, {418,1}, {419,1}, {420,1}, {421,1}, {422,1}, {423,1}, {424,1}, {425,1}, {426,1}] [ns_server:info,2014-08-19T16:51:40.158,ns_1@10.242.238.90:<0.29102.0>:ebucketmigrator_srv:handle_call:307]Successfully changed vbucket filter on tap stream `replication_ns_1@10.242.238.90`. [ns_server:info,2014-08-19T16:51:40.158,ns_1@10.242.238.90:<0.29102.0>:ebucketmigrator_srv:process_upstream:1027]Got vbucket filter change completion message. Silencing upstream sender [ns_server:info,2014-08-19T16:51:40.158,ns_1@10.242.238.90:<0.29102.0>:ebucketmigrator_srv:handle_info:366]Got reply from upstream silencing request. Completing state transition to a new ebucketmigrator. [ns_server:debug,2014-08-19T16:51:40.158,ns_1@10.242.238.90:<0.29102.0>:ebucketmigrator_srv:complete_native_vb_filter_change:170]Proceeding with reading unread binaries [ns_server:debug,2014-08-19T16:51:40.158,ns_1@10.242.238.90:<0.29102.0>:ebucketmigrator_srv:confirm_downstream:197]Going to confirm reception downstream messages [ns_server:debug,2014-08-19T16:51:40.158,ns_1@10.242.238.90:<0.29102.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:51:40.158,ns_1@10.242.238.90:<0.29114.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:51:40.158,ns_1@10.242.238.90:<0.29114.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:51:40.159,ns_1@10.242.238.90:<0.29102.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:51:40.159,ns_1@10.242.238.90:<0.29102.0>:ebucketmigrator_srv:confirm_downstream:201]Confirmed upstream messages are feeded to kernel [ns_server:debug,2014-08-19T16:51:40.159,ns_1@10.242.238.90:<0.29102.0>:ebucketmigrator_srv:reply_and_die:210]Passed old state to caller [ns_server:debug,2014-08-19T16:51:40.159,ns_1@10.242.238.90:<0.29102.0>:ebucketmigrator_srv:reply_and_die:213]Sent out state. Preparing to die [ns_server:debug,2014-08-19T16:51:40.159,ns_1@10.242.238.90:<0.29112.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:143]Got old state from previous ebucketmigrator: <0.29102.0> [ns_server:debug,2014-08-19T16:51:40.159,ns_1@10.242.238.90:<0.29112.0>:ns_vbm_new_sup:perform_vbucket_filter_change_loop:184]Sent old state to new instance [ns_server:info,2014-08-19T16:51:40.159,ns_1@10.242.238.90:<0.29116.0>:ns_vbm_new_sup:mk_old_state_retriever:83]Got vbucket filter change old state. Proceeding vbucket filter change operation [ns_server:debug,2014-08-19T16:51:40.159,ns_1@10.242.238.90:<0.29116.0>:ebucketmigrator_srv:init:494]Got old ebucketmigrator state from <0.29102.0>: {state,#Port<0.16372>,#Port<0.16368>,#Port<0.16373>,#Port<0.16369>, <0.29107.0>,<<"cut off">>,<<"cut off">>,[],199,false,false,0, {1408,452700,158281}, completed, {<0.29112.0>,#Ref<0.0.1.69354>}, <<"replication_ns_1@10.242.238.90">>,<0.29102.0>, {had_backfill,false,undefined,[]}, completed,false}. [error_logger:info,2014-08-19T16:51:40.160,ns_1@10.242.238.90:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,'ns_vbm_new_sup-default'} started: [{pid,<0.29116.0>}, {name, {new_child_id, [347,349,363,364,365,366,367,368,369,370,371, 372,373,374,375,376,377,378,379,380,381,382, 383,384,385,386,387,388,389,390,391,392,393, 394,395,396,397,398,399,400,401,402,403,404, 405,406,407,408,409,410,411,412,413,414,415, 416,417,418,419,420,421,422,423,424,425,426], 'ns_1@10.242.238.89'}}, {mfargs, {ebucketmigrator_srv,start_link, [{"10.242.238.89",11209}, {"10.242.238.90",11209}, [{old_state_retriever, #Fun}, {on_not_ready_vbuckets, #Fun}, {username,"default"}, {password,get_from_config}, {vbuckets, [347,349,363,364,365,366,367,368,369,370, 371,372,373,374,375,376,377,378,379,380, 381,382,383,384,385,386,387,388,389,390, 391,392,393,394,395,396,397,398,399,400, 401,402,403,404,405,406,407,408,409,410, 411,412,413,414,415,416,417,418,419,420, 421,422,423,424,425,426]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]]}}, {restart_type,temporary}, {shutdown,60000}, {child_type,worker}] [ns_server:debug,2014-08-19T16:51:40.160,ns_1@10.242.238.90:<0.17162.0>:ns_process_registry:handle_info:98]Got exit msg: {'EXIT',<0.29112.0>,{#Ref<0.0.1.69343>,<0.29116.0>}} [rebalance:debug,2014-08-19T16:51:40.164,ns_1@10.242.238.90:<0.28329.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:51:40.164,ns_1@10.242.238.90:<0.28329.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:51:40.164,ns_1@10.242.238.90:<0.29117.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:51:40.164,ns_1@10.242.238.90:<0.29117.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:51:40.164,ns_1@10.242.238.90:<0.28329.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:51:40.165,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:51:40.168,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:40.168,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 3323 us [ns_server:debug,2014-08-19T16:51:40.169,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:40.169,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{349, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:info,2014-08-19T16:51:40.176,ns_1@10.242.238.90:<0.18786.0>:ns_memcached:do_handle_call:527]Changed vbucket 351 state to replica [ns_server:debug,2014-08-19T16:51:40.176,ns_1@10.242.238.90:<0.29116.0>:ebucketmigrator_srv:init:621]Reusing old upstream: [{vbuckets,[347,349,363,364,365,366,367,368,369,370,371,372,373,374,375,376, 377,378,379,380,381,382,383,384,385,386,387,388,389,390,391,392, 393,394,395,396,397,398,399,400,401,402,403,404,405,406,407,408, 409,410,411,412,413,414,415,416,417,418,419,420,421,422,423,424, 425,426]}, {name,<<"replication_ns_1@10.242.238.90">>}, {takeover,false}] [rebalance:debug,2014-08-19T16:51:40.176,ns_1@10.242.238.90:<0.29116.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.29119.0> [ns_server:info,2014-08-19T16:51:40.176,ns_1@10.242.238.90:tap_replication_manager-default<0.18775.0>:tap_replication_manager:change_vbucket_filter:200]Going to change replication from 'ns_1@10.242.238.89' to have [347,349,351,363,364,365,366,367,368,369,370,371,372,373,374,375,376,377,378, 379,380,381,382,383,384,385,386,387,388,389,390,391,392,393,394,395,396,397, 398,399,400,401,402,403,404,405,406,407,408,409,410,411,412,413,414,415,416, 417,418,419,420,421,422,423,424,425,426] ([351], []) [ns_server:debug,2014-08-19T16:51:40.177,ns_1@10.242.238.90:<0.29120.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:135]Registered myself under id:{"default", {new_child_id, [347,349,351,363,364,365,366,367,368,369,370, 371,372,373,374,375,376,377,378,379,380,381, 382,383,384,385,386,387,388,389,390,391,392, 393,394,395,396,397,398,399,400,401,402,403, 404,405,406,407,408,409,410,411,412,413,414, 415,416,417,418,419,420,421,422,423,424,425, 426], 'ns_1@10.242.238.89'}, #Ref<0.0.1.69499>} Args:[{"10.242.238.89",11209}, {"10.242.238.90",11209}, [{old_state_retriever,#Fun}, {on_not_ready_vbuckets,#Fun}, {username,"default"}, {password,get_from_config}, {vbuckets,[347,349,351,363,364,365,366,367,368,369,370,371,372,373,374, 375,376,377,378,379,380,381,382,383,384,385,386,387,388,389, 390,391,392,393,394,395,396,397,398,399,400,401,402,403,404, 405,406,407,408,409,410,411,412,413,414,415,416,417,418,419, 420,421,422,423,424,425,426]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]] [ns_server:debug,2014-08-19T16:51:40.178,ns_1@10.242.238.90:<0.29120.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:139]Linked myself to old ebucketmigrator <0.29116.0> [ns_server:info,2014-08-19T16:51:40.178,ns_1@10.242.238.90:<0.29116.0>:ebucketmigrator_srv:handle_call:270]Starting new-style vbucket filter change on stream `replication_ns_1@10.242.238.90` [ns_server:info,2014-08-19T16:51:40.192,ns_1@10.242.238.90:<0.29116.0>:ebucketmigrator_srv:handle_call:297]Changing vbucket filter on tap stream `replication_ns_1@10.242.238.90`: [{347,1}, {349,1}, {351,1}, {363,1}, {364,1}, {365,1}, {366,1}, {367,1}, {368,1}, {369,1}, {370,1}, {371,1}, {372,1}, {373,1}, {374,1}, {375,1}, {376,1}, {377,1}, {378,1}, {379,1}, {380,1}, {381,1}, {382,1}, {383,1}, {384,1}, {385,1}, {386,1}, {387,1}, {388,1}, {389,1}, {390,1}, {391,1}, {392,1}, {393,1}, {394,1}, {395,1}, {396,1}, {397,1}, {398,1}, {399,1}, {400,1}, {401,1}, {402,1}, {403,1}, {404,1}, {405,1}, {406,1}, {407,1}, {408,1}, {409,1}, {410,1}, {411,1}, {412,1}, {413,1}, {414,1}, {415,1}, {416,1}, {417,1}, {418,1}, {419,1}, {420,1}, {421,1}, {422,1}, {423,1}, {424,1}, {425,1}, {426,1}] [ns_server:info,2014-08-19T16:51:40.193,ns_1@10.242.238.90:<0.29116.0>:ebucketmigrator_srv:handle_call:307]Successfully changed vbucket filter on tap stream `replication_ns_1@10.242.238.90`. [ns_server:info,2014-08-19T16:51:40.193,ns_1@10.242.238.90:<0.29116.0>:ebucketmigrator_srv:process_upstream:1027]Got vbucket filter change completion message. Silencing upstream sender [ns_server:info,2014-08-19T16:51:40.193,ns_1@10.242.238.90:<0.29116.0>:ebucketmigrator_srv:handle_info:366]Got reply from upstream silencing request. Completing state transition to a new ebucketmigrator. [ns_server:debug,2014-08-19T16:51:40.193,ns_1@10.242.238.90:<0.29116.0>:ebucketmigrator_srv:complete_native_vb_filter_change:170]Proceeding with reading unread binaries [ns_server:debug,2014-08-19T16:51:40.193,ns_1@10.242.238.90:<0.29116.0>:ebucketmigrator_srv:confirm_downstream:197]Going to confirm reception downstream messages [ns_server:debug,2014-08-19T16:51:40.193,ns_1@10.242.238.90:<0.29116.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:51:40.193,ns_1@10.242.238.90:<0.29122.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:51:40.194,ns_1@10.242.238.90:<0.29122.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:51:40.194,ns_1@10.242.238.90:<0.29116.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:51:40.194,ns_1@10.242.238.90:<0.29116.0>:ebucketmigrator_srv:confirm_downstream:201]Confirmed upstream messages are feeded to kernel [ns_server:debug,2014-08-19T16:51:40.194,ns_1@10.242.238.90:<0.29116.0>:ebucketmigrator_srv:reply_and_die:210]Passed old state to caller [ns_server:debug,2014-08-19T16:51:40.194,ns_1@10.242.238.90:<0.29116.0>:ebucketmigrator_srv:reply_and_die:213]Sent out state. Preparing to die [ns_server:debug,2014-08-19T16:51:40.194,ns_1@10.242.238.90:<0.29120.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:143]Got old state from previous ebucketmigrator: <0.29116.0> [ns_server:debug,2014-08-19T16:51:40.194,ns_1@10.242.238.90:<0.29120.0>:ns_vbm_new_sup:perform_vbucket_filter_change_loop:184]Sent old state to new instance [ns_server:info,2014-08-19T16:51:40.195,ns_1@10.242.238.90:<0.29124.0>:ns_vbm_new_sup:mk_old_state_retriever:83]Got vbucket filter change old state. Proceeding vbucket filter change operation [ns_server:debug,2014-08-19T16:51:40.195,ns_1@10.242.238.90:<0.29124.0>:ebucketmigrator_srv:init:494]Got old ebucketmigrator state from <0.29116.0>: {state,#Port<0.16372>,#Port<0.16368>,#Port<0.16373>,#Port<0.16369>, <0.29119.0>,<<"cut off">>,<<"cut off">>,[],202,false,false,0, {1408,452700,193476}, completed, {<0.29120.0>,#Ref<0.0.1.69512>}, <<"replication_ns_1@10.242.238.90">>,<0.29116.0>, {had_backfill,false,undefined,[]}, completed,false}. [ns_server:debug,2014-08-19T16:51:40.195,ns_1@10.242.238.90:<0.17162.0>:ns_process_registry:handle_info:98]Got exit msg: {'EXIT',<0.29120.0>,{#Ref<0.0.1.69501>,<0.29124.0>}} [error_logger:info,2014-08-19T16:51:40.195,ns_1@10.242.238.90:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,'ns_vbm_new_sup-default'} started: [{pid,<0.29124.0>}, {name, {new_child_id, [347,349,351,363,364,365,366,367,368,369,370, 371,372,373,374,375,376,377,378,379,380,381, 382,383,384,385,386,387,388,389,390,391,392, 393,394,395,396,397,398,399,400,401,402,403, 404,405,406,407,408,409,410,411,412,413,414, 415,416,417,418,419,420,421,422,423,424,425, 426], 'ns_1@10.242.238.89'}}, {mfargs, {ebucketmigrator_srv,start_link, [{"10.242.238.89",11209}, {"10.242.238.90",11209}, [{old_state_retriever, #Fun}, {on_not_ready_vbuckets, #Fun}, {username,"default"}, {password,get_from_config}, {vbuckets, [347,349,351,363,364,365,366,367,368,369, 370,371,372,373,374,375,376,377,378,379, 380,381,382,383,384,385,386,387,388,389, 390,391,392,393,394,395,396,397,398,399, 400,401,402,403,404,405,406,407,408,409, 410,411,412,413,414,415,416,417,418,419, 420,421,422,423,424,425,426]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]]}}, {restart_type,temporary}, {shutdown,60000}, {child_type,worker}] [ns_server:debug,2014-08-19T16:51:40.199,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:51:40.202,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:40.202,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 3036 us [ns_server:debug,2014-08-19T16:51:40.203,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:40.203,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{351, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:51:40.210,ns_1@10.242.238.90:<0.29124.0>:ebucketmigrator_srv:init:621]Reusing old upstream: [{vbuckets,[347,349,351,363,364,365,366,367,368,369,370,371,372,373,374,375, 376,377,378,379,380,381,382,383,384,385,386,387,388,389,390,391, 392,393,394,395,396,397,398,399,400,401,402,403,404,405,406,407, 408,409,410,411,412,413,414,415,416,417,418,419,420,421,422,423, 424,425,426]}, {name,<<"replication_ns_1@10.242.238.90">>}, {takeover,false}] [rebalance:debug,2014-08-19T16:51:40.210,ns_1@10.242.238.90:<0.29124.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.29126.0> [rebalance:debug,2014-08-19T16:51:40.282,ns_1@10.242.238.90:<0.28414.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:51:40.282,ns_1@10.242.238.90:<0.28414.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:51:40.283,ns_1@10.242.238.90:<0.29127.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:51:40.283,ns_1@10.242.238.90:<0.29127.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:51:40.283,ns_1@10.242.238.90:<0.28414.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:info,2014-08-19T16:51:40.286,ns_1@10.242.238.90:<0.18786.0>:ns_memcached:do_handle_call:527]Changed vbucket 348 state to replica [ns_server:info,2014-08-19T16:51:40.287,ns_1@10.242.238.90:tap_replication_manager-default<0.18775.0>:tap_replication_manager:change_vbucket_filter:200]Going to change replication from 'ns_1@10.242.238.89' to have [347,348,349,351,363,364,365,366,367,368,369,370,371,372,373,374,375,376,377, 378,379,380,381,382,383,384,385,386,387,388,389,390,391,392,393,394,395,396, 397,398,399,400,401,402,403,404,405,406,407,408,409,410,411,412,413,414,415, 416,417,418,419,420,421,422,423,424,425,426] ([348], []) [ns_server:debug,2014-08-19T16:51:40.288,ns_1@10.242.238.90:<0.29128.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:135]Registered myself under id:{"default", {new_child_id, [347,348,349,351,363,364,365,366,367,368,369, 370,371,372,373,374,375,376,377,378,379,380, 381,382,383,384,385,386,387,388,389,390,391, 392,393,394,395,396,397,398,399,400,401,402, 403,404,405,406,407,408,409,410,411,412,413, 414,415,416,417,418,419,420,421,422,423,424, 425,426], 'ns_1@10.242.238.89'}, #Ref<0.0.1.69659>} Args:[{"10.242.238.89",11209}, {"10.242.238.90",11209}, [{old_state_retriever,#Fun}, {on_not_ready_vbuckets,#Fun}, {username,"default"}, {password,get_from_config}, {vbuckets,[347,348,349,351,363,364,365,366,367,368,369,370,371,372,373, 374,375,376,377,378,379,380,381,382,383,384,385,386,387,388, 389,390,391,392,393,394,395,396,397,398,399,400,401,402,403, 404,405,406,407,408,409,410,411,412,413,414,415,416,417,418, 419,420,421,422,423,424,425,426]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]] [ns_server:debug,2014-08-19T16:51:40.288,ns_1@10.242.238.90:<0.29128.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:139]Linked myself to old ebucketmigrator <0.29124.0> [ns_server:info,2014-08-19T16:51:40.289,ns_1@10.242.238.90:<0.29124.0>:ebucketmigrator_srv:handle_call:270]Starting new-style vbucket filter change on stream `replication_ns_1@10.242.238.90` [rebalance:debug,2014-08-19T16:51:40.289,ns_1@10.242.238.90:<0.28293.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:51:40.289,ns_1@10.242.238.90:<0.28293.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:51:40.289,ns_1@10.242.238.90:<0.29130.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:51:40.289,ns_1@10.242.238.90:<0.29130.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:51:40.290,ns_1@10.242.238.90:<0.28293.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:info,2014-08-19T16:51:40.307,ns_1@10.242.238.90:<0.29124.0>:ebucketmigrator_srv:handle_call:297]Changing vbucket filter on tap stream `replication_ns_1@10.242.238.90`: [{347,1}, {348,1}, {349,1}, {351,1}, {363,1}, {364,1}, {365,1}, {366,1}, {367,1}, {368,1}, {369,1}, {370,1}, {371,1}, {372,1}, {373,1}, {374,1}, {375,1}, {376,1}, {377,1}, {378,1}, {379,1}, {380,1}, {381,1}, {382,1}, {383,1}, {384,1}, {385,1}, {386,1}, {387,1}, {388,1}, {389,1}, {390,1}, {391,1}, {392,1}, {393,1}, {394,1}, {395,1}, {396,1}, {397,1}, {398,1}, {399,1}, {400,1}, {401,1}, {402,1}, {403,1}, {404,1}, {405,1}, {406,1}, {407,1}, {408,1}, {409,1}, {410,1}, {411,1}, {412,1}, {413,1}, {414,1}, {415,1}, {416,1}, {417,1}, {418,1}, {419,1}, {420,1}, {421,1}, {422,1}, {423,1}, {424,1}, {425,1}, {426,1}] [ns_server:info,2014-08-19T16:51:40.308,ns_1@10.242.238.90:<0.29124.0>:ebucketmigrator_srv:handle_call:307]Successfully changed vbucket filter on tap stream `replication_ns_1@10.242.238.90`. [ns_server:info,2014-08-19T16:51:40.308,ns_1@10.242.238.90:<0.29124.0>:ebucketmigrator_srv:process_upstream:1027]Got vbucket filter change completion message. Silencing upstream sender [ns_server:info,2014-08-19T16:51:40.308,ns_1@10.242.238.90:<0.29124.0>:ebucketmigrator_srv:handle_info:366]Got reply from upstream silencing request. Completing state transition to a new ebucketmigrator. [ns_server:debug,2014-08-19T16:51:40.308,ns_1@10.242.238.90:<0.29124.0>:ebucketmigrator_srv:complete_native_vb_filter_change:170]Proceeding with reading unread binaries [ns_server:debug,2014-08-19T16:51:40.308,ns_1@10.242.238.90:<0.29124.0>:ebucketmigrator_srv:confirm_downstream:197]Going to confirm reception downstream messages [ns_server:debug,2014-08-19T16:51:40.309,ns_1@10.242.238.90:<0.29124.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:51:40.309,ns_1@10.242.238.90:<0.29131.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:51:40.309,ns_1@10.242.238.90:<0.29131.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:51:40.309,ns_1@10.242.238.90:<0.29124.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:51:40.309,ns_1@10.242.238.90:<0.29124.0>:ebucketmigrator_srv:confirm_downstream:201]Confirmed upstream messages are feeded to kernel [ns_server:debug,2014-08-19T16:51:40.309,ns_1@10.242.238.90:<0.29124.0>:ebucketmigrator_srv:reply_and_die:210]Passed old state to caller [ns_server:debug,2014-08-19T16:51:40.309,ns_1@10.242.238.90:<0.29124.0>:ebucketmigrator_srv:reply_and_die:213]Sent out state. Preparing to die [ns_server:debug,2014-08-19T16:51:40.309,ns_1@10.242.238.90:<0.29128.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:143]Got old state from previous ebucketmigrator: <0.29124.0> [ns_server:debug,2014-08-19T16:51:40.310,ns_1@10.242.238.90:<0.29128.0>:ns_vbm_new_sup:perform_vbucket_filter_change_loop:184]Sent old state to new instance [ns_server:info,2014-08-19T16:51:40.310,ns_1@10.242.238.90:<0.29133.0>:ns_vbm_new_sup:mk_old_state_retriever:83]Got vbucket filter change old state. Proceeding vbucket filter change operation [ns_server:debug,2014-08-19T16:51:40.310,ns_1@10.242.238.90:<0.29133.0>:ebucketmigrator_srv:init:494]Got old ebucketmigrator state from <0.29124.0>: {state,#Port<0.16372>,#Port<0.16368>,#Port<0.16373>,#Port<0.16369>, <0.29126.0>,<<"cut off">>,<<"cut off">>,[],205,false,false,0, {1408,452700,308587}, completed, {<0.29128.0>,#Ref<0.0.1.69672>}, <<"replication_ns_1@10.242.238.90">>,<0.29124.0>, {had_backfill,false,undefined,[]}, completed,false}. [ns_server:debug,2014-08-19T16:51:40.310,ns_1@10.242.238.90:<0.17162.0>:ns_process_registry:handle_info:98]Got exit msg: {'EXIT',<0.29128.0>,{#Ref<0.0.1.69661>,<0.29133.0>}} [error_logger:info,2014-08-19T16:51:40.310,ns_1@10.242.238.90:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,'ns_vbm_new_sup-default'} started: [{pid,<0.29133.0>}, {name, {new_child_id, [347,348,349,351,363,364,365,366,367,368,369, 370,371,372,373,374,375,376,377,378,379,380, 381,382,383,384,385,386,387,388,389,390,391, 392,393,394,395,396,397,398,399,400,401,402, 403,404,405,406,407,408,409,410,411,412,413, 414,415,416,417,418,419,420,421,422,423,424, 425,426], 'ns_1@10.242.238.89'}}, {mfargs, {ebucketmigrator_srv,start_link, [{"10.242.238.89",11209}, {"10.242.238.90",11209}, [{old_state_retriever, #Fun}, {on_not_ready_vbuckets, #Fun}, {username,"default"}, {password,get_from_config}, {vbuckets, [347,348,349,351,363,364,365,366,367,368, 369,370,371,372,373,374,375,376,377,378, 379,380,381,382,383,384,385,386,387,388, 389,390,391,392,393,394,395,396,397,398, 399,400,401,402,403,404,405,406,407,408, 409,410,411,412,413,414,415,416,417,418, 419,420,421,422,423,424,425,426]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]]}}, {restart_type,temporary}, {shutdown,60000}, {child_type,worker}] [ns_server:debug,2014-08-19T16:51:40.316,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:51:40.325,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:40.325,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 9414 us [ns_server:debug,2014-08-19T16:51:40.325,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:40.326,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{348, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:51:40.328,ns_1@10.242.238.90:<0.29133.0>:ebucketmigrator_srv:init:621]Reusing old upstream: [{vbuckets,[347,348,349,351,363,364,365,366,367,368,369,370,371,372,373,374, 375,376,377,378,379,380,381,382,383,384,385,386,387,388,389,390, 391,392,393,394,395,396,397,398,399,400,401,402,403,404,405,406, 407,408,409,410,411,412,413,414,415,416,417,418,419,420,421,422, 423,424,425,426]}, {name,<<"replication_ns_1@10.242.238.90">>}, {takeover,false}] [ns_server:info,2014-08-19T16:51:40.328,ns_1@10.242.238.90:<0.18786.0>:ns_memcached:do_handle_call:527]Changed vbucket 353 state to replica [rebalance:debug,2014-08-19T16:51:40.328,ns_1@10.242.238.90:<0.29133.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.29135.0> [ns_server:info,2014-08-19T16:51:40.328,ns_1@10.242.238.90:tap_replication_manager-default<0.18775.0>:tap_replication_manager:change_vbucket_filter:200]Going to change replication from 'ns_1@10.242.238.89' to have [347,348,349,351,353,363,364,365,366,367,368,369,370,371,372,373,374,375,376, 377,378,379,380,381,382,383,384,385,386,387,388,389,390,391,392,393,394,395, 396,397,398,399,400,401,402,403,404,405,406,407,408,409,410,411,412,413,414, 415,416,417,418,419,420,421,422,423,424,425,426] ([353], []) [ns_server:debug,2014-08-19T16:51:40.329,ns_1@10.242.238.90:<0.29136.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:135]Registered myself under id:{"default", {new_child_id, [347,348,349,351,353,363,364,365,366,367,368, 369,370,371,372,373,374,375,376,377,378,379, 380,381,382,383,384,385,386,387,388,389,390, 391,392,393,394,395,396,397,398,399,400,401, 402,403,404,405,406,407,408,409,410,411,412, 413,414,415,416,417,418,419,420,421,422,423, 424,425,426], 'ns_1@10.242.238.89'}, #Ref<0.0.1.69807>} Args:[{"10.242.238.89",11209}, {"10.242.238.90",11209}, [{old_state_retriever,#Fun}, {on_not_ready_vbuckets,#Fun}, {username,"default"}, {password,get_from_config}, {vbuckets,[347,348,349,351,353,363,364,365,366,367,368,369,370,371,372, 373,374,375,376,377,378,379,380,381,382,383,384,385,386,387, 388,389,390,391,392,393,394,395,396,397,398,399,400,401,402, 403,404,405,406,407,408,409,410,411,412,413,414,415,416,417, 418,419,420,421,422,423,424,425,426]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]] [ns_server:debug,2014-08-19T16:51:40.329,ns_1@10.242.238.90:<0.29136.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:139]Linked myself to old ebucketmigrator <0.29133.0> [ns_server:info,2014-08-19T16:51:40.330,ns_1@10.242.238.90:<0.29133.0>:ebucketmigrator_srv:handle_call:270]Starting new-style vbucket filter change on stream `replication_ns_1@10.242.238.90` [ns_server:info,2014-08-19T16:51:40.343,ns_1@10.242.238.90:<0.29133.0>:ebucketmigrator_srv:handle_call:297]Changing vbucket filter on tap stream `replication_ns_1@10.242.238.90`: [{347,1}, {348,1}, {349,1}, {351,1}, {353,1}, {363,1}, {364,1}, {365,1}, {366,1}, {367,1}, {368,1}, {369,1}, {370,1}, {371,1}, {372,1}, {373,1}, {374,1}, {375,1}, {376,1}, {377,1}, {378,1}, {379,1}, {380,1}, {381,1}, {382,1}, {383,1}, {384,1}, {385,1}, {386,1}, {387,1}, {388,1}, {389,1}, {390,1}, {391,1}, {392,1}, {393,1}, {394,1}, {395,1}, {396,1}, {397,1}, {398,1}, {399,1}, {400,1}, {401,1}, {402,1}, {403,1}, {404,1}, {405,1}, {406,1}, {407,1}, {408,1}, {409,1}, {410,1}, {411,1}, {412,1}, {413,1}, {414,1}, {415,1}, {416,1}, {417,1}, {418,1}, {419,1}, {420,1}, {421,1}, {422,1}, {423,1}, {424,1}, {425,1}, {426,1}] [ns_server:info,2014-08-19T16:51:40.344,ns_1@10.242.238.90:<0.29133.0>:ebucketmigrator_srv:handle_call:307]Successfully changed vbucket filter on tap stream `replication_ns_1@10.242.238.90`. [ns_server:info,2014-08-19T16:51:40.344,ns_1@10.242.238.90:<0.29133.0>:ebucketmigrator_srv:process_upstream:1027]Got vbucket filter change completion message. Silencing upstream sender [ns_server:info,2014-08-19T16:51:40.345,ns_1@10.242.238.90:<0.29133.0>:ebucketmigrator_srv:handle_info:366]Got reply from upstream silencing request. Completing state transition to a new ebucketmigrator. [ns_server:debug,2014-08-19T16:51:40.345,ns_1@10.242.238.90:<0.29133.0>:ebucketmigrator_srv:complete_native_vb_filter_change:170]Proceeding with reading unread binaries [ns_server:debug,2014-08-19T16:51:40.345,ns_1@10.242.238.90:<0.29133.0>:ebucketmigrator_srv:confirm_downstream:197]Going to confirm reception downstream messages [ns_server:debug,2014-08-19T16:51:40.345,ns_1@10.242.238.90:<0.29133.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:51:40.345,ns_1@10.242.238.90:<0.29138.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:51:40.345,ns_1@10.242.238.90:<0.29138.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:51:40.345,ns_1@10.242.238.90:<0.29133.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:51:40.345,ns_1@10.242.238.90:<0.29133.0>:ebucketmigrator_srv:confirm_downstream:201]Confirmed upstream messages are feeded to kernel [ns_server:debug,2014-08-19T16:51:40.346,ns_1@10.242.238.90:<0.29133.0>:ebucketmigrator_srv:reply_and_die:210]Passed old state to caller [ns_server:debug,2014-08-19T16:51:40.346,ns_1@10.242.238.90:<0.29133.0>:ebucketmigrator_srv:reply_and_die:213]Sent out state. Preparing to die [ns_server:debug,2014-08-19T16:51:40.346,ns_1@10.242.238.90:<0.29136.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:143]Got old state from previous ebucketmigrator: <0.29133.0> [ns_server:debug,2014-08-19T16:51:40.346,ns_1@10.242.238.90:<0.29136.0>:ns_vbm_new_sup:perform_vbucket_filter_change_loop:184]Sent old state to new instance [ns_server:info,2014-08-19T16:51:40.346,ns_1@10.242.238.90:<0.29140.0>:ns_vbm_new_sup:mk_old_state_retriever:83]Got vbucket filter change old state. Proceeding vbucket filter change operation [ns_server:debug,2014-08-19T16:51:40.346,ns_1@10.242.238.90:<0.29140.0>:ebucketmigrator_srv:init:494]Got old ebucketmigrator state from <0.29133.0>: {state,#Port<0.16372>,#Port<0.16368>,#Port<0.16373>,#Port<0.16369>, <0.29135.0>,<<"cut off">>,<<"cut off">>,[],208,false,false,0, {1408,452700,344784}, completed, {<0.29136.0>,#Ref<0.0.1.69820>}, <<"replication_ns_1@10.242.238.90">>,<0.29133.0>, {had_backfill,false,undefined,[]}, completed,false}. [ns_server:debug,2014-08-19T16:51:40.347,ns_1@10.242.238.90:<0.17162.0>:ns_process_registry:handle_info:98]Got exit msg: {'EXIT',<0.29136.0>,{#Ref<0.0.1.69809>,<0.29140.0>}} [error_logger:info,2014-08-19T16:51:40.347,ns_1@10.242.238.90:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,'ns_vbm_new_sup-default'} started: [{pid,<0.29140.0>}, {name, {new_child_id, [347,348,349,351,353,363,364,365,366,367,368, 369,370,371,372,373,374,375,376,377,378,379, 380,381,382,383,384,385,386,387,388,389,390, 391,392,393,394,395,396,397,398,399,400,401, 402,403,404,405,406,407,408,409,410,411,412, 413,414,415,416,417,418,419,420,421,422,423, 424,425,426], 'ns_1@10.242.238.89'}}, {mfargs, {ebucketmigrator_srv,start_link, [{"10.242.238.89",11209}, {"10.242.238.90",11209}, [{old_state_retriever, #Fun}, {on_not_ready_vbuckets, #Fun}, {username,"default"}, {password,get_from_config}, {vbuckets, [347,348,349,351,353,363,364,365,366,367, 368,369,370,371,372,373,374,375,376,377, 378,379,380,381,382,383,384,385,386,387, 388,389,390,391,392,393,394,395,396,397, 398,399,400,401,402,403,404,405,406,407, 408,409,410,411,412,413,414,415,416,417, 418,419,420,421,422,423,424,425,426]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]]}}, {restart_type,temporary}, {shutdown,60000}, {child_type,worker}] [ns_server:debug,2014-08-19T16:51:40.352,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:51:40.355,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:40.357,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{353, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:51:40.357,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 4823 us [ns_server:debug,2014-08-19T16:51:40.358,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:40.362,ns_1@10.242.238.90:<0.29140.0>:ebucketmigrator_srv:init:621]Reusing old upstream: [{vbuckets,[347,348,349,351,353,363,364,365,366,367,368,369,370,371,372,373, 374,375,376,377,378,379,380,381,382,383,384,385,386,387,388,389, 390,391,392,393,394,395,396,397,398,399,400,401,402,403,404,405, 406,407,408,409,410,411,412,413,414,415,416,417,418,419,420,421, 422,423,424,425,426]}, {name,<<"replication_ns_1@10.242.238.90">>}, {takeover,false}] [rebalance:debug,2014-08-19T16:51:40.362,ns_1@10.242.238.90:<0.29140.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.29142.0> [rebalance:debug,2014-08-19T16:51:40.463,ns_1@10.242.238.90:<0.28242.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:51:40.463,ns_1@10.242.238.90:<0.28242.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:51:40.463,ns_1@10.242.238.90:<0.29143.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:51:40.463,ns_1@10.242.238.90:<0.29143.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:51:40.463,ns_1@10.242.238.90:<0.28242.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:info,2014-08-19T16:51:40.467,ns_1@10.242.238.90:<0.18786.0>:ns_memcached:do_handle_call:527]Changed vbucket 355 state to replica [ns_server:info,2014-08-19T16:51:40.467,ns_1@10.242.238.90:tap_replication_manager-default<0.18775.0>:tap_replication_manager:change_vbucket_filter:200]Going to change replication from 'ns_1@10.242.238.89' to have [347,348,349,351,353,355,363,364,365,366,367,368,369,370,371,372,373,374,375, 376,377,378,379,380,381,382,383,384,385,386,387,388,389,390,391,392,393,394, 395,396,397,398,399,400,401,402,403,404,405,406,407,408,409,410,411,412,413, 414,415,416,417,418,419,420,421,422,423,424,425,426] ([355], []) [ns_server:debug,2014-08-19T16:51:40.468,ns_1@10.242.238.90:<0.29144.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:135]Registered myself under id:{"default", {new_child_id, [347,348,349,351,353,355,363,364,365,366,367, 368,369,370,371,372,373,374,375,376,377,378, 379,380,381,382,383,384,385,386,387,388,389, 390,391,392,393,394,395,396,397,398,399,400, 401,402,403,404,405,406,407,408,409,410,411, 412,413,414,415,416,417,418,419,420,421,422, 423,424,425,426], 'ns_1@10.242.238.89'}, #Ref<0.0.1.69970>} Args:[{"10.242.238.89",11209}, {"10.242.238.90",11209}, [{old_state_retriever,#Fun}, {on_not_ready_vbuckets,#Fun}, {username,"default"}, {password,get_from_config}, {vbuckets,[347,348,349,351,353,355,363,364,365,366,367,368,369,370,371, 372,373,374,375,376,377,378,379,380,381,382,383,384,385,386, 387,388,389,390,391,392,393,394,395,396,397,398,399,400,401, 402,403,404,405,406,407,408,409,410,411,412,413,414,415,416, 417,418,419,420,421,422,423,424,425,426]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]] [ns_server:debug,2014-08-19T16:51:40.468,ns_1@10.242.238.90:<0.29144.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:139]Linked myself to old ebucketmigrator <0.29140.0> [ns_server:info,2014-08-19T16:51:40.469,ns_1@10.242.238.90:<0.29140.0>:ebucketmigrator_srv:handle_call:270]Starting new-style vbucket filter change on stream `replication_ns_1@10.242.238.90` [rebalance:debug,2014-08-19T16:51:40.472,ns_1@10.242.238.90:<0.28354.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:51:40.472,ns_1@10.242.238.90:<0.28354.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:51:40.472,ns_1@10.242.238.90:<0.29146.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:51:40.473,ns_1@10.242.238.90:<0.29146.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:51:40.473,ns_1@10.242.238.90:<0.28354.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:info,2014-08-19T16:51:40.487,ns_1@10.242.238.90:<0.29140.0>:ebucketmigrator_srv:handle_call:297]Changing vbucket filter on tap stream `replication_ns_1@10.242.238.90`: [{347,1}, {348,1}, {349,1}, {351,1}, {353,1}, {355,1}, {363,1}, {364,1}, {365,1}, {366,1}, {367,1}, {368,1}, {369,1}, {370,1}, {371,1}, {372,1}, {373,1}, {374,1}, {375,1}, {376,1}, {377,1}, {378,1}, {379,1}, {380,1}, {381,1}, {382,1}, {383,1}, {384,1}, {385,1}, {386,1}, {387,1}, {388,1}, {389,1}, {390,1}, {391,1}, {392,1}, {393,1}, {394,1}, {395,1}, {396,1}, {397,1}, {398,1}, {399,1}, {400,1}, {401,1}, {402,1}, {403,1}, {404,1}, {405,1}, {406,1}, {407,1}, {408,1}, {409,1}, {410,1}, {411,1}, {412,1}, {413,1}, {414,1}, {415,1}, {416,1}, {417,1}, {418,1}, {419,1}, {420,1}, {421,1}, {422,1}, {423,1}, {424,1}, {425,1}, {426,1}] [ns_server:info,2014-08-19T16:51:40.488,ns_1@10.242.238.90:<0.29140.0>:ebucketmigrator_srv:handle_call:307]Successfully changed vbucket filter on tap stream `replication_ns_1@10.242.238.90`. [ns_server:info,2014-08-19T16:51:40.488,ns_1@10.242.238.90:<0.29140.0>:ebucketmigrator_srv:process_upstream:1027]Got vbucket filter change completion message. Silencing upstream sender [ns_server:info,2014-08-19T16:51:40.488,ns_1@10.242.238.90:<0.29140.0>:ebucketmigrator_srv:handle_info:366]Got reply from upstream silencing request. Completing state transition to a new ebucketmigrator. [ns_server:debug,2014-08-19T16:51:40.489,ns_1@10.242.238.90:<0.29140.0>:ebucketmigrator_srv:complete_native_vb_filter_change:170]Proceeding with reading unread binaries [ns_server:debug,2014-08-19T16:51:40.489,ns_1@10.242.238.90:<0.29140.0>:ebucketmigrator_srv:confirm_downstream:197]Going to confirm reception downstream messages [ns_server:debug,2014-08-19T16:51:40.489,ns_1@10.242.238.90:<0.29140.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:51:40.489,ns_1@10.242.238.90:<0.29147.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:51:40.489,ns_1@10.242.238.90:<0.29147.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:51:40.489,ns_1@10.242.238.90:<0.29140.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:51:40.489,ns_1@10.242.238.90:<0.29140.0>:ebucketmigrator_srv:confirm_downstream:201]Confirmed upstream messages are feeded to kernel [ns_server:debug,2014-08-19T16:51:40.489,ns_1@10.242.238.90:<0.29140.0>:ebucketmigrator_srv:reply_and_die:210]Passed old state to caller [ns_server:debug,2014-08-19T16:51:40.489,ns_1@10.242.238.90:<0.29140.0>:ebucketmigrator_srv:reply_and_die:213]Sent out state. Preparing to die [ns_server:debug,2014-08-19T16:51:40.490,ns_1@10.242.238.90:<0.29144.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:143]Got old state from previous ebucketmigrator: <0.29140.0> [ns_server:debug,2014-08-19T16:51:40.490,ns_1@10.242.238.90:<0.29144.0>:ns_vbm_new_sup:perform_vbucket_filter_change_loop:184]Sent old state to new instance [ns_server:info,2014-08-19T16:51:40.490,ns_1@10.242.238.90:<0.29149.0>:ns_vbm_new_sup:mk_old_state_retriever:83]Got vbucket filter change old state. Proceeding vbucket filter change operation [ns_server:debug,2014-08-19T16:51:40.490,ns_1@10.242.238.90:<0.29149.0>:ebucketmigrator_srv:init:494]Got old ebucketmigrator state from <0.29140.0>: {state,#Port<0.16372>,#Port<0.16368>,#Port<0.16373>,#Port<0.16369>, <0.29142.0>,<<"cut off">>,<<"cut off">>,[],211,false,false,0, {1408,452700,488736}, completed, {<0.29144.0>,#Ref<0.0.1.69983>}, <<"replication_ns_1@10.242.238.90">>,<0.29140.0>, {had_backfill,false,undefined,[]}, completed,false}. [ns_server:debug,2014-08-19T16:51:40.491,ns_1@10.242.238.90:<0.17162.0>:ns_process_registry:handle_info:98]Got exit msg: {'EXIT',<0.29144.0>,{#Ref<0.0.1.69972>,<0.29149.0>}} [error_logger:info,2014-08-19T16:51:40.491,ns_1@10.242.238.90:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,'ns_vbm_new_sup-default'} started: [{pid,<0.29149.0>}, {name, {new_child_id, [347,348,349,351,353,355,363,364,365,366,367, 368,369,370,371,372,373,374,375,376,377,378, 379,380,381,382,383,384,385,386,387,388,389, 390,391,392,393,394,395,396,397,398,399,400, 401,402,403,404,405,406,407,408,409,410,411, 412,413,414,415,416,417,418,419,420,421,422, 423,424,425,426], 'ns_1@10.242.238.89'}}, {mfargs, {ebucketmigrator_srv,start_link, [{"10.242.238.89",11209}, {"10.242.238.90",11209}, [{old_state_retriever, #Fun}, {on_not_ready_vbuckets, #Fun}, {username,"default"}, {password,get_from_config}, {vbuckets, [347,348,349,351,353,355,363,364,365,366, 367,368,369,370,371,372,373,374,375,376, 377,378,379,380,381,382,383,384,385,386, 387,388,389,390,391,392,393,394,395,396, 397,398,399,400,401,402,403,404,405,406, 407,408,409,410,411,412,413,414,415,416, 417,418,419,420,421,422,423,424,425, 426]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]]}}, {restart_type,temporary}, {shutdown,60000}, {child_type,worker}] [ns_server:debug,2014-08-19T16:51:40.495,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:51:40.499,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:40.499,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 3808 us [ns_server:debug,2014-08-19T16:51:40.499,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:40.500,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{355, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:info,2014-08-19T16:51:40.503,ns_1@10.242.238.90:<0.18786.0>:ns_memcached:do_handle_call:527]Changed vbucket 350 state to replica [ns_server:info,2014-08-19T16:51:40.504,ns_1@10.242.238.90:tap_replication_manager-default<0.18775.0>:tap_replication_manager:change_vbucket_filter:200]Going to change replication from 'ns_1@10.242.238.89' to have [347,348,349,350,351,353,355,363,364,365,366,367,368,369,370,371,372,373,374, 375,376,377,378,379,380,381,382,383,384,385,386,387,388,389,390,391,392,393, 394,395,396,397,398,399,400,401,402,403,404,405,406,407,408,409,410,411,412, 413,414,415,416,417,418,419,420,421,422,423,424,425,426] ([350], []) [ns_server:debug,2014-08-19T16:51:40.505,ns_1@10.242.238.90:<0.29151.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:135]Registered myself under id:{"default", {new_child_id, [347,348,349,350,351,353,355,363,364,365,366, 367,368,369,370,371,372,373,374,375,376,377, 378,379,380,381,382,383,384,385,386,387,388, 389,390,391,392,393,394,395,396,397,398,399, 400,401,402,403,404,405,406,407,408,409,410, 411,412,413,414,415,416,417,418,419,420,421, 422,423,424,425,426], 'ns_1@10.242.238.89'}, #Ref<0.0.1.70109>} Args:[{"10.242.238.89",11209}, {"10.242.238.90",11209}, [{old_state_retriever,#Fun}, {on_not_ready_vbuckets,#Fun}, {username,"default"}, {password,get_from_config}, {vbuckets,[347,348,349,350,351,353,355,363,364,365,366,367,368,369,370, 371,372,373,374,375,376,377,378,379,380,381,382,383,384,385, 386,387,388,389,390,391,392,393,394,395,396,397,398,399,400, 401,402,403,404,405,406,407,408,409,410,411,412,413,414,415, 416,417,418,419,420,421,422,423,424,425,426]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]] [ns_server:debug,2014-08-19T16:51:40.505,ns_1@10.242.238.90:<0.29151.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:139]Linked myself to old ebucketmigrator <0.29149.0> [ns_server:debug,2014-08-19T16:51:40.507,ns_1@10.242.238.90:<0.29149.0>:ebucketmigrator_srv:init:621]Reusing old upstream: [{vbuckets,[347,348,349,351,353,355,363,364,365,366,367,368,369,370,371,372, 373,374,375,376,377,378,379,380,381,382,383,384,385,386,387,388, 389,390,391,392,393,394,395,396,397,398,399,400,401,402,403,404, 405,406,407,408,409,410,411,412,413,414,415,416,417,418,419,420, 421,422,423,424,425,426]}, {name,<<"replication_ns_1@10.242.238.90">>}, {takeover,false}] [rebalance:debug,2014-08-19T16:51:40.507,ns_1@10.242.238.90:<0.29149.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.29153.0> [ns_server:info,2014-08-19T16:51:40.507,ns_1@10.242.238.90:<0.29149.0>:ebucketmigrator_srv:handle_call:270]Starting new-style vbucket filter change on stream `replication_ns_1@10.242.238.90` [ns_server:info,2014-08-19T16:51:40.521,ns_1@10.242.238.90:<0.29149.0>:ebucketmigrator_srv:handle_call:297]Changing vbucket filter on tap stream `replication_ns_1@10.242.238.90`: [{347,1}, {348,1}, {349,1}, {350,1}, {351,1}, {353,1}, {355,1}, {363,1}, {364,1}, {365,1}, {366,1}, {367,1}, {368,1}, {369,1}, {370,1}, {371,1}, {372,1}, {373,1}, {374,1}, {375,1}, {376,1}, {377,1}, {378,1}, {379,1}, {380,1}, {381,1}, {382,1}, {383,1}, {384,1}, {385,1}, {386,1}, {387,1}, {388,1}, {389,1}, {390,1}, {391,1}, {392,1}, {393,1}, {394,1}, {395,1}, {396,1}, {397,1}, {398,1}, {399,1}, {400,1}, {401,1}, {402,1}, {403,1}, {404,1}, {405,1}, {406,1}, {407,1}, {408,1}, {409,1}, {410,1}, {411,1}, {412,1}, {413,1}, {414,1}, {415,1}, {416,1}, {417,1}, {418,1}, {419,1}, {420,1}, {421,1}, {422,1}, {423,1}, {424,1}, {425,1}, {426,1}] [ns_server:info,2014-08-19T16:51:40.522,ns_1@10.242.238.90:<0.29149.0>:ebucketmigrator_srv:handle_call:307]Successfully changed vbucket filter on tap stream `replication_ns_1@10.242.238.90`. [ns_server:info,2014-08-19T16:51:40.522,ns_1@10.242.238.90:<0.29149.0>:ebucketmigrator_srv:process_upstream:1027]Got vbucket filter change completion message. Silencing upstream sender [ns_server:info,2014-08-19T16:51:40.522,ns_1@10.242.238.90:<0.29149.0>:ebucketmigrator_srv:handle_info:366]Got reply from upstream silencing request. Completing state transition to a new ebucketmigrator. [ns_server:debug,2014-08-19T16:51:40.522,ns_1@10.242.238.90:<0.29149.0>:ebucketmigrator_srv:complete_native_vb_filter_change:170]Proceeding with reading unread binaries [ns_server:debug,2014-08-19T16:51:40.523,ns_1@10.242.238.90:<0.29149.0>:ebucketmigrator_srv:confirm_downstream:197]Going to confirm reception downstream messages [ns_server:debug,2014-08-19T16:51:40.523,ns_1@10.242.238.90:<0.29149.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:51:40.523,ns_1@10.242.238.90:<0.29154.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:51:40.523,ns_1@10.242.238.90:<0.29154.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:51:40.523,ns_1@10.242.238.90:<0.29149.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:51:40.523,ns_1@10.242.238.90:<0.29149.0>:ebucketmigrator_srv:confirm_downstream:201]Confirmed upstream messages are feeded to kernel [ns_server:debug,2014-08-19T16:51:40.523,ns_1@10.242.238.90:<0.29149.0>:ebucketmigrator_srv:reply_and_die:210]Passed old state to caller [ns_server:debug,2014-08-19T16:51:40.523,ns_1@10.242.238.90:<0.29149.0>:ebucketmigrator_srv:reply_and_die:213]Sent out state. Preparing to die [ns_server:debug,2014-08-19T16:51:40.523,ns_1@10.242.238.90:<0.29151.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:143]Got old state from previous ebucketmigrator: <0.29149.0> [ns_server:debug,2014-08-19T16:51:40.524,ns_1@10.242.238.90:<0.29151.0>:ns_vbm_new_sup:perform_vbucket_filter_change_loop:184]Sent old state to new instance [ns_server:info,2014-08-19T16:51:40.524,ns_1@10.242.238.90:<0.29156.0>:ns_vbm_new_sup:mk_old_state_retriever:83]Got vbucket filter change old state. Proceeding vbucket filter change operation [ns_server:debug,2014-08-19T16:51:40.524,ns_1@10.242.238.90:<0.29156.0>:ebucketmigrator_srv:init:494]Got old ebucketmigrator state from <0.29149.0>: {state,#Port<0.16372>,#Port<0.16368>,#Port<0.16373>,#Port<0.16369>, <0.29153.0>,<<"cut off">>,<<"cut off">>,[],214,false,false,0, {1408,452700,522757}, completed, {<0.29151.0>,#Ref<0.0.1.70122>}, <<"replication_ns_1@10.242.238.90">>,<0.29149.0>, {had_backfill,false,undefined,[]}, completed,false}. [ns_server:debug,2014-08-19T16:51:40.524,ns_1@10.242.238.90:<0.17162.0>:ns_process_registry:handle_info:98]Got exit msg: {'EXIT',<0.29151.0>,{#Ref<0.0.1.70111>,<0.29156.0>}} [error_logger:info,2014-08-19T16:51:40.524,ns_1@10.242.238.90:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,'ns_vbm_new_sup-default'} started: [{pid,<0.29156.0>}, {name, {new_child_id, [347,348,349,350,351,353,355,363,364,365,366, 367,368,369,370,371,372,373,374,375,376,377, 378,379,380,381,382,383,384,385,386,387,388, 389,390,391,392,393,394,395,396,397,398,399, 400,401,402,403,404,405,406,407,408,409,410, 411,412,413,414,415,416,417,418,419,420,421, 422,423,424,425,426], 'ns_1@10.242.238.89'}}, {mfargs, {ebucketmigrator_srv,start_link, [{"10.242.238.89",11209}, {"10.242.238.90",11209}, [{old_state_retriever, #Fun}, {on_not_ready_vbuckets, #Fun}, {username,"default"}, {password,get_from_config}, {vbuckets, [347,348,349,350,351,353,355,363,364,365, 366,367,368,369,370,371,372,373,374,375, 376,377,378,379,380,381,382,383,384,385, 386,387,388,389,390,391,392,393,394,395, 396,397,398,399,400,401,402,403,404,405, 406,407,408,409,410,411,412,413,414,415, 416,417,418,419,420,421,422,423,424,425, 426]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]]}}, {restart_type,temporary}, {shutdown,60000}, {child_type,worker}] [ns_server:debug,2014-08-19T16:51:40.532,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:51:40.532,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:40.532,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 227 us [ns_server:debug,2014-08-19T16:51:40.533,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:40.534,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{350, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:51:40.540,ns_1@10.242.238.90:<0.29156.0>:ebucketmigrator_srv:init:621]Reusing old upstream: [{vbuckets,[347,348,349,350,351,353,355,363,364,365,366,367,368,369,370,371, 372,373,374,375,376,377,378,379,380,381,382,383,384,385,386,387, 388,389,390,391,392,393,394,395,396,397,398,399,400,401,402,403, 404,405,406,407,408,409,410,411,412,413,414,415,416,417,418,419, 420,421,422,423,424,425,426]}, {name,<<"replication_ns_1@10.242.238.90">>}, {takeover,false}] [rebalance:debug,2014-08-19T16:51:40.540,ns_1@10.242.238.90:<0.29156.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.29158.0> [rebalance:debug,2014-08-19T16:51:40.586,ns_1@10.242.238.90:<0.28318.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:51:40.586,ns_1@10.242.238.90:<0.28318.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:51:40.587,ns_1@10.242.238.90:<0.29159.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:51:40.587,ns_1@10.242.238.90:<0.29159.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:51:40.587,ns_1@10.242.238.90:<0.28318.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:info,2014-08-19T16:51:40.591,ns_1@10.242.238.90:<0.18786.0>:ns_memcached:do_handle_call:527]Changed vbucket 352 state to replica [ns_server:info,2014-08-19T16:51:40.592,ns_1@10.242.238.90:tap_replication_manager-default<0.18775.0>:tap_replication_manager:change_vbucket_filter:200]Going to change replication from 'ns_1@10.242.238.89' to have [347,348,349,350,351,352,353,355,363,364,365,366,367,368,369,370,371,372,373, 374,375,376,377,378,379,380,381,382,383,384,385,386,387,388,389,390,391,392, 393,394,395,396,397,398,399,400,401,402,403,404,405,406,407,408,409,410,411, 412,413,414,415,416,417,418,419,420,421,422,423,424,425,426] ([352], []) [ns_server:debug,2014-08-19T16:51:40.593,ns_1@10.242.238.90:<0.29160.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:135]Registered myself under id:{"default", {new_child_id, [347,348,349,350,351,352,353,355,363,364,365, 366,367,368,369,370,371,372,373,374,375,376, 377,378,379,380,381,382,383,384,385,386,387, 388,389,390,391,392,393,394,395,396,397,398, 399,400,401,402,403,404,405,406,407,408,409, 410,411,412,413,414,415,416,417,418,419,420, 421,422,423,424,425,426], 'ns_1@10.242.238.89'}, #Ref<0.0.1.70280>} Args:[{"10.242.238.89",11209}, {"10.242.238.90",11209}, [{old_state_retriever,#Fun}, {on_not_ready_vbuckets,#Fun}, {username,"default"}, {password,get_from_config}, {vbuckets,[347,348,349,350,351,352,353,355,363,364,365,366,367,368,369, 370,371,372,373,374,375,376,377,378,379,380,381,382,383,384, 385,386,387,388,389,390,391,392,393,394,395,396,397,398,399, 400,401,402,403,404,405,406,407,408,409,410,411,412,413,414, 415,416,417,418,419,420,421,422,423,424,425,426]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]] [ns_server:debug,2014-08-19T16:51:40.593,ns_1@10.242.238.90:<0.29160.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:139]Linked myself to old ebucketmigrator <0.29156.0> [ns_server:info,2014-08-19T16:51:40.594,ns_1@10.242.238.90:<0.29156.0>:ebucketmigrator_srv:handle_call:270]Starting new-style vbucket filter change on stream `replication_ns_1@10.242.238.90` [rebalance:debug,2014-08-19T16:51:40.598,ns_1@10.242.238.90:<0.28199.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:51:40.598,ns_1@10.242.238.90:<0.28199.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:51:40.598,ns_1@10.242.238.90:<0.29162.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:51:40.598,ns_1@10.242.238.90:<0.29162.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:51:40.598,ns_1@10.242.238.90:<0.28199.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:info,2014-08-19T16:51:40.612,ns_1@10.242.238.90:<0.29156.0>:ebucketmigrator_srv:handle_call:297]Changing vbucket filter on tap stream `replication_ns_1@10.242.238.90`: [{347,1}, {348,1}, {349,1}, {350,1}, {351,1}, {352,1}, {353,1}, {355,1}, {363,1}, {364,1}, {365,1}, {366,1}, {367,1}, {368,1}, {369,1}, {370,1}, {371,1}, {372,1}, {373,1}, {374,1}, {375,1}, {376,1}, {377,1}, {378,1}, {379,1}, {380,1}, {381,1}, {382,1}, {383,1}, {384,1}, {385,1}, {386,1}, {387,1}, {388,1}, {389,1}, {390,1}, {391,1}, {392,1}, {393,1}, {394,1}, {395,1}, {396,1}, {397,1}, {398,1}, {399,1}, {400,1}, {401,1}, {402,1}, {403,1}, {404,1}, {405,1}, {406,1}, {407,1}, {408,1}, {409,1}, {410,1}, {411,1}, {412,1}, {413,1}, {414,1}, {415,1}, {416,1}, {417,1}, {418,1}, {419,1}, {420,1}, {421,1}, {422,1}, {423,1}, {424,1}, {425,1}, {426,1}] [ns_server:info,2014-08-19T16:51:40.613,ns_1@10.242.238.90:<0.29156.0>:ebucketmigrator_srv:handle_call:307]Successfully changed vbucket filter on tap stream `replication_ns_1@10.242.238.90`. [ns_server:info,2014-08-19T16:51:40.614,ns_1@10.242.238.90:<0.29156.0>:ebucketmigrator_srv:process_upstream:1027]Got vbucket filter change completion message. Silencing upstream sender [ns_server:info,2014-08-19T16:51:40.614,ns_1@10.242.238.90:<0.29156.0>:ebucketmigrator_srv:handle_info:366]Got reply from upstream silencing request. Completing state transition to a new ebucketmigrator. [ns_server:debug,2014-08-19T16:51:40.614,ns_1@10.242.238.90:<0.29156.0>:ebucketmigrator_srv:complete_native_vb_filter_change:170]Proceeding with reading unread binaries [ns_server:debug,2014-08-19T16:51:40.614,ns_1@10.242.238.90:<0.29156.0>:ebucketmigrator_srv:confirm_downstream:197]Going to confirm reception downstream messages [ns_server:debug,2014-08-19T16:51:40.614,ns_1@10.242.238.90:<0.29156.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:51:40.614,ns_1@10.242.238.90:<0.29163.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:51:40.614,ns_1@10.242.238.90:<0.29163.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:51:40.615,ns_1@10.242.238.90:<0.29156.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:51:40.615,ns_1@10.242.238.90:<0.29156.0>:ebucketmigrator_srv:confirm_downstream:201]Confirmed upstream messages are feeded to kernel [ns_server:debug,2014-08-19T16:51:40.615,ns_1@10.242.238.90:<0.29156.0>:ebucketmigrator_srv:reply_and_die:210]Passed old state to caller [ns_server:debug,2014-08-19T16:51:40.615,ns_1@10.242.238.90:<0.29156.0>:ebucketmigrator_srv:reply_and_die:213]Sent out state. Preparing to die [ns_server:debug,2014-08-19T16:51:40.615,ns_1@10.242.238.90:<0.29160.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:143]Got old state from previous ebucketmigrator: <0.29156.0> [ns_server:debug,2014-08-19T16:51:40.615,ns_1@10.242.238.90:<0.29160.0>:ns_vbm_new_sup:perform_vbucket_filter_change_loop:184]Sent old state to new instance [ns_server:info,2014-08-19T16:51:40.615,ns_1@10.242.238.90:<0.29165.0>:ns_vbm_new_sup:mk_old_state_retriever:83]Got vbucket filter change old state. Proceeding vbucket filter change operation [ns_server:debug,2014-08-19T16:51:40.616,ns_1@10.242.238.90:<0.29165.0>:ebucketmigrator_srv:init:494]Got old ebucketmigrator state from <0.29156.0>: {state,#Port<0.16372>,#Port<0.16368>,#Port<0.16373>,#Port<0.16369>, <0.29158.0>,<<"cut off">>,<<"cut off">>,[],217,false,false,0, {1408,452700,614168}, completed, {<0.29160.0>,#Ref<0.0.1.70293>}, <<"replication_ns_1@10.242.238.90">>,<0.29156.0>, {had_backfill,false,undefined,[]}, completed,false}. [ns_server:debug,2014-08-19T16:51:40.616,ns_1@10.242.238.90:<0.17162.0>:ns_process_registry:handle_info:98]Got exit msg: {'EXIT',<0.29160.0>,{#Ref<0.0.1.70282>,<0.29165.0>}} [error_logger:info,2014-08-19T16:51:40.616,ns_1@10.242.238.90:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,'ns_vbm_new_sup-default'} started: [{pid,<0.29165.0>}, {name, {new_child_id, [347,348,349,350,351,352,353,355,363,364,365, 366,367,368,369,370,371,372,373,374,375,376, 377,378,379,380,381,382,383,384,385,386,387, 388,389,390,391,392,393,394,395,396,397,398, 399,400,401,402,403,404,405,406,407,408,409, 410,411,412,413,414,415,416,417,418,419,420, 421,422,423,424,425,426], 'ns_1@10.242.238.89'}}, {mfargs, {ebucketmigrator_srv,start_link, [{"10.242.238.89",11209}, {"10.242.238.90",11209}, [{old_state_retriever, #Fun}, {on_not_ready_vbuckets, #Fun}, {username,"default"}, {password,get_from_config}, {vbuckets, [347,348,349,350,351,352,353,355,363,364, 365,366,367,368,369,370,371,372,373,374, 375,376,377,378,379,380,381,382,383,384, 385,386,387,388,389,390,391,392,393,394, 395,396,397,398,399,400,401,402,403,404, 405,406,407,408,409,410,411,412,413,414, 415,416,417,418,419,420,421,422,423,424, 425,426]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]]}}, {restart_type,temporary}, {shutdown,60000}, {child_type,worker}] [ns_server:debug,2014-08-19T16:51:40.621,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:51:40.626,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 5046 us [ns_server:debug,2014-08-19T16:51:40.627,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:40.628,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:40.629,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{352, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:51:40.632,ns_1@10.242.238.90:<0.29165.0>:ebucketmigrator_srv:init:621]Reusing old upstream: [{vbuckets,[347,348,349,350,351,352,353,355,363,364,365,366,367,368,369,370, 371,372,373,374,375,376,377,378,379,380,381,382,383,384,385,386, 387,388,389,390,391,392,393,394,395,396,397,398,399,400,401,402, 403,404,405,406,407,408,409,410,411,412,413,414,415,416,417,418, 419,420,421,422,423,424,425,426]}, {name,<<"replication_ns_1@10.242.238.90">>}, {takeover,false}] [rebalance:debug,2014-08-19T16:51:40.632,ns_1@10.242.238.90:<0.29165.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.29167.0> [ns_server:info,2014-08-19T16:51:40.634,ns_1@10.242.238.90:<0.18786.0>:ns_memcached:do_handle_call:527]Changed vbucket 357 state to replica [ns_server:info,2014-08-19T16:51:40.634,ns_1@10.242.238.90:tap_replication_manager-default<0.18775.0>:tap_replication_manager:change_vbucket_filter:200]Going to change replication from 'ns_1@10.242.238.89' to have [347,348,349,350,351,352,353,355,357,363,364,365,366,367,368,369,370,371,372, 373,374,375,376,377,378,379,380,381,382,383,384,385,386,387,388,389,390,391, 392,393,394,395,396,397,398,399,400,401,402,403,404,405,406,407,408,409,410, 411,412,413,414,415,416,417,418,419,420,421,422,423,424,425,426] ([357], []) [ns_server:debug,2014-08-19T16:51:40.636,ns_1@10.242.238.90:<0.29168.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:135]Registered myself under id:{"default", {new_child_id, [347,348,349,350,351,352,353,355,357,363,364, 365,366,367,368,369,370,371,372,373,374,375, 376,377,378,379,380,381,382,383,384,385,386, 387,388,389,390,391,392,393,394,395,396,397, 398,399,400,401,402,403,404,405,406,407,408, 409,410,411,412,413,414,415,416,417,418,419, 420,421,422,423,424,425,426], 'ns_1@10.242.238.89'}, #Ref<0.0.1.70430>} Args:[{"10.242.238.89",11209}, {"10.242.238.90",11209}, [{old_state_retriever,#Fun}, {on_not_ready_vbuckets,#Fun}, {username,"default"}, {password,get_from_config}, {vbuckets,[347,348,349,350,351,352,353,355,357,363,364,365,366,367,368, 369,370,371,372,373,374,375,376,377,378,379,380,381,382,383, 384,385,386,387,388,389,390,391,392,393,394,395,396,397,398, 399,400,401,402,403,404,405,406,407,408,409,410,411,412,413, 414,415,416,417,418,419,420,421,422,423,424,425,426]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]] [ns_server:debug,2014-08-19T16:51:40.636,ns_1@10.242.238.90:<0.29168.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:139]Linked myself to old ebucketmigrator <0.29165.0> [ns_server:info,2014-08-19T16:51:40.636,ns_1@10.242.238.90:<0.29165.0>:ebucketmigrator_srv:handle_call:270]Starting new-style vbucket filter change on stream `replication_ns_1@10.242.238.90` [rebalance:debug,2014-08-19T16:51:40.637,ns_1@10.242.238.90:<0.28163.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:51:40.637,ns_1@10.242.238.90:<0.28163.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:51:40.637,ns_1@10.242.238.90:<0.29170.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:51:40.638,ns_1@10.242.238.90:<0.29170.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:51:40.638,ns_1@10.242.238.90:<0.28163.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [rebalance:debug,2014-08-19T16:51:40.649,ns_1@10.242.238.90:<0.28267.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:51:40.649,ns_1@10.242.238.90:<0.28267.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:51:40.649,ns_1@10.242.238.90:<0.29171.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:51:40.649,ns_1@10.242.238.90:<0.29171.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:51:40.650,ns_1@10.242.238.90:<0.28267.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:info,2014-08-19T16:51:40.650,ns_1@10.242.238.90:<0.29165.0>:ebucketmigrator_srv:handle_call:297]Changing vbucket filter on tap stream `replication_ns_1@10.242.238.90`: [{347,1}, {348,1}, {349,1}, {350,1}, {351,1}, {352,1}, {353,1}, {355,1}, {357,1}, {363,1}, {364,1}, {365,1}, {366,1}, {367,1}, {368,1}, {369,1}, {370,1}, {371,1}, {372,1}, {373,1}, {374,1}, {375,1}, {376,1}, {377,1}, {378,1}, {379,1}, {380,1}, {381,1}, {382,1}, {383,1}, {384,1}, {385,1}, {386,1}, {387,1}, {388,1}, {389,1}, {390,1}, {391,1}, {392,1}, {393,1}, {394,1}, {395,1}, {396,1}, {397,1}, {398,1}, {399,1}, {400,1}, {401,1}, {402,1}, {403,1}, {404,1}, {405,1}, {406,1}, {407,1}, {408,1}, {409,1}, {410,1}, {411,1}, {412,1}, {413,1}, {414,1}, {415,1}, {416,1}, {417,1}, {418,1}, {419,1}, {420,1}, {421,1}, {422,1}, {423,1}, {424,1}, {425,1}, {426,1}] [ns_server:info,2014-08-19T16:51:40.651,ns_1@10.242.238.90:<0.29165.0>:ebucketmigrator_srv:handle_call:307]Successfully changed vbucket filter on tap stream `replication_ns_1@10.242.238.90`. [ns_server:info,2014-08-19T16:51:40.651,ns_1@10.242.238.90:<0.29165.0>:ebucketmigrator_srv:process_upstream:1027]Got vbucket filter change completion message. Silencing upstream sender [ns_server:info,2014-08-19T16:51:40.652,ns_1@10.242.238.90:<0.29165.0>:ebucketmigrator_srv:handle_info:366]Got reply from upstream silencing request. Completing state transition to a new ebucketmigrator. [ns_server:debug,2014-08-19T16:51:40.652,ns_1@10.242.238.90:<0.29165.0>:ebucketmigrator_srv:complete_native_vb_filter_change:170]Proceeding with reading unread binaries [ns_server:debug,2014-08-19T16:51:40.652,ns_1@10.242.238.90:<0.29165.0>:ebucketmigrator_srv:confirm_downstream:197]Going to confirm reception downstream messages [ns_server:debug,2014-08-19T16:51:40.652,ns_1@10.242.238.90:<0.29165.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:51:40.652,ns_1@10.242.238.90:<0.29172.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:51:40.652,ns_1@10.242.238.90:<0.29172.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:51:40.652,ns_1@10.242.238.90:<0.29165.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:51:40.652,ns_1@10.242.238.90:<0.29165.0>:ebucketmigrator_srv:confirm_downstream:201]Confirmed upstream messages are feeded to kernel [ns_server:debug,2014-08-19T16:51:40.653,ns_1@10.242.238.90:<0.29165.0>:ebucketmigrator_srv:reply_and_die:210]Passed old state to caller [ns_server:debug,2014-08-19T16:51:40.653,ns_1@10.242.238.90:<0.29165.0>:ebucketmigrator_srv:reply_and_die:213]Sent out state. Preparing to die [ns_server:debug,2014-08-19T16:51:40.653,ns_1@10.242.238.90:<0.29168.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:143]Got old state from previous ebucketmigrator: <0.29165.0> [ns_server:debug,2014-08-19T16:51:40.653,ns_1@10.242.238.90:<0.29168.0>:ns_vbm_new_sup:perform_vbucket_filter_change_loop:184]Sent old state to new instance [ns_server:info,2014-08-19T16:51:40.653,ns_1@10.242.238.90:<0.29174.0>:ns_vbm_new_sup:mk_old_state_retriever:83]Got vbucket filter change old state. Proceeding vbucket filter change operation [ns_server:debug,2014-08-19T16:51:40.653,ns_1@10.242.238.90:<0.29174.0>:ebucketmigrator_srv:init:494]Got old ebucketmigrator state from <0.29165.0>: {state,#Port<0.16372>,#Port<0.16368>,#Port<0.16373>,#Port<0.16369>, <0.29167.0>,<<"cut off">>,<<"cut off">>,[],220,false,false,0, {1408,452700,651822}, completed, {<0.29168.0>,#Ref<0.0.1.70443>}, <<"replication_ns_1@10.242.238.90">>,<0.29165.0>, {had_backfill,false,undefined,[]}, completed,false}. [ns_server:debug,2014-08-19T16:51:40.654,ns_1@10.242.238.90:<0.17162.0>:ns_process_registry:handle_info:98]Got exit msg: {'EXIT',<0.29168.0>,{#Ref<0.0.1.70432>,<0.29174.0>}} [error_logger:info,2014-08-19T16:51:40.654,ns_1@10.242.238.90:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,'ns_vbm_new_sup-default'} started: [{pid,<0.29174.0>}, {name, {new_child_id, [347,348,349,350,351,352,353,355,357,363,364, 365,366,367,368,369,370,371,372,373,374,375, 376,377,378,379,380,381,382,383,384,385,386, 387,388,389,390,391,392,393,394,395,396,397, 398,399,400,401,402,403,404,405,406,407,408, 409,410,411,412,413,414,415,416,417,418,419, 420,421,422,423,424,425,426], 'ns_1@10.242.238.89'}}, {mfargs, {ebucketmigrator_srv,start_link, [{"10.242.238.89",11209}, {"10.242.238.90",11209}, [{old_state_retriever, #Fun}, {on_not_ready_vbuckets, #Fun}, {username,"default"}, {password,get_from_config}, {vbuckets, [347,348,349,350,351,352,353,355,357,363, 364,365,366,367,368,369,370,371,372,373, 374,375,376,377,378,379,380,381,382,383, 384,385,386,387,388,389,390,391,392,393, 394,395,396,397,398,399,400,401,402,403, 404,405,406,407,408,409,410,411,412,413, 414,415,416,417,418,419,420,421,422,423, 424,425,426]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]]}}, {restart_type,temporary}, {shutdown,60000}, {child_type,worker}] [ns_server:debug,2014-08-19T16:51:40.658,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:51:40.661,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:40.661,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 2753 us [ns_server:debug,2014-08-19T16:51:40.662,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:40.662,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{357, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:info,2014-08-19T16:51:40.664,ns_1@10.242.238.90:<0.18786.0>:ns_memcached:do_handle_call:527]Changed vbucket 359 state to replica [ns_server:info,2014-08-19T16:51:40.665,ns_1@10.242.238.90:tap_replication_manager-default<0.18775.0>:tap_replication_manager:change_vbucket_filter:200]Going to change replication from 'ns_1@10.242.238.89' to have [347,348,349,350,351,352,353,355,357,359,363,364,365,366,367,368,369,370,371, 372,373,374,375,376,377,378,379,380,381,382,383,384,385,386,387,388,389,390, 391,392,393,394,395,396,397,398,399,400,401,402,403,404,405,406,407,408,409, 410,411,412,413,414,415,416,417,418,419,420,421,422,423,424,425,426] ([359], []) [ns_server:debug,2014-08-19T16:51:40.667,ns_1@10.242.238.90:<0.29175.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:135]Registered myself under id:{"default", {new_child_id, [347,348,349,350,351,352,353,355,357,359,363, 364,365,366,367,368,369,370,371,372,373,374, 375,376,377,378,379,380,381,382,383,384,385, 386,387,388,389,390,391,392,393,394,395,396, 397,398,399,400,401,402,403,404,405,406,407, 408,409,410,411,412,413,414,415,416,417,418, 419,420,421,422,423,424,425,426], 'ns_1@10.242.238.89'}, #Ref<0.0.1.70600>} Args:[{"10.242.238.89",11209}, {"10.242.238.90",11209}, [{old_state_retriever,#Fun}, {on_not_ready_vbuckets,#Fun}, {username,"default"}, {password,get_from_config}, {vbuckets,[347,348,349,350,351,352,353,355,357,359,363,364,365,366,367, 368,369,370,371,372,373,374,375,376,377,378,379,380,381,382, 383,384,385,386,387,388,389,390,391,392,393,394,395,396,397, 398,399,400,401,402,403,404,405,406,407,408,409,410,411,412, 413,414,415,416,417,418,419,420,421,422,423,424,425,426]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]] [ns_server:debug,2014-08-19T16:51:40.667,ns_1@10.242.238.90:<0.29175.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:139]Linked myself to old ebucketmigrator <0.29174.0> [ns_server:debug,2014-08-19T16:51:40.670,ns_1@10.242.238.90:<0.29174.0>:ebucketmigrator_srv:init:621]Reusing old upstream: [{vbuckets,[347,348,349,350,351,352,353,355,357,363,364,365,366,367,368,369, 370,371,372,373,374,375,376,377,378,379,380,381,382,383,384,385, 386,387,388,389,390,391,392,393,394,395,396,397,398,399,400,401, 402,403,404,405,406,407,408,409,410,411,412,413,414,415,416,417, 418,419,420,421,422,423,424,425,426]}, {name,<<"replication_ns_1@10.242.238.90">>}, {takeover,false}] [rebalance:debug,2014-08-19T16:51:40.670,ns_1@10.242.238.90:<0.29174.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.29178.0> [ns_server:info,2014-08-19T16:51:40.671,ns_1@10.242.238.90:<0.29174.0>:ebucketmigrator_srv:handle_call:270]Starting new-style vbucket filter change on stream `replication_ns_1@10.242.238.90` [ns_server:info,2014-08-19T16:51:40.687,ns_1@10.242.238.90:<0.29174.0>:ebucketmigrator_srv:handle_call:297]Changing vbucket filter on tap stream `replication_ns_1@10.242.238.90`: [{347,1}, {348,1}, {349,1}, {350,1}, {351,1}, {352,1}, {353,1}, {355,1}, {357,1}, {359,1}, {363,1}, {364,1}, {365,1}, {366,1}, {367,1}, {368,1}, {369,1}, {370,1}, {371,1}, {372,1}, {373,1}, {374,1}, {375,1}, {376,1}, {377,1}, {378,1}, {379,1}, {380,1}, {381,1}, {382,1}, {383,1}, {384,1}, {385,1}, {386,1}, {387,1}, {388,1}, {389,1}, {390,1}, {391,1}, {392,1}, {393,1}, {394,1}, {395,1}, {396,1}, {397,1}, {398,1}, {399,1}, {400,1}, {401,1}, {402,1}, {403,1}, {404,1}, {405,1}, {406,1}, {407,1}, {408,1}, {409,1}, {410,1}, {411,1}, {412,1}, {413,1}, {414,1}, {415,1}, {416,1}, {417,1}, {418,1}, {419,1}, {420,1}, {421,1}, {422,1}, {423,1}, {424,1}, {425,1}, {426,1}] [ns_server:info,2014-08-19T16:51:40.688,ns_1@10.242.238.90:<0.29174.0>:ebucketmigrator_srv:handle_call:307]Successfully changed vbucket filter on tap stream `replication_ns_1@10.242.238.90`. [ns_server:info,2014-08-19T16:51:40.688,ns_1@10.242.238.90:<0.29174.0>:ebucketmigrator_srv:process_upstream:1027]Got vbucket filter change completion message. Silencing upstream sender [ns_server:info,2014-08-19T16:51:40.688,ns_1@10.242.238.90:<0.29174.0>:ebucketmigrator_srv:handle_info:366]Got reply from upstream silencing request. Completing state transition to a new ebucketmigrator. [ns_server:debug,2014-08-19T16:51:40.688,ns_1@10.242.238.90:<0.29174.0>:ebucketmigrator_srv:complete_native_vb_filter_change:170]Proceeding with reading unread binaries [ns_server:debug,2014-08-19T16:51:40.689,ns_1@10.242.238.90:<0.29174.0>:ebucketmigrator_srv:confirm_downstream:197]Going to confirm reception downstream messages [ns_server:debug,2014-08-19T16:51:40.689,ns_1@10.242.238.90:<0.29174.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:51:40.689,ns_1@10.242.238.90:<0.29179.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:51:40.689,ns_1@10.242.238.90:<0.29179.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:51:40.689,ns_1@10.242.238.90:<0.29174.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:51:40.691,ns_1@10.242.238.90:<0.29174.0>:ebucketmigrator_srv:confirm_downstream:201]Confirmed upstream messages are feeded to kernel [ns_server:debug,2014-08-19T16:51:40.691,ns_1@10.242.238.90:<0.29174.0>:ebucketmigrator_srv:reply_and_die:210]Passed old state to caller [ns_server:debug,2014-08-19T16:51:40.691,ns_1@10.242.238.90:<0.29174.0>:ebucketmigrator_srv:reply_and_die:213]Sent out state. Preparing to die [ns_server:debug,2014-08-19T16:51:40.691,ns_1@10.242.238.90:<0.29175.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:143]Got old state from previous ebucketmigrator: <0.29174.0> [ns_server:debug,2014-08-19T16:51:40.692,ns_1@10.242.238.90:<0.29175.0>:ns_vbm_new_sup:perform_vbucket_filter_change_loop:184]Sent old state to new instance [ns_server:info,2014-08-19T16:51:40.692,ns_1@10.242.238.90:<0.29181.0>:ns_vbm_new_sup:mk_old_state_retriever:83]Got vbucket filter change old state. Proceeding vbucket filter change operation [ns_server:debug,2014-08-19T16:51:40.692,ns_1@10.242.238.90:<0.29181.0>:ebucketmigrator_srv:init:494]Got old ebucketmigrator state from <0.29174.0>: {state,#Port<0.16372>,#Port<0.16368>,#Port<0.16373>,#Port<0.16369>, <0.29178.0>,<<"cut off">>,<<"cut off">>,[],223,false,false,0, {1408,452700,688645}, completed, {<0.29175.0>,#Ref<0.0.1.70614>}, <<"replication_ns_1@10.242.238.90">>,<0.29174.0>, {had_backfill,false,undefined,[]}, completed,false}. [ns_server:debug,2014-08-19T16:51:40.692,ns_1@10.242.238.90:<0.17162.0>:ns_process_registry:handle_info:98]Got exit msg: {'EXIT',<0.29175.0>,{#Ref<0.0.1.70602>,<0.29181.0>}} [error_logger:info,2014-08-19T16:51:40.692,ns_1@10.242.238.90:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,'ns_vbm_new_sup-default'} started: [{pid,<0.29181.0>}, {name, {new_child_id, [347,348,349,350,351,352,353,355,357,359,363, 364,365,366,367,368,369,370,371,372,373,374, 375,376,377,378,379,380,381,382,383,384,385, 386,387,388,389,390,391,392,393,394,395,396, 397,398,399,400,401,402,403,404,405,406,407, 408,409,410,411,412,413,414,415,416,417,418, 419,420,421,422,423,424,425,426], 'ns_1@10.242.238.89'}}, {mfargs, {ebucketmigrator_srv,start_link, [{"10.242.238.89",11209}, {"10.242.238.90",11209}, [{old_state_retriever, #Fun}, {on_not_ready_vbuckets, #Fun}, {username,"default"}, {password,get_from_config}, {vbuckets, [347,348,349,350,351,352,353,355,357,359, 363,364,365,366,367,368,369,370,371,372, 373,374,375,376,377,378,379,380,381,382, 383,384,385,386,387,388,389,390,391,392, 393,394,395,396,397,398,399,400,401,402, 403,404,405,406,407,408,409,410,411,412, 413,414,415,416,417,418,419,420,421,422, 423,424,425,426]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]]}}, {restart_type,temporary}, {shutdown,60000}, {child_type,worker}] [ns_server:debug,2014-08-19T16:51:40.697,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:51:40.706,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:40.706,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 8954 us [ns_server:debug,2014-08-19T16:51:40.707,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:40.708,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{359, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:info,2014-08-19T16:51:40.710,ns_1@10.242.238.90:<0.18786.0>:ns_memcached:do_handle_call:527]Changed vbucket 354 state to replica [ns_server:info,2014-08-19T16:51:40.710,ns_1@10.242.238.90:tap_replication_manager-default<0.18775.0>:tap_replication_manager:change_vbucket_filter:200]Going to change replication from 'ns_1@10.242.238.89' to have [347,348,349,350,351,352,353,354,355,357,359,363,364,365,366,367,368,369,370, 371,372,373,374,375,376,377,378,379,380,381,382,383,384,385,386,387,388,389, 390,391,392,393,394,395,396,397,398,399,400,401,402,403,404,405,406,407,408, 409,410,411,412,413,414,415,416,417,418,419,420,421,422,423,424,425,426] ([354], []) [ns_server:debug,2014-08-19T16:51:40.711,ns_1@10.242.238.90:<0.29183.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:135]Registered myself under id:{"default", {new_child_id, [347,348,349,350,351,352,353,354,355,357,359, 363,364,365,366,367,368,369,370,371,372,373, 374,375,376,377,378,379,380,381,382,383,384, 385,386,387,388,389,390,391,392,393,394,395, 396,397,398,399,400,401,402,403,404,405,406, 407,408,409,410,411,412,413,414,415,416,417, 418,419,420,421,422,423,424,425,426], 'ns_1@10.242.238.89'}, #Ref<0.0.1.70745>} Args:[{"10.242.238.89",11209}, {"10.242.238.90",11209}, [{old_state_retriever,#Fun}, {on_not_ready_vbuckets,#Fun}, {username,"default"}, {password,get_from_config}, {vbuckets,[347,348,349,350,351,352,353,354,355,357,359,363,364,365,366, 367,368,369,370,371,372,373,374,375,376,377,378,379,380,381, 382,383,384,385,386,387,388,389,390,391,392,393,394,395,396, 397,398,399,400,401,402,403,404,405,406,407,408,409,410,411, 412,413,414,415,416,417,418,419,420,421,422,423,424,425, 426]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]] [ns_server:debug,2014-08-19T16:51:40.711,ns_1@10.242.238.90:<0.29183.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:139]Linked myself to old ebucketmigrator <0.29181.0> [ns_server:debug,2014-08-19T16:51:40.711,ns_1@10.242.238.90:<0.29181.0>:ebucketmigrator_srv:init:621]Reusing old upstream: [{vbuckets,[347,348,349,350,351,352,353,355,357,359,363,364,365,366,367,368, 369,370,371,372,373,374,375,376,377,378,379,380,381,382,383,384, 385,386,387,388,389,390,391,392,393,394,395,396,397,398,399,400, 401,402,403,404,405,406,407,408,409,410,411,412,413,414,415,416, 417,418,419,420,421,422,423,424,425,426]}, {name,<<"replication_ns_1@10.242.238.90">>}, {takeover,false}] [rebalance:debug,2014-08-19T16:51:40.712,ns_1@10.242.238.90:<0.29181.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.29185.0> [ns_server:info,2014-08-19T16:51:40.712,ns_1@10.242.238.90:<0.29181.0>:ebucketmigrator_srv:handle_call:270]Starting new-style vbucket filter change on stream `replication_ns_1@10.242.238.90` [ns_server:info,2014-08-19T16:51:40.725,ns_1@10.242.238.90:<0.29181.0>:ebucketmigrator_srv:handle_call:297]Changing vbucket filter on tap stream `replication_ns_1@10.242.238.90`: [{347,1}, {348,1}, {349,1}, {350,1}, {351,1}, {352,1}, {353,1}, {354,1}, {355,1}, {357,1}, {359,1}, {363,1}, {364,1}, {365,1}, {366,1}, {367,1}, {368,1}, {369,1}, {370,1}, {371,1}, {372,1}, {373,1}, {374,1}, {375,1}, {376,1}, {377,1}, {378,1}, {379,1}, {380,1}, {381,1}, {382,1}, {383,1}, {384,1}, {385,1}, {386,1}, {387,1}, {388,1}, {389,1}, {390,1}, {391,1}, {392,1}, {393,1}, {394,1}, {395,1}, {396,1}, {397,1}, {398,1}, {399,1}, {400,1}, {401,1}, {402,1}, {403,1}, {404,1}, {405,1}, {406,1}, {407,1}, {408,1}, {409,1}, {410,1}, {411,1}, {412,1}, {413,1}, {414,1}, {415,1}, {416,1}, {417,1}, {418,1}, {419,1}, {420,1}, {421,1}, {422,1}, {423,1}, {424,1}, {425,1}, {426,1}] [ns_server:info,2014-08-19T16:51:40.726,ns_1@10.242.238.90:<0.29181.0>:ebucketmigrator_srv:handle_call:307]Successfully changed vbucket filter on tap stream `replication_ns_1@10.242.238.90`. [ns_server:info,2014-08-19T16:51:40.726,ns_1@10.242.238.90:<0.29181.0>:ebucketmigrator_srv:process_upstream:1027]Got vbucket filter change completion message. Silencing upstream sender [ns_server:info,2014-08-19T16:51:40.727,ns_1@10.242.238.90:<0.29181.0>:ebucketmigrator_srv:handle_info:366]Got reply from upstream silencing request. Completing state transition to a new ebucketmigrator. [ns_server:debug,2014-08-19T16:51:40.727,ns_1@10.242.238.90:<0.29181.0>:ebucketmigrator_srv:complete_native_vb_filter_change:170]Proceeding with reading unread binaries [ns_server:debug,2014-08-19T16:51:40.727,ns_1@10.242.238.90:<0.29181.0>:ebucketmigrator_srv:confirm_downstream:197]Going to confirm reception downstream messages [ns_server:debug,2014-08-19T16:51:40.727,ns_1@10.242.238.90:<0.29181.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:51:40.727,ns_1@10.242.238.90:<0.29186.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:51:40.727,ns_1@10.242.238.90:<0.29186.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:51:40.727,ns_1@10.242.238.90:<0.29181.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:51:40.727,ns_1@10.242.238.90:<0.29181.0>:ebucketmigrator_srv:confirm_downstream:201]Confirmed upstream messages are feeded to kernel [ns_server:debug,2014-08-19T16:51:40.727,ns_1@10.242.238.90:<0.29181.0>:ebucketmigrator_srv:reply_and_die:210]Passed old state to caller [ns_server:debug,2014-08-19T16:51:40.727,ns_1@10.242.238.90:<0.29181.0>:ebucketmigrator_srv:reply_and_die:213]Sent out state. Preparing to die [ns_server:debug,2014-08-19T16:51:40.727,ns_1@10.242.238.90:<0.29183.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:143]Got old state from previous ebucketmigrator: <0.29181.0> [ns_server:debug,2014-08-19T16:51:40.728,ns_1@10.242.238.90:<0.29183.0>:ns_vbm_new_sup:perform_vbucket_filter_change_loop:184]Sent old state to new instance [ns_server:info,2014-08-19T16:51:40.728,ns_1@10.242.238.90:<0.29188.0>:ns_vbm_new_sup:mk_old_state_retriever:83]Got vbucket filter change old state. Proceeding vbucket filter change operation [ns_server:debug,2014-08-19T16:51:40.728,ns_1@10.242.238.90:<0.29188.0>:ebucketmigrator_srv:init:494]Got old ebucketmigrator state from <0.29181.0>: {state,#Port<0.16372>,#Port<0.16368>,#Port<0.16373>,#Port<0.16369>, <0.29185.0>,<<"cut off">>,<<"cut off">>,[],226,false,false,0, {1408,452700,726779}, completed, {<0.29183.0>,#Ref<0.0.1.70759>}, <<"replication_ns_1@10.242.238.90">>,<0.29181.0>, {had_backfill,false,undefined,[]}, completed,false}. [ns_server:debug,2014-08-19T16:51:40.729,ns_1@10.242.238.90:<0.17162.0>:ns_process_registry:handle_info:98]Got exit msg: {'EXIT',<0.29183.0>,{#Ref<0.0.1.70747>,<0.29188.0>}} [error_logger:info,2014-08-19T16:51:40.729,ns_1@10.242.238.90:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,'ns_vbm_new_sup-default'} started: [{pid,<0.29188.0>}, {name, {new_child_id, [347,348,349,350,351,352,353,354,355,357,359, 363,364,365,366,367,368,369,370,371,372,373, 374,375,376,377,378,379,380,381,382,383,384, 385,386,387,388,389,390,391,392,393,394,395, 396,397,398,399,400,401,402,403,404,405,406, 407,408,409,410,411,412,413,414,415,416,417, 418,419,420,421,422,423,424,425,426], 'ns_1@10.242.238.89'}}, {mfargs, {ebucketmigrator_srv,start_link, [{"10.242.238.89",11209}, {"10.242.238.90",11209}, [{old_state_retriever, #Fun}, {on_not_ready_vbuckets, #Fun}, {username,"default"}, {password,get_from_config}, {vbuckets, [347,348,349,350,351,352,353,354,355,357, 359,363,364,365,366,367,368,369,370,371, 372,373,374,375,376,377,378,379,380,381, 382,383,384,385,386,387,388,389,390,391, 392,393,394,395,396,397,398,399,400,401, 402,403,404,405,406,407,408,409,410,411, 412,413,414,415,416,417,418,419,420,421, 422,423,424,425,426]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]]}}, {restart_type,temporary}, {shutdown,60000}, {child_type,worker}] [ns_server:debug,2014-08-19T16:51:40.733,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:51:40.737,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:40.737,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 4385 us [ns_server:debug,2014-08-19T16:51:40.738,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:40.738,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{354, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:51:40.743,ns_1@10.242.238.90:<0.29188.0>:ebucketmigrator_srv:init:621]Reusing old upstream: [{vbuckets,[347,348,349,350,351,352,353,354,355,357,359,363,364,365,366,367, 368,369,370,371,372,373,374,375,376,377,378,379,380,381,382,383, 384,385,386,387,388,389,390,391,392,393,394,395,396,397,398,399, 400,401,402,403,404,405,406,407,408,409,410,411,412,413,414,415, 416,417,418,419,420,421,422,423,424,425,426]}, {name,<<"replication_ns_1@10.242.238.90">>}, {takeover,false}] [rebalance:debug,2014-08-19T16:51:40.744,ns_1@10.242.238.90:<0.29188.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.29190.0> [rebalance:debug,2014-08-19T16:51:40.847,ns_1@10.242.238.90:<0.28113.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:51:40.847,ns_1@10.242.238.90:<0.28113.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:51:40.847,ns_1@10.242.238.90:<0.29191.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:51:40.847,ns_1@10.242.238.90:<0.29191.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:51:40.847,ns_1@10.242.238.90:<0.28113.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:info,2014-08-19T16:51:40.851,ns_1@10.242.238.90:<0.18786.0>:ns_memcached:do_handle_call:527]Changed vbucket 361 state to replica [ns_server:info,2014-08-19T16:51:40.851,ns_1@10.242.238.90:tap_replication_manager-default<0.18775.0>:tap_replication_manager:change_vbucket_filter:200]Going to change replication from 'ns_1@10.242.238.89' to have [347,348,349,350,351,352,353,354,355,357,359,361,363,364,365,366,367,368,369, 370,371,372,373,374,375,376,377,378,379,380,381,382,383,384,385,386,387,388, 389,390,391,392,393,394,395,396,397,398,399,400,401,402,403,404,405,406,407, 408,409,410,411,412,413,414,415,416,417,418,419,420,421,422,423,424,425,426] ([361], []) [ns_server:debug,2014-08-19T16:51:40.852,ns_1@10.242.238.90:<0.29192.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:135]Registered myself under id:{"default", {new_child_id, [347,348,349,350,351,352,353,354,355,357,359, 361,363,364,365,366,367,368,369,370,371,372, 373,374,375,376,377,378,379,380,381,382,383, 384,385,386,387,388,389,390,391,392,393,394, 395,396,397,398,399,400,401,402,403,404,405, 406,407,408,409,410,411,412,413,414,415,416, 417,418,419,420,421,422,423,424,425,426], 'ns_1@10.242.238.89'}, #Ref<0.0.1.70916>} Args:[{"10.242.238.89",11209}, {"10.242.238.90",11209}, [{old_state_retriever,#Fun}, {on_not_ready_vbuckets,#Fun}, {username,"default"}, {password,get_from_config}, {vbuckets,[347,348,349,350,351,352,353,354,355,357,359,361,363,364,365, 366,367,368,369,370,371,372,373,374,375,376,377,378,379,380, 381,382,383,384,385,386,387,388,389,390,391,392,393,394,395, 396,397,398,399,400,401,402,403,404,405,406,407,408,409,410, 411,412,413,414,415,416,417,418,419,420,421,422,423,424,425, 426]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]] [ns_server:debug,2014-08-19T16:51:40.852,ns_1@10.242.238.90:<0.29192.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:139]Linked myself to old ebucketmigrator <0.29188.0> [ns_server:info,2014-08-19T16:51:40.853,ns_1@10.242.238.90:<0.29188.0>:ebucketmigrator_srv:handle_call:270]Starting new-style vbucket filter change on stream `replication_ns_1@10.242.238.90` [rebalance:debug,2014-08-19T16:51:40.856,ns_1@10.242.238.90:<0.28210.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:51:40.856,ns_1@10.242.238.90:<0.28210.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:51:40.856,ns_1@10.242.238.90:<0.29194.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:51:40.856,ns_1@10.242.238.90:<0.29194.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:51:40.856,ns_1@10.242.238.90:<0.28210.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:info,2014-08-19T16:51:40.872,ns_1@10.242.238.90:<0.29188.0>:ebucketmigrator_srv:handle_call:297]Changing vbucket filter on tap stream `replication_ns_1@10.242.238.90`: [{347,1}, {348,1}, {349,1}, {350,1}, {351,1}, {352,1}, {353,1}, {354,1}, {355,1}, {357,1}, {359,1}, {361,1}, {363,1}, {364,1}, {365,1}, {366,1}, {367,1}, {368,1}, {369,1}, {370,1}, {371,1}, {372,1}, {373,1}, {374,1}, {375,1}, {376,1}, {377,1}, {378,1}, {379,1}, {380,1}, {381,1}, {382,1}, {383,1}, {384,1}, {385,1}, {386,1}, {387,1}, {388,1}, {389,1}, {390,1}, {391,1}, {392,1}, {393,1}, {394,1}, {395,1}, {396,1}, {397,1}, {398,1}, {399,1}, {400,1}, {401,1}, {402,1}, {403,1}, {404,1}, {405,1}, {406,1}, {407,1}, {408,1}, {409,1}, {410,1}, {411,1}, {412,1}, {413,1}, {414,1}, {415,1}, {416,1}, {417,1}, {418,1}, {419,1}, {420,1}, {421,1}, {422,1}, {423,1}, {424,1}, {425,1}, {426,1}] [ns_server:info,2014-08-19T16:51:40.873,ns_1@10.242.238.90:<0.29188.0>:ebucketmigrator_srv:handle_call:307]Successfully changed vbucket filter on tap stream `replication_ns_1@10.242.238.90`. [ns_server:info,2014-08-19T16:51:40.873,ns_1@10.242.238.90:<0.29188.0>:ebucketmigrator_srv:process_upstream:1027]Got vbucket filter change completion message. Silencing upstream sender [ns_server:info,2014-08-19T16:51:40.874,ns_1@10.242.238.90:<0.29188.0>:ebucketmigrator_srv:handle_info:366]Got reply from upstream silencing request. Completing state transition to a new ebucketmigrator. [ns_server:debug,2014-08-19T16:51:40.874,ns_1@10.242.238.90:<0.29188.0>:ebucketmigrator_srv:complete_native_vb_filter_change:170]Proceeding with reading unread binaries [ns_server:debug,2014-08-19T16:51:40.874,ns_1@10.242.238.90:<0.29188.0>:ebucketmigrator_srv:confirm_downstream:197]Going to confirm reception downstream messages [ns_server:debug,2014-08-19T16:51:40.874,ns_1@10.242.238.90:<0.29188.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:51:40.874,ns_1@10.242.238.90:<0.29195.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:51:40.874,ns_1@10.242.238.90:<0.29195.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:51:40.874,ns_1@10.242.238.90:<0.29188.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:51:40.874,ns_1@10.242.238.90:<0.29188.0>:ebucketmigrator_srv:confirm_downstream:201]Confirmed upstream messages are feeded to kernel [ns_server:debug,2014-08-19T16:51:40.875,ns_1@10.242.238.90:<0.29188.0>:ebucketmigrator_srv:reply_and_die:210]Passed old state to caller [ns_server:debug,2014-08-19T16:51:40.875,ns_1@10.242.238.90:<0.29188.0>:ebucketmigrator_srv:reply_and_die:213]Sent out state. Preparing to die [ns_server:debug,2014-08-19T16:51:40.875,ns_1@10.242.238.90:<0.29192.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:143]Got old state from previous ebucketmigrator: <0.29188.0> [ns_server:debug,2014-08-19T16:51:40.875,ns_1@10.242.238.90:<0.29192.0>:ns_vbm_new_sup:perform_vbucket_filter_change_loop:184]Sent old state to new instance [ns_server:info,2014-08-19T16:51:40.875,ns_1@10.242.238.90:<0.29197.0>:ns_vbm_new_sup:mk_old_state_retriever:83]Got vbucket filter change old state. Proceeding vbucket filter change operation [ns_server:debug,2014-08-19T16:51:40.875,ns_1@10.242.238.90:<0.29197.0>:ebucketmigrator_srv:init:494]Got old ebucketmigrator state from <0.29188.0>: {state,#Port<0.16372>,#Port<0.16368>,#Port<0.16373>,#Port<0.16369>, <0.29190.0>,<<"cut off">>,<<"cut off">>,[],229,false,false,0, {1408,452700,873806}, completed, {<0.29192.0>,#Ref<0.0.1.70929>}, <<"replication_ns_1@10.242.238.90">>,<0.29188.0>, {had_backfill,false,undefined,[]}, completed,false}. [ns_server:debug,2014-08-19T16:51:40.876,ns_1@10.242.238.90:<0.17162.0>:ns_process_registry:handle_info:98]Got exit msg: {'EXIT',<0.29192.0>,{#Ref<0.0.1.70918>,<0.29197.0>}} [error_logger:info,2014-08-19T16:51:40.876,ns_1@10.242.238.90:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,'ns_vbm_new_sup-default'} started: [{pid,<0.29197.0>}, {name, {new_child_id, [347,348,349,350,351,352,353,354,355,357,359, 361,363,364,365,366,367,368,369,370,371,372, 373,374,375,376,377,378,379,380,381,382,383, 384,385,386,387,388,389,390,391,392,393,394, 395,396,397,398,399,400,401,402,403,404,405, 406,407,408,409,410,411,412,413,414,415,416, 417,418,419,420,421,422,423,424,425,426], 'ns_1@10.242.238.89'}}, {mfargs, {ebucketmigrator_srv,start_link, [{"10.242.238.89",11209}, {"10.242.238.90",11209}, [{old_state_retriever, #Fun}, {on_not_ready_vbuckets, #Fun}, {username,"default"}, {password,get_from_config}, {vbuckets, [347,348,349,350,351,352,353,354,355,357, 359,361,363,364,365,366,367,368,369,370, 371,372,373,374,375,376,377,378,379,380, 381,382,383,384,385,386,387,388,389,390, 391,392,393,394,395,396,397,398,399,400, 401,402,403,404,405,406,407,408,409,410, 411,412,413,414,415,416,417,418,419,420, 421,422,423,424,425,426]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]]}}, {restart_type,temporary}, {shutdown,60000}, {child_type,worker}] [rebalance:debug,2014-08-19T16:51:40.881,ns_1@10.242.238.90:<0.29198.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 599 [ns_server:debug,2014-08-19T16:51:40.882,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [rebalance:debug,2014-08-19T16:51:40.883,ns_1@10.242.238.90:<0.29198.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:51:40.883,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.29198.0> (ok) [ns_server:debug,2014-08-19T16:51:40.886,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:40.886,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 3961 us [ns_server:debug,2014-08-19T16:51:40.886,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:40.887,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{361, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:info,2014-08-19T16:51:40.890,ns_1@10.242.238.90:<0.18786.0>:ns_memcached:do_handle_call:527]Changed vbucket 356 state to replica [ns_server:info,2014-08-19T16:51:40.890,ns_1@10.242.238.90:tap_replication_manager-default<0.18775.0>:tap_replication_manager:change_vbucket_filter:200]Going to change replication from 'ns_1@10.242.238.89' to have [347,348,349,350,351,352,353,354,355,356,357,359,361,363,364,365,366,367,368, 369,370,371,372,373,374,375,376,377,378,379,380,381,382,383,384,385,386,387, 388,389,390,391,392,393,394,395,396,397,398,399,400,401,402,403,404,405,406, 407,408,409,410,411,412,413,414,415,416,417,418,419,420,421,422,423,424,425, 426] ([356], []) [ns_server:debug,2014-08-19T16:51:40.891,ns_1@10.242.238.90:<0.29202.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:135]Registered myself under id:{"default", {new_child_id, [347,348,349,350,351,352,353,354,355,356,357, 359,361,363,364,365,366,367,368,369,370,371, 372,373,374,375,376,377,378,379,380,381,382, 383,384,385,386,387,388,389,390,391,392,393, 394,395,396,397,398,399,400,401,402,403,404, 405,406,407,408,409,410,411,412,413,414,415, 416,417,418,419,420,421,422,423,424,425,426], 'ns_1@10.242.238.89'}, #Ref<0.0.1.71071>} Args:[{"10.242.238.89",11209}, {"10.242.238.90",11209}, [{old_state_retriever,#Fun}, {on_not_ready_vbuckets,#Fun}, {username,"default"}, {password,get_from_config}, {vbuckets,[347,348,349,350,351,352,353,354,355,356,357,359,361,363,364, 365,366,367,368,369,370,371,372,373,374,375,376,377,378,379, 380,381,382,383,384,385,386,387,388,389,390,391,392,393,394, 395,396,397,398,399,400,401,402,403,404,405,406,407,408,409, 410,411,412,413,414,415,416,417,418,419,420,421,422,423,424, 425,426]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]] [ns_server:debug,2014-08-19T16:51:40.892,ns_1@10.242.238.90:<0.29202.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:139]Linked myself to old ebucketmigrator <0.29197.0> [ns_server:debug,2014-08-19T16:51:40.893,ns_1@10.242.238.90:<0.29197.0>:ebucketmigrator_srv:init:621]Reusing old upstream: [{vbuckets,[347,348,349,350,351,352,353,354,355,357,359,361,363,364,365,366, 367,368,369,370,371,372,373,374,375,376,377,378,379,380,381,382, 383,384,385,386,387,388,389,390,391,392,393,394,395,396,397,398, 399,400,401,402,403,404,405,406,407,408,409,410,411,412,413,414, 415,416,417,418,419,420,421,422,423,424,425,426]}, {name,<<"replication_ns_1@10.242.238.90">>}, {takeover,false}] [rebalance:debug,2014-08-19T16:51:40.893,ns_1@10.242.238.90:<0.29197.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.29204.0> [ns_server:info,2014-08-19T16:51:40.893,ns_1@10.242.238.90:<0.29197.0>:ebucketmigrator_srv:handle_call:270]Starting new-style vbucket filter change on stream `replication_ns_1@10.242.238.90` [ns_server:info,2014-08-19T16:51:40.907,ns_1@10.242.238.90:<0.29197.0>:ebucketmigrator_srv:handle_call:297]Changing vbucket filter on tap stream `replication_ns_1@10.242.238.90`: [{347,1}, {348,1}, {349,1}, {350,1}, {351,1}, {352,1}, {353,1}, {354,1}, {355,1}, {356,1}, {357,1}, {359,1}, {361,1}, {363,1}, {364,1}, {365,1}, {366,1}, {367,1}, {368,1}, {369,1}, {370,1}, {371,1}, {372,1}, {373,1}, {374,1}, {375,1}, {376,1}, {377,1}, {378,1}, {379,1}, {380,1}, {381,1}, {382,1}, {383,1}, {384,1}, {385,1}, {386,1}, {387,1}, {388,1}, {389,1}, {390,1}, {391,1}, {392,1}, {393,1}, {394,1}, {395,1}, {396,1}, {397,1}, {398,1}, {399,1}, {400,1}, {401,1}, {402,1}, {403,1}, {404,1}, {405,1}, {406,1}, {407,1}, {408,1}, {409,1}, {410,1}, {411,1}, {412,1}, {413,1}, {414,1}, {415,1}, {416,1}, {417,1}, {418,1}, {419,1}, {420,1}, {421,1}, {422,1}, {423,1}, {424,1}, {425,1}, {426,1}] [ns_server:info,2014-08-19T16:51:40.909,ns_1@10.242.238.90:<0.29197.0>:ebucketmigrator_srv:handle_call:307]Successfully changed vbucket filter on tap stream `replication_ns_1@10.242.238.90`. [ns_server:info,2014-08-19T16:51:40.909,ns_1@10.242.238.90:<0.29197.0>:ebucketmigrator_srv:process_upstream:1027]Got vbucket filter change completion message. Silencing upstream sender [ns_server:info,2014-08-19T16:51:40.909,ns_1@10.242.238.90:<0.29197.0>:ebucketmigrator_srv:handle_info:366]Got reply from upstream silencing request. Completing state transition to a new ebucketmigrator. [ns_server:debug,2014-08-19T16:51:40.909,ns_1@10.242.238.90:<0.29197.0>:ebucketmigrator_srv:complete_native_vb_filter_change:170]Proceeding with reading unread binaries [ns_server:debug,2014-08-19T16:51:40.909,ns_1@10.242.238.90:<0.29197.0>:ebucketmigrator_srv:confirm_downstream:197]Going to confirm reception downstream messages [ns_server:debug,2014-08-19T16:51:40.909,ns_1@10.242.238.90:<0.29197.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:51:40.909,ns_1@10.242.238.90:<0.29205.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:51:40.910,ns_1@10.242.238.90:<0.29205.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:51:40.910,ns_1@10.242.238.90:<0.29197.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:51:40.910,ns_1@10.242.238.90:<0.29197.0>:ebucketmigrator_srv:confirm_downstream:201]Confirmed upstream messages are feeded to kernel [ns_server:debug,2014-08-19T16:51:40.910,ns_1@10.242.238.90:<0.29197.0>:ebucketmigrator_srv:reply_and_die:210]Passed old state to caller [ns_server:debug,2014-08-19T16:51:40.910,ns_1@10.242.238.90:<0.29197.0>:ebucketmigrator_srv:reply_and_die:213]Sent out state. Preparing to die [ns_server:debug,2014-08-19T16:51:40.910,ns_1@10.242.238.90:<0.29202.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:143]Got old state from previous ebucketmigrator: <0.29197.0> [ns_server:debug,2014-08-19T16:51:40.910,ns_1@10.242.238.90:<0.29202.0>:ns_vbm_new_sup:perform_vbucket_filter_change_loop:184]Sent old state to new instance [ns_server:info,2014-08-19T16:51:40.911,ns_1@10.242.238.90:<0.29207.0>:ns_vbm_new_sup:mk_old_state_retriever:83]Got vbucket filter change old state. Proceeding vbucket filter change operation [ns_server:debug,2014-08-19T16:51:40.911,ns_1@10.242.238.90:<0.29207.0>:ebucketmigrator_srv:init:494]Got old ebucketmigrator state from <0.29197.0>: {state,#Port<0.16372>,#Port<0.16368>,#Port<0.16373>,#Port<0.16369>, <0.29204.0>,<<"cut off">>,<<"cut off">>,[],232,false,false,0, {1408,452700,909356}, completed, {<0.29202.0>,#Ref<0.0.1.71084>}, <<"replication_ns_1@10.242.238.90">>,<0.29197.0>, {had_backfill,false,undefined,[]}, completed,false}. [ns_server:debug,2014-08-19T16:51:40.911,ns_1@10.242.238.90:<0.17162.0>:ns_process_registry:handle_info:98]Got exit msg: {'EXIT',<0.29202.0>,{#Ref<0.0.1.71073>,<0.29207.0>}} [error_logger:info,2014-08-19T16:51:40.911,ns_1@10.242.238.90:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,'ns_vbm_new_sup-default'} started: [{pid,<0.29207.0>}, {name, {new_child_id, [347,348,349,350,351,352,353,354,355,356,357, 359,361,363,364,365,366,367,368,369,370,371, 372,373,374,375,376,377,378,379,380,381,382, 383,384,385,386,387,388,389,390,391,392,393, 394,395,396,397,398,399,400,401,402,403,404, 405,406,407,408,409,410,411,412,413,414,415, 416,417,418,419,420,421,422,423,424,425,426], 'ns_1@10.242.238.89'}}, {mfargs, {ebucketmigrator_srv,start_link, [{"10.242.238.89",11209}, {"10.242.238.90",11209}, [{old_state_retriever, #Fun}, {on_not_ready_vbuckets, #Fun}, {username,"default"}, {password,get_from_config}, {vbuckets, [347,348,349,350,351,352,353,354,355,356, 357,359,361,363,364,365,366,367,368,369, 370,371,372,373,374,375,376,377,378,379, 380,381,382,383,384,385,386,387,388,389, 390,391,392,393,394,395,396,397,398,399, 400,401,402,403,404,405,406,407,408,409, 410,411,412,413,414,415,416,417,418,419, 420,421,422,423,424,425,426]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]]}}, {restart_type,temporary}, {shutdown,60000}, {child_type,worker}] [rebalance:debug,2014-08-19T16:51:40.913,ns_1@10.242.238.90:<0.28174.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:51:40.913,ns_1@10.242.238.90:<0.28174.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:51:40.914,ns_1@10.242.238.90:<0.29208.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:51:40.914,ns_1@10.242.238.90:<0.29208.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:51:40.914,ns_1@10.242.238.90:<0.28174.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:51:40.916,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:51:40.919,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:40.920,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 2245 us [ns_server:debug,2014-08-19T16:51:40.920,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:40.920,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{356, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:info,2014-08-19T16:51:40.922,ns_1@10.242.238.90:<0.18786.0>:ns_memcached:do_handle_call:527]Changed vbucket 358 state to replica [ns_server:info,2014-08-19T16:51:40.922,ns_1@10.242.238.90:tap_replication_manager-default<0.18775.0>:tap_replication_manager:change_vbucket_filter:200]Going to change replication from 'ns_1@10.242.238.89' to have [347,348,349,350,351,352,353,354,355,356,357,358,359,361,363,364,365,366,367, 368,369,370,371,372,373,374,375,376,377,378,379,380,381,382,383,384,385,386, 387,388,389,390,391,392,393,394,395,396,397,398,399,400,401,402,403,404,405, 406,407,408,409,410,411,412,413,414,415,416,417,418,419,420,421,422,423,424, 425,426] ([358], []) [ns_server:debug,2014-08-19T16:51:40.926,ns_1@10.242.238.90:<0.29210.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:135]Registered myself under id:{"default", {new_child_id, [347,348,349,350,351,352,353,354,355,356,357, 358,359,361,363,364,365,366,367,368,369,370, 371,372,373,374,375,376,377,378,379,380,381, 382,383,384,385,386,387,388,389,390,391,392, 393,394,395,396,397,398,399,400,401,402,403, 404,405,406,407,408,409,410,411,412,413,414, 415,416,417,418,419,420,421,422,423,424,425, 426], 'ns_1@10.242.238.89'}, #Ref<0.0.1.71222>} Args:[{"10.242.238.89",11209}, {"10.242.238.90",11209}, [{old_state_retriever,#Fun}, {on_not_ready_vbuckets,#Fun}, {username,"default"}, {password,get_from_config}, {vbuckets,[347,348,349,350,351,352,353,354,355,356,357,358,359,361,363, 364,365,366,367,368,369,370,371,372,373,374,375,376,377,378, 379,380,381,382,383,384,385,386,387,388,389,390,391,392,393, 394,395,396,397,398,399,400,401,402,403,404,405,406,407,408, 409,410,411,412,413,414,415,416,417,418,419,420,421,422,423, 424,425,426]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]] [ns_server:debug,2014-08-19T16:51:40.926,ns_1@10.242.238.90:<0.29210.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:139]Linked myself to old ebucketmigrator <0.29207.0> [ns_server:debug,2014-08-19T16:51:40.929,ns_1@10.242.238.90:<0.29207.0>:ebucketmigrator_srv:init:621]Reusing old upstream: [{vbuckets,[347,348,349,350,351,352,353,354,355,356,357,359,361,363,364,365, 366,367,368,369,370,371,372,373,374,375,376,377,378,379,380,381, 382,383,384,385,386,387,388,389,390,391,392,393,394,395,396,397, 398,399,400,401,402,403,404,405,406,407,408,409,410,411,412,413, 414,415,416,417,418,419,420,421,422,423,424,425,426]}, {name,<<"replication_ns_1@10.242.238.90">>}, {takeover,false}] [rebalance:debug,2014-08-19T16:51:40.930,ns_1@10.242.238.90:<0.29207.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.29212.0> [ns_server:info,2014-08-19T16:51:40.930,ns_1@10.242.238.90:<0.29207.0>:ebucketmigrator_srv:handle_call:270]Starting new-style vbucket filter change on stream `replication_ns_1@10.242.238.90` [ns_server:info,2014-08-19T16:51:40.952,ns_1@10.242.238.90:<0.29207.0>:ebucketmigrator_srv:handle_call:297]Changing vbucket filter on tap stream `replication_ns_1@10.242.238.90`: [{347,1}, {348,1}, {349,1}, {350,1}, {351,1}, {352,1}, {353,1}, {354,1}, {355,1}, {356,1}, {357,1}, {358,1}, {359,1}, {361,1}, {363,1}, {364,1}, {365,1}, {366,1}, {367,1}, {368,1}, {369,1}, {370,1}, {371,1}, {372,1}, {373,1}, {374,1}, {375,1}, {376,1}, {377,1}, {378,1}, {379,1}, {380,1}, {381,1}, {382,1}, {383,1}, {384,1}, {385,1}, {386,1}, {387,1}, {388,1}, {389,1}, {390,1}, {391,1}, {392,1}, {393,1}, {394,1}, {395,1}, {396,1}, {397,1}, {398,1}, {399,1}, {400,1}, {401,1}, {402,1}, {403,1}, {404,1}, {405,1}, {406,1}, {407,1}, {408,1}, {409,1}, {410,1}, {411,1}, {412,1}, {413,1}, {414,1}, {415,1}, {416,1}, {417,1}, {418,1}, {419,1}, {420,1}, {421,1}, {422,1}, {423,1}, {424,1}, {425,1}, {426,1}] [ns_server:info,2014-08-19T16:51:40.953,ns_1@10.242.238.90:<0.29207.0>:ebucketmigrator_srv:handle_call:307]Successfully changed vbucket filter on tap stream `replication_ns_1@10.242.238.90`. [ns_server:info,2014-08-19T16:51:40.954,ns_1@10.242.238.90:<0.29207.0>:ebucketmigrator_srv:process_upstream:1027]Got vbucket filter change completion message. Silencing upstream sender [ns_server:info,2014-08-19T16:51:40.954,ns_1@10.242.238.90:<0.29207.0>:ebucketmigrator_srv:handle_info:366]Got reply from upstream silencing request. Completing state transition to a new ebucketmigrator. [ns_server:debug,2014-08-19T16:51:40.954,ns_1@10.242.238.90:<0.29207.0>:ebucketmigrator_srv:complete_native_vb_filter_change:170]Proceeding with reading unread binaries [ns_server:debug,2014-08-19T16:51:40.954,ns_1@10.242.238.90:<0.29207.0>:ebucketmigrator_srv:confirm_downstream:197]Going to confirm reception downstream messages [ns_server:debug,2014-08-19T16:51:40.954,ns_1@10.242.238.90:<0.29207.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:51:40.954,ns_1@10.242.238.90:<0.29213.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:51:40.954,ns_1@10.242.238.90:<0.29213.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:51:40.954,ns_1@10.242.238.90:<0.29207.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:51:40.954,ns_1@10.242.238.90:<0.29207.0>:ebucketmigrator_srv:confirm_downstream:201]Confirmed upstream messages are feeded to kernel [ns_server:debug,2014-08-19T16:51:40.954,ns_1@10.242.238.90:<0.29207.0>:ebucketmigrator_srv:reply_and_die:210]Passed old state to caller [ns_server:debug,2014-08-19T16:51:40.954,ns_1@10.242.238.90:<0.29207.0>:ebucketmigrator_srv:reply_and_die:213]Sent out state. Preparing to die [ns_server:debug,2014-08-19T16:51:40.955,ns_1@10.242.238.90:<0.29210.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:143]Got old state from previous ebucketmigrator: <0.29207.0> [ns_server:debug,2014-08-19T16:51:40.955,ns_1@10.242.238.90:<0.29210.0>:ns_vbm_new_sup:perform_vbucket_filter_change_loop:184]Sent old state to new instance [ns_server:info,2014-08-19T16:51:40.955,ns_1@10.242.238.90:<0.29215.0>:ns_vbm_new_sup:mk_old_state_retriever:83]Got vbucket filter change old state. Proceeding vbucket filter change operation [ns_server:debug,2014-08-19T16:51:40.955,ns_1@10.242.238.90:<0.29215.0>:ebucketmigrator_srv:init:494]Got old ebucketmigrator state from <0.29207.0>: {state,#Port<0.16372>,#Port<0.16368>,#Port<0.16373>,#Port<0.16369>, <0.29212.0>,<<"cut off">>,<<"cut off">>,[],235,false,false,0, {1408,452700,954013}, completed, {<0.29210.0>,#Ref<0.0.1.71236>}, <<"replication_ns_1@10.242.238.90">>,<0.29207.0>, {had_backfill,false,undefined,[]}, completed,false}. [ns_server:debug,2014-08-19T16:51:40.955,ns_1@10.242.238.90:<0.17162.0>:ns_process_registry:handle_info:98]Got exit msg: {'EXIT',<0.29210.0>,{#Ref<0.0.1.71224>,<0.29215.0>}} [error_logger:info,2014-08-19T16:51:40.955,ns_1@10.242.238.90:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,'ns_vbm_new_sup-default'} started: [{pid,<0.29215.0>}, {name, {new_child_id, [347,348,349,350,351,352,353,354,355,356,357, 358,359,361,363,364,365,366,367,368,369,370, 371,372,373,374,375,376,377,378,379,380,381, 382,383,384,385,386,387,388,389,390,391,392, 393,394,395,396,397,398,399,400,401,402,403, 404,405,406,407,408,409,410,411,412,413,414, 415,416,417,418,419,420,421,422,423,424,425, 426], 'ns_1@10.242.238.89'}}, {mfargs, {ebucketmigrator_srv,start_link, [{"10.242.238.89",11209}, {"10.242.238.90",11209}, [{old_state_retriever, #Fun}, {on_not_ready_vbuckets, #Fun}, {username,"default"}, {password,get_from_config}, {vbuckets, [347,348,349,350,351,352,353,354,355,356, 357,358,359,361,363,364,365,366,367,368, 369,370,371,372,373,374,375,376,377,378, 379,380,381,382,383,384,385,386,387,388, 389,390,391,392,393,394,395,396,397,398, 399,400,401,402,403,404,405,406,407,408, 409,410,411,412,413,414,415,416,417,418, 419,420,421,422,423,424,425,426]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]]}}, {restart_type,temporary}, {shutdown,60000}, {child_type,worker}] [ns_server:debug,2014-08-19T16:51:40.960,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:51:40.965,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 5403 us [ns_server:debug,2014-08-19T16:51:40.965,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:40.966,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:40.966,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{358, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:51:40.972,ns_1@10.242.238.90:<0.29215.0>:ebucketmigrator_srv:init:621]Reusing old upstream: [{vbuckets,[347,348,349,350,351,352,353,354,355,356,357,358,359,361,363,364, 365,366,367,368,369,370,371,372,373,374,375,376,377,378,379,380, 381,382,383,384,385,386,387,388,389,390,391,392,393,394,395,396, 397,398,399,400,401,402,403,404,405,406,407,408,409,410,411,412, 413,414,415,416,417,418,419,420,421,422,423,424,425,426]}, {name,<<"replication_ns_1@10.242.238.90">>}, {takeover,false}] [rebalance:debug,2014-08-19T16:51:40.972,ns_1@10.242.238.90:<0.29215.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.29217.0> [rebalance:debug,2014-08-19T16:51:41.105,ns_1@10.242.238.90:<0.29218.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 601 [rebalance:debug,2014-08-19T16:51:41.107,ns_1@10.242.238.90:<0.29218.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:51:41.107,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.29218.0> (ok) [rebalance:debug,2014-08-19T16:51:41.139,ns_1@10.242.238.90:<0.28138.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:51:41.139,ns_1@10.242.238.90:<0.28138.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:51:41.139,ns_1@10.242.238.90:<0.29221.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:51:41.139,ns_1@10.242.238.90:<0.29221.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:51:41.139,ns_1@10.242.238.90:<0.28138.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:info,2014-08-19T16:51:41.143,ns_1@10.242.238.90:<0.18786.0>:ns_memcached:do_handle_call:527]Changed vbucket 360 state to replica [ns_server:info,2014-08-19T16:51:41.143,ns_1@10.242.238.90:tap_replication_manager-default<0.18775.0>:tap_replication_manager:change_vbucket_filter:200]Going to change replication from 'ns_1@10.242.238.89' to have [347,348,349,350,351,352,353,354,355,356,357,358,359,360,361,363,364,365,366, 367,368,369,370,371,372,373,374,375,376,377,378,379,380,381,382,383,384,385, 386,387,388,389,390,391,392,393,394,395,396,397,398,399,400,401,402,403,404, 405,406,407,408,409,410,411,412,413,414,415,416,417,418,419,420,421,422,423, 424,425,426] ([360], []) [ns_server:debug,2014-08-19T16:51:41.144,ns_1@10.242.238.90:<0.29222.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:135]Registered myself under id:{"default", {new_child_id, [347,348,349,350,351,352,353,354,355,356,357, 358,359,360,361,363,364,365,366,367,368,369, 370,371,372,373,374,375,376,377,378,379,380, 381,382,383,384,385,386,387,388,389,390,391, 392,393,394,395,396,397,398,399,400,401,402, 403,404,405,406,407,408,409,410,411,412,413, 414,415,416,417,418,419,420,421,422,423,424, 425,426], 'ns_1@10.242.238.89'}, #Ref<0.0.1.71431>} Args:[{"10.242.238.89",11209}, {"10.242.238.90",11209}, [{old_state_retriever,#Fun}, {on_not_ready_vbuckets,#Fun}, {username,"default"}, {password,get_from_config}, {vbuckets,[347,348,349,350,351,352,353,354,355,356,357,358,359,360,361, 363,364,365,366,367,368,369,370,371,372,373,374,375,376,377, 378,379,380,381,382,383,384,385,386,387,388,389,390,391,392, 393,394,395,396,397,398,399,400,401,402,403,404,405,406,407, 408,409,410,411,412,413,414,415,416,417,418,419,420,421,422, 423,424,425,426]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]] [ns_server:debug,2014-08-19T16:51:41.144,ns_1@10.242.238.90:<0.29222.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:139]Linked myself to old ebucketmigrator <0.29215.0> [ns_server:info,2014-08-19T16:51:41.145,ns_1@10.242.238.90:<0.29215.0>:ebucketmigrator_srv:handle_call:270]Starting new-style vbucket filter change on stream `replication_ns_1@10.242.238.90` [ns_server:info,2014-08-19T16:51:41.162,ns_1@10.242.238.90:<0.29215.0>:ebucketmigrator_srv:handle_call:297]Changing vbucket filter on tap stream `replication_ns_1@10.242.238.90`: [{347,1}, {348,1}, {349,1}, {350,1}, {351,1}, {352,1}, {353,1}, {354,1}, {355,1}, {356,1}, {357,1}, {358,1}, {359,1}, {360,1}, {361,1}, {363,1}, {364,1}, {365,1}, {366,1}, {367,1}, {368,1}, {369,1}, {370,1}, {371,1}, {372,1}, {373,1}, {374,1}, {375,1}, {376,1}, {377,1}, {378,1}, {379,1}, {380,1}, {381,1}, {382,1}, {383,1}, {384,1}, {385,1}, {386,1}, {387,1}, {388,1}, {389,1}, {390,1}, {391,1}, {392,1}, {393,1}, {394,1}, {395,1}, {396,1}, {397,1}, {398,1}, {399,1}, {400,1}, {401,1}, {402,1}, {403,1}, {404,1}, {405,1}, {406,1}, {407,1}, {408,1}, {409,1}, {410,1}, {411,1}, {412,1}, {413,1}, {414,1}, {415,1}, {416,1}, {417,1}, {418,1}, {419,1}, {420,1}, {421,1}, {422,1}, {423,1}, {424,1}, {425,1}, {426,1}] [ns_server:info,2014-08-19T16:51:41.163,ns_1@10.242.238.90:<0.29215.0>:ebucketmigrator_srv:handle_call:307]Successfully changed vbucket filter on tap stream `replication_ns_1@10.242.238.90`. [ns_server:info,2014-08-19T16:51:41.164,ns_1@10.242.238.90:<0.29215.0>:ebucketmigrator_srv:process_upstream:1027]Got vbucket filter change completion message. Silencing upstream sender [ns_server:info,2014-08-19T16:51:41.164,ns_1@10.242.238.90:<0.29215.0>:ebucketmigrator_srv:handle_info:366]Got reply from upstream silencing request. Completing state transition to a new ebucketmigrator. [ns_server:debug,2014-08-19T16:51:41.164,ns_1@10.242.238.90:<0.29215.0>:ebucketmigrator_srv:complete_native_vb_filter_change:170]Proceeding with reading unread binaries [ns_server:debug,2014-08-19T16:51:41.164,ns_1@10.242.238.90:<0.29215.0>:ebucketmigrator_srv:confirm_downstream:197]Going to confirm reception downstream messages [ns_server:debug,2014-08-19T16:51:41.164,ns_1@10.242.238.90:<0.29215.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:51:41.164,ns_1@10.242.238.90:<0.29224.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:51:41.164,ns_1@10.242.238.90:<0.29224.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:51:41.165,ns_1@10.242.238.90:<0.29215.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:51:41.165,ns_1@10.242.238.90:<0.29215.0>:ebucketmigrator_srv:confirm_downstream:201]Confirmed upstream messages are feeded to kernel [ns_server:debug,2014-08-19T16:51:41.165,ns_1@10.242.238.90:<0.29215.0>:ebucketmigrator_srv:reply_and_die:210]Passed old state to caller [ns_server:debug,2014-08-19T16:51:41.165,ns_1@10.242.238.90:<0.29215.0>:ebucketmigrator_srv:reply_and_die:213]Sent out state. Preparing to die [ns_server:debug,2014-08-19T16:51:41.165,ns_1@10.242.238.90:<0.29222.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:143]Got old state from previous ebucketmigrator: <0.29215.0> [ns_server:debug,2014-08-19T16:51:41.165,ns_1@10.242.238.90:<0.29222.0>:ns_vbm_new_sup:perform_vbucket_filter_change_loop:184]Sent old state to new instance [ns_server:info,2014-08-19T16:51:41.165,ns_1@10.242.238.90:<0.29226.0>:ns_vbm_new_sup:mk_old_state_retriever:83]Got vbucket filter change old state. Proceeding vbucket filter change operation [ns_server:debug,2014-08-19T16:51:41.166,ns_1@10.242.238.90:<0.29226.0>:ebucketmigrator_srv:init:494]Got old ebucketmigrator state from <0.29215.0>: {state,#Port<0.16372>,#Port<0.16368>,#Port<0.16373>,#Port<0.16369>, <0.29217.0>,<<"cut off">>,<<"cut off">>,[],238,false,false,0, {1408,452701,164129}, completed, {<0.29222.0>,#Ref<0.0.1.71444>}, <<"replication_ns_1@10.242.238.90">>,<0.29215.0>, {had_backfill,false,undefined,[]}, completed,false}. [ns_server:debug,2014-08-19T16:51:41.166,ns_1@10.242.238.90:<0.17162.0>:ns_process_registry:handle_info:98]Got exit msg: {'EXIT',<0.29222.0>,{#Ref<0.0.1.71433>,<0.29226.0>}} [error_logger:info,2014-08-19T16:51:41.166,ns_1@10.242.238.90:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,'ns_vbm_new_sup-default'} started: [{pid,<0.29226.0>}, {name, {new_child_id, [347,348,349,350,351,352,353,354,355,356,357, 358,359,360,361,363,364,365,366,367,368,369, 370,371,372,373,374,375,376,377,378,379,380, 381,382,383,384,385,386,387,388,389,390,391, 392,393,394,395,396,397,398,399,400,401,402, 403,404,405,406,407,408,409,410,411,412,413, 414,415,416,417,418,419,420,421,422,423,424, 425,426], 'ns_1@10.242.238.89'}}, {mfargs, {ebucketmigrator_srv,start_link, [{"10.242.238.89",11209}, {"10.242.238.90",11209}, [{old_state_retriever, #Fun}, {on_not_ready_vbuckets, #Fun}, {username,"default"}, {password,get_from_config}, {vbuckets, [347,348,349,350,351,352,353,354,355,356, 357,358,359,360,361,363,364,365,366,367, 368,369,370,371,372,373,374,375,376,377, 378,379,380,381,382,383,384,385,386,387, 388,389,390,391,392,393,394,395,396,397, 398,399,400,401,402,403,404,405,406,407, 408,409,410,411,412,413,414,415,416,417, 418,419,420,421,422,423,424,425,426]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]]}}, {restart_type,temporary}, {shutdown,60000}, {child_type,worker}] [ns_server:debug,2014-08-19T16:51:41.171,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:51:41.173,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:41.174,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 3618 us [ns_server:debug,2014-08-19T16:51:41.175,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:41.175,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{360, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:51:41.181,ns_1@10.242.238.90:<0.29230.0>:capi_set_view_manager:do_wait_index_updated:618]References to wait: [] ("default", 605) [ns_server:debug,2014-08-19T16:51:41.181,ns_1@10.242.238.90:<0.29230.0>:capi_set_view_manager:do_wait_index_updated:638]All refs fired [ns_server:debug,2014-08-19T16:51:41.181,ns_1@10.242.238.90:<0.29231.0>:capi_set_view_manager:do_wait_index_updated:618]References to wait: [] ("default", 603) [ns_server:debug,2014-08-19T16:51:41.181,ns_1@10.242.238.90:<0.29226.0>:ebucketmigrator_srv:init:621]Reusing old upstream: [{vbuckets,[347,348,349,350,351,352,353,354,355,356,357,358,359,360,361,363, 364,365,366,367,368,369,370,371,372,373,374,375,376,377,378,379, 380,381,382,383,384,385,386,387,388,389,390,391,392,393,394,395, 396,397,398,399,400,401,402,403,404,405,406,407,408,409,410,411, 412,413,414,415,416,417,418,419,420,421,422,423,424,425,426]}, {name,<<"replication_ns_1@10.242.238.90">>}, {takeover,false}] [ns_server:debug,2014-08-19T16:51:41.181,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.29228.0> (ok) [ns_server:debug,2014-08-19T16:51:41.181,ns_1@10.242.238.90:<0.29231.0>:capi_set_view_manager:do_wait_index_updated:638]All refs fired [rebalance:debug,2014-08-19T16:51:41.182,ns_1@10.242.238.90:<0.29226.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.29232.0> [ns_server:debug,2014-08-19T16:51:41.182,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.29229.0> (ok) [rebalance:debug,2014-08-19T16:51:41.182,ns_1@10.242.238.90:<0.28373.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [rebalance:debug,2014-08-19T16:51:41.182,ns_1@10.242.238.90:<0.28419.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:51:41.182,ns_1@10.242.238.90:<0.28373.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:51:41.182,ns_1@10.242.238.90:<0.29233.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:51:41.182,ns_1@10.242.238.90:<0.28419.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:51:41.182,ns_1@10.242.238.90:<0.29234.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:51:41.183,ns_1@10.242.238.90:<0.29233.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [ns_server:debug,2014-08-19T16:51:41.183,ns_1@10.242.238.90:<0.29234.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:51:41.183,ns_1@10.242.238.90:<0.28373.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [rebalance:info,2014-08-19T16:51:41.183,ns_1@10.242.238.90:<0.28419.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [rebalance:debug,2014-08-19T16:51:41.184,ns_1@10.242.238.90:<0.29235.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 598 [rebalance:debug,2014-08-19T16:51:41.184,ns_1@10.242.238.90:<0.29235.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:51:41.185,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.29235.0> (ok) [rebalance:debug,2014-08-19T16:51:41.216,ns_1@10.242.238.90:<0.28088.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:51:41.217,ns_1@10.242.238.90:<0.28088.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:51:41.217,ns_1@10.242.238.90:<0.29238.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:51:41.217,ns_1@10.242.238.90:<0.29238.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:51:41.217,ns_1@10.242.238.90:<0.28088.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:info,2014-08-19T16:51:41.224,ns_1@10.242.238.90:<0.18786.0>:ns_memcached:do_handle_call:527]Changed vbucket 362 state to replica [ns_server:info,2014-08-19T16:51:41.224,ns_1@10.242.238.90:tap_replication_manager-default<0.18775.0>:tap_replication_manager:change_vbucket_filter:200]Going to change replication from 'ns_1@10.242.238.89' to have [347,348,349,350,351,352,353,354,355,356,357,358,359,360,361,362,363,364,365, 366,367,368,369,370,371,372,373,374,375,376,377,378,379,380,381,382,383,384, 385,386,387,388,389,390,391,392,393,394,395,396,397,398,399,400,401,402,403, 404,405,406,407,408,409,410,411,412,413,414,415,416,417,418,419,420,421,422, 423,424,425,426] ([362], []) [ns_server:debug,2014-08-19T16:51:41.228,ns_1@10.242.238.90:<0.29239.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:135]Registered myself under id:{"default", {new_child_id, [347,348,349,350,351,352,353,354,355,356,357, 358,359,360,361,362,363,364,365,366,367,368, 369,370,371,372,373,374,375,376,377,378,379, 380,381,382,383,384,385,386,387,388,389,390, 391,392,393,394,395,396,397,398,399,400,401, 402,403,404,405,406,407,408,409,410,411,412, 413,414,415,416,417,418,419,420,421,422,423, 424,425,426], 'ns_1@10.242.238.89'}, #Ref<0.0.1.71651>} Args:[{"10.242.238.89",11209}, {"10.242.238.90",11209}, [{old_state_retriever,#Fun}, {on_not_ready_vbuckets,#Fun}, {username,"default"}, {password,get_from_config}, {vbuckets,[347,348,349,350,351,352,353,354,355,356,357,358,359,360,361, 362,363,364,365,366,367,368,369,370,371,372,373,374,375,376, 377,378,379,380,381,382,383,384,385,386,387,388,389,390,391, 392,393,394,395,396,397,398,399,400,401,402,403,404,405,406, 407,408,409,410,411,412,413,414,415,416,417,418,419,420,421, 422,423,424,425,426]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]] [ns_server:debug,2014-08-19T16:51:41.228,ns_1@10.242.238.90:<0.29239.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:139]Linked myself to old ebucketmigrator <0.29226.0> [ns_server:info,2014-08-19T16:51:41.228,ns_1@10.242.238.90:<0.29226.0>:ebucketmigrator_srv:handle_call:270]Starting new-style vbucket filter change on stream `replication_ns_1@10.242.238.90` [ns_server:info,2014-08-19T16:51:41.247,ns_1@10.242.238.90:<0.29226.0>:ebucketmigrator_srv:handle_call:297]Changing vbucket filter on tap stream `replication_ns_1@10.242.238.90`: [{347,1}, {348,1}, {349,1}, {350,1}, {351,1}, {352,1}, {353,1}, {354,1}, {355,1}, {356,1}, {357,1}, {358,1}, {359,1}, {360,1}, {361,1}, {362,1}, {363,1}, {364,1}, {365,1}, {366,1}, {367,1}, {368,1}, {369,1}, {370,1}, {371,1}, {372,1}, {373,1}, {374,1}, {375,1}, {376,1}, {377,1}, {378,1}, {379,1}, {380,1}, {381,1}, {382,1}, {383,1}, {384,1}, {385,1}, {386,1}, {387,1}, {388,1}, {389,1}, {390,1}, {391,1}, {392,1}, {393,1}, {394,1}, {395,1}, {396,1}, {397,1}, {398,1}, {399,1}, {400,1}, {401,1}, {402,1}, {403,1}, {404,1}, {405,1}, {406,1}, {407,1}, {408,1}, {409,1}, {410,1}, {411,1}, {412,1}, {413,1}, {414,1}, {415,1}, {416,1}, {417,1}, {418,1}, {419,1}, {420,1}, {421,1}, {422,1}, {423,1}, {424,1}, {425,1}, {426,1}] [ns_server:info,2014-08-19T16:51:41.248,ns_1@10.242.238.90:<0.29226.0>:ebucketmigrator_srv:handle_call:307]Successfully changed vbucket filter on tap stream `replication_ns_1@10.242.238.90`. [ns_server:info,2014-08-19T16:51:41.249,ns_1@10.242.238.90:<0.29226.0>:ebucketmigrator_srv:process_upstream:1027]Got vbucket filter change completion message. Silencing upstream sender [ns_server:info,2014-08-19T16:51:41.249,ns_1@10.242.238.90:<0.29226.0>:ebucketmigrator_srv:handle_info:366]Got reply from upstream silencing request. Completing state transition to a new ebucketmigrator. [ns_server:debug,2014-08-19T16:51:41.249,ns_1@10.242.238.90:<0.29226.0>:ebucketmigrator_srv:complete_native_vb_filter_change:170]Proceeding with reading unread binaries [ns_server:debug,2014-08-19T16:51:41.249,ns_1@10.242.238.90:<0.29226.0>:ebucketmigrator_srv:confirm_downstream:197]Going to confirm reception downstream messages [ns_server:debug,2014-08-19T16:51:41.249,ns_1@10.242.238.90:<0.29226.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:51:41.249,ns_1@10.242.238.90:<0.29241.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:51:41.250,ns_1@10.242.238.90:<0.29241.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:51:41.250,ns_1@10.242.238.90:<0.29226.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:51:41.250,ns_1@10.242.238.90:<0.29226.0>:ebucketmigrator_srv:confirm_downstream:201]Confirmed upstream messages are feeded to kernel [ns_server:debug,2014-08-19T16:51:41.250,ns_1@10.242.238.90:<0.29226.0>:ebucketmigrator_srv:reply_and_die:210]Passed old state to caller [ns_server:debug,2014-08-19T16:51:41.250,ns_1@10.242.238.90:<0.29226.0>:ebucketmigrator_srv:reply_and_die:213]Sent out state. Preparing to die [ns_server:debug,2014-08-19T16:51:41.250,ns_1@10.242.238.90:<0.29239.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:143]Got old state from previous ebucketmigrator: <0.29226.0> [ns_server:debug,2014-08-19T16:51:41.251,ns_1@10.242.238.90:<0.29239.0>:ns_vbm_new_sup:perform_vbucket_filter_change_loop:184]Sent old state to new instance [ns_server:info,2014-08-19T16:51:41.251,ns_1@10.242.238.90:<0.29243.0>:ns_vbm_new_sup:mk_old_state_retriever:83]Got vbucket filter change old state. Proceeding vbucket filter change operation [ns_server:debug,2014-08-19T16:51:41.251,ns_1@10.242.238.90:<0.29243.0>:ebucketmigrator_srv:init:494]Got old ebucketmigrator state from <0.29226.0>: {state,#Port<0.16372>,#Port<0.16368>,#Port<0.16373>,#Port<0.16369>, <0.29232.0>,<<"cut off">>,<<"cut off">>,[],241,false,false,0, {1408,452701,249231}, completed, {<0.29239.0>,#Ref<0.0.1.71665>}, <<"replication_ns_1@10.242.238.90">>,<0.29226.0>, {had_backfill,false,undefined,[]}, completed,false}. [ns_server:debug,2014-08-19T16:51:41.251,ns_1@10.242.238.90:<0.17162.0>:ns_process_registry:handle_info:98]Got exit msg: {'EXIT',<0.29239.0>,{#Ref<0.0.1.71653>,<0.29243.0>}} [ns_server:info,2014-08-19T16:51:41.252,ns_1@10.242.238.90:<0.18786.0>:ns_memcached:do_handle_call:527]Changed vbucket 603 state to active [error_logger:info,2014-08-19T16:51:41.251,ns_1@10.242.238.90:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,'ns_vbm_new_sup-default'} started: [{pid,<0.29243.0>}, {name, {new_child_id, [347,348,349,350,351,352,353,354,355,356,357, 358,359,360,361,362,363,364,365,366,367,368, 369,370,371,372,373,374,375,376,377,378,379, 380,381,382,383,384,385,386,387,388,389,390, 391,392,393,394,395,396,397,398,399,400,401, 402,403,404,405,406,407,408,409,410,411,412, 413,414,415,416,417,418,419,420,421,422,423, 424,425,426], 'ns_1@10.242.238.89'}}, {mfargs, {ebucketmigrator_srv,start_link, [{"10.242.238.89",11209}, {"10.242.238.90",11209}, [{old_state_retriever, #Fun}, {on_not_ready_vbuckets, #Fun}, {username,"default"}, {password,get_from_config}, {vbuckets, [347,348,349,350,351,352,353,354,355,356, 357,358,359,360,361,362,363,364,365,366, 367,368,369,370,371,372,373,374,375,376, 377,378,379,380,381,382,383,384,385,386, 387,388,389,390,391,392,393,394,395,396, 397,398,399,400,401,402,403,404,405,406, 407,408,409,410,411,412,413,414,415,416, 417,418,419,420,421,422,423,424,425, 426]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]]}}, {restart_type,temporary}, {shutdown,60000}, {child_type,worker}] [ns_server:info,2014-08-19T16:51:41.253,ns_1@10.242.238.90:<0.18786.0>:ns_memcached:do_handle_call:527]Changed vbucket 605 state to active [ns_server:debug,2014-08-19T16:51:41.256,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:51:41.265,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:41.265,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 8817 us [ns_server:debug,2014-08-19T16:51:41.265,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:41.266,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{362, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:51:41.274,ns_1@10.242.238.90:<0.29243.0>:ebucketmigrator_srv:init:621]Reusing old upstream: [{vbuckets,[347,348,349,350,351,352,353,354,355,356,357,358,359,360,361,362, 363,364,365,366,367,368,369,370,371,372,373,374,375,376,377,378, 379,380,381,382,383,384,385,386,387,388,389,390,391,392,393,394, 395,396,397,398,399,400,401,402,403,404,405,406,407,408,409,410, 411,412,413,414,415,416,417,418,419,420,421,422,423,424,425,426]}, {name,<<"replication_ns_1@10.242.238.90">>}, {takeover,false}] [rebalance:debug,2014-08-19T16:51:41.274,ns_1@10.242.238.90:<0.29243.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.29245.0> [views:debug,2014-08-19T16:51:41.276,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/603. Updated state: active (1) [ns_server:debug,2014-08-19T16:51:41.276,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",603,active,1} [ns_server:debug,2014-08-19T16:51:41.299,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:51:41.300,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:41.301,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 1733 us [ns_server:debug,2014-08-19T16:51:41.301,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:41.301,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{603, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:51:41.331,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:51:41.335,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:41.335,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 3874 us [ns_server:debug,2014-08-19T16:51:41.336,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{605, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:51:41.337,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:41.338,ns_1@10.242.238.90:<0.29249.0>:capi_set_view_manager:do_wait_index_updated:618]References to wait: [] ("default", 607) [ns_server:debug,2014-08-19T16:51:41.338,ns_1@10.242.238.90:<0.29249.0>:capi_set_view_manager:do_wait_index_updated:638]All refs fired [ns_server:debug,2014-08-19T16:51:41.338,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.29248.0> (ok) [rebalance:debug,2014-08-19T16:51:41.339,ns_1@10.242.238.90:<0.28323.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:51:41.339,ns_1@10.242.238.90:<0.28323.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:51:41.339,ns_1@10.242.238.90:<0.29250.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:51:41.339,ns_1@10.242.238.90:<0.29250.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:51:41.339,ns_1@10.242.238.90:<0.28323.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [rebalance:debug,2014-08-19T16:51:41.340,ns_1@10.242.238.90:<0.29251.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 600 [views:debug,2014-08-19T16:51:41.343,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/605. Updated state: active (1) [ns_server:debug,2014-08-19T16:51:41.343,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",605,active,1} [rebalance:debug,2014-08-19T16:51:41.344,ns_1@10.242.238.90:<0.29251.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:51:41.344,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.29251.0> (ok) [ns_server:info,2014-08-19T16:51:41.371,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 607 state to active [ns_server:debug,2014-08-19T16:51:41.399,ns_1@10.242.238.90:<0.29261.0>:capi_set_view_manager:do_wait_index_updated:618]References to wait: [] ("default", 602) [ns_server:debug,2014-08-19T16:51:41.399,ns_1@10.242.238.90:<0.29261.0>:capi_set_view_manager:do_wait_index_updated:638]All refs fired [ns_server:debug,2014-08-19T16:51:41.399,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.29260.0> (ok) [ns_server:debug,2014-08-19T16:51:41.399,ns_1@10.242.238.90:<0.29263.0>:capi_set_view_manager:do_wait_index_updated:618]References to wait: [] ("default", 604) [ns_server:debug,2014-08-19T16:51:41.400,ns_1@10.242.238.90:<0.29263.0>:capi_set_view_manager:do_wait_index_updated:638]All refs fired [ns_server:debug,2014-08-19T16:51:41.400,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.29262.0> (ok) [ns_server:debug,2014-08-19T16:51:41.400,ns_1@10.242.238.90:<0.29270.0>:capi_set_view_manager:do_wait_index_updated:618]References to wait: [] ("default", 609) [rebalance:debug,2014-08-19T16:51:41.400,ns_1@10.242.238.90:<0.28444.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:51:41.400,ns_1@10.242.238.90:<0.29270.0>:capi_set_view_manager:do_wait_index_updated:638]All refs fired [ns_server:debug,2014-08-19T16:51:41.400,ns_1@10.242.238.90:<0.29274.0>:capi_set_view_manager:do_wait_index_updated:618]References to wait: [] ("default", 611) [ns_server:debug,2014-08-19T16:51:41.400,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.29264.0> (ok) [ns_server:debug,2014-08-19T16:51:41.400,ns_1@10.242.238.90:<0.28444.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:51:41.400,ns_1@10.242.238.90:<0.29274.0>:capi_set_view_manager:do_wait_index_updated:638]All refs fired [ns_server:debug,2014-08-19T16:51:41.400,ns_1@10.242.238.90:<0.29275.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:51:41.401,ns_1@10.242.238.90:<0.29276.0>:capi_set_view_manager:do_wait_index_updated:618]References to wait: [] ("default", 608) [ns_server:debug,2014-08-19T16:51:41.401,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.29265.0> (ok) [rebalance:debug,2014-08-19T16:51:41.401,ns_1@10.242.238.90:<0.28392.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:51:41.401,ns_1@10.242.238.90:<0.29275.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [ns_server:debug,2014-08-19T16:51:41.401,ns_1@10.242.238.90:<0.29277.0>:capi_set_view_manager:do_wait_index_updated:618]References to wait: [] ("default", 613) [ns_server:debug,2014-08-19T16:51:41.401,ns_1@10.242.238.90:<0.29276.0>:capi_set_view_manager:do_wait_index_updated:638]All refs fired [ns_server:debug,2014-08-19T16:51:41.401,ns_1@10.242.238.90:<0.28392.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:51:41.401,ns_1@10.242.238.90:<0.29278.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:51:41.401,ns_1@10.242.238.90:<0.29277.0>:capi_set_view_manager:do_wait_index_updated:638]All refs fired [ns_server:debug,2014-08-19T16:51:41.401,ns_1@10.242.238.90:<0.29279.0>:capi_set_view_manager:do_wait_index_updated:618]References to wait: [] ("default", 615) [rebalance:info,2014-08-19T16:51:41.401,ns_1@10.242.238.90:<0.28444.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [rebalance:debug,2014-08-19T16:51:41.401,ns_1@10.242.238.90:<0.28286.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:51:41.401,ns_1@10.242.238.90:<0.29278.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [ns_server:debug,2014-08-19T16:51:41.401,ns_1@10.242.238.90:<0.29279.0>:capi_set_view_manager:do_wait_index_updated:638]All refs fired [ns_server:debug,2014-08-19T16:51:41.401,ns_1@10.242.238.90:<0.29280.0>:capi_set_view_manager:do_wait_index_updated:618]References to wait: [] ("default", 606) [rebalance:debug,2014-08-19T16:51:41.401,ns_1@10.242.238.90:<0.28236.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:51:41.401,ns_1@10.242.238.90:<0.29280.0>:capi_set_view_manager:do_wait_index_updated:638]All refs fired [ns_server:debug,2014-08-19T16:51:41.401,ns_1@10.242.238.90:<0.28286.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:51:41.401,ns_1@10.242.238.90:<0.29282.0>:capi_set_view_manager:do_wait_index_updated:618]References to wait: [] ("default", 610) [rebalance:info,2014-08-19T16:51:41.401,ns_1@10.242.238.90:<0.28392.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:51:41.401,ns_1@10.242.238.90:<0.29281.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:51:41.402,ns_1@10.242.238.90:<0.28236.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:51:41.402,ns_1@10.242.238.90:<0.29283.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:51:41.402,ns_1@10.242.238.90:<0.29282.0>:capi_set_view_manager:do_wait_index_updated:638]All refs fired [ns_server:debug,2014-08-19T16:51:41.402,ns_1@10.242.238.90:<0.29281.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [ns_server:debug,2014-08-19T16:51:41.402,ns_1@10.242.238.90:<0.29284.0>:capi_set_view_manager:do_wait_index_updated:618]References to wait: [] ("default", 617) [ns_server:debug,2014-08-19T16:51:41.402,ns_1@10.242.238.90:<0.29283.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [ns_server:debug,2014-08-19T16:51:41.402,ns_1@10.242.238.90:<0.29284.0>:capi_set_view_manager:do_wait_index_updated:638]All refs fired [ns_server:debug,2014-08-19T16:51:41.402,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.29266.0> (ok) [rebalance:info,2014-08-19T16:51:41.402,ns_1@10.242.238.90:<0.28286.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [rebalance:info,2014-08-19T16:51:41.402,ns_1@10.242.238.90:<0.28236.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:51:41.402,ns_1@10.242.238.90:<0.29285.0>:capi_set_view_manager:do_wait_index_updated:618]References to wait: [] ("default", 612) [ns_server:debug,2014-08-19T16:51:41.402,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.29267.0> (ok) [ns_server:debug,2014-08-19T16:51:41.402,ns_1@10.242.238.90:<0.29285.0>:capi_set_view_manager:do_wait_index_updated:638]All refs fired [ns_server:debug,2014-08-19T16:51:41.402,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.29268.0> (ok) [ns_server:debug,2014-08-19T16:51:41.402,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.29269.0> (ok) [ns_server:debug,2014-08-19T16:51:41.402,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.29271.0> (ok) [rebalance:debug,2014-08-19T16:51:41.402,ns_1@10.242.238.90:<0.28312.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:51:41.403,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.29272.0> (ok) [ns_server:debug,2014-08-19T16:51:41.403,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.29273.0> (ok) [ns_server:debug,2014-08-19T16:51:41.403,ns_1@10.242.238.90:<0.28312.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [rebalance:debug,2014-08-19T16:51:41.403,ns_1@10.242.238.90:<0.28193.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:51:41.403,ns_1@10.242.238.90:<0.29286.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [rebalance:debug,2014-08-19T16:51:41.403,ns_1@10.242.238.90:<0.28143.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:51:41.403,ns_1@10.242.238.90:<0.28193.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:51:41.403,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:51:41.403,ns_1@10.242.238.90:<0.29286.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:debug,2014-08-19T16:51:41.403,ns_1@10.242.238.90:<0.28348.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [rebalance:debug,2014-08-19T16:51:41.403,ns_1@10.242.238.90:<0.28261.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [rebalance:debug,2014-08-19T16:51:41.403,ns_1@10.242.238.90:<0.28107.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:51:41.403,ns_1@10.242.238.90:<0.29287.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [rebalance:debug,2014-08-19T16:51:41.403,ns_1@10.242.238.90:<0.28204.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:51:41.403,ns_1@10.242.238.90:<0.28261.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:51:41.403,ns_1@10.242.238.90:<0.28348.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:51:41.403,ns_1@10.242.238.90:<0.28143.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:51:41.403,ns_1@10.242.238.90:<0.29288.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:51:41.403,ns_1@10.242.238.90:<0.29289.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [rebalance:info,2014-08-19T16:51:41.403,ns_1@10.242.238.90:<0.28312.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:51:41.403,ns_1@10.242.238.90:<0.29290.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:51:41.403,ns_1@10.242.238.90:<0.29287.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [ns_server:debug,2014-08-19T16:51:41.404,ns_1@10.242.238.90:<0.29289.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [ns_server:debug,2014-08-19T16:51:41.404,ns_1@10.242.238.90:<0.28204.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:51:41.404,ns_1@10.242.238.90:<0.28107.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:51:41.404,ns_1@10.242.238.90:<0.29291.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:51:41.404,ns_1@10.242.238.90:<0.29292.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:51:41.404,ns_1@10.242.238.90:<0.29288.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [ns_server:debug,2014-08-19T16:51:41.404,ns_1@10.242.238.90:<0.29290.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:51:41.404,ns_1@10.242.238.90:<0.28193.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:51:41.404,ns_1@10.242.238.90:<0.29291.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [ns_server:debug,2014-08-19T16:51:41.404,ns_1@10.242.238.90:<0.29292.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:51:41.404,ns_1@10.242.238.90:<0.28143.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [rebalance:info,2014-08-19T16:51:41.404,ns_1@10.242.238.90:<0.28348.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [rebalance:info,2014-08-19T16:51:41.404,ns_1@10.242.238.90:<0.28204.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [rebalance:info,2014-08-19T16:51:41.404,ns_1@10.242.238.90:<0.28261.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [rebalance:info,2014-08-19T16:51:41.404,ns_1@10.242.238.90:<0.28107.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:51:41.406,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:41.407,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 3489 us [ns_server:debug,2014-08-19T16:51:41.407,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:41.408,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{607, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [views:debug,2014-08-19T16:51:41.435,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/607. Updated state: active (1) [ns_server:debug,2014-08-19T16:51:41.436,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",607,active,1} [ns_server:info,2014-08-19T16:51:41.446,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 602 state to active [ns_server:info,2014-08-19T16:51:41.455,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 604 state to active [ns_server:info,2014-08-19T16:51:41.473,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 610 state to active [ns_server:debug,2014-08-19T16:51:41.478,ns_1@10.242.238.90:<0.29295.0>:capi_set_view_manager:do_wait_index_updated:618]References to wait: [] ("default", 614) [ns_server:debug,2014-08-19T16:51:41.479,ns_1@10.242.238.90:<0.29295.0>:capi_set_view_manager:do_wait_index_updated:638]All refs fired [ns_server:debug,2014-08-19T16:51:41.479,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.29294.0> (ok) [rebalance:debug,2014-08-19T16:51:41.480,ns_1@10.242.238.90:<0.28168.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:51:41.480,ns_1@10.242.238.90:<0.28168.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:51:41.480,ns_1@10.242.238.90:<0.29296.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:51:41.480,ns_1@10.242.238.90:<0.29296.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:51:41.480,ns_1@10.242.238.90:<0.28168.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:info,2014-08-19T16:51:41.481,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 606 state to active [ns_server:debug,2014-08-19T16:51:41.486,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:51:41.488,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:41.488,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 2012 us [ns_server:debug,2014-08-19T16:51:41.488,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:41.489,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{602, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:51:41.491,ns_1@10.242.238.90:<0.29300.0>:capi_set_view_manager:do_wait_index_updated:618]References to wait: [] ("default", 618) [ns_server:debug,2014-08-19T16:51:41.491,ns_1@10.242.238.90:<0.29300.0>:capi_set_view_manager:do_wait_index_updated:638]All refs fired [ns_server:debug,2014-08-19T16:51:41.491,ns_1@10.242.238.90:<0.29301.0>:capi_set_view_manager:do_wait_index_updated:618]References to wait: [] ("default", 616) [ns_server:debug,2014-08-19T16:51:41.491,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.29298.0> (ok) [ns_server:debug,2014-08-19T16:51:41.491,ns_1@10.242.238.90:<0.29301.0>:capi_set_view_manager:do_wait_index_updated:638]All refs fired [ns_server:debug,2014-08-19T16:51:41.491,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.29299.0> (ok) [rebalance:debug,2014-08-19T16:51:41.491,ns_1@10.242.238.90:<0.28082.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [rebalance:debug,2014-08-19T16:51:41.491,ns_1@10.242.238.90:<0.28118.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:51:41.492,ns_1@10.242.238.90:<0.28082.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:51:41.492,ns_1@10.242.238.90:<0.29302.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:51:41.492,ns_1@10.242.238.90:<0.29302.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [ns_server:debug,2014-08-19T16:51:41.492,ns_1@10.242.238.90:<0.28118.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:51:41.492,ns_1@10.242.238.90:<0.29303.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:51:41.492,ns_1@10.242.238.90:<0.29303.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:51:41.492,ns_1@10.242.238.90:<0.28118.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [rebalance:info,2014-08-19T16:51:41.492,ns_1@10.242.238.90:<0.28082.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:info,2014-08-19T16:51:41.496,ns_1@10.242.238.90:<0.18786.0>:ns_memcached:do_handle_call:527]Changed vbucket 617 state to active [views:debug,2014-08-19T16:51:41.512,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/602. Updated state: active (1) [ns_server:info,2014-08-19T16:51:41.512,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 613 state to active [ns_server:debug,2014-08-19T16:51:41.512,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",602,active,1} [ns_server:info,2014-08-19T16:51:41.521,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 608 state to active [ns_server:debug,2014-08-19T16:51:41.523,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:51:41.527,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:41.527,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 3810 us [ns_server:debug,2014-08-19T16:51:41.527,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:41.528,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{604, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:info,2014-08-19T16:51:41.552,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 612 state to active [ns_server:info,2014-08-19T16:51:41.561,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 609 state to active [ns_server:info,2014-08-19T16:51:41.562,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 611 state to active [ns_server:debug,2014-08-19T16:51:41.564,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:info,2014-08-19T16:51:41.571,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 615 state to active [ns_server:debug,2014-08-19T16:51:41.572,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:41.573,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 8361 us [ns_server:debug,2014-08-19T16:51:41.573,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:41.573,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{610, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [views:debug,2014-08-19T16:51:41.578,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/610. Updated state: active (1) [ns_server:debug,2014-08-19T16:51:41.579,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",610,active,1} [ns_server:debug,2014-08-19T16:51:41.600,ns_1@10.242.238.90:<0.29308.0>:capi_set_view_manager:do_wait_index_updated:618]References to wait: [] ("default", 601) [ns_server:info,2014-08-19T16:51:41.600,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 614 state to active [ns_server:debug,2014-08-19T16:51:41.600,ns_1@10.242.238.90:<0.29308.0>:capi_set_view_manager:do_wait_index_updated:638]All refs fired [ns_server:debug,2014-08-19T16:51:41.601,ns_1@10.242.238.90:<0.29309.0>:capi_set_view_manager:do_wait_index_updated:618]References to wait: [] ("default", 599) [ns_server:debug,2014-08-19T16:51:41.601,ns_1@10.242.238.90:<0.29309.0>:capi_set_view_manager:do_wait_index_updated:638]All refs fired [ns_server:debug,2014-08-19T16:51:41.602,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.29306.0> (ok) [ns_server:debug,2014-08-19T16:51:41.602,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.29307.0> (ok) [ns_server:debug,2014-08-19T16:51:41.602,ns_1@10.242.238.90:<0.29311.0>:capi_set_view_manager:do_wait_index_updated:618]References to wait: [] ("default", 598) [ns_server:debug,2014-08-19T16:51:41.602,ns_1@10.242.238.90:<0.29311.0>:capi_set_view_manager:do_wait_index_updated:638]All refs fired [ns_server:debug,2014-08-19T16:51:41.602,ns_1@10.242.238.90:<0.29313.0>:capi_set_view_manager:do_wait_index_updated:618]References to wait: [] ("default", 600) [ns_server:debug,2014-08-19T16:51:41.602,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.29310.0> (ok) [ns_server:debug,2014-08-19T16:51:41.602,ns_1@10.242.238.90:<0.29313.0>:capi_set_view_manager:do_wait_index_updated:638]All refs fired [ns_server:debug,2014-08-19T16:51:41.602,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.29312.0> (ok) [rebalance:debug,2014-08-19T16:51:41.602,ns_1@10.242.238.90:<0.28469.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [rebalance:debug,2014-08-19T16:51:41.602,ns_1@10.242.238.90:<0.28519.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [rebalance:debug,2014-08-19T16:51:41.603,ns_1@10.242.238.90:<0.28544.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:51:41.603,ns_1@10.242.238.90:<0.29314.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:51:41.603,ns_1@10.242.238.90:<0.28469.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:51:41.603,ns_1@10.242.238.90:<0.29314.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [ns_server:debug,2014-08-19T16:51:41.603,ns_1@10.242.238.90:<0.28544.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [rebalance:info,2014-08-19T16:51:41.603,ns_1@10.242.238.90:<0.28469.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:51:41.603,ns_1@10.242.238.90:<0.29315.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:51:41.603,ns_1@10.242.238.90:<0.28519.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:51:41.603,ns_1@10.242.238.90:<0.29316.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:51:41.603,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:51:41.603,ns_1@10.242.238.90:<0.29315.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:debug,2014-08-19T16:51:41.603,ns_1@10.242.238.90:<0.28494.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:51:41.603,ns_1@10.242.238.90:<0.29316.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:51:41.603,ns_1@10.242.238.90:<0.28544.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [rebalance:info,2014-08-19T16:51:41.603,ns_1@10.242.238.90:<0.28519.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:51:41.603,ns_1@10.242.238.90:<0.28494.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:51:41.603,ns_1@10.242.238.90:<0.29317.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:51:41.604,ns_1@10.242.238.90:<0.29317.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:51:41.604,ns_1@10.242.238.90:<0.28494.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:info,2014-08-19T16:51:41.605,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 616 state to active [ns_server:debug,2014-08-19T16:51:41.607,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:41.607,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 3651 us [ns_server:debug,2014-08-19T16:51:41.607,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:41.607,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{606, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:info,2014-08-19T16:51:41.617,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 618 state to active [views:debug,2014-08-19T16:51:41.635,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/606. Updated state: active (1) [ns_server:debug,2014-08-19T16:51:41.635,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",606,active,1} [ns_server:debug,2014-08-19T16:51:41.648,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:51:41.651,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:41.651,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 3349 us [ns_server:debug,2014-08-19T16:51:41.652,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:41.652,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{617, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [views:debug,2014-08-19T16:51:41.668,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/604. Updated state: active (1) [ns_server:debug,2014-08-19T16:51:41.668,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",604,active,1} [ns_server:debug,2014-08-19T16:51:41.691,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:51:41.694,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:41.695,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 3309 us [ns_server:debug,2014-08-19T16:51:41.696,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:41.697,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{613, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [rebalance:debug,2014-08-19T16:51:41.704,ns_1@10.242.238.90:<0.28500.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:51:41.704,ns_1@10.242.238.90:<0.28500.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:51:41.704,ns_1@10.242.238.90:<0.29321.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:51:41.704,ns_1@10.242.238.90:<0.29321.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:51:41.705,ns_1@10.242.238.90:<0.28500.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [views:debug,2014-08-19T16:51:41.726,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/618. Updated state: active (1) [ns_server:debug,2014-08-19T16:51:41.726,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",618,active,1} [ns_server:info,2014-08-19T16:51:41.729,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 599 state to active [ns_server:debug,2014-08-19T16:51:41.734,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:51:41.738,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 1873 us [ns_server:debug,2014-08-19T16:51:41.739,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:41.740,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:41.740,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{608, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:info,2014-08-19T16:51:41.751,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 598 state to active [ns_server:info,2014-08-19T16:51:41.761,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 600 state to active [ns_server:info,2014-08-19T16:51:41.773,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 601 state to active [ns_server:debug,2014-08-19T16:51:41.777,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:41.777,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:51:41.778,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 859 us [ns_server:debug,2014-08-19T16:51:41.778,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{612, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:51:41.779,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [views:debug,2014-08-19T16:51:41.785,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/616. Updated state: active (1) [ns_server:debug,2014-08-19T16:51:41.785,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",616,active,1} [rebalance:debug,2014-08-19T16:51:41.812,ns_1@10.242.238.90:<0.28450.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:51:41.812,ns_1@10.242.238.90:<0.28450.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:51:41.812,ns_1@10.242.238.90:<0.29324.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:51:41.812,ns_1@10.242.238.90:<0.29324.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:51:41.812,ns_1@10.242.238.90:<0.28450.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:51:41.814,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:51:41.821,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:41.822,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 7735 us [ns_server:debug,2014-08-19T16:51:41.822,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:41.823,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{609, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [views:debug,2014-08-19T16:51:41.843,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/614. Updated state: active (1) [ns_server:debug,2014-08-19T16:51:41.844,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",614,active,1} [ns_server:debug,2014-08-19T16:51:41.853,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:51:41.856,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:41.856,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 3272 us [ns_server:debug,2014-08-19T16:51:41.857,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:41.858,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{611, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [views:debug,2014-08-19T16:51:41.886,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/612. Updated state: active (1) [ns_server:debug,2014-08-19T16:51:41.886,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",612,active,1} [rebalance:debug,2014-08-19T16:51:41.891,ns_1@10.242.238.90:<0.28525.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:51:41.892,ns_1@10.242.238.90:<0.28525.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:51:41.892,ns_1@10.242.238.90:<0.29327.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:51:41.892,ns_1@10.242.238.90:<0.29327.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:51:41.892,ns_1@10.242.238.90:<0.28525.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [rebalance:debug,2014-08-19T16:51:41.893,ns_1@10.242.238.90:<0.28550.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:51:41.893,ns_1@10.242.238.90:<0.28550.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:51:41.893,ns_1@10.242.238.90:<0.29328.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:51:41.894,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:51:41.894,ns_1@10.242.238.90:<0.29328.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:51:41.894,ns_1@10.242.238.90:<0.28550.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:51:41.897,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 3236 us [ns_server:debug,2014-08-19T16:51:41.897,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:41.898,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:41.898,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{615, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [views:debug,2014-08-19T16:51:41.927,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/608. Updated state: active (1) [ns_server:debug,2014-08-19T16:51:41.927,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",608,active,1} [ns_server:debug,2014-08-19T16:51:41.933,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:51:41.936,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:41.936,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 3361 us [rebalance:debug,2014-08-19T16:51:41.937,ns_1@10.242.238.90:<0.28475.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:51:41.937,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:41.937,ns_1@10.242.238.90:<0.28475.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:51:41.937,ns_1@10.242.238.90:<0.29331.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:51:41.937,ns_1@10.242.238.90:<0.29331.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:51:41.937,ns_1@10.242.238.90:<0.28475.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:51:41.938,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{614, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [views:debug,2014-08-19T16:51:41.961,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/600. Updated state: active (1) [ns_server:debug,2014-08-19T16:51:41.961,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",600,active,1} [ns_server:debug,2014-08-19T16:51:41.966,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:51:41.969,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 3116 us [ns_server:debug,2014-08-19T16:51:41.969,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:41.970,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:41.971,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{859, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:51:42.014,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:51:42.015,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:42.015,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 1417 us [ns_server:debug,2014-08-19T16:51:42.015,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:42.016,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{616, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [views:debug,2014-08-19T16:51:42.019,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/598. Updated state: active (1) [ns_server:debug,2014-08-19T16:51:42.020,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",598,active,1} [ns_server:debug,2014-08-19T16:51:42.048,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:51:42.056,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:42.056,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 7436 us [ns_server:debug,2014-08-19T16:51:42.056,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:42.057,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{618, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [views:debug,2014-08-19T16:51:42.079,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/617. Updated state: active (1) [ns_server:debug,2014-08-19T16:51:42.079,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",617,active,1} [ns_server:debug,2014-08-19T16:51:42.085,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:51:42.088,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:42.088,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 3046 us [ns_server:debug,2014-08-19T16:51:42.088,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:42.089,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{861, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:51:42.131,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:51:42.133,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:42.133,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 1556 us [ns_server:debug,2014-08-19T16:51:42.134,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:42.134,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{863, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [views:debug,2014-08-19T16:51:42.145,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/615. Updated state: active (1) [ns_server:debug,2014-08-19T16:51:42.145,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",615,active,1} [ns_server:debug,2014-08-19T16:51:42.163,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:51:42.167,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:42.167,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 2912 us [ns_server:debug,2014-08-19T16:51:42.167,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:42.169,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{857, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:51:42.198,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:51:42.202,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:42.203,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 4199 us [ns_server:debug,2014-08-19T16:51:42.203,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{873, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [views:debug,2014-08-19T16:51:42.204,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/613. Updated state: active (1) [ns_server:debug,2014-08-19T16:51:42.204,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",613,active,1} [ns_server:debug,2014-08-19T16:51:42.204,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [views:debug,2014-08-19T16:51:42.238,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/611. Updated state: active (1) [ns_server:debug,2014-08-19T16:51:42.239,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",611,active,1} [ns_server:debug,2014-08-19T16:51:42.244,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:51:42.247,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:42.248,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 3158 us [ns_server:debug,2014-08-19T16:51:42.248,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{865, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:51:42.248,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [views:debug,2014-08-19T16:51:42.272,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/609. Updated state: active (1) [ns_server:debug,2014-08-19T16:51:42.272,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",609,active,1} [ns_server:debug,2014-08-19T16:51:42.289,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:51:42.296,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:42.296,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 6754 us [ns_server:debug,2014-08-19T16:51:42.297,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:42.297,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{862, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:info,2014-08-19T16:51:42.299,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 344 state to replica [ns_server:info,2014-08-19T16:51:42.299,ns_1@10.242.238.90:tap_replication_manager-default<0.18775.0>:tap_replication_manager:change_vbucket_filter:200]Going to change replication from 'ns_1@10.242.238.89' to have [344,347,348,349,350,351,352,353,354,355,356,357,358,359,360,361,362,363,364, 365,366,367,368,369,370,371,372,373,374,375,376,377,378,379,380,381,382,383, 384,385,386,387,388,389,390,391,392,393,394,395,396,397,398,399,400,401,402, 403,404,405,406,407,408,409,410,411,412,413,414,415,416,417,418,419,420,421, 422,423,424,425,426] ([344], []) [ns_server:debug,2014-08-19T16:51:42.300,ns_1@10.242.238.90:<0.29341.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:135]Registered myself under id:{"default", {new_child_id, [344,347,348,349,350,351,352,353,354,355,356, 357,358,359,360,361,362,363,364,365,366,367, 368,369,370,371,372,373,374,375,376,377,378, 379,380,381,382,383,384,385,386,387,388,389, 390,391,392,393,394,395,396,397,398,399,400, 401,402,403,404,405,406,407,408,409,410,411, 412,413,414,415,416,417,418,419,420,421,422, 423,424,425,426], 'ns_1@10.242.238.89'}, #Ref<0.0.1.73716>} Args:[{"10.242.238.89",11209}, {"10.242.238.90",11209}, [{old_state_retriever,#Fun}, {on_not_ready_vbuckets,#Fun}, {username,"default"}, {password,get_from_config}, {vbuckets,[344,347,348,349,350,351,352,353,354,355,356,357,358,359,360, 361,362,363,364,365,366,367,368,369,370,371,372,373,374,375, 376,377,378,379,380,381,382,383,384,385,386,387,388,389,390, 391,392,393,394,395,396,397,398,399,400,401,402,403,404,405, 406,407,408,409,410,411,412,413,414,415,416,417,418,419,420, 421,422,423,424,425,426]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]] [ns_server:debug,2014-08-19T16:51:42.300,ns_1@10.242.238.90:<0.29341.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:139]Linked myself to old ebucketmigrator <0.29243.0> [ns_server:info,2014-08-19T16:51:42.301,ns_1@10.242.238.90:<0.29243.0>:ebucketmigrator_srv:handle_call:270]Starting new-style vbucket filter change on stream `replication_ns_1@10.242.238.90` [ns_server:info,2014-08-19T16:51:42.319,ns_1@10.242.238.90:<0.29243.0>:ebucketmigrator_srv:handle_call:297]Changing vbucket filter on tap stream `replication_ns_1@10.242.238.90`: [{344,1}, {347,1}, {348,1}, {349,1}, {350,1}, {351,1}, {352,1}, {353,1}, {354,1}, {355,1}, {356,1}, {357,1}, {358,1}, {359,1}, {360,1}, {361,1}, {362,1}, {363,1}, {364,1}, {365,1}, {366,1}, {367,1}, {368,1}, {369,1}, {370,1}, {371,1}, {372,1}, {373,1}, {374,1}, {375,1}, {376,1}, {377,1}, {378,1}, {379,1}, {380,1}, {381,1}, {382,1}, {383,1}, {384,1}, {385,1}, {386,1}, {387,1}, {388,1}, {389,1}, {390,1}, {391,1}, {392,1}, {393,1}, {394,1}, {395,1}, {396,1}, {397,1}, {398,1}, {399,1}, {400,1}, {401,1}, {402,1}, {403,1}, {404,1}, {405,1}, {406,1}, {407,1}, {408,1}, {409,1}, {410,1}, {411,1}, {412,1}, {413,1}, {414,1}, {415,1}, {416,1}, {417,1}, {418,1}, {419,1}, {420,1}, {421,1}, {422,1}, {423,1}, {424,1}, {425,1}, {426,1}] [ns_server:info,2014-08-19T16:51:42.321,ns_1@10.242.238.90:<0.29243.0>:ebucketmigrator_srv:handle_call:307]Successfully changed vbucket filter on tap stream `replication_ns_1@10.242.238.90`. [ns_server:info,2014-08-19T16:51:42.321,ns_1@10.242.238.90:<0.29243.0>:ebucketmigrator_srv:process_upstream:1027]Got vbucket filter change completion message. Silencing upstream sender [ns_server:info,2014-08-19T16:51:42.321,ns_1@10.242.238.90:<0.29243.0>:ebucketmigrator_srv:handle_info:366]Got reply from upstream silencing request. Completing state transition to a new ebucketmigrator. [ns_server:debug,2014-08-19T16:51:42.321,ns_1@10.242.238.90:<0.29243.0>:ebucketmigrator_srv:complete_native_vb_filter_change:170]Proceeding with reading unread binaries [ns_server:debug,2014-08-19T16:51:42.321,ns_1@10.242.238.90:<0.29243.0>:ebucketmigrator_srv:confirm_downstream:197]Going to confirm reception downstream messages [ns_server:debug,2014-08-19T16:51:42.321,ns_1@10.242.238.90:<0.29243.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:51:42.321,ns_1@10.242.238.90:<0.29343.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:51:42.322,ns_1@10.242.238.90:<0.29343.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:51:42.322,ns_1@10.242.238.90:<0.29243.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:51:42.322,ns_1@10.242.238.90:<0.29243.0>:ebucketmigrator_srv:confirm_downstream:201]Confirmed upstream messages are feeded to kernel [ns_server:debug,2014-08-19T16:51:42.322,ns_1@10.242.238.90:<0.29243.0>:ebucketmigrator_srv:reply_and_die:210]Passed old state to caller [ns_server:debug,2014-08-19T16:51:42.322,ns_1@10.242.238.90:<0.29243.0>:ebucketmigrator_srv:reply_and_die:213]Sent out state. Preparing to die [ns_server:debug,2014-08-19T16:51:42.322,ns_1@10.242.238.90:<0.29341.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:143]Got old state from previous ebucketmigrator: <0.29243.0> [ns_server:debug,2014-08-19T16:51:42.322,ns_1@10.242.238.90:<0.29341.0>:ns_vbm_new_sup:perform_vbucket_filter_change_loop:184]Sent old state to new instance [ns_server:info,2014-08-19T16:51:42.323,ns_1@10.242.238.90:<0.29345.0>:ns_vbm_new_sup:mk_old_state_retriever:83]Got vbucket filter change old state. Proceeding vbucket filter change operation [ns_server:debug,2014-08-19T16:51:42.323,ns_1@10.242.238.90:<0.29345.0>:ebucketmigrator_srv:init:494]Got old ebucketmigrator state from <0.29243.0>: {state,#Port<0.16372>,#Port<0.16368>,#Port<0.16373>,#Port<0.16369>, <0.29245.0>,<<"cut off">>,<<"cut off">>,[],244,false,false,0, {1408,452702,321345}, completed, {<0.29341.0>,#Ref<0.0.1.73729>}, <<"replication_ns_1@10.242.238.90">>,<0.29243.0>, {had_backfill,false,undefined,[]}, completed,false}. [ns_server:debug,2014-08-19T16:51:42.323,ns_1@10.242.238.90:<0.17162.0>:ns_process_registry:handle_info:98]Got exit msg: {'EXIT',<0.29341.0>,{#Ref<0.0.1.73718>,<0.29345.0>}} [error_logger:info,2014-08-19T16:51:42.323,ns_1@10.242.238.90:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,'ns_vbm_new_sup-default'} started: [{pid,<0.29345.0>}, {name, {new_child_id, [344,347,348,349,350,351,352,353,354,355,356, 357,358,359,360,361,362,363,364,365,366,367, 368,369,370,371,372,373,374,375,376,377,378, 379,380,381,382,383,384,385,386,387,388,389, 390,391,392,393,394,395,396,397,398,399,400, 401,402,403,404,405,406,407,408,409,410,411, 412,413,414,415,416,417,418,419,420,421,422, 423,424,425,426], 'ns_1@10.242.238.89'}}, {mfargs, {ebucketmigrator_srv,start_link, [{"10.242.238.89",11209}, {"10.242.238.90",11209}, [{old_state_retriever, #Fun}, {on_not_ready_vbuckets, #Fun}, {username,"default"}, {password,get_from_config}, {vbuckets, [344,347,348,349,350,351,352,353,354,355, 356,357,358,359,360,361,362,363,364,365, 366,367,368,369,370,371,372,373,374,375, 376,377,378,379,380,381,382,383,384,385, 386,387,388,389,390,391,392,393,394,395, 396,397,398,399,400,401,402,403,404,405, 406,407,408,409,410,411,412,413,414,415, 416,417,418,419,420,421,422,423,424,425, 426]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]]}}, {restart_type,temporary}, {shutdown,60000}, {child_type,worker}] [ns_server:debug,2014-08-19T16:51:42.329,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:51:42.332,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:42.333,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 3857 us [ns_server:debug,2014-08-19T16:51:42.333,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:42.334,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{344, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:51:42.339,ns_1@10.242.238.90:<0.29345.0>:ebucketmigrator_srv:init:621]Reusing old upstream: [{vbuckets,[344,347,348,349,350,351,352,353,354,355,356,357,358,359,360,361, 362,363,364,365,366,367,368,369,370,371,372,373,374,375,376,377, 378,379,380,381,382,383,384,385,386,387,388,389,390,391,392,393, 394,395,396,397,398,399,400,401,402,403,404,405,406,407,408,409, 410,411,412,413,414,415,416,417,418,419,420,421,422,423,424,425, 426]}, {name,<<"replication_ns_1@10.242.238.90">>}, {takeover,false}] [rebalance:debug,2014-08-19T16:51:42.339,ns_1@10.242.238.90:<0.29345.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.29347.0> [views:debug,2014-08-19T16:51:42.345,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/601. Updated state: active (1) [ns_server:debug,2014-08-19T16:51:42.345,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",601,active,1} [ns_server:debug,2014-08-19T16:51:42.370,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:51:42.373,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:42.373,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 3253 us [ns_server:debug,2014-08-19T16:51:42.374,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:42.374,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{853, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [views:debug,2014-08-19T16:51:42.404,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/599. Updated state: active (1) [ns_server:debug,2014-08-19T16:51:42.405,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",599,active,1} [ns_server:debug,2014-08-19T16:51:42.405,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:51:42.408,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:42.408,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 2899 us [ns_server:debug,2014-08-19T16:51:42.408,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:42.409,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{870, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:51:42.447,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:51:42.450,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:42.451,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 3136 us [ns_server:debug,2014-08-19T16:51:42.451,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:42.452,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{599, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:51:42.486,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:51:42.489,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:42.490,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 1523 us [ns_server:debug,2014-08-19T16:51:42.490,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:42.490,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{855, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:51:42.521,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:51:42.528,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:42.529,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 7715 us [ns_server:debug,2014-08-19T16:51:42.529,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:42.530,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{598, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:51:42.556,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:51:42.559,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:42.559,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 3037 us [ns_server:debug,2014-08-19T16:51:42.559,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:42.560,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{600, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:51:42.598,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:51:42.602,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 3312 us [ns_server:debug,2014-08-19T16:51:42.602,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:42.602,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:42.603,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{601, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:51:42.632,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:51:42.633,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:42.633,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 1071 us [ns_server:debug,2014-08-19T16:51:42.634,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:42.635,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{856, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:info,2014-08-19T16:51:42.636,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 346 state to replica [ns_server:info,2014-08-19T16:51:42.636,ns_1@10.242.238.90:tap_replication_manager-default<0.18775.0>:tap_replication_manager:change_vbucket_filter:200]Going to change replication from 'ns_1@10.242.238.89' to have [344,346,347,348,349,350,351,352,353,354,355,356,357,358,359,360,361,362,363, 364,365,366,367,368,369,370,371,372,373,374,375,376,377,378,379,380,381,382, 383,384,385,386,387,388,389,390,391,392,393,394,395,396,397,398,399,400,401, 402,403,404,405,406,407,408,409,410,411,412,413,414,415,416,417,418,419,420, 421,422,423,424,425,426] ([346], []) [ns_server:debug,2014-08-19T16:51:42.641,ns_1@10.242.238.90:<0.29357.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:135]Registered myself under id:{"default", {new_child_id, [344,346,347,348,349,350,351,352,353,354,355, 356,357,358,359,360,361,362,363,364,365,366, 367,368,369,370,371,372,373,374,375,376,377, 378,379,380,381,382,383,384,385,386,387,388, 389,390,391,392,393,394,395,396,397,398,399, 400,401,402,403,404,405,406,407,408,409,410, 411,412,413,414,415,416,417,418,419,420,421, 422,423,424,425,426], 'ns_1@10.242.238.89'}, #Ref<0.0.1.74155>} Args:[{"10.242.238.89",11209}, {"10.242.238.90",11209}, [{old_state_retriever,#Fun}, {on_not_ready_vbuckets,#Fun}, {username,"default"}, {password,get_from_config}, {vbuckets,[344,346,347,348,349,350,351,352,353,354,355,356,357,358,359, 360,361,362,363,364,365,366,367,368,369,370,371,372,373,374, 375,376,377,378,379,380,381,382,383,384,385,386,387,388,389, 390,391,392,393,394,395,396,397,398,399,400,401,402,403,404, 405,406,407,408,409,410,411,412,413,414,415,416,417,418,419, 420,421,422,423,424,425,426]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]] [ns_server:debug,2014-08-19T16:51:42.642,ns_1@10.242.238.90:<0.29357.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:139]Linked myself to old ebucketmigrator <0.29345.0> [ns_server:info,2014-08-19T16:51:42.642,ns_1@10.242.238.90:<0.29345.0>:ebucketmigrator_srv:handle_call:270]Starting new-style vbucket filter change on stream `replication_ns_1@10.242.238.90` [ns_server:info,2014-08-19T16:51:42.667,ns_1@10.242.238.90:<0.29345.0>:ebucketmigrator_srv:handle_call:297]Changing vbucket filter on tap stream `replication_ns_1@10.242.238.90`: [{344,1}, {346,1}, {347,1}, {348,1}, {349,1}, {350,1}, {351,1}, {352,1}, {353,1}, {354,1}, {355,1}, {356,1}, {357,1}, {358,1}, {359,1}, {360,1}, {361,1}, {362,1}, {363,1}, {364,1}, {365,1}, {366,1}, {367,1}, {368,1}, {369,1}, {370,1}, {371,1}, {372,1}, {373,1}, {374,1}, {375,1}, {376,1}, {377,1}, {378,1}, {379,1}, {380,1}, {381,1}, {382,1}, {383,1}, {384,1}, {385,1}, {386,1}, {387,1}, {388,1}, {389,1}, {390,1}, {391,1}, {392,1}, {393,1}, {394,1}, {395,1}, {396,1}, {397,1}, {398,1}, {399,1}, {400,1}, {401,1}, {402,1}, {403,1}, {404,1}, {405,1}, {406,1}, {407,1}, {408,1}, {409,1}, {410,1}, {411,1}, {412,1}, {413,1}, {414,1}, {415,1}, {416,1}, {417,1}, {418,1}, {419,1}, {420,1}, {421,1}, {422,1}, {423,1}, {424,1}, {425,1}, {426,1}] [ns_server:info,2014-08-19T16:51:42.668,ns_1@10.242.238.90:<0.29345.0>:ebucketmigrator_srv:handle_call:307]Successfully changed vbucket filter on tap stream `replication_ns_1@10.242.238.90`. [ns_server:info,2014-08-19T16:51:42.669,ns_1@10.242.238.90:<0.29345.0>:ebucketmigrator_srv:process_upstream:1027]Got vbucket filter change completion message. Silencing upstream sender [ns_server:info,2014-08-19T16:51:42.669,ns_1@10.242.238.90:<0.29345.0>:ebucketmigrator_srv:handle_info:366]Got reply from upstream silencing request. Completing state transition to a new ebucketmigrator. [ns_server:debug,2014-08-19T16:51:42.669,ns_1@10.242.238.90:<0.29345.0>:ebucketmigrator_srv:complete_native_vb_filter_change:170]Proceeding with reading unread binaries [ns_server:debug,2014-08-19T16:51:42.669,ns_1@10.242.238.90:<0.29345.0>:ebucketmigrator_srv:confirm_downstream:197]Going to confirm reception downstream messages [ns_server:debug,2014-08-19T16:51:42.669,ns_1@10.242.238.90:<0.29345.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:51:42.669,ns_1@10.242.238.90:<0.29359.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:51:42.669,ns_1@10.242.238.90:<0.29359.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:51:42.669,ns_1@10.242.238.90:<0.29345.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:51:42.669,ns_1@10.242.238.90:<0.29345.0>:ebucketmigrator_srv:confirm_downstream:201]Confirmed upstream messages are feeded to kernel [ns_server:debug,2014-08-19T16:51:42.670,ns_1@10.242.238.90:<0.29345.0>:ebucketmigrator_srv:reply_and_die:210]Passed old state to caller [ns_server:debug,2014-08-19T16:51:42.670,ns_1@10.242.238.90:<0.29345.0>:ebucketmigrator_srv:reply_and_die:213]Sent out state. Preparing to die [ns_server:debug,2014-08-19T16:51:42.670,ns_1@10.242.238.90:<0.29357.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:143]Got old state from previous ebucketmigrator: <0.29345.0> [ns_server:debug,2014-08-19T16:51:42.670,ns_1@10.242.238.90:<0.29357.0>:ns_vbm_new_sup:perform_vbucket_filter_change_loop:184]Sent old state to new instance [ns_server:info,2014-08-19T16:51:42.670,ns_1@10.242.238.90:<0.29361.0>:ns_vbm_new_sup:mk_old_state_retriever:83]Got vbucket filter change old state. Proceeding vbucket filter change operation [ns_server:debug,2014-08-19T16:51:42.670,ns_1@10.242.238.90:<0.29361.0>:ebucketmigrator_srv:init:494]Got old ebucketmigrator state from <0.29345.0>: {state,#Port<0.16372>,#Port<0.16368>,#Port<0.16373>,#Port<0.16369>, <0.29347.0>,<<"cut off">>,<<"cut off">>,[],247,false,false,0, {1408,452702,668989}, completed, {<0.29357.0>,#Ref<0.0.1.74168>}, <<"replication_ns_1@10.242.238.90">>,<0.29345.0>, {had_backfill,false,undefined,[]}, completed,false}. [ns_server:debug,2014-08-19T16:51:42.671,ns_1@10.242.238.90:<0.17162.0>:ns_process_registry:handle_info:98]Got exit msg: {'EXIT',<0.29357.0>,{#Ref<0.0.1.74157>,<0.29361.0>}} [error_logger:info,2014-08-19T16:51:42.671,ns_1@10.242.238.90:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,'ns_vbm_new_sup-default'} started: [{pid,<0.29361.0>}, {name, {new_child_id, [344,346,347,348,349,350,351,352,353,354,355, 356,357,358,359,360,361,362,363,364,365,366, 367,368,369,370,371,372,373,374,375,376,377, 378,379,380,381,382,383,384,385,386,387,388, 389,390,391,392,393,394,395,396,397,398,399, 400,401,402,403,404,405,406,407,408,409,410, 411,412,413,414,415,416,417,418,419,420,421, 422,423,424,425,426], 'ns_1@10.242.238.89'}}, {mfargs, {ebucketmigrator_srv,start_link, [{"10.242.238.89",11209}, {"10.242.238.90",11209}, [{old_state_retriever, #Fun}, {on_not_ready_vbuckets, #Fun}, {username,"default"}, {password,get_from_config}, {vbuckets, [344,346,347,348,349,350,351,352,353,354, 355,356,357,358,359,360,361,362,363,364, 365,366,367,368,369,370,371,372,373,374, 375,376,377,378,379,380,381,382,383,384, 385,386,387,388,389,390,391,392,393,394, 395,396,397,398,399,400,401,402,403,404, 405,406,407,408,409,410,411,412,413,414, 415,416,417,418,419,420,421,422,423,424, 425,426]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]]}}, {restart_type,temporary}, {shutdown,60000}, {child_type,worker}] [ns_server:debug,2014-08-19T16:51:42.676,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:51:42.680,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 3382 us [ns_server:debug,2014-08-19T16:51:42.680,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:42.681,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:42.681,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{346, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:51:42.687,ns_1@10.242.238.90:<0.29361.0>:ebucketmigrator_srv:init:621]Reusing old upstream: [{vbuckets,[344,346,347,348,349,350,351,352,353,354,355,356,357,358,359,360, 361,362,363,364,365,366,367,368,369,370,371,372,373,374,375,376, 377,378,379,380,381,382,383,384,385,386,387,388,389,390,391,392, 393,394,395,396,397,398,399,400,401,402,403,404,405,406,407,408, 409,410,411,412,413,414,415,416,417,418,419,420,421,422,423,424, 425,426]}, {name,<<"replication_ns_1@10.242.238.90">>}, {takeover,false}] [rebalance:debug,2014-08-19T16:51:42.688,ns_1@10.242.238.90:<0.29361.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.29363.0> [ns_server:debug,2014-08-19T16:51:42.721,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:51:42.724,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:42.724,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 3683 us [ns_server:debug,2014-08-19T16:51:42.725,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:42.726,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{854, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:51:42.767,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:51:42.775,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:42.775,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 8220 us [ns_server:debug,2014-08-19T16:51:42.776,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:42.776,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{871, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:51:42.805,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:51:42.808,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:42.808,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 3452 us [ns_server:debug,2014-08-19T16:51:42.809,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:42.809,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{867, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:51:42.845,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:51:42.849,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:42.849,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 3497 us [ns_server:debug,2014-08-19T16:51:42.849,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:42.850,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{872, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:51:42.879,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:51:42.883,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:42.883,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 3194 us [ns_server:debug,2014-08-19T16:51:42.883,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:42.883,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{869, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:51:42.919,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:51:42.924,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:42.924,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 4764 us [ns_server:debug,2014-08-19T16:51:42.925,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:42.925,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{864, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:info,2014-08-19T16:51:42.931,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 343 state to replica [ns_server:info,2014-08-19T16:51:42.931,ns_1@10.242.238.90:tap_replication_manager-default<0.18775.0>:tap_replication_manager:change_vbucket_filter:200]Going to change replication from 'ns_1@10.242.238.89' to have [343,344,346,347,348,349,350,351,352,353,354,355,356,357,358,359,360,361,362, 363,364,365,366,367,368,369,370,371,372,373,374,375,376,377,378,379,380,381, 382,383,384,385,386,387,388,389,390,391,392,393,394,395,396,397,398,399,400, 401,402,403,404,405,406,407,408,409,410,411,412,413,414,415,416,417,418,419, 420,421,422,423,424,425,426] ([343], []) [ns_server:debug,2014-08-19T16:51:42.932,ns_1@10.242.238.90:<0.29371.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:135]Registered myself under id:{"default", {new_child_id, [343,344,346,347,348,349,350,351,352,353,354, 355,356,357,358,359,360,361,362,363,364,365, 366,367,368,369,370,371,372,373,374,375,376, 377,378,379,380,381,382,383,384,385,386,387, 388,389,390,391,392,393,394,395,396,397,398, 399,400,401,402,403,404,405,406,407,408,409, 410,411,412,413,414,415,416,417,418,419,420, 421,422,423,424,425,426], 'ns_1@10.242.238.89'}, #Ref<0.0.1.74463>} Args:[{"10.242.238.89",11209}, {"10.242.238.90",11209}, [{old_state_retriever,#Fun}, {on_not_ready_vbuckets,#Fun}, {username,"default"}, {password,get_from_config}, {vbuckets,[343,344,346,347,348,349,350,351,352,353,354,355,356,357,358, 359,360,361,362,363,364,365,366,367,368,369,370,371,372,373, 374,375,376,377,378,379,380,381,382,383,384,385,386,387,388, 389,390,391,392,393,394,395,396,397,398,399,400,401,402,403, 404,405,406,407,408,409,410,411,412,413,414,415,416,417,418, 419,420,421,422,423,424,425,426]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]] [ns_server:debug,2014-08-19T16:51:42.933,ns_1@10.242.238.90:<0.29371.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:139]Linked myself to old ebucketmigrator <0.29361.0> [ns_server:info,2014-08-19T16:51:42.933,ns_1@10.242.238.90:<0.29361.0>:ebucketmigrator_srv:handle_call:270]Starting new-style vbucket filter change on stream `replication_ns_1@10.242.238.90` [ns_server:info,2014-08-19T16:51:42.952,ns_1@10.242.238.90:<0.29361.0>:ebucketmigrator_srv:handle_call:297]Changing vbucket filter on tap stream `replication_ns_1@10.242.238.90`: [{343,1}, {344,1}, {346,1}, {347,1}, {348,1}, {349,1}, {350,1}, {351,1}, {352,1}, {353,1}, {354,1}, {355,1}, {356,1}, {357,1}, {358,1}, {359,1}, {360,1}, {361,1}, {362,1}, {363,1}, {364,1}, {365,1}, {366,1}, {367,1}, {368,1}, {369,1}, {370,1}, {371,1}, {372,1}, {373,1}, {374,1}, {375,1}, {376,1}, {377,1}, {378,1}, {379,1}, {380,1}, {381,1}, {382,1}, {383,1}, {384,1}, {385,1}, {386,1}, {387,1}, {388,1}, {389,1}, {390,1}, {391,1}, {392,1}, {393,1}, {394,1}, {395,1}, {396,1}, {397,1}, {398,1}, {399,1}, {400,1}, {401,1}, {402,1}, {403,1}, {404,1}, {405,1}, {406,1}, {407,1}, {408,1}, {409,1}, {410,1}, {411,1}, {412,1}, {413,1}, {414,1}, {415,1}, {416,1}, {417,1}, {418,1}, {419,1}, {420,1}, {421,1}, {422,1}, {423,1}, {424,1}, {425,1}, {426,1}] [ns_server:info,2014-08-19T16:51:42.953,ns_1@10.242.238.90:<0.29361.0>:ebucketmigrator_srv:handle_call:307]Successfully changed vbucket filter on tap stream `replication_ns_1@10.242.238.90`. [ns_server:info,2014-08-19T16:51:42.953,ns_1@10.242.238.90:<0.29361.0>:ebucketmigrator_srv:process_upstream:1027]Got vbucket filter change completion message. Silencing upstream sender [ns_server:info,2014-08-19T16:51:42.954,ns_1@10.242.238.90:<0.29361.0>:ebucketmigrator_srv:handle_info:366]Got reply from upstream silencing request. Completing state transition to a new ebucketmigrator. [ns_server:debug,2014-08-19T16:51:42.954,ns_1@10.242.238.90:<0.29361.0>:ebucketmigrator_srv:complete_native_vb_filter_change:170]Proceeding with reading unread binaries [ns_server:debug,2014-08-19T16:51:42.954,ns_1@10.242.238.90:<0.29361.0>:ebucketmigrator_srv:confirm_downstream:197]Going to confirm reception downstream messages [ns_server:debug,2014-08-19T16:51:42.954,ns_1@10.242.238.90:<0.29361.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:51:42.954,ns_1@10.242.238.90:<0.29373.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:51:42.954,ns_1@10.242.238.90:<0.29373.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:51:42.954,ns_1@10.242.238.90:<0.29361.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:51:42.954,ns_1@10.242.238.90:<0.29361.0>:ebucketmigrator_srv:confirm_downstream:201]Confirmed upstream messages are feeded to kernel [ns_server:debug,2014-08-19T16:51:42.955,ns_1@10.242.238.90:<0.29361.0>:ebucketmigrator_srv:reply_and_die:210]Passed old state to caller [ns_server:debug,2014-08-19T16:51:42.955,ns_1@10.242.238.90:<0.29361.0>:ebucketmigrator_srv:reply_and_die:213]Sent out state. Preparing to die [ns_server:debug,2014-08-19T16:51:42.955,ns_1@10.242.238.90:<0.29371.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:143]Got old state from previous ebucketmigrator: <0.29361.0> [ns_server:debug,2014-08-19T16:51:42.955,ns_1@10.242.238.90:<0.29371.0>:ns_vbm_new_sup:perform_vbucket_filter_change_loop:184]Sent old state to new instance [ns_server:info,2014-08-19T16:51:42.955,ns_1@10.242.238.90:<0.29375.0>:ns_vbm_new_sup:mk_old_state_retriever:83]Got vbucket filter change old state. Proceeding vbucket filter change operation [ns_server:debug,2014-08-19T16:51:42.955,ns_1@10.242.238.90:<0.29375.0>:ebucketmigrator_srv:init:494]Got old ebucketmigrator state from <0.29361.0>: {state,#Port<0.16372>,#Port<0.16368>,#Port<0.16373>,#Port<0.16369>, <0.29363.0>,<<"cut off">>,<<"cut off">>,[],250,false,false,0, {1408,452702,953853}, completed, {<0.29371.0>,#Ref<0.0.1.74476>}, <<"replication_ns_1@10.242.238.90">>,<0.29361.0>, {had_backfill,false,undefined,[]}, completed,false}. [ns_server:debug,2014-08-19T16:51:42.956,ns_1@10.242.238.90:<0.17162.0>:ns_process_registry:handle_info:98]Got exit msg: {'EXIT',<0.29371.0>,{#Ref<0.0.1.74465>,<0.29375.0>}} [error_logger:info,2014-08-19T16:51:42.956,ns_1@10.242.238.90:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,'ns_vbm_new_sup-default'} started: [{pid,<0.29375.0>}, {name, {new_child_id, [343,344,346,347,348,349,350,351,352,353,354, 355,356,357,358,359,360,361,362,363,364,365, 366,367,368,369,370,371,372,373,374,375,376, 377,378,379,380,381,382,383,384,385,386,387, 388,389,390,391,392,393,394,395,396,397,398, 399,400,401,402,403,404,405,406,407,408,409, 410,411,412,413,414,415,416,417,418,419,420, 421,422,423,424,425,426], 'ns_1@10.242.238.89'}}, {mfargs, {ebucketmigrator_srv,start_link, [{"10.242.238.89",11209}, {"10.242.238.90",11209}, [{old_state_retriever, #Fun}, {on_not_ready_vbuckets, #Fun}, {username,"default"}, {password,get_from_config}, {vbuckets, [343,344,346,347,348,349,350,351,352,353, 354,355,356,357,358,359,360,361,362,363, 364,365,366,367,368,369,370,371,372,373, 374,375,376,377,378,379,380,381,382,383, 384,385,386,387,388,389,390,391,392,393, 394,395,396,397,398,399,400,401,402,403, 404,405,406,407,408,409,410,411,412,413, 414,415,416,417,418,419,420,421,422,423, 424,425,426]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]]}}, {restart_type,temporary}, {shutdown,60000}, {child_type,worker}] [ns_server:debug,2014-08-19T16:51:42.962,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:51:42.965,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:42.965,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 2974 us [ns_server:debug,2014-08-19T16:51:42.966,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:42.966,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{343, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:51:42.971,ns_1@10.242.238.90:<0.29375.0>:ebucketmigrator_srv:init:621]Reusing old upstream: [{vbuckets,[343,344,346,347,348,349,350,351,352,353,354,355,356,357,358,359, 360,361,362,363,364,365,366,367,368,369,370,371,372,373,374,375, 376,377,378,379,380,381,382,383,384,385,386,387,388,389,390,391, 392,393,394,395,396,397,398,399,400,401,402,403,404,405,406,407, 408,409,410,411,412,413,414,415,416,417,418,419,420,421,422,423, 424,425,426]}, {name,<<"replication_ns_1@10.242.238.90">>}, {takeover,false}] [rebalance:debug,2014-08-19T16:51:42.971,ns_1@10.242.238.90:<0.29375.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.29377.0> [ns_server:debug,2014-08-19T16:51:43.007,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:51:43.017,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:43.017,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 9651 us [ns_server:debug,2014-08-19T16:51:43.017,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:43.018,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{858, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:info,2014-08-19T16:51:43.020,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 342 state to replica [ns_server:info,2014-08-19T16:51:43.021,ns_1@10.242.238.90:tap_replication_manager-default<0.18775.0>:tap_replication_manager:change_vbucket_filter:200]Going to change replication from 'ns_1@10.242.238.89' to have [342,343,344,346,347,348,349,350,351,352,353,354,355,356,357,358,359,360,361, 362,363,364,365,366,367,368,369,370,371,372,373,374,375,376,377,378,379,380, 381,382,383,384,385,386,387,388,389,390,391,392,393,394,395,396,397,398,399, 400,401,402,403,404,405,406,407,408,409,410,411,412,413,414,415,416,417,418, 419,420,421,422,423,424,425,426] ([342], []) [ns_server:debug,2014-08-19T16:51:43.022,ns_1@10.242.238.90:<0.29379.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:135]Registered myself under id:{"default", {new_child_id, [342,343,344,346,347,348,349,350,351,352,353, 354,355,356,357,358,359,360,361,362,363,364, 365,366,367,368,369,370,371,372,373,374,375, 376,377,378,379,380,381,382,383,384,385,386, 387,388,389,390,391,392,393,394,395,396,397, 398,399,400,401,402,403,404,405,406,407,408, 409,410,411,412,413,414,415,416,417,418,419, 420,421,422,423,424,425,426], 'ns_1@10.242.238.89'}, #Ref<0.0.1.75316>} Args:[{"10.242.238.89",11209}, {"10.242.238.90",11209}, [{old_state_retriever,#Fun}, {on_not_ready_vbuckets,#Fun}, {username,"default"}, {password,get_from_config}, {vbuckets,[342,343,344,346,347,348,349,350,351,352,353,354,355,356,357, 358,359,360,361,362,363,364,365,366,367,368,369,370,371,372, 373,374,375,376,377,378,379,380,381,382,383,384,385,386,387, 388,389,390,391,392,393,394,395,396,397,398,399,400,401,402, 403,404,405,406,407,408,409,410,411,412,413,414,415,416,417, 418,419,420,421,422,423,424,425,426]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]] [ns_server:debug,2014-08-19T16:51:43.022,ns_1@10.242.238.90:<0.29379.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:139]Linked myself to old ebucketmigrator <0.29375.0> [ns_server:info,2014-08-19T16:51:43.023,ns_1@10.242.238.90:<0.29375.0>:ebucketmigrator_srv:handle_call:270]Starting new-style vbucket filter change on stream `replication_ns_1@10.242.238.90` [ns_server:info,2014-08-19T16:51:43.041,ns_1@10.242.238.90:<0.29375.0>:ebucketmigrator_srv:handle_call:297]Changing vbucket filter on tap stream `replication_ns_1@10.242.238.90`: [{342,1}, {343,1}, {344,1}, {346,1}, {347,1}, {348,1}, {349,1}, {350,1}, {351,1}, {352,1}, {353,1}, {354,1}, {355,1}, {356,1}, {357,1}, {358,1}, {359,1}, {360,1}, {361,1}, {362,1}, {363,1}, {364,1}, {365,1}, {366,1}, {367,1}, {368,1}, {369,1}, {370,1}, {371,1}, {372,1}, {373,1}, {374,1}, {375,1}, {376,1}, {377,1}, {378,1}, {379,1}, {380,1}, {381,1}, {382,1}, {383,1}, {384,1}, {385,1}, {386,1}, {387,1}, {388,1}, {389,1}, {390,1}, {391,1}, {392,1}, {393,1}, {394,1}, {395,1}, {396,1}, {397,1}, {398,1}, {399,1}, {400,1}, {401,1}, {402,1}, {403,1}, {404,1}, {405,1}, {406,1}, {407,1}, {408,1}, {409,1}, {410,1}, {411,1}, {412,1}, {413,1}, {414,1}, {415,1}, {416,1}, {417,1}, {418,1}, {419,1}, {420,1}, {421,1}, {422,1}, {423,1}, {424,1}, {425,1}, {426,1}] [ns_server:info,2014-08-19T16:51:43.042,ns_1@10.242.238.90:<0.29375.0>:ebucketmigrator_srv:handle_call:307]Successfully changed vbucket filter on tap stream `replication_ns_1@10.242.238.90`. [ns_server:info,2014-08-19T16:51:43.043,ns_1@10.242.238.90:<0.29375.0>:ebucketmigrator_srv:process_upstream:1027]Got vbucket filter change completion message. Silencing upstream sender [ns_server:info,2014-08-19T16:51:43.043,ns_1@10.242.238.90:<0.29375.0>:ebucketmigrator_srv:handle_info:366]Got reply from upstream silencing request. Completing state transition to a new ebucketmigrator. [ns_server:debug,2014-08-19T16:51:43.043,ns_1@10.242.238.90:<0.29375.0>:ebucketmigrator_srv:complete_native_vb_filter_change:170]Proceeding with reading unread binaries [ns_server:debug,2014-08-19T16:51:43.043,ns_1@10.242.238.90:<0.29375.0>:ebucketmigrator_srv:confirm_downstream:197]Going to confirm reception downstream messages [ns_server:debug,2014-08-19T16:51:43.043,ns_1@10.242.238.90:<0.29375.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:51:43.043,ns_1@10.242.238.90:<0.29381.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:51:43.043,ns_1@10.242.238.90:<0.29381.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:51:43.044,ns_1@10.242.238.90:<0.29375.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:51:43.044,ns_1@10.242.238.90:<0.29375.0>:ebucketmigrator_srv:confirm_downstream:201]Confirmed upstream messages are feeded to kernel [ns_server:debug,2014-08-19T16:51:43.044,ns_1@10.242.238.90:<0.29375.0>:ebucketmigrator_srv:reply_and_die:210]Passed old state to caller [ns_server:debug,2014-08-19T16:51:43.044,ns_1@10.242.238.90:<0.29375.0>:ebucketmigrator_srv:reply_and_die:213]Sent out state. Preparing to die [ns_server:debug,2014-08-19T16:51:43.044,ns_1@10.242.238.90:<0.29379.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:143]Got old state from previous ebucketmigrator: <0.29375.0> [ns_server:debug,2014-08-19T16:51:43.044,ns_1@10.242.238.90:<0.29379.0>:ns_vbm_new_sup:perform_vbucket_filter_change_loop:184]Sent old state to new instance [ns_server:info,2014-08-19T16:51:43.044,ns_1@10.242.238.90:<0.29383.0>:ns_vbm_new_sup:mk_old_state_retriever:83]Got vbucket filter change old state. Proceeding vbucket filter change operation [ns_server:debug,2014-08-19T16:51:43.045,ns_1@10.242.238.90:<0.29383.0>:ebucketmigrator_srv:init:494]Got old ebucketmigrator state from <0.29375.0>: {state,#Port<0.16372>,#Port<0.16368>,#Port<0.16373>,#Port<0.16369>, <0.29377.0>,<<"cut off">>,<<"cut off">>,[],253,false,false,0, {1408,452703,43173}, completed, {<0.29379.0>,#Ref<0.0.1.75329>}, <<"replication_ns_1@10.242.238.90">>,<0.29375.0>, {had_backfill,false,undefined,[]}, completed,false}. [ns_server:debug,2014-08-19T16:51:43.045,ns_1@10.242.238.90:<0.17162.0>:ns_process_registry:handle_info:98]Got exit msg: {'EXIT',<0.29379.0>,{#Ref<0.0.1.75318>,<0.29383.0>}} [error_logger:info,2014-08-19T16:51:43.045,ns_1@10.242.238.90:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,'ns_vbm_new_sup-default'} started: [{pid,<0.29383.0>}, {name, {new_child_id, [342,343,344,346,347,348,349,350,351,352,353, 354,355,356,357,358,359,360,361,362,363,364, 365,366,367,368,369,370,371,372,373,374,375, 376,377,378,379,380,381,382,383,384,385,386, 387,388,389,390,391,392,393,394,395,396,397, 398,399,400,401,402,403,404,405,406,407,408, 409,410,411,412,413,414,415,416,417,418,419, 420,421,422,423,424,425,426], 'ns_1@10.242.238.89'}}, {mfargs, {ebucketmigrator_srv,start_link, [{"10.242.238.89",11209}, {"10.242.238.90",11209}, [{old_state_retriever, #Fun}, {on_not_ready_vbuckets, #Fun}, {username,"default"}, {password,get_from_config}, {vbuckets, [342,343,344,346,347,348,349,350,351,352, 353,354,355,356,357,358,359,360,361,362, 363,364,365,366,367,368,369,370,371,372, 373,374,375,376,377,378,379,380,381,382, 383,384,385,386,387,388,389,390,391,392, 393,394,395,396,397,398,399,400,401,402, 403,404,405,406,407,408,409,410,411,412, 413,414,415,416,417,418,419,420,421,422, 423,424,425,426]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]]}}, {restart_type,temporary}, {shutdown,60000}, {child_type,worker}] [ns_server:debug,2014-08-19T16:51:43.050,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:51:43.054,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:43.054,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 3469 us [ns_server:debug,2014-08-19T16:51:43.054,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:43.055,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{342, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:51:43.064,ns_1@10.242.238.90:<0.29383.0>:ebucketmigrator_srv:init:621]Reusing old upstream: [{vbuckets,[342,343,344,346,347,348,349,350,351,352,353,354,355,356,357,358, 359,360,361,362,363,364,365,366,367,368,369,370,371,372,373,374, 375,376,377,378,379,380,381,382,383,384,385,386,387,388,389,390, 391,392,393,394,395,396,397,398,399,400,401,402,403,404,405,406, 407,408,409,410,411,412,413,414,415,416,417,418,419,420,421,422, 423,424,425,426]}, {name,<<"replication_ns_1@10.242.238.90">>}, {takeover,false}] [rebalance:debug,2014-08-19T16:51:43.065,ns_1@10.242.238.90:<0.29383.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.29385.0> [ns_server:debug,2014-08-19T16:51:43.091,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:51:43.095,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:43.095,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 4501 us [ns_server:debug,2014-08-19T16:51:43.096,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:43.096,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{866, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:51:43.126,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:51:43.129,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:43.129,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 3217 us [ns_server:debug,2014-08-19T16:51:43.130,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:43.130,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{868, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:51:43.170,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:51:43.175,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 4756 us [ns_server:debug,2014-08-19T16:51:43.175,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:43.176,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:43.176,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{860, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:info,2014-08-19T16:51:43.183,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 345 state to replica [ns_server:info,2014-08-19T16:51:43.183,ns_1@10.242.238.90:tap_replication_manager-default<0.18775.0>:tap_replication_manager:change_vbucket_filter:200]Going to change replication from 'ns_1@10.242.238.89' to have [342,343,344,345,346,347,348,349,350,351,352,353,354,355,356,357,358,359,360, 361,362,363,364,365,366,367,368,369,370,371,372,373,374,375,376,377,378,379, 380,381,382,383,384,385,386,387,388,389,390,391,392,393,394,395,396,397,398, 399,400,401,402,403,404,405,406,407,408,409,410,411,412,413,414,415,416,417, 418,419,420,421,422,423,424,425,426] ([345], []) [ns_server:debug,2014-08-19T16:51:43.186,ns_1@10.242.238.90:<0.29389.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:135]Registered myself under id:{"default", {new_child_id, [342,343,344,345,346,347,348,349,350,351,352, 353,354,355,356,357,358,359,360,361,362,363, 364,365,366,367,368,369,370,371,372,373,374, 375,376,377,378,379,380,381,382,383,384,385, 386,387,388,389,390,391,392,393,394,395,396, 397,398,399,400,401,402,403,404,405,406,407, 408,409,410,411,412,413,414,415,416,417,418, 419,420,421,422,423,424,425,426], 'ns_1@10.242.238.89'}, #Ref<0.0.1.75550>} Args:[{"10.242.238.89",11209}, {"10.242.238.90",11209}, [{old_state_retriever,#Fun}, {on_not_ready_vbuckets,#Fun}, {username,"default"}, {password,get_from_config}, {vbuckets,[342,343,344,345,346,347,348,349,350,351,352,353,354,355,356, 357,358,359,360,361,362,363,364,365,366,367,368,369,370,371, 372,373,374,375,376,377,378,379,380,381,382,383,384,385,386, 387,388,389,390,391,392,393,394,395,396,397,398,399,400,401, 402,403,404,405,406,407,408,409,410,411,412,413,414,415,416, 417,418,419,420,421,422,423,424,425,426]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]] [ns_server:debug,2014-08-19T16:51:43.187,ns_1@10.242.238.90:<0.29389.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:139]Linked myself to old ebucketmigrator <0.29383.0> [ns_server:info,2014-08-19T16:51:43.187,ns_1@10.242.238.90:<0.29383.0>:ebucketmigrator_srv:handle_call:270]Starting new-style vbucket filter change on stream `replication_ns_1@10.242.238.90` [ns_server:info,2014-08-19T16:51:43.207,ns_1@10.242.238.90:<0.29383.0>:ebucketmigrator_srv:handle_call:297]Changing vbucket filter on tap stream `replication_ns_1@10.242.238.90`: [{342,1}, {343,1}, {344,1}, {345,1}, {346,1}, {347,1}, {348,1}, {349,1}, {350,1}, {351,1}, {352,1}, {353,1}, {354,1}, {355,1}, {356,1}, {357,1}, {358,1}, {359,1}, {360,1}, {361,1}, {362,1}, {363,1}, {364,1}, {365,1}, {366,1}, {367,1}, {368,1}, {369,1}, {370,1}, {371,1}, {372,1}, {373,1}, {374,1}, {375,1}, {376,1}, {377,1}, {378,1}, {379,1}, {380,1}, {381,1}, {382,1}, {383,1}, {384,1}, {385,1}, {386,1}, {387,1}, {388,1}, {389,1}, {390,1}, {391,1}, {392,1}, {393,1}, {394,1}, {395,1}, {396,1}, {397,1}, {398,1}, {399,1}, {400,1}, {401,1}, {402,1}, {403,1}, {404,1}, {405,1}, {406,1}, {407,1}, {408,1}, {409,1}, {410,1}, {411,1}, {412,1}, {413,1}, {414,1}, {415,1}, {416,1}, {417,1}, {418,1}, {419,1}, {420,1}, {421,1}, {422,1}, {423,1}, {424,1}, {425,1}, {426,1}] [ns_server:info,2014-08-19T16:51:43.208,ns_1@10.242.238.90:<0.29383.0>:ebucketmigrator_srv:handle_call:307]Successfully changed vbucket filter on tap stream `replication_ns_1@10.242.238.90`. [ns_server:info,2014-08-19T16:51:43.209,ns_1@10.242.238.90:<0.29383.0>:ebucketmigrator_srv:process_upstream:1027]Got vbucket filter change completion message. Silencing upstream sender [ns_server:info,2014-08-19T16:51:43.209,ns_1@10.242.238.90:<0.29383.0>:ebucketmigrator_srv:handle_info:366]Got reply from upstream silencing request. Completing state transition to a new ebucketmigrator. [ns_server:debug,2014-08-19T16:51:43.209,ns_1@10.242.238.90:<0.29383.0>:ebucketmigrator_srv:complete_native_vb_filter_change:170]Proceeding with reading unread binaries [ns_server:debug,2014-08-19T16:51:43.209,ns_1@10.242.238.90:<0.29383.0>:ebucketmigrator_srv:confirm_downstream:197]Going to confirm reception downstream messages [ns_server:debug,2014-08-19T16:51:43.209,ns_1@10.242.238.90:<0.29383.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:51:43.209,ns_1@10.242.238.90:<0.29391.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:51:43.209,ns_1@10.242.238.90:<0.29391.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:51:43.210,ns_1@10.242.238.90:<0.29383.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:51:43.210,ns_1@10.242.238.90:<0.29383.0>:ebucketmigrator_srv:confirm_downstream:201]Confirmed upstream messages are feeded to kernel [ns_server:debug,2014-08-19T16:51:43.210,ns_1@10.242.238.90:<0.29383.0>:ebucketmigrator_srv:reply_and_die:210]Passed old state to caller [ns_server:debug,2014-08-19T16:51:43.210,ns_1@10.242.238.90:<0.29383.0>:ebucketmigrator_srv:reply_and_die:213]Sent out state. Preparing to die [ns_server:debug,2014-08-19T16:51:43.210,ns_1@10.242.238.90:<0.29389.0>:ns_vbm_new_sup:do_perform_vbucket_filter_change:143]Got old state from previous ebucketmigrator: <0.29383.0> [ns_server:debug,2014-08-19T16:51:43.211,ns_1@10.242.238.90:<0.29389.0>:ns_vbm_new_sup:perform_vbucket_filter_change_loop:184]Sent old state to new instance [ns_server:info,2014-08-19T16:51:43.211,ns_1@10.242.238.90:<0.29393.0>:ns_vbm_new_sup:mk_old_state_retriever:83]Got vbucket filter change old state. Proceeding vbucket filter change operation [ns_server:debug,2014-08-19T16:51:43.211,ns_1@10.242.238.90:<0.29393.0>:ebucketmigrator_srv:init:494]Got old ebucketmigrator state from <0.29383.0>: {state,#Port<0.16372>,#Port<0.16368>,#Port<0.16373>,#Port<0.16369>, <0.29385.0>,<<"cut off">>,<<"cut off">>,[],256,false,false,0, {1408,452703,209304}, completed, {<0.29389.0>,#Ref<0.0.1.75563>}, <<"replication_ns_1@10.242.238.90">>,<0.29383.0>, {had_backfill,false,undefined,[]}, completed,false}. [ns_server:debug,2014-08-19T16:51:43.211,ns_1@10.242.238.90:<0.17162.0>:ns_process_registry:handle_info:98]Got exit msg: {'EXIT',<0.29389.0>,{#Ref<0.0.1.75552>,<0.29393.0>}} [error_logger:info,2014-08-19T16:51:43.211,ns_1@10.242.238.90:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,'ns_vbm_new_sup-default'} started: [{pid,<0.29393.0>}, {name, {new_child_id, [342,343,344,345,346,347,348,349,350,351,352, 353,354,355,356,357,358,359,360,361,362,363, 364,365,366,367,368,369,370,371,372,373,374, 375,376,377,378,379,380,381,382,383,384,385, 386,387,388,389,390,391,392,393,394,395,396, 397,398,399,400,401,402,403,404,405,406,407, 408,409,410,411,412,413,414,415,416,417,418, 419,420,421,422,423,424,425,426], 'ns_1@10.242.238.89'}}, {mfargs, {ebucketmigrator_srv,start_link, [{"10.242.238.89",11209}, {"10.242.238.90",11209}, [{old_state_retriever, #Fun}, {on_not_ready_vbuckets, #Fun}, {username,"default"}, {password,get_from_config}, {vbuckets, [342,343,344,345,346,347,348,349,350,351, 352,353,354,355,356,357,358,359,360,361, 362,363,364,365,366,367,368,369,370,371, 372,373,374,375,376,377,378,379,380,381, 382,383,384,385,386,387,388,389,390,391, 392,393,394,395,396,397,398,399,400,401, 402,403,404,405,406,407,408,409,410,411, 412,413,414,415,416,417,418,419,420,421, 422,423,424,425,426]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]]}}, {restart_type,temporary}, {shutdown,60000}, {child_type,worker}] [ns_server:debug,2014-08-19T16:51:43.216,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:51:43.219,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:43.219,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 3315 us [ns_server:debug,2014-08-19T16:51:43.219,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:43.220,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{345, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:51:43.226,ns_1@10.242.238.90:<0.29393.0>:ebucketmigrator_srv:init:621]Reusing old upstream: [{vbuckets,[342,343,344,345,346,347,348,349,350,351,352,353,354,355,356,357, 358,359,360,361,362,363,364,365,366,367,368,369,370,371,372,373, 374,375,376,377,378,379,380,381,382,383,384,385,386,387,388,389, 390,391,392,393,394,395,396,397,398,399,400,401,402,403,404,405, 406,407,408,409,410,411,412,413,414,415,416,417,418,419,420,421, 422,423,424,425,426]}, {name,<<"replication_ns_1@10.242.238.90">>}, {takeover,false}] [rebalance:debug,2014-08-19T16:51:43.226,ns_1@10.242.238.90:<0.29393.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.29395.0> [ns_server:info,2014-08-19T16:51:43.271,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 597 state to replica [ns_server:info,2014-08-19T16:51:43.276,ns_1@10.242.238.90:<0.29396.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 597 to state replica [ns_server:debug,2014-08-19T16:51:43.306,ns_1@10.242.238.90:<0.29396.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_597_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:51:43.308,ns_1@10.242.238.90:<0.29396.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[597]}, {checkpoints,[{597,0}]}, {name,<<"replication_building_597_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[597]}, {takeover,false}, {suffix,"building_597_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",597,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,true}]} [rebalance:debug,2014-08-19T16:51:43.308,ns_1@10.242.238.90:<0.29396.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.29397.0> [rebalance:debug,2014-08-19T16:51:43.308,ns_1@10.242.238.90:<0.29396.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:51:43.309,ns_1@10.242.238.90:<0.29396.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.25898.1>,#Ref<16550.0.2.46633>}]} [rebalance:info,2014-08-19T16:51:43.309,ns_1@10.242.238.90:<0.29396.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 597 [rebalance:debug,2014-08-19T16:51:43.309,ns_1@10.242.238.90:<0.29396.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.25898.1>,#Ref<16550.0.2.46633>}] [ns_server:debug,2014-08-19T16:51:43.310,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.29398.0> (ok) [ns_server:debug,2014-08-19T16:51:43.310,ns_1@10.242.238.90:<0.29396.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:debug,2014-08-19T16:51:43.312,ns_1@10.242.238.90:<0.29399.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 597 [ns_server:info,2014-08-19T16:51:43.379,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 596 state to replica [ns_server:info,2014-08-19T16:51:43.384,ns_1@10.242.238.90:<0.29430.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 596 to state replica [ns_server:debug,2014-08-19T16:51:43.403,ns_1@10.242.238.90:<0.29430.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_596_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:51:43.404,ns_1@10.242.238.90:<0.29430.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[596]}, {checkpoints,[{596,0}]}, {name,<<"replication_building_596_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[596]}, {takeover,false}, {suffix,"building_596_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",596,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,true}]} [rebalance:debug,2014-08-19T16:51:43.406,ns_1@10.242.238.90:<0.29430.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.29431.0> [rebalance:debug,2014-08-19T16:51:43.406,ns_1@10.242.238.90:<0.29430.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:51:43.407,ns_1@10.242.238.90:<0.29430.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.25960.1>,#Ref<16550.0.2.47094>}]} [rebalance:info,2014-08-19T16:51:43.407,ns_1@10.242.238.90:<0.29430.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 596 [rebalance:debug,2014-08-19T16:51:43.407,ns_1@10.242.238.90:<0.29430.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.25960.1>,#Ref<16550.0.2.47094>}] [ns_server:debug,2014-08-19T16:51:43.408,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.29432.0> (ok) [ns_server:debug,2014-08-19T16:51:43.408,ns_1@10.242.238.90:<0.29430.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:debug,2014-08-19T16:51:43.409,ns_1@10.242.238.90:<0.29433.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 596 [ns_server:debug,2014-08-19T16:51:43.429,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 597. Nacking mccouch update. [views:debug,2014-08-19T16:51:43.429,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/597. Updated state: replica (0) [ns_server:debug,2014-08-19T16:51:43.429,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",597,replica,0} [ns_server:debug,2014-08-19T16:51:43.430,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,984,673,362,724,413,958,647,698,387,1009,996,749,685,621,374,983,736, 672,608,425,361,970,723,659,412,348,957,710,646,399,1021,944,761,697,633,386, 1008,995,748,684,620,373,982,735,671,607,424,360,969,722,658,411,347,956,709, 645,398,1020,943,760,696,632,385,1007,994,747,683,619,372,981,734,670,606, 423,359,968,721,657,410,346,955,708,644,397,1019,942,759,695,631,384,1006, 993,746,682,618,371,980,733,669,605,422,358,967,720,656,409,345,954,707,643, 396,1018,941,758,694,630,383,1005,992,745,681,617,370,979,732,668,604,421, 357,966,719,655,408,344,953,706,642,395,1017,940,757,693,629,382,1004,991, 744,680,616,369,978,731,667,603,420,356,965,718,654,407,343,952,705,641,394, 1016,939,756,692,628,381,1003,990,743,679,615,368,977,730,666,602,419,355, 964,717,653,406,342,951,704,640,393,1015,938,755,691,627,380,1002,989,742, 678,614,367,976,729,665,601,418,354,963,716,652,405,950,767,703,639,392,1014, 754,690,626,379,1001,988,741,677,613,366,975,728,664,600,417,353,962,715,651, 404,949,766,702,638,391,1013,753,689,625,378,1000,987,740,676,612,365,974, 727,663,599,416,352,961,714,650,403,948,765,701,637,390,1012,999,752,688,624, 377,986,739,675,611,364,973,726,662,598,415,351,960,713,649,402,947,764,700, 636,389,1011,998,751,687,623,376,985,738,674,610,363,972,725,661,597,414,350, 959,712,648,401,1023,946,763,699,635,388,1010,997,686,375,737,609,426,971, 660,349,711,400,1022,945,762,634] [ns_server:info,2014-08-19T16:51:43.476,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 595 state to replica [ns_server:info,2014-08-19T16:51:43.481,ns_1@10.242.238.90:<0.29438.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 595 to state replica [views:debug,2014-08-19T16:51:43.496,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/597. Updated state: replica (0) [ns_server:debug,2014-08-19T16:51:43.496,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",597,replica,0} [ns_server:debug,2014-08-19T16:51:43.499,ns_1@10.242.238.90:<0.29438.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_595_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:51:43.500,ns_1@10.242.238.90:<0.29438.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[595]}, {checkpoints,[{595,0}]}, {name,<<"replication_building_595_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[595]}, {takeover,false}, {suffix,"building_595_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",595,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,true}]} [rebalance:debug,2014-08-19T16:51:43.501,ns_1@10.242.238.90:<0.29438.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.29439.0> [rebalance:debug,2014-08-19T16:51:43.501,ns_1@10.242.238.90:<0.29438.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:51:43.502,ns_1@10.242.238.90:<0.29438.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.26011.1>,#Ref<16550.0.2.47340>}]} [rebalance:info,2014-08-19T16:51:43.502,ns_1@10.242.238.90:<0.29438.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 595 [rebalance:debug,2014-08-19T16:51:43.502,ns_1@10.242.238.90:<0.29438.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.26011.1>,#Ref<16550.0.2.47340>}] [ns_server:debug,2014-08-19T16:51:43.503,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.29440.0> (ok) [ns_server:debug,2014-08-19T16:51:43.503,ns_1@10.242.238.90:<0.29438.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:debug,2014-08-19T16:51:43.504,ns_1@10.242.238.90:<0.29441.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 595 [ns_server:info,2014-08-19T16:51:43.571,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 594 state to replica [ns_server:info,2014-08-19T16:51:43.575,ns_1@10.242.238.90:<0.29452.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 594 to state replica [ns_server:debug,2014-08-19T16:51:43.595,ns_1@10.242.238.90:<0.29452.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_594_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:51:43.596,ns_1@10.242.238.90:<0.29452.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[594]}, {checkpoints,[{594,0}]}, {name,<<"replication_building_594_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[594]}, {takeover,false}, {suffix,"building_594_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",594,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,true}]} [rebalance:debug,2014-08-19T16:51:43.597,ns_1@10.242.238.90:<0.29452.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.29459.0> [rebalance:debug,2014-08-19T16:51:43.597,ns_1@10.242.238.90:<0.29452.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:51:43.598,ns_1@10.242.238.90:<0.29452.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.26062.1>,#Ref<16550.0.2.47596>}]} [rebalance:info,2014-08-19T16:51:43.598,ns_1@10.242.238.90:<0.29452.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 594 [rebalance:debug,2014-08-19T16:51:43.598,ns_1@10.242.238.90:<0.29452.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.26062.1>,#Ref<16550.0.2.47596>}] [ns_server:debug,2014-08-19T16:51:43.598,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.29460.0> (ok) [ns_server:debug,2014-08-19T16:51:43.599,ns_1@10.242.238.90:<0.29452.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:debug,2014-08-19T16:51:43.600,ns_1@10.242.238.90:<0.29461.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 594 [ns_server:debug,2014-08-19T16:51:43.647,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 596. Nacking mccouch update. [views:debug,2014-08-19T16:51:43.647,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/596. Updated state: pending (0) [ns_server:debug,2014-08-19T16:51:43.647,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",596,pending,0} [ns_server:debug,2014-08-19T16:51:43.648,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,984,673,362,724,596,413,958,647,698,387,1009,996,749,685,621,374,983, 736,672,608,425,361,970,723,659,412,348,957,710,646,399,1021,944,761,697,633, 386,1008,995,748,684,620,373,982,735,671,607,424,360,969,722,658,411,347,956, 709,645,398,1020,943,760,696,632,385,1007,994,747,683,619,372,981,734,670, 606,423,359,968,721,657,410,346,955,708,644,397,1019,942,759,695,631,384, 1006,993,746,682,618,371,980,733,669,605,422,358,967,720,656,409,345,954,707, 643,396,1018,941,758,694,630,383,1005,992,745,681,617,370,979,732,668,604, 421,357,966,719,655,408,344,953,706,642,395,1017,940,757,693,629,382,1004, 991,744,680,616,369,978,731,667,603,420,356,965,718,654,407,343,952,705,641, 394,1016,939,756,692,628,381,1003,990,743,679,615,368,977,730,666,602,419, 355,964,717,653,406,342,951,704,640,393,1015,938,755,691,627,380,1002,989, 742,678,614,367,976,729,665,601,418,354,963,716,652,405,950,767,703,639,392, 1014,754,690,626,379,1001,988,741,677,613,366,975,728,664,600,417,353,962, 715,651,404,949,766,702,638,391,1013,753,689,625,378,1000,987,740,676,612, 365,974,727,663,599,416,352,961,714,650,403,948,765,701,637,390,1012,999,752, 688,624,377,986,739,675,611,364,973,726,662,598,415,351,960,713,649,402,947, 764,700,636,389,1011,998,751,687,623,376,985,738,674,610,363,972,725,661,597, 414,350,959,712,648,401,1023,946,763,699,635,388,1010,997,686,375,737,609, 426,971,660,349,711,400,1022,945,762,634] [ns_server:info,2014-08-19T16:51:43.667,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 593 state to replica [ns_server:info,2014-08-19T16:51:43.671,ns_1@10.242.238.90:<0.29464.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 593 to state replica [ns_server:debug,2014-08-19T16:51:43.689,ns_1@10.242.238.90:<0.29464.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_593_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:51:43.690,ns_1@10.242.238.90:<0.29464.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[593]}, {checkpoints,[{593,0}]}, {name,<<"replication_building_593_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[593]}, {takeover,false}, {suffix,"building_593_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",593,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,true}]} [rebalance:debug,2014-08-19T16:51:43.691,ns_1@10.242.238.90:<0.29464.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.29465.0> [rebalance:debug,2014-08-19T16:51:43.691,ns_1@10.242.238.90:<0.29464.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:51:43.692,ns_1@10.242.238.90:<0.29464.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.26113.1>,#Ref<16550.0.2.47854>}]} [rebalance:info,2014-08-19T16:51:43.692,ns_1@10.242.238.90:<0.29464.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 593 [rebalance:debug,2014-08-19T16:51:43.692,ns_1@10.242.238.90:<0.29464.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.26113.1>,#Ref<16550.0.2.47854>}] [ns_server:debug,2014-08-19T16:51:43.693,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.29466.0> (ok) [ns_server:debug,2014-08-19T16:51:43.693,ns_1@10.242.238.90:<0.29464.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:debug,2014-08-19T16:51:43.694,ns_1@10.242.238.90:<0.29467.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 593 [views:debug,2014-08-19T16:51:43.714,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/596. Updated state: pending (0) [ns_server:debug,2014-08-19T16:51:43.714,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",596,pending,0} [ns_server:info,2014-08-19T16:51:43.762,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 592 state to replica [ns_server:info,2014-08-19T16:51:43.766,ns_1@10.242.238.90:<0.29470.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 592 to state replica [views:debug,2014-08-19T16:51:43.781,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/597. Updated state: pending (0) [ns_server:debug,2014-08-19T16:51:43.781,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",597,pending,0} [ns_server:debug,2014-08-19T16:51:43.785,ns_1@10.242.238.90:<0.29470.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_592_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:51:43.786,ns_1@10.242.238.90:<0.29470.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[592]}, {checkpoints,[{592,0}]}, {name,<<"replication_building_592_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[592]}, {takeover,false}, {suffix,"building_592_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",592,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,true}]} [rebalance:debug,2014-08-19T16:51:43.787,ns_1@10.242.238.90:<0.29470.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.29471.0> [rebalance:debug,2014-08-19T16:51:43.787,ns_1@10.242.238.90:<0.29470.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:51:43.787,ns_1@10.242.238.90:<0.29470.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.26164.1>,#Ref<16550.0.2.48110>}]} [rebalance:info,2014-08-19T16:51:43.788,ns_1@10.242.238.90:<0.29470.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 592 [rebalance:debug,2014-08-19T16:51:43.788,ns_1@10.242.238.90:<0.29470.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.26164.1>,#Ref<16550.0.2.48110>}] [ns_server:debug,2014-08-19T16:51:43.788,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.29472.0> (ok) [ns_server:debug,2014-08-19T16:51:43.788,ns_1@10.242.238.90:<0.29470.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:debug,2014-08-19T16:51:43.790,ns_1@10.242.238.90:<0.29473.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 592 [ns_server:info,2014-08-19T16:51:43.857,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 591 state to replica [ns_server:info,2014-08-19T16:51:43.862,ns_1@10.242.238.90:<0.29490.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 591 to state replica [ns_server:debug,2014-08-19T16:51:43.865,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 595. Nacking mccouch update. [views:debug,2014-08-19T16:51:43.865,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/595. Updated state: pending (0) [ns_server:debug,2014-08-19T16:51:43.865,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",595,pending,0} [ns_server:debug,2014-08-19T16:51:43.866,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,984,673,362,724,596,413,958,647,698,387,1009,996,749,685,621,374,983, 736,672,608,425,361,970,723,659,595,412,348,957,710,646,399,1021,944,761,697, 633,386,1008,995,748,684,620,373,982,735,671,607,424,360,969,722,658,411,347, 956,709,645,398,1020,943,760,696,632,385,1007,994,747,683,619,372,981,734, 670,606,423,359,968,721,657,410,346,955,708,644,397,1019,942,759,695,631,384, 1006,993,746,682,618,371,980,733,669,605,422,358,967,720,656,409,345,954,707, 643,396,1018,941,758,694,630,383,1005,992,745,681,617,370,979,732,668,604, 421,357,966,719,655,408,344,953,706,642,395,1017,940,757,693,629,382,1004, 991,744,680,616,369,978,731,667,603,420,356,965,718,654,407,343,952,705,641, 394,1016,939,756,692,628,381,1003,990,743,679,615,368,977,730,666,602,419, 355,964,717,653,406,342,951,704,640,393,1015,938,755,691,627,380,1002,989, 742,678,614,367,976,729,665,601,418,354,963,716,652,405,950,767,703,639,392, 1014,754,690,626,379,1001,988,741,677,613,366,975,728,664,600,417,353,962, 715,651,404,949,766,702,638,391,1013,753,689,625,378,1000,987,740,676,612, 365,974,727,663,599,416,352,961,714,650,403,948,765,701,637,390,1012,999,752, 688,624,377,986,739,675,611,364,973,726,662,598,415,351,960,713,649,402,947, 764,700,636,389,1011,998,751,687,623,376,985,738,674,610,363,972,725,661,597, 414,350,959,712,648,401,1023,946,763,699,635,388,1010,997,686,375,737,609, 426,971,660,349,711,400,1022,945,762,634] [ns_server:debug,2014-08-19T16:51:43.880,ns_1@10.242.238.90:<0.29490.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_591_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:51:43.881,ns_1@10.242.238.90:<0.29490.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[591]}, {checkpoints,[{591,0}]}, {name,<<"replication_building_591_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[591]}, {takeover,false}, {suffix,"building_591_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",591,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,true}]} [rebalance:debug,2014-08-19T16:51:43.882,ns_1@10.242.238.90:<0.29490.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.29491.0> [rebalance:debug,2014-08-19T16:51:43.882,ns_1@10.242.238.90:<0.29490.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:51:43.883,ns_1@10.242.238.90:<0.29490.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.26215.1>,#Ref<16550.0.2.48355>}]} [rebalance:info,2014-08-19T16:51:43.883,ns_1@10.242.238.90:<0.29490.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 591 [rebalance:debug,2014-08-19T16:51:43.883,ns_1@10.242.238.90:<0.29490.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.26215.1>,#Ref<16550.0.2.48355>}] [ns_server:debug,2014-08-19T16:51:43.884,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.29492.0> (ok) [ns_server:debug,2014-08-19T16:51:43.884,ns_1@10.242.238.90:<0.29490.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:debug,2014-08-19T16:51:43.885,ns_1@10.242.238.90:<0.29493.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 591 [views:debug,2014-08-19T16:51:43.898,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/595. Updated state: pending (0) [ns_server:debug,2014-08-19T16:51:43.899,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",595,pending,0} [ns_server:info,2014-08-19T16:51:43.952,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 590 state to replica [ns_server:info,2014-08-19T16:51:43.956,ns_1@10.242.238.90:<0.29510.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 590 to state replica [ns_server:debug,2014-08-19T16:51:43.975,ns_1@10.242.238.90:<0.29510.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_590_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:51:43.977,ns_1@10.242.238.90:<0.29510.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[590]}, {checkpoints,[{590,0}]}, {name,<<"replication_building_590_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[590]}, {takeover,false}, {suffix,"building_590_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",590,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,true}]} [rebalance:debug,2014-08-19T16:51:43.978,ns_1@10.242.238.90:<0.29510.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.29511.0> [rebalance:debug,2014-08-19T16:51:43.978,ns_1@10.242.238.90:<0.29510.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:51:43.978,ns_1@10.242.238.90:<0.29510.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.26266.1>,#Ref<16550.0.2.48610>}]} [rebalance:info,2014-08-19T16:51:43.978,ns_1@10.242.238.90:<0.29510.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 590 [rebalance:debug,2014-08-19T16:51:43.979,ns_1@10.242.238.90:<0.29510.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.26266.1>,#Ref<16550.0.2.48610>}] [ns_server:debug,2014-08-19T16:51:43.979,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.29512.0> (ok) [ns_server:debug,2014-08-19T16:51:43.979,ns_1@10.242.238.90:<0.29510.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:51:43.980,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 593. Nacking mccouch update. [views:debug,2014-08-19T16:51:43.980,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/593. Updated state: pending (0) [ns_server:debug,2014-08-19T16:51:43.981,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",593,pending,0} [rebalance:debug,2014-08-19T16:51:43.981,ns_1@10.242.238.90:<0.29513.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 590 [ns_server:debug,2014-08-19T16:51:43.981,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,984,673,362,724,596,413,958,647,698,387,1009,996,749,685,621,374,983, 736,672,608,425,361,970,723,659,595,412,348,957,710,646,399,1021,944,761,697, 633,386,1008,995,748,684,620,373,982,735,671,607,424,360,969,722,658,411,347, 956,709,645,398,1020,943,760,696,632,385,1007,994,747,683,619,372,981,734, 670,606,423,359,968,721,657,593,410,346,955,708,644,397,1019,942,759,695,631, 384,1006,993,746,682,618,371,980,733,669,605,422,358,967,720,656,409,345,954, 707,643,396,1018,941,758,694,630,383,1005,992,745,681,617,370,979,732,668, 604,421,357,966,719,655,408,344,953,706,642,395,1017,940,757,693,629,382, 1004,991,744,680,616,369,978,731,667,603,420,356,965,718,654,407,343,952,705, 641,394,1016,939,756,692,628,381,1003,990,743,679,615,368,977,730,666,602, 419,355,964,717,653,406,342,951,704,640,393,1015,938,755,691,627,380,1002, 989,742,678,614,367,976,729,665,601,418,354,963,716,652,405,950,767,703,639, 392,1014,754,690,626,379,1001,988,741,677,613,366,975,728,664,600,417,353, 962,715,651,404,949,766,702,638,391,1013,753,689,625,378,1000,987,740,676, 612,365,974,727,663,599,416,352,961,714,650,403,948,765,701,637,390,1012,999, 752,688,624,377,986,739,675,611,364,973,726,662,598,415,351,960,713,649,402, 947,764,700,636,389,1011,998,751,687,623,376,985,738,674,610,363,972,725,661, 597,414,350,959,712,648,401,1023,946,763,699,635,388,1010,997,686,375,737, 609,426,971,660,349,711,400,1022,945,762,634] [ns_server:info,2014-08-19T16:51:44.049,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 589 state to replica [ns_server:info,2014-08-19T16:51:44.053,ns_1@10.242.238.90:<0.29516.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 589 to state replica [views:debug,2014-08-19T16:51:44.064,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/593. Updated state: pending (0) [ns_server:debug,2014-08-19T16:51:44.065,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",593,pending,0} [rebalance:debug,2014-08-19T16:51:44.066,ns_1@10.242.238.90:<0.29493.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:51:44.066,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.29493.0> (ok) [rebalance:debug,2014-08-19T16:51:44.069,ns_1@10.242.238.90:<0.29517.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 591 [ns_server:debug,2014-08-19T16:51:44.072,ns_1@10.242.238.90:<0.29516.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_589_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:51:44.073,ns_1@10.242.238.90:<0.29516.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[589]}, {checkpoints,[{589,0}]}, {name,<<"replication_building_589_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[589]}, {takeover,false}, {suffix,"building_589_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",589,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,true}]} [rebalance:debug,2014-08-19T16:51:44.074,ns_1@10.242.238.90:<0.29516.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.29520.0> [rebalance:debug,2014-08-19T16:51:44.074,ns_1@10.242.238.90:<0.29516.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:51:44.074,ns_1@10.242.238.90:<0.29516.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.26317.1>,#Ref<16550.0.2.48865>}]} [rebalance:info,2014-08-19T16:51:44.074,ns_1@10.242.238.90:<0.29516.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 589 [rebalance:debug,2014-08-19T16:51:44.075,ns_1@10.242.238.90:<0.29516.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.26317.1>,#Ref<16550.0.2.48865>}] [ns_server:debug,2014-08-19T16:51:44.076,ns_1@10.242.238.90:<0.29516.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:51:44.077,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.29521.0> (ok) [rebalance:debug,2014-08-19T16:51:44.078,ns_1@10.242.238.90:<0.29522.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 589 [ns_server:info,2014-08-19T16:51:44.146,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 588 state to replica [ns_server:info,2014-08-19T16:51:44.150,ns_1@10.242.238.90:<0.29539.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 588 to state replica [ns_server:debug,2014-08-19T16:51:44.169,ns_1@10.242.238.90:<0.29539.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_588_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:51:44.171,ns_1@10.242.238.90:<0.29539.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[588]}, {checkpoints,[{588,0}]}, {name,<<"replication_building_588_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[588]}, {takeover,false}, {suffix,"building_588_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",588,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,true}]} [rebalance:debug,2014-08-19T16:51:44.171,ns_1@10.242.238.90:<0.29539.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.29540.0> [rebalance:debug,2014-08-19T16:51:44.173,ns_1@10.242.238.90:<0.29539.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:51:44.173,ns_1@10.242.238.90:<0.29539.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.26371.1>,#Ref<16550.0.2.49168>}]} [rebalance:info,2014-08-19T16:51:44.173,ns_1@10.242.238.90:<0.29539.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 588 [rebalance:debug,2014-08-19T16:51:44.174,ns_1@10.242.238.90:<0.29539.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.26371.1>,#Ref<16550.0.2.49168>}] [ns_server:debug,2014-08-19T16:51:44.174,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.29541.0> (ok) [ns_server:debug,2014-08-19T16:51:44.174,ns_1@10.242.238.90:<0.29539.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:debug,2014-08-19T16:51:44.176,ns_1@10.242.238.90:<0.29542.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 588 [ns_server:debug,2014-08-19T16:51:44.198,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 591. Nacking mccouch update. [views:debug,2014-08-19T16:51:44.198,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/591. Updated state: pending (1) [ns_server:debug,2014-08-19T16:51:44.198,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",591,pending,1} [ns_server:debug,2014-08-19T16:51:44.199,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,984,673,362,724,596,413,958,647,698,387,1009,749,621,983,736,672,608, 425,361,970,723,659,595,412,348,957,710,646,399,1021,944,761,697,633,386, 1008,995,748,684,620,373,982,735,671,607,424,360,969,722,658,411,347,956,709, 645,398,1020,943,760,696,632,385,1007,994,747,683,619,372,981,734,670,606, 423,359,968,721,657,593,410,346,955,708,644,397,1019,942,759,695,631,384, 1006,993,746,682,618,371,980,733,669,605,422,358,967,720,656,409,345,954,707, 643,396,1018,941,758,694,630,383,1005,992,745,681,617,370,979,732,668,604, 421,357,966,719,655,591,408,344,953,706,642,395,1017,940,757,693,629,382, 1004,991,744,680,616,369,978,731,667,603,420,356,965,718,654,407,343,952,705, 641,394,1016,939,756,692,628,381,1003,990,743,679,615,368,977,730,666,602, 419,355,964,717,653,406,342,951,704,640,393,1015,938,755,691,627,380,1002, 989,742,678,614,367,976,729,665,601,418,354,963,716,652,405,950,767,703,639, 392,1014,754,690,626,379,1001,988,741,677,613,366,975,728,664,600,417,353, 962,715,651,404,949,766,702,638,391,1013,753,689,625,378,1000,987,740,676, 612,365,974,727,663,599,416,352,961,714,650,403,948,765,701,637,390,1012,999, 752,688,624,377,986,739,675,611,364,973,726,662,598,415,351,960,713,649,402, 947,764,700,636,389,1011,998,751,687,623,376,985,738,674,610,363,972,725,661, 597,414,350,959,712,648,401,1023,946,763,699,635,388,1010,997,686,375,737, 609,426,971,660,349,711,400,1022,945,762,634,996,685,374] [ns_server:info,2014-08-19T16:51:44.242,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 587 state to replica [ns_server:info,2014-08-19T16:51:44.246,ns_1@10.242.238.90:<0.29545.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 587 to state replica [views:debug,2014-08-19T16:51:44.265,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/591. Updated state: pending (1) [ns_server:debug,2014-08-19T16:51:44.265,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",591,pending,1} [ns_server:debug,2014-08-19T16:51:44.265,ns_1@10.242.238.90:<0.29545.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_587_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:51:44.267,ns_1@10.242.238.90:<0.29545.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[587]}, {checkpoints,[{587,0}]}, {name,<<"replication_building_587_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[587]}, {takeover,false}, {suffix,"building_587_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",587,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,true}]} [rebalance:debug,2014-08-19T16:51:44.267,ns_1@10.242.238.90:<0.29545.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.29546.0> [rebalance:debug,2014-08-19T16:51:44.267,ns_1@10.242.238.90:<0.29545.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:51:44.268,ns_1@10.242.238.90:<0.29545.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.26425.1>,#Ref<16550.0.2.49457>}]} [rebalance:info,2014-08-19T16:51:44.268,ns_1@10.242.238.90:<0.29545.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 587 [rebalance:debug,2014-08-19T16:51:44.268,ns_1@10.242.238.90:<0.29545.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.26425.1>,#Ref<16550.0.2.49457>}] [ns_server:debug,2014-08-19T16:51:44.269,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.29547.0> (ok) [ns_server:debug,2014-08-19T16:51:44.269,ns_1@10.242.238.90:<0.29545.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:debug,2014-08-19T16:51:44.270,ns_1@10.242.238.90:<0.29548.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 587 [ns_server:info,2014-08-19T16:51:44.337,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 586 state to replica [ns_server:info,2014-08-19T16:51:44.342,ns_1@10.242.238.90:<0.29565.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 586 to state replica [ns_server:debug,2014-08-19T16:51:44.361,ns_1@10.242.238.90:<0.29565.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_586_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:51:44.363,ns_1@10.242.238.90:<0.29565.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[586]}, {checkpoints,[{586,0}]}, {name,<<"replication_building_586_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[586]}, {takeover,false}, {suffix,"building_586_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",586,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,true}]} [rebalance:debug,2014-08-19T16:51:44.363,ns_1@10.242.238.90:<0.29565.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.29566.0> [rebalance:debug,2014-08-19T16:51:44.364,ns_1@10.242.238.90:<0.29565.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:51:44.364,ns_1@10.242.238.90:<0.29565.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.26479.1>,#Ref<16550.0.2.49736>}]} [rebalance:info,2014-08-19T16:51:44.365,ns_1@10.242.238.90:<0.29565.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 586 [rebalance:debug,2014-08-19T16:51:44.365,ns_1@10.242.238.90:<0.29565.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.26479.1>,#Ref<16550.0.2.49736>}] [ns_server:debug,2014-08-19T16:51:44.366,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.29567.0> (ok) [ns_server:debug,2014-08-19T16:51:44.366,ns_1@10.242.238.90:<0.29565.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:debug,2014-08-19T16:51:44.367,ns_1@10.242.238.90:<0.29568.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 586 [ns_server:debug,2014-08-19T16:51:44.415,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 589. Nacking mccouch update. [views:debug,2014-08-19T16:51:44.415,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/589. Updated state: pending (0) [ns_server:debug,2014-08-19T16:51:44.415,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",589,pending,0} [ns_server:debug,2014-08-19T16:51:44.416,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,984,673,362,724,596,413,958,647,698,387,1009,749,621,983,736,672,608, 425,361,970,723,659,595,412,348,957,710,646,399,1021,944,761,697,633,386, 1008,995,748,684,620,373,982,735,671,607,424,360,969,722,658,411,347,956,709, 645,398,1020,943,760,696,632,385,1007,994,747,683,619,372,981,734,670,606, 423,359,968,721,657,593,410,346,955,708,644,397,1019,942,759,695,631,384, 1006,993,746,682,618,371,980,733,669,605,422,358,967,720,656,409,345,954,707, 643,396,1018,941,758,694,630,383,1005,992,745,681,617,370,979,732,668,604, 421,357,966,719,655,591,408,344,953,706,642,395,1017,940,757,693,629,382, 1004,991,744,680,616,369,978,731,667,603,420,356,965,718,654,407,343,952,705, 641,394,1016,939,756,692,628,381,1003,990,743,679,615,368,977,730,666,602, 419,355,964,717,653,589,406,342,951,704,640,393,1015,938,755,691,627,380, 1002,989,742,678,614,367,976,729,665,601,418,354,963,716,652,405,950,767,703, 639,392,1014,754,690,626,379,1001,988,741,677,613,366,975,728,664,600,417, 353,962,715,651,404,949,766,702,638,391,1013,753,689,625,378,1000,987,740, 676,612,365,974,727,663,599,416,352,961,714,650,403,948,765,701,637,390,1012, 999,752,688,624,377,986,739,675,611,364,973,726,662,598,415,351,960,713,649, 402,947,764,700,636,389,1011,998,751,687,623,376,985,738,674,610,363,972,725, 661,597,414,350,959,712,648,401,1023,946,763,699,635,388,1010,997,686,375, 737,609,426,971,660,349,711,400,1022,945,762,634,996,685,374] [ns_server:info,2014-08-19T16:51:44.434,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 585 state to replica [ns_server:info,2014-08-19T16:51:44.439,ns_1@10.242.238.90:<0.29571.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 585 to state replica [ns_server:debug,2014-08-19T16:51:44.459,ns_1@10.242.238.90:<0.29571.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_585_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:51:44.460,ns_1@10.242.238.90:<0.29571.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[585]}, {checkpoints,[{585,0}]}, {name,<<"replication_building_585_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[585]}, {takeover,false}, {suffix,"building_585_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",585,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,true}]} [rebalance:debug,2014-08-19T16:51:44.461,ns_1@10.242.238.90:<0.29571.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.29572.0> [rebalance:debug,2014-08-19T16:51:44.461,ns_1@10.242.238.90:<0.29571.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:51:44.461,ns_1@10.242.238.90:<0.29571.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.26531.1>,#Ref<16550.0.2.50039>}]} [rebalance:info,2014-08-19T16:51:44.461,ns_1@10.242.238.90:<0.29571.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 585 [rebalance:debug,2014-08-19T16:51:44.462,ns_1@10.242.238.90:<0.29571.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.26531.1>,#Ref<16550.0.2.50039>}] [ns_server:debug,2014-08-19T16:51:44.462,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.29573.0> (ok) [ns_server:debug,2014-08-19T16:51:44.462,ns_1@10.242.238.90:<0.29571.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:debug,2014-08-19T16:51:44.464,ns_1@10.242.238.90:<0.29574.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 585 [views:debug,2014-08-19T16:51:44.482,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/589. Updated state: pending (0) [ns_server:debug,2014-08-19T16:51:44.483,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",589,pending,0} [ns_server:info,2014-08-19T16:51:44.539,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 584 state to replica [ns_server:info,2014-08-19T16:51:44.543,ns_1@10.242.238.90:<0.29591.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 584 to state replica [ns_server:debug,2014-08-19T16:51:44.562,ns_1@10.242.238.90:<0.29591.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_584_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:51:44.564,ns_1@10.242.238.90:<0.29591.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[584]}, {checkpoints,[{584,0}]}, {name,<<"replication_building_584_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[584]}, {takeover,false}, {suffix,"building_584_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",584,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,true}]} [rebalance:debug,2014-08-19T16:51:44.565,ns_1@10.242.238.90:<0.29591.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.29592.0> [rebalance:debug,2014-08-19T16:51:44.565,ns_1@10.242.238.90:<0.29591.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:51:44.565,ns_1@10.242.238.90:<0.29591.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.26582.1>,#Ref<16550.0.2.50296>}]} [rebalance:info,2014-08-19T16:51:44.565,ns_1@10.242.238.90:<0.29591.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 584 [rebalance:debug,2014-08-19T16:51:44.566,ns_1@10.242.238.90:<0.29591.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.26582.1>,#Ref<16550.0.2.50296>}] [ns_server:debug,2014-08-19T16:51:44.566,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.29593.0> (ok) [ns_server:debug,2014-08-19T16:51:44.566,ns_1@10.242.238.90:<0.29591.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:debug,2014-08-19T16:51:44.568,ns_1@10.242.238.90:<0.29594.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 584 [ns_server:debug,2014-08-19T16:51:44.599,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 594. Nacking mccouch update. [views:debug,2014-08-19T16:51:44.599,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/594. Updated state: pending (0) [ns_server:debug,2014-08-19T16:51:44.600,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",594,pending,0} [ns_server:debug,2014-08-19T16:51:44.600,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,984,673,362,724,596,413,958,647,698,387,1009,749,621,983,736,672,608, 425,361,970,723,659,595,412,348,957,710,646,399,1021,944,761,697,633,386, 1008,995,748,684,620,373,982,735,671,607,424,360,969,722,658,594,411,347,956, 709,645,398,1020,943,760,696,632,385,1007,994,747,683,619,372,981,734,670, 606,423,359,968,721,657,593,410,346,955,708,644,397,1019,942,759,695,631,384, 1006,993,746,682,618,371,980,733,669,605,422,358,967,720,656,409,345,954,707, 643,396,1018,941,758,694,630,383,1005,992,745,681,617,370,979,732,668,604, 421,357,966,719,655,591,408,344,953,706,642,395,1017,940,757,693,629,382, 1004,991,744,680,616,369,978,731,667,603,420,356,965,718,654,407,343,952,705, 641,394,1016,939,756,692,628,381,1003,990,743,679,615,368,977,730,666,602, 419,355,964,717,653,589,406,342,951,704,640,393,1015,938,755,691,627,380, 1002,989,742,678,614,367,976,729,665,601,418,354,963,716,652,405,950,767,703, 639,392,1014,754,690,626,379,1001,988,741,677,613,366,975,728,664,600,417, 353,962,715,651,404,949,766,702,638,391,1013,753,689,625,378,1000,987,740, 676,612,365,974,727,663,599,416,352,961,714,650,403,948,765,701,637,390,1012, 999,752,688,624,377,986,739,675,611,364,973,726,662,598,415,351,960,713,649, 402,947,764,700,636,389,1011,998,751,687,623,376,985,738,674,610,363,972,725, 661,597,414,350,959,712,648,401,1023,946,763,699,635,388,1010,997,686,375, 737,609,426,971,660,349,711,400,1022,945,762,634,996,685,374] [ns_server:info,2014-08-19T16:51:44.636,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 583 state to replica [ns_server:info,2014-08-19T16:51:44.640,ns_1@10.242.238.90:<0.29597.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 583 to state replica [ns_server:debug,2014-08-19T16:51:44.659,ns_1@10.242.238.90:<0.29597.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_583_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:51:44.661,ns_1@10.242.238.90:<0.29597.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[583]}, {checkpoints,[{583,0}]}, {name,<<"replication_building_583_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[583]}, {takeover,false}, {suffix,"building_583_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",583,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,true}]} [rebalance:debug,2014-08-19T16:51:44.661,ns_1@10.242.238.90:<0.29597.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.29598.0> [rebalance:debug,2014-08-19T16:51:44.662,ns_1@10.242.238.90:<0.29597.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:51:44.662,ns_1@10.242.238.90:<0.29597.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.26633.1>,#Ref<16550.0.2.50542>}]} [rebalance:info,2014-08-19T16:51:44.662,ns_1@10.242.238.90:<0.29597.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 583 [rebalance:debug,2014-08-19T16:51:44.662,ns_1@10.242.238.90:<0.29597.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.26633.1>,#Ref<16550.0.2.50542>}] [ns_server:debug,2014-08-19T16:51:44.663,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.29599.0> (ok) [ns_server:debug,2014-08-19T16:51:44.663,ns_1@10.242.238.90:<0.29597.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:debug,2014-08-19T16:51:44.665,ns_1@10.242.238.90:<0.29600.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 583 [views:debug,2014-08-19T16:51:44.666,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/594. Updated state: pending (0) [ns_server:debug,2014-08-19T16:51:44.667,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",594,pending,0} [ns_server:info,2014-08-19T16:51:44.732,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 582 state to replica [ns_server:info,2014-08-19T16:51:44.736,ns_1@10.242.238.90:<0.29617.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 582 to state replica [ns_server:debug,2014-08-19T16:51:44.755,ns_1@10.242.238.90:<0.29617.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_582_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:51:44.756,ns_1@10.242.238.90:<0.29617.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[582]}, {checkpoints,[{582,0}]}, {name,<<"replication_building_582_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[582]}, {takeover,false}, {suffix,"building_582_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",582,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,true}]} [rebalance:debug,2014-08-19T16:51:44.757,ns_1@10.242.238.90:<0.29617.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.29618.0> [rebalance:debug,2014-08-19T16:51:44.757,ns_1@10.242.238.90:<0.29617.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:51:44.758,ns_1@10.242.238.90:<0.29617.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.26684.1>,#Ref<16550.0.2.50787>}]} [rebalance:info,2014-08-19T16:51:44.758,ns_1@10.242.238.90:<0.29617.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 582 [rebalance:debug,2014-08-19T16:51:44.758,ns_1@10.242.238.90:<0.29617.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.26684.1>,#Ref<16550.0.2.50787>}] [ns_server:debug,2014-08-19T16:51:44.759,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.29619.0> (ok) [ns_server:debug,2014-08-19T16:51:44.759,ns_1@10.242.238.90:<0.29617.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:debug,2014-08-19T16:51:44.760,ns_1@10.242.238.90:<0.29620.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 582 [ns_server:debug,2014-08-19T16:51:44.792,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 592. Nacking mccouch update. [views:debug,2014-08-19T16:51:44.792,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/592. Updated state: pending (0) [ns_server:debug,2014-08-19T16:51:44.792,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",592,pending,0} [ns_server:debug,2014-08-19T16:51:44.793,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,984,673,362,724,596,413,958,647,698,387,1009,749,621,983,736,672,608, 425,361,970,723,659,595,412,348,957,710,646,399,1021,944,761,697,633,386, 1008,995,748,684,620,373,982,735,671,607,424,360,969,722,658,594,411,347,956, 709,645,398,1020,943,760,696,632,385,1007,994,747,683,619,372,981,734,670, 606,423,359,968,721,657,593,410,346,955,708,644,397,1019,942,759,695,631,384, 1006,993,746,682,618,371,980,733,669,605,422,358,967,720,656,592,409,345,954, 707,643,396,1018,941,758,694,630,383,1005,992,745,681,617,370,979,732,668, 604,421,357,966,719,655,591,408,344,953,706,642,395,1017,940,757,693,629,382, 1004,991,744,680,616,369,978,731,667,603,420,356,965,718,654,407,343,952,705, 641,394,1016,939,756,692,628,381,1003,990,743,679,615,368,977,730,666,602, 419,355,964,717,653,589,406,342,951,704,640,393,1015,938,755,691,627,380, 1002,989,742,678,614,367,976,729,665,601,418,354,963,716,652,405,950,767,703, 639,392,1014,754,690,626,379,1001,988,741,677,613,366,975,728,664,600,417, 353,962,715,651,404,949,766,702,638,391,1013,753,689,625,378,1000,987,740, 676,612,365,974,727,663,599,416,352,961,714,650,403,948,765,701,637,390,1012, 999,752,688,624,377,986,739,675,611,364,973,726,662,598,415,351,960,713,649, 402,947,764,700,636,389,1011,998,751,687,623,376,985,738,674,610,363,972,725, 661,597,414,350,959,712,648,401,1023,946,763,699,635,388,1010,997,686,375, 737,609,426,971,660,349,711,400,1022,945,762,634,996,685,374] [ns_server:info,2014-08-19T16:51:44.827,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 581 state to replica [ns_server:info,2014-08-19T16:51:44.831,ns_1@10.242.238.90:<0.29623.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 581 to state replica [ns_server:debug,2014-08-19T16:51:44.850,ns_1@10.242.238.90:<0.29623.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_581_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:51:44.852,ns_1@10.242.238.90:<0.29623.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[581]}, {checkpoints,[{581,0}]}, {name,<<"replication_building_581_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[581]}, {takeover,false}, {suffix,"building_581_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",581,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,true}]} [rebalance:debug,2014-08-19T16:51:44.852,ns_1@10.242.238.90:<0.29623.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.29624.0> [rebalance:debug,2014-08-19T16:51:44.852,ns_1@10.242.238.90:<0.29623.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:51:44.853,ns_1@10.242.238.90:<0.29623.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.26735.1>,#Ref<16550.0.2.51043>}]} [rebalance:info,2014-08-19T16:51:44.853,ns_1@10.242.238.90:<0.29623.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 581 [rebalance:debug,2014-08-19T16:51:44.853,ns_1@10.242.238.90:<0.29623.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.26735.1>,#Ref<16550.0.2.51043>}] [ns_server:debug,2014-08-19T16:51:44.854,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.29625.0> (ok) [ns_server:debug,2014-08-19T16:51:44.854,ns_1@10.242.238.90:<0.29623.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:debug,2014-08-19T16:51:44.855,ns_1@10.242.238.90:<0.29626.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 581 [views:debug,2014-08-19T16:51:44.859,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/592. Updated state: pending (0) [ns_server:debug,2014-08-19T16:51:44.859,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",592,pending,0} [ns_server:info,2014-08-19T16:51:44.923,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 580 state to replica [ns_server:info,2014-08-19T16:51:44.928,ns_1@10.242.238.90:<0.29643.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 580 to state replica [ns_server:debug,2014-08-19T16:51:44.941,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 590. Nacking mccouch update. [views:debug,2014-08-19T16:51:44.941,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/590. Updated state: pending (0) [ns_server:debug,2014-08-19T16:51:44.941,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",590,pending,0} [ns_server:debug,2014-08-19T16:51:44.942,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,984,673,362,724,596,413,958,647,698,387,1009,749,621,983,736,672,608, 425,361,970,723,659,595,412,348,957,710,646,399,1021,944,761,697,633,386, 1008,995,748,684,620,373,982,735,671,607,424,360,969,722,658,594,411,347,956, 709,645,398,1020,943,760,696,632,385,1007,994,747,683,619,372,981,734,670, 606,423,359,968,721,657,593,410,346,955,708,644,397,1019,942,759,695,631,384, 1006,993,746,682,618,371,980,733,669,605,422,358,967,720,656,592,409,345,954, 707,643,396,1018,941,758,694,630,383,1005,992,745,681,617,370,979,732,668, 604,421,357,966,719,655,591,408,344,953,706,642,395,1017,940,757,693,629,382, 1004,991,744,680,616,369,978,731,667,603,420,356,965,718,654,590,407,343,952, 705,641,394,1016,939,756,692,628,381,1003,990,743,679,615,368,977,730,666, 602,419,355,964,717,653,589,406,342,951,704,640,393,1015,938,755,691,627,380, 1002,989,742,678,614,367,976,729,665,601,418,354,963,716,652,405,950,767,703, 639,392,1014,754,690,626,379,1001,988,741,677,613,366,975,728,664,600,417, 353,962,715,651,404,949,766,702,638,391,1013,753,689,625,378,1000,987,740, 676,612,365,974,727,663,599,416,352,961,714,650,403,948,765,701,637,390,1012, 999,752,688,624,377,986,739,675,611,364,973,726,662,598,415,351,960,713,649, 402,947,764,700,636,389,1011,998,751,687,623,376,985,738,674,610,363,972,725, 661,597,414,350,959,712,648,401,1023,946,763,699,635,388,1010,997,686,375, 737,609,426,971,660,349,711,400,1022,945,762,634,996,685,374] [ns_server:debug,2014-08-19T16:51:44.947,ns_1@10.242.238.90:<0.29643.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_580_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:51:44.948,ns_1@10.242.238.90:<0.29643.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[580]}, {checkpoints,[{580,0}]}, {name,<<"replication_building_580_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[580]}, {takeover,false}, {suffix,"building_580_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",580,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,true}]} [rebalance:debug,2014-08-19T16:51:44.949,ns_1@10.242.238.90:<0.29643.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.29644.0> [rebalance:debug,2014-08-19T16:51:44.949,ns_1@10.242.238.90:<0.29643.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:51:44.949,ns_1@10.242.238.90:<0.29643.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.26786.1>,#Ref<16550.0.2.51288>}]} [rebalance:info,2014-08-19T16:51:44.949,ns_1@10.242.238.90:<0.29643.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 580 [rebalance:debug,2014-08-19T16:51:44.950,ns_1@10.242.238.90:<0.29643.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.26786.1>,#Ref<16550.0.2.51288>}] [ns_server:debug,2014-08-19T16:51:44.951,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.29645.0> (ok) [ns_server:debug,2014-08-19T16:51:44.951,ns_1@10.242.238.90:<0.29643.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:debug,2014-08-19T16:51:44.952,ns_1@10.242.238.90:<0.29646.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 580 [views:debug,2014-08-19T16:51:44.975,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/590. Updated state: pending (0) [ns_server:debug,2014-08-19T16:51:44.976,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",590,pending,0} [ns_server:info,2014-08-19T16:51:45.019,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 579 state to replica [ns_server:info,2014-08-19T16:51:45.023,ns_1@10.242.238.90:<0.29649.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 579 to state replica [ns_server:debug,2014-08-19T16:51:45.042,ns_1@10.242.238.90:<0.29649.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_579_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:51:45.043,ns_1@10.242.238.90:<0.29649.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[579]}, {checkpoints,[{579,0}]}, {name,<<"replication_building_579_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[579]}, {takeover,false}, {suffix,"building_579_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",579,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,true}]} [rebalance:debug,2014-08-19T16:51:45.044,ns_1@10.242.238.90:<0.29649.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.29664.0> [rebalance:debug,2014-08-19T16:51:45.044,ns_1@10.242.238.90:<0.29649.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:51:45.045,ns_1@10.242.238.90:<0.29649.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.26837.1>,#Ref<16550.0.2.51542>}]} [rebalance:info,2014-08-19T16:51:45.045,ns_1@10.242.238.90:<0.29649.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 579 [rebalance:debug,2014-08-19T16:51:45.045,ns_1@10.242.238.90:<0.29649.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.26837.1>,#Ref<16550.0.2.51542>}] [ns_server:debug,2014-08-19T16:51:45.046,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.29665.0> (ok) [ns_server:debug,2014-08-19T16:51:45.046,ns_1@10.242.238.90:<0.29649.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:debug,2014-08-19T16:51:45.047,ns_1@10.242.238.90:<0.29666.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 579 [ns_server:debug,2014-08-19T16:51:45.083,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 588. Nacking mccouch update. [views:debug,2014-08-19T16:51:45.084,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/588. Updated state: pending (0) [ns_server:debug,2014-08-19T16:51:45.084,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",588,pending,0} [ns_server:debug,2014-08-19T16:51:45.085,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,984,673,362,724,596,413,958,647,698,387,1009,749,621,983,672,361,970, 723,659,595,412,348,957,710,646,399,1021,944,761,697,633,386,1008,995,748, 684,620,373,982,735,671,607,424,360,969,722,658,594,411,347,956,709,645,398, 1020,943,760,696,632,385,1007,994,747,683,619,372,981,734,670,606,423,359, 968,721,657,593,410,346,955,708,644,397,1019,942,759,695,631,384,1006,993, 746,682,618,371,980,733,669,605,422,358,967,720,656,592,409,345,954,707,643, 396,1018,941,758,694,630,383,1005,992,745,681,617,370,979,732,668,604,421, 357,966,719,655,591,408,344,953,706,642,395,1017,940,757,693,629,382,1004, 991,744,680,616,369,978,731,667,603,420,356,965,718,654,590,407,343,952,705, 641,394,1016,939,756,692,628,381,1003,990,743,679,615,368,977,730,666,602, 419,355,964,717,653,589,406,342,951,704,640,393,1015,938,755,691,627,380, 1002,989,742,678,614,367,976,729,665,601,418,354,963,716,652,588,405,950,767, 703,639,392,1014,754,690,626,379,1001,988,741,677,613,366,975,728,664,600, 417,353,962,715,651,404,949,766,702,638,391,1013,753,689,625,378,1000,987, 740,676,612,365,974,727,663,599,416,352,961,714,650,403,948,765,701,637,390, 1012,999,752,688,624,377,986,739,675,611,364,973,726,662,598,415,351,960,713, 649,402,947,764,700,636,389,1011,998,751,687,623,376,985,738,674,610,363,972, 725,661,597,414,350,959,712,648,401,1023,946,763,699,635,388,1010,997,686, 375,737,609,426,971,660,349,711,400,1022,945,762,634,996,685,374,736,608,425] [ns_server:info,2014-08-19T16:51:45.115,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 578 state to replica [ns_server:info,2014-08-19T16:51:45.119,ns_1@10.242.238.90:<0.29669.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 578 to state replica [views:debug,2014-08-19T16:51:45.134,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/588. Updated state: pending (0) [ns_server:debug,2014-08-19T16:51:45.135,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",588,pending,0} [ns_server:debug,2014-08-19T16:51:45.138,ns_1@10.242.238.90:<0.29669.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_578_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:51:45.140,ns_1@10.242.238.90:<0.29669.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[578]}, {checkpoints,[{578,0}]}, {name,<<"replication_building_578_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[578]}, {takeover,false}, {suffix,"building_578_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",578,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,true}]} [rebalance:debug,2014-08-19T16:51:45.141,ns_1@10.242.238.90:<0.29669.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.29670.0> [rebalance:debug,2014-08-19T16:51:45.141,ns_1@10.242.238.90:<0.29669.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:51:45.141,ns_1@10.242.238.90:<0.29669.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.26893.1>,#Ref<16550.0.2.51825>}]} [rebalance:info,2014-08-19T16:51:45.142,ns_1@10.242.238.90:<0.29669.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 578 [rebalance:debug,2014-08-19T16:51:45.142,ns_1@10.242.238.90:<0.29669.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.26893.1>,#Ref<16550.0.2.51825>}] [ns_server:debug,2014-08-19T16:51:45.142,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.29671.0> (ok) [ns_server:debug,2014-08-19T16:51:45.143,ns_1@10.242.238.90:<0.29669.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:debug,2014-08-19T16:51:45.144,ns_1@10.242.238.90:<0.29672.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 578 [ns_server:info,2014-08-19T16:51:45.179,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 577 state to replica [ns_server:info,2014-08-19T16:51:45.184,ns_1@10.242.238.90:<0.29675.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 577 to state replica [ns_server:debug,2014-08-19T16:51:45.203,ns_1@10.242.238.90:<0.29675.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_577_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:51:45.204,ns_1@10.242.238.90:<0.29675.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[577]}, {checkpoints,[{577,0}]}, {name,<<"replication_building_577_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[577]}, {takeover,false}, {suffix,"building_577_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",577,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,true}]} [rebalance:debug,2014-08-19T16:51:45.205,ns_1@10.242.238.90:<0.29675.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.29690.0> [rebalance:debug,2014-08-19T16:51:45.205,ns_1@10.242.238.90:<0.29675.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:51:45.205,ns_1@10.242.238.90:<0.29675.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.26927.1>,#Ref<16550.0.2.51991>}]} [rebalance:info,2014-08-19T16:51:45.205,ns_1@10.242.238.90:<0.29675.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 577 [rebalance:debug,2014-08-19T16:51:45.206,ns_1@10.242.238.90:<0.29675.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.26927.1>,#Ref<16550.0.2.51991>}] [ns_server:debug,2014-08-19T16:51:45.206,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.29691.0> (ok) [ns_server:debug,2014-08-19T16:51:45.207,ns_1@10.242.238.90:<0.29675.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:debug,2014-08-19T16:51:45.207,ns_1@10.242.238.90:<0.29692.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 577 [ns_server:debug,2014-08-19T16:51:45.218,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 586. Nacking mccouch update. [views:debug,2014-08-19T16:51:45.218,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/586. Updated state: pending (0) [ns_server:debug,2014-08-19T16:51:45.218,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",586,pending,0} [ns_server:debug,2014-08-19T16:51:45.219,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,984,673,362,724,596,413,958,647,698,387,1009,749,621,983,672,361,970, 723,659,595,412,348,957,710,646,399,1021,944,761,697,633,386,1008,995,748, 684,620,373,982,735,671,607,424,360,969,722,658,594,411,347,956,709,645,398, 1020,943,760,696,632,385,1007,994,747,683,619,372,981,734,670,606,423,359, 968,721,657,593,410,346,955,708,644,397,1019,942,759,695,631,384,1006,993, 746,682,618,371,980,733,669,605,422,358,967,720,656,592,409,345,954,707,643, 396,1018,941,758,694,630,383,1005,992,745,681,617,370,979,732,668,604,421, 357,966,719,655,591,408,344,953,706,642,395,1017,940,757,693,629,382,1004, 991,744,680,616,369,978,731,667,603,420,356,965,718,654,590,407,343,952,705, 641,394,1016,939,756,692,628,381,1003,990,743,679,615,368,977,730,666,602, 419,355,964,717,653,589,406,342,951,704,640,393,1015,938,755,691,627,380, 1002,989,742,678,614,367,976,729,665,601,418,354,963,716,652,588,405,950,767, 703,639,392,1014,754,690,626,379,1001,988,741,677,613,366,975,728,664,600, 417,353,962,715,651,404,949,766,702,638,391,1013,753,689,625,378,1000,987, 740,676,612,365,974,727,663,599,416,352,961,714,650,586,403,948,765,701,637, 390,1012,999,752,688,624,377,986,739,675,611,364,973,726,662,598,415,351,960, 713,649,402,947,764,700,636,389,1011,998,751,687,623,376,985,738,674,610,363, 972,725,661,597,414,350,959,712,648,401,1023,946,763,699,635,388,1010,997, 686,375,737,609,426,971,660,349,711,400,1022,945,762,634,996,685,374,736,608, 425] [ns_server:info,2014-08-19T16:51:45.243,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 576 state to replica [ns_server:info,2014-08-19T16:51:45.247,ns_1@10.242.238.90:<0.29695.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 576 to state replica [views:debug,2014-08-19T16:51:45.252,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/586. Updated state: pending (0) [ns_server:debug,2014-08-19T16:51:45.252,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",586,pending,0} [ns_server:debug,2014-08-19T16:51:45.266,ns_1@10.242.238.90:<0.29695.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_576_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:51:45.267,ns_1@10.242.238.90:<0.29695.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[576]}, {checkpoints,[{576,0}]}, {name,<<"replication_building_576_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[576]}, {takeover,false}, {suffix,"building_576_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",576,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,true}]} [rebalance:debug,2014-08-19T16:51:45.268,ns_1@10.242.238.90:<0.29695.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.29696.0> [rebalance:debug,2014-08-19T16:51:45.268,ns_1@10.242.238.90:<0.29695.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:51:45.269,ns_1@10.242.238.90:<0.29695.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.26961.1>,#Ref<16550.0.2.52167>}]} [rebalance:info,2014-08-19T16:51:45.269,ns_1@10.242.238.90:<0.29695.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 576 [rebalance:debug,2014-08-19T16:51:45.269,ns_1@10.242.238.90:<0.29695.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.26961.1>,#Ref<16550.0.2.52167>}] [ns_server:debug,2014-08-19T16:51:45.270,ns_1@10.242.238.90:<0.29695.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:51:45.270,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.29697.0> (ok) [rebalance:debug,2014-08-19T16:51:45.271,ns_1@10.242.238.90:<0.29698.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 576 [ns_server:debug,2014-08-19T16:51:45.335,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 587. Nacking mccouch update. [views:debug,2014-08-19T16:51:45.335,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/587. Updated state: pending (0) [ns_server:debug,2014-08-19T16:51:45.336,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",587,pending,0} [ns_server:debug,2014-08-19T16:51:45.336,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,984,673,362,724,596,413,958,647,698,387,1009,749,621,983,672,361,970, 723,659,595,412,348,957,710,646,399,1021,944,761,697,633,386,1008,995,748, 684,620,373,982,735,671,607,424,360,969,722,658,594,411,347,956,709,645,398, 1020,943,760,696,632,385,1007,994,747,683,619,372,981,734,670,606,423,359, 968,721,657,593,410,346,955,708,644,397,1019,942,759,695,631,384,1006,993, 746,682,618,371,980,733,669,605,422,358,967,720,656,592,409,345,954,707,643, 396,1018,941,758,694,630,383,1005,992,745,681,617,370,979,732,668,604,421, 357,966,719,655,591,408,344,953,706,642,395,1017,940,757,693,629,382,1004, 991,744,680,616,369,978,731,667,603,420,356,965,718,654,590,407,343,952,705, 641,394,1016,939,756,692,628,381,1003,990,743,679,615,368,977,730,666,602, 419,355,964,717,653,589,406,342,951,704,640,393,1015,938,755,691,627,380, 1002,989,742,678,614,367,976,729,665,601,418,354,963,716,652,588,405,950,767, 703,639,392,1014,754,690,626,379,1001,988,741,677,613,366,975,728,664,600, 417,353,962,715,651,587,404,949,766,702,638,391,1013,753,689,625,378,1000, 987,740,676,612,365,974,727,663,599,416,352,961,714,650,586,403,948,765,701, 637,390,1012,999,752,688,624,377,986,739,675,611,364,973,726,662,598,415,351, 960,713,649,402,947,764,700,636,389,1011,998,751,687,623,376,985,738,674,610, 363,972,725,661,597,414,350,959,712,648,401,1023,946,763,699,635,388,1010, 997,686,375,737,609,426,971,660,349,711,400,1022,945,762,634,996,685,374,736, 608,425] [views:debug,2014-08-19T16:51:45.370,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/587. Updated state: pending (0) [ns_server:debug,2014-08-19T16:51:45.370,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",587,pending,0} [ns_server:debug,2014-08-19T16:51:45.492,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 585. Nacking mccouch update. [views:debug,2014-08-19T16:51:45.492,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/585. Updated state: pending (0) [ns_server:debug,2014-08-19T16:51:45.492,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",585,pending,0} [ns_server:debug,2014-08-19T16:51:45.493,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,984,673,362,724,596,413,958,647,698,387,1009,749,621,983,672,361,970, 723,659,595,412,348,957,710,646,399,1021,944,761,697,633,386,1008,995,748, 684,620,373,982,735,671,607,424,360,969,722,658,594,411,347,956,709,645,398, 1020,943,760,696,632,385,1007,994,747,683,619,372,981,734,670,606,423,359, 968,721,657,593,410,346,955,708,644,397,1019,942,759,695,631,384,1006,993, 746,682,618,371,980,733,669,605,422,358,967,720,656,592,409,345,954,707,643, 396,1018,941,758,694,630,383,1005,992,745,681,617,370,979,732,668,604,421, 357,966,719,655,591,408,344,953,706,642,395,1017,940,757,693,629,382,1004, 991,744,680,616,369,978,731,667,603,420,356,965,718,654,590,407,343,952,705, 641,394,1016,939,756,692,628,381,1003,990,743,679,615,368,977,730,666,602, 419,355,964,717,653,589,406,342,951,704,640,393,1015,938,755,691,627,380, 1002,989,742,678,614,367,976,729,665,601,418,354,963,716,652,588,405,950,767, 703,639,392,1014,754,690,626,379,1001,988,741,677,613,366,975,728,664,600, 417,353,962,715,651,587,404,949,766,702,638,391,1013,753,689,625,378,1000, 987,740,676,612,365,974,727,663,599,416,352,961,714,650,586,403,948,765,701, 637,390,1012,999,752,688,624,377,986,739,675,611,364,973,726,662,598,415,351, 960,713,649,585,402,947,764,700,636,389,1011,998,751,687,623,376,985,738,674, 610,363,972,725,661,597,414,350,959,712,648,401,1023,946,763,699,635,388, 1010,997,686,375,737,609,426,971,660,349,711,400,1022,945,762,634,996,685, 374,736,608,425] [views:debug,2014-08-19T16:51:45.560,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/585. Updated state: pending (0) [ns_server:debug,2014-08-19T16:51:45.560,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",585,pending,0} [ns_server:debug,2014-08-19T16:51:45.693,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 583. Nacking mccouch update. [views:debug,2014-08-19T16:51:45.693,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/583. Updated state: pending (0) [ns_server:debug,2014-08-19T16:51:45.693,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",583,pending,0} [ns_server:debug,2014-08-19T16:51:45.694,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,984,673,362,724,596,413,958,647,698,387,1009,749,621,983,672,361,970, 723,659,595,412,348,957,710,646,399,1021,944,761,697,633,386,1008,995,748, 684,620,373,982,735,671,607,424,360,969,722,658,594,411,347,956,709,645,398, 1020,943,760,696,632,385,1007,994,747,683,619,372,981,734,670,606,423,359, 968,721,657,593,410,346,955,708,644,397,1019,942,759,695,631,384,1006,993, 746,682,618,371,980,733,669,605,422,358,967,720,656,592,409,345,954,707,643, 396,1018,941,758,694,630,383,1005,992,745,681,617,370,979,732,668,604,421, 357,966,719,655,591,408,344,953,706,642,395,1017,940,757,693,629,382,1004, 991,744,680,616,369,978,731,667,603,420,356,965,718,654,590,407,343,952,705, 641,394,1016,939,756,692,628,381,1003,990,743,679,615,368,977,730,666,602, 419,355,964,717,653,589,406,342,951,704,640,393,1015,938,755,691,627,380, 1002,989,742,678,614,367,976,729,665,601,418,354,963,716,652,588,405,950,767, 703,639,392,1014,754,690,626,379,1001,988,741,677,613,366,975,728,664,600, 417,353,962,715,651,587,404,949,766,702,638,391,1013,753,689,625,378,1000, 987,740,676,612,365,974,727,663,599,416,352,961,714,650,586,403,948,765,701, 637,390,1012,999,752,688,624,377,986,739,675,611,364,973,726,662,598,415,351, 960,713,649,585,402,947,764,700,636,389,1011,998,751,687,623,376,985,738,674, 610,363,972,725,661,597,414,350,959,712,648,401,1023,946,763,699,635,388, 1010,997,686,375,737,609,426,971,660,349,711,583,400,1022,945,762,634,996, 685,374,736,608,425] [views:debug,2014-08-19T16:51:45.769,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/583. Updated state: pending (0) [ns_server:debug,2014-08-19T16:51:45.769,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",583,pending,0} [ns_server:debug,2014-08-19T16:51:45.910,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 581. Nacking mccouch update. [views:debug,2014-08-19T16:51:45.911,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/581. Updated state: pending (0) [ns_server:debug,2014-08-19T16:51:45.911,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",581,pending,0} [ns_server:debug,2014-08-19T16:51:45.911,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,984,673,362,724,596,413,958,647,698,387,1009,749,621,983,672,361,723, 595,412,957,710,646,399,1021,944,761,697,633,386,1008,995,748,684,620,373, 982,735,671,607,424,360,969,722,658,594,411,347,956,709,645,581,398,1020,943, 760,696,632,385,1007,994,747,683,619,372,981,734,670,606,423,359,968,721,657, 593,410,346,955,708,644,397,1019,942,759,695,631,384,1006,993,746,682,618, 371,980,733,669,605,422,358,967,720,656,592,409,345,954,707,643,396,1018,941, 758,694,630,383,1005,992,745,681,617,370,979,732,668,604,421,357,966,719,655, 591,408,344,953,706,642,395,1017,940,757,693,629,382,1004,991,744,680,616, 369,978,731,667,603,420,356,965,718,654,590,407,343,952,705,641,394,1016,939, 756,692,628,381,1003,990,743,679,615,368,977,730,666,602,419,355,964,717,653, 589,406,342,951,704,640,393,1015,938,755,691,627,380,1002,989,742,678,614, 367,976,729,665,601,418,354,963,716,652,588,405,950,767,703,639,392,1014,754, 690,626,379,1001,988,741,677,613,366,975,728,664,600,417,353,962,715,651,587, 404,949,766,702,638,391,1013,753,689,625,378,1000,987,740,676,612,365,974, 727,663,599,416,352,961,714,650,586,403,948,765,701,637,390,1012,999,752,688, 624,377,986,739,675,611,364,973,726,662,598,415,351,960,713,649,585,402,947, 764,700,636,389,1011,998,751,687,623,376,985,738,674,610,363,972,725,661,597, 414,350,959,712,648,401,1023,946,763,699,635,388,1010,997,686,375,737,609, 426,971,660,349,711,583,400,1022,945,762,634,996,685,374,736,608,425,970,659, 348] [views:debug,2014-08-19T16:51:45.978,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/581. Updated state: pending (0) [ns_server:debug,2014-08-19T16:51:45.978,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",581,pending,0} [ns_server:debug,2014-08-19T16:51:46.120,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 579. Nacking mccouch update. [views:debug,2014-08-19T16:51:46.120,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/579. Updated state: pending (0) [ns_server:debug,2014-08-19T16:51:46.120,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",579,pending,0} [ns_server:debug,2014-08-19T16:51:46.121,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,984,673,362,724,596,413,958,647,698,387,1009,749,621,983,672,361,723, 595,412,957,710,646,399,1021,944,761,697,633,386,1008,995,748,684,620,373, 982,735,671,607,424,360,969,722,658,594,411,347,956,709,645,581,398,1020,943, 760,696,632,385,1007,994,747,683,619,372,981,734,670,606,423,359,968,721,657, 593,410,346,955,708,644,397,1019,942,759,695,631,384,1006,993,746,682,618, 371,980,733,669,605,422,358,967,720,656,592,409,345,954,707,643,579,396,1018, 941,758,694,630,383,1005,992,745,681,617,370,979,732,668,604,421,357,966,719, 655,591,408,344,953,706,642,395,1017,940,757,693,629,382,1004,991,744,680, 616,369,978,731,667,603,420,356,965,718,654,590,407,343,952,705,641,394,1016, 939,756,692,628,381,1003,990,743,679,615,368,977,730,666,602,419,355,964,717, 653,589,406,342,951,704,640,393,1015,938,755,691,627,380,1002,989,742,678, 614,367,976,729,665,601,418,354,963,716,652,588,405,950,767,703,639,392,1014, 754,690,626,379,1001,988,741,677,613,366,975,728,664,600,417,353,962,715,651, 587,404,949,766,702,638,391,1013,753,689,625,378,1000,987,740,676,612,365, 974,727,663,599,416,352,961,714,650,586,403,948,765,701,637,390,1012,999,752, 688,624,377,986,739,675,611,364,973,726,662,598,415,351,960,713,649,585,402, 947,764,700,636,389,1011,998,751,687,623,376,985,738,674,610,363,972,725,661, 597,414,350,959,712,648,401,1023,946,763,699,635,388,1010,997,686,375,737, 609,426,971,660,349,711,583,400,1022,945,762,634,996,685,374,736,608,425,970, 659,348] [views:debug,2014-08-19T16:51:46.187,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/579. Updated state: pending (0) [ns_server:debug,2014-08-19T16:51:46.187,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",579,pending,0} [ns_server:debug,2014-08-19T16:51:46.346,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 577. Nacking mccouch update. [views:debug,2014-08-19T16:51:46.346,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/577. Updated state: pending (0) [ns_server:debug,2014-08-19T16:51:46.346,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",577,pending,0} [ns_server:debug,2014-08-19T16:51:46.347,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,984,673,362,724,596,413,958,647,698,387,1009,749,621,983,672,361,723, 595,412,957,710,646,399,1021,944,761,697,633,386,1008,995,748,684,620,373, 982,735,671,607,424,360,969,722,658,594,411,347,956,709,645,581,398,1020,943, 760,696,632,385,1007,994,747,683,619,372,981,734,670,606,423,359,968,721,657, 593,410,346,955,708,644,397,1019,942,759,695,631,384,1006,993,746,682,618, 371,980,733,669,605,422,358,967,720,656,592,409,345,954,707,643,579,396,1018, 941,758,694,630,383,1005,992,745,681,617,370,979,732,668,604,421,357,966,719, 655,591,408,344,953,706,642,395,1017,940,757,693,629,382,1004,991,744,680, 616,369,978,731,667,603,420,356,965,718,654,590,407,343,952,705,641,577,394, 1016,939,756,692,628,381,1003,990,743,679,615,368,977,730,666,602,419,355, 964,717,653,589,406,342,951,704,640,393,1015,938,755,691,627,380,1002,989, 742,678,614,367,976,729,665,601,418,354,963,716,652,588,405,950,767,703,639, 392,1014,754,690,626,379,1001,988,741,677,613,366,975,728,664,600,417,353, 962,715,651,587,404,949,766,702,638,391,1013,753,689,625,378,1000,987,740, 676,612,365,974,727,663,599,416,352,961,714,650,586,403,948,765,701,637,390, 1012,999,752,688,624,377,986,739,675,611,364,973,726,662,598,415,351,960,713, 649,585,402,947,764,700,636,389,1011,998,751,687,623,376,985,738,674,610,363, 972,725,661,597,414,350,959,712,648,401,1023,946,763,699,635,388,1010,997, 686,375,737,609,426,971,660,349,711,583,400,1022,945,762,634,996,685,374,736, 608,425,970,659,348] [views:debug,2014-08-19T16:51:46.396,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/577. Updated state: pending (0) [ns_server:debug,2014-08-19T16:51:46.396,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",577,pending,0} [ns_server:debug,2014-08-19T16:51:46.480,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 584. Nacking mccouch update. [views:debug,2014-08-19T16:51:46.480,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/584. Updated state: pending (0) [ns_server:debug,2014-08-19T16:51:46.480,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",584,pending,0} [ns_server:debug,2014-08-19T16:51:46.481,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,984,673,362,724,596,413,958,647,698,387,1009,749,621,983,672,361,723, 595,412,957,710,646,399,1021,944,761,697,633,386,1008,995,748,684,620,373, 982,735,671,607,424,360,969,722,658,594,411,347,956,709,645,581,398,1020,943, 760,696,632,385,1007,994,747,683,619,372,981,734,670,606,423,359,968,721,657, 593,410,346,955,708,644,397,1019,942,759,695,631,384,1006,993,746,682,618, 371,980,733,669,605,422,358,967,720,656,592,409,345,954,707,643,579,396,1018, 941,758,694,630,383,1005,992,745,681,617,370,979,732,668,604,421,357,966,719, 655,591,408,344,953,706,642,395,1017,940,757,693,629,382,1004,991,744,680, 616,369,978,731,667,603,420,356,965,718,654,590,407,343,952,705,641,577,394, 1016,939,756,692,628,381,1003,990,743,679,615,368,977,730,666,602,419,355, 964,717,653,589,406,342,951,704,640,393,1015,938,755,691,627,380,1002,989, 742,678,614,367,976,729,665,601,418,354,963,716,652,588,405,950,767,703,639, 392,1014,754,690,626,379,1001,988,741,677,613,366,975,728,664,600,417,353, 962,715,651,587,404,949,766,702,638,391,1013,753,689,625,378,1000,987,740, 676,612,365,974,727,663,599,416,352,961,714,650,586,403,948,765,701,637,390, 1012,999,752,688,624,377,986,739,675,611,364,973,726,662,598,415,351,960,713, 649,585,402,947,764,700,636,389,1011,998,751,687,623,376,985,738,674,610,363, 972,725,661,597,414,350,959,712,648,584,401,1023,946,763,699,635,388,1010, 997,686,375,737,609,426,971,660,349,711,583,400,1022,945,762,634,996,685,374, 736,608,425,970,659,348] [views:debug,2014-08-19T16:51:46.514,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/584. Updated state: pending (0) [ns_server:debug,2014-08-19T16:51:46.514,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",584,pending,0} [ns_server:debug,2014-08-19T16:51:46.597,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 582. Nacking mccouch update. [views:debug,2014-08-19T16:51:46.598,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/582. Updated state: pending (0) [ns_server:debug,2014-08-19T16:51:46.598,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",582,pending,0} [ns_server:debug,2014-08-19T16:51:46.598,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,984,673,362,724,596,413,958,647,698,387,1009,749,621,983,672,361,723, 595,412,957,710,646,582,399,1021,944,761,697,633,386,1008,995,748,684,620, 373,982,735,671,607,424,360,969,722,658,594,411,347,956,709,645,581,398,1020, 943,760,696,632,385,1007,994,747,683,619,372,981,734,670,606,423,359,968,721, 657,593,410,346,955,708,644,397,1019,942,759,695,631,384,1006,993,746,682, 618,371,980,733,669,605,422,358,967,720,656,592,409,345,954,707,643,579,396, 1018,941,758,694,630,383,1005,992,745,681,617,370,979,732,668,604,421,357, 966,719,655,591,408,344,953,706,642,395,1017,940,757,693,629,382,1004,991, 744,680,616,369,978,731,667,603,420,356,965,718,654,590,407,343,952,705,641, 577,394,1016,939,756,692,628,381,1003,990,743,679,615,368,977,730,666,602, 419,355,964,717,653,589,406,342,951,704,640,393,1015,938,755,691,627,380, 1002,989,742,678,614,367,976,729,665,601,418,354,963,716,652,588,405,950,767, 703,639,392,1014,754,690,626,379,1001,988,741,677,613,366,975,728,664,600, 417,353,962,715,651,587,404,949,766,702,638,391,1013,753,689,625,378,1000, 987,740,676,612,365,974,727,663,599,416,352,961,714,650,586,403,948,765,701, 637,390,1012,999,752,688,624,377,986,739,675,611,364,973,726,662,598,415,351, 960,713,649,585,402,947,764,700,636,389,1011,998,751,687,623,376,985,738,674, 610,363,972,725,661,597,414,350,959,712,648,584,401,1023,946,763,699,635,388, 1010,997,686,375,737,609,426,971,660,349,711,583,400,1022,945,762,634,996, 685,374,736,608,425,970,659,348] [views:debug,2014-08-19T16:51:46.648,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/582. Updated state: pending (0) [ns_server:debug,2014-08-19T16:51:46.648,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",582,pending,0} [ns_server:debug,2014-08-19T16:51:46.723,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 580. Nacking mccouch update. [views:debug,2014-08-19T16:51:46.723,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/580. Updated state: pending (0) [ns_server:debug,2014-08-19T16:51:46.723,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",580,pending,0} [ns_server:debug,2014-08-19T16:51:46.724,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,984,673,362,724,596,413,958,647,698,387,1009,749,621,983,672,361,723, 595,412,957,646,944,761,697,633,386,1008,995,748,684,620,373,982,735,671,607, 424,360,969,722,658,594,411,347,956,709,645,581,398,1020,943,760,696,632,385, 1007,994,747,683,619,372,981,734,670,606,423,359,968,721,657,593,410,346,955, 708,644,580,397,1019,942,759,695,631,384,1006,993,746,682,618,371,980,733, 669,605,422,358,967,720,656,592,409,345,954,707,643,579,396,1018,941,758,694, 630,383,1005,992,745,681,617,370,979,732,668,604,421,357,966,719,655,591,408, 344,953,706,642,395,1017,940,757,693,629,382,1004,991,744,680,616,369,978, 731,667,603,420,356,965,718,654,590,407,343,952,705,641,577,394,1016,939,756, 692,628,381,1003,990,743,679,615,368,977,730,666,602,419,355,964,717,653,589, 406,342,951,704,640,393,1015,938,755,691,627,380,1002,989,742,678,614,367, 976,729,665,601,418,354,963,716,652,588,405,950,767,703,639,392,1014,754,690, 626,379,1001,988,741,677,613,366,975,728,664,600,417,353,962,715,651,587,404, 949,766,702,638,391,1013,753,689,625,378,1000,987,740,676,612,365,974,727, 663,599,416,352,961,714,650,586,403,948,765,701,637,390,1012,999,752,688,624, 377,986,739,675,611,364,973,726,662,598,415,351,960,713,649,585,402,947,764, 700,636,389,1011,998,751,687,623,376,985,738,674,610,363,972,725,661,597,414, 350,959,712,648,584,401,1023,946,763,699,635,388,1010,997,686,375,737,609, 426,971,660,349,711,583,400,1022,945,762,634,996,685,374,736,608,425,970,659, 348,710,582,399,1021] [views:debug,2014-08-19T16:51:46.782,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/580. Updated state: pending (0) [ns_server:debug,2014-08-19T16:51:46.782,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",580,pending,0} [ns_server:debug,2014-08-19T16:51:46.933,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 578. Nacking mccouch update. [views:debug,2014-08-19T16:51:46.933,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/578. Updated state: pending (0) [ns_server:debug,2014-08-19T16:51:46.933,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",578,pending,0} [ns_server:debug,2014-08-19T16:51:46.934,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,984,673,362,724,596,413,958,647,698,387,1009,749,621,983,672,361,723, 595,412,957,646,944,761,697,633,386,1008,995,748,684,620,373,982,735,671,607, 424,360,969,722,658,594,411,347,956,709,645,581,398,1020,943,760,696,632,385, 1007,994,747,683,619,372,981,734,670,606,423,359,968,721,657,593,410,346,955, 708,644,580,397,1019,942,759,695,631,384,1006,993,746,682,618,371,980,733, 669,605,422,358,967,720,656,592,409,345,954,707,643,579,396,1018,941,758,694, 630,383,1005,992,745,681,617,370,979,732,668,604,421,357,966,719,655,591,408, 344,953,706,642,578,395,1017,940,757,693,629,382,1004,991,744,680,616,369, 978,731,667,603,420,356,965,718,654,590,407,343,952,705,641,577,394,1016,939, 756,692,628,381,1003,990,743,679,615,368,977,730,666,602,419,355,964,717,653, 589,406,342,951,704,640,393,1015,938,755,691,627,380,1002,989,742,678,614, 367,976,729,665,601,418,354,963,716,652,588,405,950,767,703,639,392,1014,754, 690,626,379,1001,988,741,677,613,366,975,728,664,600,417,353,962,715,651,587, 404,949,766,702,638,391,1013,753,689,625,378,1000,987,740,676,612,365,974, 727,663,599,416,352,961,714,650,586,403,948,765,701,637,390,1012,999,752,688, 624,377,986,739,675,611,364,973,726,662,598,415,351,960,713,649,585,402,947, 764,700,636,389,1011,998,751,687,623,376,985,738,674,610,363,972,725,661,597, 414,350,959,712,648,584,401,1023,946,763,699,635,388,1010,997,686,375,737, 609,426,971,660,349,711,583,400,1022,945,762,634,996,685,374,736,608,425,970, 659,348,710,582,399,1021] [ns_server:info,2014-08-19T16:51:46.956,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:do_pull:341]Pulling config from: 'ns_1@10.242.238.88' [views:debug,2014-08-19T16:51:46.984,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/578. Updated state: pending (0) [ns_server:debug,2014-08-19T16:51:46.984,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",578,pending,0} [ns_server:debug,2014-08-19T16:51:47.088,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 576. Nacking mccouch update. [views:debug,2014-08-19T16:51:47.088,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/576. Updated state: pending (0) [ns_server:debug,2014-08-19T16:51:47.088,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",576,pending,0} [ns_server:debug,2014-08-19T16:51:47.089,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,984,673,362,724,596,413,958,647,698,387,1009,749,621,983,672,361,723, 595,412,957,646,944,761,697,633,386,1008,995,748,684,620,373,982,735,671,607, 424,360,969,722,658,594,411,347,956,709,645,581,398,1020,943,760,696,632,385, 1007,994,747,683,619,372,981,734,670,606,423,359,968,721,657,593,410,346,955, 708,644,580,397,1019,942,759,695,631,384,1006,993,746,682,618,371,980,733, 669,605,422,358,967,720,656,592,409,345,954,707,643,579,396,1018,941,758,694, 630,383,1005,992,745,681,617,370,979,732,668,604,421,357,966,719,655,591,408, 344,953,706,642,578,395,1017,940,757,693,629,382,1004,991,744,680,616,369, 978,731,667,603,420,356,965,718,654,590,407,343,952,705,641,577,394,1016,939, 756,692,628,381,1003,990,743,679,615,368,977,730,666,602,419,355,964,717,653, 589,406,342,951,704,640,576,393,1015,938,755,691,627,380,1002,989,742,678, 614,367,976,729,665,601,418,354,963,716,652,588,405,950,767,703,639,392,1014, 754,690,626,379,1001,988,741,677,613,366,975,728,664,600,417,353,962,715,651, 587,404,949,766,702,638,391,1013,753,689,625,378,1000,987,740,676,612,365, 974,727,663,599,416,352,961,714,650,586,403,948,765,701,637,390,1012,999,752, 688,624,377,986,739,675,611,364,973,726,662,598,415,351,960,713,649,585,402, 947,764,700,636,389,1011,998,751,687,623,376,985,738,674,610,363,972,725,661, 597,414,350,959,712,648,584,401,1023,946,763,699,635,388,1010,997,686,375, 737,609,426,971,660,349,711,583,400,1022,945,762,634,996,685,374,736,608,425, 970,659,348,710,582,399,1021] [views:debug,2014-08-19T16:51:47.156,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/576. Updated state: pending (0) [ns_server:debug,2014-08-19T16:51:47.156,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",576,pending,0} [rebalance:debug,2014-08-19T16:51:47.157,ns_1@10.242.238.90:<0.29473.0>:janitor_agent:handle_call:795]Done [rebalance:debug,2014-08-19T16:51:47.157,ns_1@10.242.238.90:<0.29467.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:51:47.157,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.29473.0> (ok) [ns_server:debug,2014-08-19T16:51:47.157,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.29467.0> (ok) [rebalance:debug,2014-08-19T16:51:47.161,ns_1@10.242.238.90:<0.29864.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 593 [rebalance:debug,2014-08-19T16:51:47.161,ns_1@10.242.238.90:<0.29867.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 592 [rebalance:debug,2014-08-19T16:51:47.294,ns_1@10.242.238.90:<0.29461.0>:janitor_agent:handle_call:795]Done [rebalance:debug,2014-08-19T16:51:47.294,ns_1@10.242.238.90:<0.29441.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:51:47.294,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.29461.0> (ok) [ns_server:debug,2014-08-19T16:51:47.294,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.29441.0> (ok) [rebalance:debug,2014-08-19T16:51:47.298,ns_1@10.242.238.90:<0.29870.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 594 [rebalance:debug,2014-08-19T16:51:47.298,ns_1@10.242.238.90:<0.29871.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 595 [rebalance:debug,2014-08-19T16:51:47.432,ns_1@10.242.238.90:<0.29399.0>:janitor_agent:handle_call:795]Done [rebalance:debug,2014-08-19T16:51:47.432,ns_1@10.242.238.90:<0.29433.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:51:47.433,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.29399.0> (ok) [ns_server:debug,2014-08-19T16:51:47.433,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.29433.0> (ok) [rebalance:debug,2014-08-19T16:51:47.435,ns_1@10.242.238.90:<0.29882.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 596 [rebalance:debug,2014-08-19T16:51:47.541,ns_1@10.242.238.90:<0.29698.0>:janitor_agent:handle_call:795]Done [rebalance:debug,2014-08-19T16:51:47.541,ns_1@10.242.238.90:<0.29692.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:51:47.541,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.29698.0> (ok) [ns_server:debug,2014-08-19T16:51:47.542,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.29692.0> (ok) [rebalance:debug,2014-08-19T16:51:47.545,ns_1@10.242.238.90:<0.29885.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 577 [rebalance:debug,2014-08-19T16:51:47.545,ns_1@10.242.238.90:<0.29888.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 576 [rebalance:debug,2014-08-19T16:51:47.683,ns_1@10.242.238.90:<0.29672.0>:janitor_agent:handle_call:795]Done [rebalance:debug,2014-08-19T16:51:47.683,ns_1@10.242.238.90:<0.29666.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:51:47.683,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.29672.0> (ok) [ns_server:debug,2014-08-19T16:51:47.683,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.29666.0> (ok) [rebalance:debug,2014-08-19T16:51:47.687,ns_1@10.242.238.90:<0.29891.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 578 [rebalance:debug,2014-08-19T16:51:47.687,ns_1@10.242.238.90:<0.29894.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 579 [rebalance:debug,2014-08-19T16:51:47.826,ns_1@10.242.238.90:<0.29626.0>:janitor_agent:handle_call:795]Done [rebalance:debug,2014-08-19T16:51:47.826,ns_1@10.242.238.90:<0.29646.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:51:47.826,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.29626.0> (ok) [ns_server:debug,2014-08-19T16:51:47.826,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.29646.0> (ok) [rebalance:debug,2014-08-19T16:51:47.830,ns_1@10.242.238.90:<0.29898.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 581 [rebalance:debug,2014-08-19T16:51:47.830,ns_1@10.242.238.90:<0.29899.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 580 [rebalance:debug,2014-08-19T16:51:47.909,ns_1@10.242.238.90:<0.29620.0>:janitor_agent:handle_call:795]Done [rebalance:debug,2014-08-19T16:51:47.909,ns_1@10.242.238.90:<0.29600.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:51:47.910,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.29620.0> (ok) [ns_server:debug,2014-08-19T16:51:47.910,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.29600.0> (ok) [rebalance:debug,2014-08-19T16:51:47.914,ns_1@10.242.238.90:<0.29904.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 583 [rebalance:debug,2014-08-19T16:51:47.914,ns_1@10.242.238.90:<0.29905.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 582 [rebalance:debug,2014-08-19T16:51:48.017,ns_1@10.242.238.90:<0.29594.0>:janitor_agent:handle_call:795]Done [rebalance:debug,2014-08-19T16:51:48.017,ns_1@10.242.238.90:<0.29574.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:51:48.017,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.29594.0> (ok) [ns_server:debug,2014-08-19T16:51:48.018,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.29574.0> (ok) [rebalance:debug,2014-08-19T16:51:48.021,ns_1@10.242.238.90:<0.29910.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 584 [rebalance:debug,2014-08-19T16:51:48.021,ns_1@10.242.238.90:<0.29912.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 585 [rebalance:debug,2014-08-19T16:51:48.118,ns_1@10.242.238.90:<0.29548.0>:janitor_agent:handle_call:795]Done [rebalance:debug,2014-08-19T16:51:48.118,ns_1@10.242.238.90:<0.29568.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:51:48.118,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.29548.0> (ok) [ns_server:debug,2014-08-19T16:51:48.118,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.29568.0> (ok) [rebalance:debug,2014-08-19T16:51:48.129,ns_1@10.242.238.90:<0.29916.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 586 [rebalance:debug,2014-08-19T16:51:48.129,ns_1@10.242.238.90:<0.29919.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 587 [ns_server:debug,2014-08-19T16:51:48.196,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:51:48.205,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 8821 us [ns_server:debug,2014-08-19T16:51:48.206,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [rebalance:debug,2014-08-19T16:51:48.206,ns_1@10.242.238.90:<0.29542.0>:janitor_agent:handle_call:795]Done [rebalance:debug,2014-08-19T16:51:48.206,ns_1@10.242.238.90:<0.29522.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:51:48.206,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:48.207,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.29542.0> (ok) [ns_server:debug,2014-08-19T16:51:48.207,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.29522.0> (ok) [ns_server:debug,2014-08-19T16:51:48.207,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{843, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.88']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [rebalance:debug,2014-08-19T16:51:48.211,ns_1@10.242.238.90:<0.29923.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 588 [rebalance:debug,2014-08-19T16:51:48.211,ns_1@10.242.238.90:<0.29924.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 589 [ns_server:debug,2014-08-19T16:51:48.253,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:51:48.256,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:48.256,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 3399 us [ns_server:debug,2014-08-19T16:51:48.257,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:48.257,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{846, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.88']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [rebalance:debug,2014-08-19T16:51:48.272,ns_1@10.242.238.90:<0.29513.0>:janitor_agent:handle_call:795]Done [rebalance:debug,2014-08-19T16:51:48.272,ns_1@10.242.238.90:<0.29517.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:51:48.272,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.29513.0> (ok) [ns_server:debug,2014-08-19T16:51:48.272,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.29517.0> (ok) [ns_server:debug,2014-08-19T16:51:48.274,ns_1@10.242.238.90:<0.29931.0>:capi_set_view_manager:do_wait_index_updated:618]References to wait: [] ("default", 591) [ns_server:debug,2014-08-19T16:51:48.274,ns_1@10.242.238.90:<0.29931.0>:capi_set_view_manager:do_wait_index_updated:638]All refs fired [ns_server:debug,2014-08-19T16:51:48.274,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.29930.0> (ok) [rebalance:debug,2014-08-19T16:51:48.274,ns_1@10.242.238.90:<0.29490.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:51:48.275,ns_1@10.242.238.90:<0.29490.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:51:48.275,ns_1@10.242.238.90:<0.29932.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:51:48.275,ns_1@10.242.238.90:<0.29932.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:51:48.275,ns_1@10.242.238.90:<0.29490.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:51:48.295,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [rebalance:debug,2014-08-19T16:51:48.297,ns_1@10.242.238.90:<0.29933.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 590 [ns_server:debug,2014-08-19T16:51:48.298,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:48.299,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 3507 us [ns_server:debug,2014-08-19T16:51:48.299,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:48.301,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{837, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.88']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [rebalance:debug,2014-08-19T16:51:48.305,ns_1@10.242.238.90:<0.29867.0>:janitor_agent:handle_call:795]Done [rebalance:debug,2014-08-19T16:51:48.305,ns_1@10.242.238.90:<0.29864.0>:janitor_agent:handle_call:795]Done [rebalance:debug,2014-08-19T16:51:48.305,ns_1@10.242.238.90:<0.29870.0>:janitor_agent:handle_call:795]Done [rebalance:debug,2014-08-19T16:51:48.305,ns_1@10.242.238.90:<0.29871.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:51:48.305,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.29867.0> (ok) [rebalance:debug,2014-08-19T16:51:48.305,ns_1@10.242.238.90:<0.29882.0>:janitor_agent:handle_call:795]Done [rebalance:debug,2014-08-19T16:51:48.305,ns_1@10.242.238.90:<0.29894.0>:janitor_agent:handle_call:795]Done [rebalance:debug,2014-08-19T16:51:48.305,ns_1@10.242.238.90:<0.29885.0>:janitor_agent:handle_call:795]Done [rebalance:debug,2014-08-19T16:51:48.305,ns_1@10.242.238.90:<0.29888.0>:janitor_agent:handle_call:795]Done [rebalance:debug,2014-08-19T16:51:48.305,ns_1@10.242.238.90:<0.29898.0>:janitor_agent:handle_call:795]Done [rebalance:debug,2014-08-19T16:51:48.305,ns_1@10.242.238.90:<0.29904.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:51:48.305,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.29864.0> (ok) [ns_server:debug,2014-08-19T16:51:48.306,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.29870.0> (ok) [rebalance:debug,2014-08-19T16:51:48.306,ns_1@10.242.238.90:<0.29899.0>:janitor_agent:handle_call:795]Done [rebalance:debug,2014-08-19T16:51:48.306,ns_1@10.242.238.90:<0.29891.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:51:48.306,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.29871.0> (ok) [rebalance:debug,2014-08-19T16:51:48.306,ns_1@10.242.238.90:<0.29910.0>:janitor_agent:handle_call:795]Done [rebalance:debug,2014-08-19T16:51:48.306,ns_1@10.242.238.90:<0.29912.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:51:48.306,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.29882.0> (ok) [rebalance:debug,2014-08-19T16:51:48.306,ns_1@10.242.238.90:<0.29923.0>:janitor_agent:handle_call:795]Done [rebalance:debug,2014-08-19T16:51:48.306,ns_1@10.242.238.90:<0.29916.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:51:48.306,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.29894.0> (ok) [rebalance:debug,2014-08-19T16:51:48.306,ns_1@10.242.238.90:<0.29924.0>:janitor_agent:handle_call:795]Done [rebalance:debug,2014-08-19T16:51:48.306,ns_1@10.242.238.90:<0.29919.0>:janitor_agent:handle_call:795]Done [rebalance:debug,2014-08-19T16:51:48.306,ns_1@10.242.238.90:<0.29933.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:51:48.306,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.29885.0> (ok) [ns_server:debug,2014-08-19T16:51:48.306,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.29888.0> (ok) [rebalance:debug,2014-08-19T16:51:48.306,ns_1@10.242.238.90:<0.29905.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:51:48.306,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.29904.0> (ok) [ns_server:debug,2014-08-19T16:51:48.306,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.29898.0> (ok) [ns_server:debug,2014-08-19T16:51:48.306,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.29891.0> (ok) [ns_server:debug,2014-08-19T16:51:48.307,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.29910.0> (ok) [ns_server:debug,2014-08-19T16:51:48.307,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.29912.0> (ok) [ns_server:debug,2014-08-19T16:51:48.307,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.29899.0> (ok) [ns_server:debug,2014-08-19T16:51:48.307,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.29916.0> (ok) [ns_server:debug,2014-08-19T16:51:48.307,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.29923.0> (ok) [ns_server:debug,2014-08-19T16:51:48.307,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.29924.0> (ok) [ns_server:debug,2014-08-19T16:51:48.307,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.29919.0> (ok) [ns_server:debug,2014-08-19T16:51:48.307,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.29933.0> (ok) [ns_server:debug,2014-08-19T16:51:48.307,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.29905.0> (ok) [ns_server:debug,2014-08-19T16:51:48.308,ns_1@10.242.238.90:<0.29938.0>:capi_set_view_manager:do_wait_index_updated:618]References to wait: [] ("default", 583) [ns_server:debug,2014-08-19T16:51:48.308,ns_1@10.242.238.90:<0.29938.0>:capi_set_view_manager:do_wait_index_updated:638]All refs fired [ns_server:debug,2014-08-19T16:51:48.308,ns_1@10.242.238.90:<0.29941.0>:capi_set_view_manager:do_wait_index_updated:618]References to wait: [] ("default", 577) [ns_server:debug,2014-08-19T16:51:48.308,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.29937.0> (ok) [ns_server:debug,2014-08-19T16:51:48.308,ns_1@10.242.238.90:<0.29941.0>:capi_set_view_manager:do_wait_index_updated:638]All refs fired [ns_server:debug,2014-08-19T16:51:48.308,ns_1@10.242.238.90:<0.29942.0>:capi_set_view_manager:do_wait_index_updated:618]References to wait: [] ("default", 579) [ns_server:debug,2014-08-19T16:51:48.308,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.29939.0> (ok) [ns_server:debug,2014-08-19T16:51:48.308,ns_1@10.242.238.90:<0.29942.0>:capi_set_view_manager:do_wait_index_updated:638]All refs fired [ns_server:debug,2014-08-19T16:51:48.309,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.29940.0> (ok) [ns_server:debug,2014-08-19T16:51:48.309,ns_1@10.242.238.90:<0.29945.0>:capi_set_view_manager:do_wait_index_updated:618]References to wait: [] ("default", 592) [ns_server:debug,2014-08-19T16:51:48.310,ns_1@10.242.238.90:<0.29945.0>:capi_set_view_manager:do_wait_index_updated:638]All refs fired [ns_server:debug,2014-08-19T16:51:48.310,ns_1@10.242.238.90:<0.29946.0>:capi_set_view_manager:do_wait_index_updated:618]References to wait: [] ("default", 587) [ns_server:debug,2014-08-19T16:51:48.310,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.29943.0> (ok) [ns_server:debug,2014-08-19T16:51:48.310,ns_1@10.242.238.90:<0.29946.0>:capi_set_view_manager:do_wait_index_updated:638]All refs fired [ns_server:debug,2014-08-19T16:51:48.310,ns_1@10.242.238.90:<0.29956.0>:capi_set_view_manager:do_wait_index_updated:618]References to wait: [] ("default", 593) [ns_server:debug,2014-08-19T16:51:48.310,ns_1@10.242.238.90:<0.29956.0>:capi_set_view_manager:do_wait_index_updated:638]All refs fired [ns_server:debug,2014-08-19T16:51:48.310,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.29944.0> (ok) [ns_server:debug,2014-08-19T16:51:48.310,ns_1@10.242.238.90:<0.29962.0>:capi_set_view_manager:do_wait_index_updated:618]References to wait: [] ("default", 590) [ns_server:debug,2014-08-19T16:51:48.310,ns_1@10.242.238.90:<0.29962.0>:capi_set_view_manager:do_wait_index_updated:638]All refs fired [ns_server:debug,2014-08-19T16:51:48.310,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.29949.0> (ok) [ns_server:debug,2014-08-19T16:51:48.310,ns_1@10.242.238.90:<0.29964.0>:capi_set_view_manager:do_wait_index_updated:618]References to wait: [] ("default", 589) [ns_server:debug,2014-08-19T16:51:48.310,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.29953.0> (ok) [ns_server:debug,2014-08-19T16:51:48.310,ns_1@10.242.238.90:<0.29964.0>:capi_set_view_manager:do_wait_index_updated:638]All refs fired [ns_server:debug,2014-08-19T16:51:48.310,ns_1@10.242.238.90:<0.29965.0>:capi_set_view_manager:do_wait_index_updated:618]References to wait: [] ("default", 576) [ns_server:debug,2014-08-19T16:51:48.311,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.29954.0> (ok) [ns_server:debug,2014-08-19T16:51:48.311,ns_1@10.242.238.90:<0.29965.0>:capi_set_view_manager:do_wait_index_updated:638]All refs fired [ns_server:debug,2014-08-19T16:51:48.311,ns_1@10.242.238.90:<0.29966.0>:capi_set_view_manager:do_wait_index_updated:618]References to wait: [] ("default", 584) [ns_server:debug,2014-08-19T16:51:48.311,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.29955.0> (ok) [ns_server:debug,2014-08-19T16:51:48.311,ns_1@10.242.238.90:<0.29966.0>:capi_set_view_manager:do_wait_index_updated:638]All refs fired [ns_server:debug,2014-08-19T16:51:48.311,ns_1@10.242.238.90:<0.29967.0>:capi_set_view_manager:do_wait_index_updated:618]References to wait: [] ("default", 580) [ns_server:debug,2014-08-19T16:51:48.311,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.29947.0> (ok) [ns_server:debug,2014-08-19T16:51:48.311,ns_1@10.242.238.90:<0.29967.0>:capi_set_view_manager:do_wait_index_updated:638]All refs fired [ns_server:debug,2014-08-19T16:51:48.311,ns_1@10.242.238.90:<0.29968.0>:capi_set_view_manager:do_wait_index_updated:618]References to wait: [] ("default", 585) [ns_server:debug,2014-08-19T16:51:48.311,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.29948.0> (ok) [ns_server:debug,2014-08-19T16:51:48.311,ns_1@10.242.238.90:<0.29968.0>:capi_set_view_manager:do_wait_index_updated:638]All refs fired [ns_server:debug,2014-08-19T16:51:48.311,ns_1@10.242.238.90:<0.29969.0>:capi_set_view_manager:do_wait_index_updated:618]References to wait: [] ("default", 582) [ns_server:debug,2014-08-19T16:51:48.311,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.29950.0> (ok) [ns_server:debug,2014-08-19T16:51:48.311,ns_1@10.242.238.90:<0.29969.0>:capi_set_view_manager:do_wait_index_updated:638]All refs fired [ns_server:debug,2014-08-19T16:51:48.311,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.29951.0> (ok) [ns_server:debug,2014-08-19T16:51:48.311,ns_1@10.242.238.90:<0.29970.0>:capi_set_view_manager:do_wait_index_updated:618]References to wait: [] ("default", 581) [ns_server:debug,2014-08-19T16:51:48.311,ns_1@10.242.238.90:<0.29970.0>:capi_set_view_manager:do_wait_index_updated:638]All refs fired [ns_server:debug,2014-08-19T16:51:48.312,ns_1@10.242.238.90:<0.29971.0>:capi_set_view_manager:do_wait_index_updated:618]References to wait: [] ("default", 588) [ns_server:debug,2014-08-19T16:51:48.312,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.29952.0> (ok) [ns_server:debug,2014-08-19T16:51:48.312,ns_1@10.242.238.90:<0.29971.0>:capi_set_view_manager:do_wait_index_updated:638]All refs fired [ns_server:debug,2014-08-19T16:51:48.312,ns_1@10.242.238.90:<0.29972.0>:capi_set_view_manager:do_wait_index_updated:618]References to wait: [] ("default", 586) [rebalance:debug,2014-08-19T16:51:48.312,ns_1@10.242.238.90:<0.29470.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:51:48.312,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.29963.0> (ok) [rebalance:debug,2014-08-19T16:51:48.312,ns_1@10.242.238.90:<0.29649.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [rebalance:debug,2014-08-19T16:51:48.312,ns_1@10.242.238.90:<0.29510.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [rebalance:debug,2014-08-19T16:51:48.312,ns_1@10.242.238.90:<0.29597.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:51:48.312,ns_1@10.242.238.90:<0.29972.0>:capi_set_view_manager:do_wait_index_updated:638]All refs fired [ns_server:debug,2014-08-19T16:51:48.312,ns_1@10.242.238.90:<0.29973.0>:capi_set_view_manager:do_wait_index_updated:618]References to wait: [] ("default", 596) [rebalance:debug,2014-08-19T16:51:48.312,ns_1@10.242.238.90:<0.29643.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [rebalance:debug,2014-08-19T16:51:48.312,ns_1@10.242.238.90:<0.29464.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:51:48.312,ns_1@10.242.238.90:<0.29470.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [rebalance:debug,2014-08-19T16:51:48.312,ns_1@10.242.238.90:<0.29516.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:51:48.312,ns_1@10.242.238.90:<0.29974.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [rebalance:debug,2014-08-19T16:51:48.312,ns_1@10.242.238.90:<0.29545.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [rebalance:debug,2014-08-19T16:51:48.312,ns_1@10.242.238.90:<0.29675.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [rebalance:debug,2014-08-19T16:51:48.312,ns_1@10.242.238.90:<0.29591.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [rebalance:debug,2014-08-19T16:51:48.312,ns_1@10.242.238.90:<0.29695.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:51:48.312,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.29961.0> (ok) [rebalance:debug,2014-08-19T16:51:48.312,ns_1@10.242.238.90:<0.29571.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [rebalance:debug,2014-08-19T16:51:48.312,ns_1@10.242.238.90:<0.29617.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:51:48.312,ns_1@10.242.238.90:<0.29973.0>:capi_set_view_manager:do_wait_index_updated:638]All refs fired [ns_server:debug,2014-08-19T16:51:48.312,ns_1@10.242.238.90:<0.29597.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:51:48.312,ns_1@10.242.238.90:<0.29975.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:51:48.312,ns_1@10.242.238.90:<0.29649.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:51:48.312,ns_1@10.242.238.90:<0.29510.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:51:48.312,ns_1@10.242.238.90:<0.29464.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:51:48.312,ns_1@10.242.238.90:<0.29516.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:51:48.312,ns_1@10.242.238.90:<0.29976.0>:capi_set_view_manager:do_wait_index_updated:618]References to wait: [] ("default", 595) [ns_server:debug,2014-08-19T16:51:48.312,ns_1@10.242.238.90:<0.29643.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:51:48.312,ns_1@10.242.238.90:<0.29974.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [ns_server:debug,2014-08-19T16:51:48.312,ns_1@10.242.238.90:<0.29977.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:51:48.312,ns_1@10.242.238.90:<0.29675.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [rebalance:debug,2014-08-19T16:51:48.312,ns_1@10.242.238.90:<0.29539.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:51:48.312,ns_1@10.242.238.90:<0.29980.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:51:48.312,ns_1@10.242.238.90:<0.29982.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:51:48.312,ns_1@10.242.238.90:<0.29979.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:51:48.312,ns_1@10.242.238.90:<0.29981.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [rebalance:debug,2014-08-19T16:51:48.312,ns_1@10.242.238.90:<0.29623.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:51:48.313,ns_1@10.242.238.90:<0.29978.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:51:48.313,ns_1@10.242.238.90:<0.29571.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:51:48.313,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.29960.0> (ok) [rebalance:info,2014-08-19T16:51:48.313,ns_1@10.242.238.90:<0.29470.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:51:48.313,ns_1@10.242.238.90:<0.29591.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:51:48.313,ns_1@10.242.238.90:<0.29695.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:51:48.313,ns_1@10.242.238.90:<0.29975.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [ns_server:debug,2014-08-19T16:51:48.313,ns_1@10.242.238.90:<0.29983.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:51:48.313,ns_1@10.242.238.90:<0.29545.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:51:48.313,ns_1@10.242.238.90:<0.29984.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:51:48.313,ns_1@10.242.238.90:<0.29985.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:51:48.313,ns_1@10.242.238.90:<0.29986.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:51:48.313,ns_1@10.242.238.90:<0.29976.0>:capi_set_view_manager:do_wait_index_updated:638]All refs fired [ns_server:debug,2014-08-19T16:51:48.313,ns_1@10.242.238.90:<0.29617.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:51:48.313,ns_1@10.242.238.90:<0.29987.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:51:48.313,ns_1@10.242.238.90:<0.29977.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [ns_server:debug,2014-08-19T16:51:48.313,ns_1@10.242.238.90:<0.29988.0>:capi_set_view_manager:do_wait_index_updated:618]References to wait: [] ("default", 578) [rebalance:info,2014-08-19T16:51:48.313,ns_1@10.242.238.90:<0.29597.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:51:48.313,ns_1@10.242.238.90:<0.29980.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [ns_server:debug,2014-08-19T16:51:48.313,ns_1@10.242.238.90:<0.29982.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [ns_server:debug,2014-08-19T16:51:48.313,ns_1@10.242.238.90:<0.29979.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [ns_server:debug,2014-08-19T16:51:48.313,ns_1@10.242.238.90:<0.29539.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:51:48.313,ns_1@10.242.238.90:<0.29989.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:51:48.313,ns_1@10.242.238.90:<0.29981.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [ns_server:debug,2014-08-19T16:51:48.313,ns_1@10.242.238.90:<0.29978.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:51:48.313,ns_1@10.242.238.90:<0.29649.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [rebalance:info,2014-08-19T16:51:48.313,ns_1@10.242.238.90:<0.29464.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:51:48.313,ns_1@10.242.238.90:<0.29623.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [rebalance:debug,2014-08-19T16:51:48.313,ns_1@10.242.238.90:<0.29565.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:51:48.313,ns_1@10.242.238.90:<0.29990.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [rebalance:info,2014-08-19T16:51:48.313,ns_1@10.242.238.90:<0.29675.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [rebalance:info,2014-08-19T16:51:48.313,ns_1@10.242.238.90:<0.29516.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:51:48.313,ns_1@10.242.238.90:<0.29983.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [ns_server:debug,2014-08-19T16:51:48.313,ns_1@10.242.238.90:<0.29984.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:51:48.313,ns_1@10.242.238.90:<0.29643.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [rebalance:info,2014-08-19T16:51:48.313,ns_1@10.242.238.90:<0.29571.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [rebalance:info,2014-08-19T16:51:48.313,ns_1@10.242.238.90:<0.29510.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:51:48.313,ns_1@10.242.238.90:<0.29985.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [ns_server:debug,2014-08-19T16:51:48.313,ns_1@10.242.238.90:<0.29986.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [ns_server:debug,2014-08-19T16:51:48.313,ns_1@10.242.238.90:<0.29987.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [ns_server:debug,2014-08-19T16:51:48.313,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.29959.0> (ok) [rebalance:info,2014-08-19T16:51:48.313,ns_1@10.242.238.90:<0.29695.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [rebalance:info,2014-08-19T16:51:48.313,ns_1@10.242.238.90:<0.29545.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [rebalance:info,2014-08-19T16:51:48.313,ns_1@10.242.238.90:<0.29591.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [rebalance:info,2014-08-19T16:51:48.314,ns_1@10.242.238.90:<0.29617.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:51:48.314,ns_1@10.242.238.90:<0.29988.0>:capi_set_view_manager:do_wait_index_updated:638]All refs fired [rebalance:debug,2014-08-19T16:51:48.314,ns_1@10.242.238.90:<0.29430.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:51:48.314,ns_1@10.242.238.90:<0.29991.0>:capi_set_view_manager:do_wait_index_updated:618]References to wait: [] ("default", 594) [ns_server:debug,2014-08-19T16:51:48.314,ns_1@10.242.238.90:<0.29989.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:51:48.314,ns_1@10.242.238.90:<0.29539.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:51:48.314,ns_1@10.242.238.90:<0.29565.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:51:48.314,ns_1@10.242.238.90:<0.29990.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [ns_server:debug,2014-08-19T16:51:48.314,ns_1@10.242.238.90:<0.29992.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [rebalance:info,2014-08-19T16:51:48.314,ns_1@10.242.238.90:<0.29623.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:51:48.315,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.29958.0> (ok) [ns_server:debug,2014-08-19T16:51:48.315,ns_1@10.242.238.90:<0.29991.0>:capi_set_view_manager:do_wait_index_updated:638]All refs fired [ns_server:debug,2014-08-19T16:51:48.315,ns_1@10.242.238.90:<0.29430.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:51:48.315,ns_1@10.242.238.90:<0.29993.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:51:48.315,ns_1@10.242.238.90:<0.29992.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [ns_server:debug,2014-08-19T16:51:48.315,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.29957.0> (ok) [ns_server:debug,2014-08-19T16:51:48.315,ns_1@10.242.238.90:<0.29993.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:debug,2014-08-19T16:51:48.315,ns_1@10.242.238.90:<0.29438.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [rebalance:info,2014-08-19T16:51:48.315,ns_1@10.242.238.90:<0.29565.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [rebalance:info,2014-08-19T16:51:48.315,ns_1@10.242.238.90:<0.29430.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:51:48.316,ns_1@10.242.238.90:<0.29438.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [rebalance:debug,2014-08-19T16:51:48.316,ns_1@10.242.238.90:<0.29669.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:51:48.316,ns_1@10.242.238.90:<0.29994.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:51:48.316,ns_1@10.242.238.90:<0.29994.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [ns_server:debug,2014-08-19T16:51:48.316,ns_1@10.242.238.90:<0.29669.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:51:48.316,ns_1@10.242.238.90:<0.29995.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [rebalance:info,2014-08-19T16:51:48.316,ns_1@10.242.238.90:<0.29438.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [rebalance:debug,2014-08-19T16:51:48.316,ns_1@10.242.238.90:<0.29452.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:51:48.316,ns_1@10.242.238.90:<0.29995.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:51:48.316,ns_1@10.242.238.90:<0.29669.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:51:48.316,ns_1@10.242.238.90:<0.29452.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:51:48.316,ns_1@10.242.238.90:<0.29996.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:51:48.316,ns_1@10.242.238.90:<0.29996.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:51:48.316,ns_1@10.242.238.90:<0.29452.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:51:48.367,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:51:48.369,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:48.369,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 1695 us [ns_server:debug,2014-08-19T16:51:48.369,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:48.370,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{833, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.88']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:info,2014-08-19T16:51:48.397,ns_1@10.242.238.90:<0.18784.0>:ns_memcached:do_handle_call:527]Changed vbucket 591 state to active [ns_server:debug,2014-08-19T16:51:48.408,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:info,2014-08-19T16:51:48.409,ns_1@10.242.238.90:<0.18784.0>:ns_memcached:do_handle_call:527]Changed vbucket 593 state to active [ns_server:debug,2014-08-19T16:51:48.411,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:48.411,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 3209 us [ns_server:debug,2014-08-19T16:51:48.412,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:48.412,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{836, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.88']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:info,2014-08-19T16:51:48.421,ns_1@10.242.238.90:<0.18784.0>:ns_memcached:do_handle_call:527]Changed vbucket 594 state to active [ns_server:info,2014-08-19T16:51:48.427,ns_1@10.242.238.90:<0.18784.0>:ns_memcached:do_handle_call:527]Changed vbucket 579 state to active [views:debug,2014-08-19T16:51:48.428,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/591. Updated state: active (1) [ns_server:debug,2014-08-19T16:51:48.428,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",591,active,1} [ns_server:info,2014-08-19T16:51:48.438,ns_1@10.242.238.90:<0.18784.0>:ns_memcached:do_handle_call:527]Changed vbucket 581 state to active [ns_server:debug,2014-08-19T16:51:48.450,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:info,2014-08-19T16:51:48.450,ns_1@10.242.238.90:<0.18784.0>:ns_memcached:do_handle_call:527]Changed vbucket 580 state to active [ns_server:debug,2014-08-19T16:51:48.454,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:48.454,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 3990 us [ns_server:debug,2014-08-19T16:51:48.454,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:48.455,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{838, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.88']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:info,2014-08-19T16:51:48.460,ns_1@10.242.238.90:<0.18786.0>:ns_memcached:do_handle_call:527]Changed vbucket 586 state to active [views:debug,2014-08-19T16:51:48.461,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/593. Updated state: active (1) [ns_server:debug,2014-08-19T16:51:48.461,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",593,active,1} [ns_server:info,2014-08-19T16:51:48.472,ns_1@10.242.238.90:<0.18786.0>:ns_memcached:do_handle_call:527]Changed vbucket 578 state to active [ns_server:info,2014-08-19T16:51:48.482,ns_1@10.242.238.90:<0.18786.0>:ns_memcached:do_handle_call:527]Changed vbucket 585 state to active [rebalance:debug,2014-08-19T16:51:48.487,ns_1@10.242.238.90:<0.30011.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 597 [ns_server:debug,2014-08-19T16:51:48.489,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:info,2014-08-19T16:51:48.492,ns_1@10.242.238.90:<0.18786.0>:ns_memcached:do_handle_call:527]Changed vbucket 583 state to active [views:debug,2014-08-19T16:51:48.494,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/579. Updated state: active (1) [ns_server:debug,2014-08-19T16:51:48.495,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",579,active,1} [ns_server:debug,2014-08-19T16:51:48.496,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:48.497,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 7041 us [ns_server:debug,2014-08-19T16:51:48.497,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:48.497,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{832, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.88']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:info,2014-08-19T16:51:48.502,ns_1@10.242.238.90:<0.18784.0>:ns_memcached:do_handle_call:527]Changed vbucket 577 state to active [ns_server:info,2014-08-19T16:51:48.511,ns_1@10.242.238.90:<0.18784.0>:ns_memcached:do_handle_call:527]Changed vbucket 595 state to active [ns_server:info,2014-08-19T16:51:48.520,ns_1@10.242.238.90:<0.18784.0>:ns_memcached:do_handle_call:527]Changed vbucket 584 state to active [ns_server:info,2014-08-19T16:51:48.530,ns_1@10.242.238.90:<0.18784.0>:ns_memcached:do_handle_call:527]Changed vbucket 587 state to active [ns_server:debug,2014-08-19T16:51:48.535,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:51:48.538,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:info,2014-08-19T16:51:48.538,ns_1@10.242.238.90:<0.18784.0>:ns_memcached:do_handle_call:527]Changed vbucket 589 state to active [ns_server:debug,2014-08-19T16:51:48.538,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 3610 us [ns_server:debug,2014-08-19T16:51:48.539,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:48.539,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{851, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.88']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [views:debug,2014-08-19T16:51:48.545,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/594. Updated state: active (1) [ns_server:debug,2014-08-19T16:51:48.545,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",594,active,1} [ns_server:info,2014-08-19T16:51:48.548,ns_1@10.242.238.90:<0.18784.0>:ns_memcached:do_handle_call:527]Changed vbucket 588 state to active [ns_server:info,2014-08-19T16:51:48.556,ns_1@10.242.238.90:<0.18784.0>:ns_memcached:do_handle_call:527]Changed vbucket 592 state to active [ns_server:info,2014-08-19T16:51:48.571,ns_1@10.242.238.90:<0.18784.0>:ns_memcached:do_handle_call:527]Changed vbucket 596 state to active [ns_server:debug,2014-08-19T16:51:48.576,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:51:48.579,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 3275 us [ns_server:debug,2014-08-19T16:51:48.579,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:48.580,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:48.581,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{845, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.88']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:info,2014-08-19T16:51:48.581,ns_1@10.242.238.90:<0.18784.0>:ns_memcached:do_handle_call:527]Changed vbucket 576 state to active [ns_server:info,2014-08-19T16:51:48.585,ns_1@10.242.238.90:<0.18784.0>:ns_memcached:do_handle_call:527]Changed vbucket 590 state to active [views:debug,2014-08-19T16:51:48.595,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/586. Updated state: active (1) [ns_server:debug,2014-08-19T16:51:48.596,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",586,active,1} [ns_server:info,2014-08-19T16:51:48.600,ns_1@10.242.238.90:<0.18784.0>:ns_memcached:do_handle_call:527]Changed vbucket 582 state to active [ns_server:debug,2014-08-19T16:51:48.616,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:51:48.619,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:48.620,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 3453 us [ns_server:debug,2014-08-19T16:51:48.620,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:48.620,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{835, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.88']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [views:debug,2014-08-19T16:51:48.646,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/580. Updated state: active (1) [ns_server:debug,2014-08-19T16:51:48.646,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",580,active,1} [ns_server:debug,2014-08-19T16:51:48.651,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:51:48.654,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:48.655,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 3121 us [ns_server:debug,2014-08-19T16:51:48.655,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:48.656,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{847, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.88']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [views:debug,2014-08-19T16:51:48.684,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/578. Updated state: active (1) [ns_server:debug,2014-08-19T16:51:48.684,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",578,active,1} [ns_server:debug,2014-08-19T16:51:48.685,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:51:48.688,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:48.688,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 2725 us [ns_server:debug,2014-08-19T16:51:48.688,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:48.689,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{844, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.88']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:51:48.721,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:51:48.728,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:48.728,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 7091 us [ns_server:debug,2014-08-19T16:51:48.729,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{839, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.88']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:51:48.729,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [views:debug,2014-08-19T16:51:48.751,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/595. Updated state: active (1) [ns_server:debug,2014-08-19T16:51:48.751,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",595,active,1} [ns_server:debug,2014-08-19T16:51:48.757,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:51:48.760,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 2967 us [ns_server:debug,2014-08-19T16:51:48.760,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:48.760,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:48.761,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{848, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.88']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:51:48.793,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:51:48.796,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 2600 us [ns_server:debug,2014-08-19T16:51:48.796,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:48.797,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:48.797,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{850, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.88']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [views:debug,2014-08-19T16:51:48.818,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/589. Updated state: active (1) [ns_server:debug,2014-08-19T16:51:48.818,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",589,active,1} [ns_server:debug,2014-08-19T16:51:48.829,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:51:48.831,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:48.831,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 1450 us [ns_server:debug,2014-08-19T16:51:48.831,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:48.832,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{834, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.88']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:51:48.863,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:51:48.866,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 3066 us [ns_server:debug,2014-08-19T16:51:48.866,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:48.867,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:48.868,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{849, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.88']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [views:debug,2014-08-19T16:51:48.885,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/587. Updated state: active (1) [ns_server:debug,2014-08-19T16:51:48.885,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",587,active,1} [ns_server:debug,2014-08-19T16:51:48.902,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:51:48.905,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:48.905,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 3005 us [ns_server:debug,2014-08-19T16:51:48.905,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:48.906,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{842, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.88']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [views:debug,2014-08-19T16:51:48.935,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/585. Updated state: active (1) [ns_server:debug,2014-08-19T16:51:48.935,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",585,active,1} [ns_server:debug,2014-08-19T16:51:48.940,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:51:48.947,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:48.947,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 6699 us [ns_server:debug,2014-08-19T16:51:48.947,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:48.948,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{841, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.88']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:51:48.982,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:51:48.985,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:48.986,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 3450 us [ns_server:debug,2014-08-19T16:51:48.987,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:48.987,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{840, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.88']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:51:48.997,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:51:48.999,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 2674 us [ns_server:debug,2014-08-19T16:51:48.999,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:49.000,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:49.001,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{591, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.88']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [views:debug,2014-08-19T16:51:49.002,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/583. Updated state: active (1) [ns_server:debug,2014-08-19T16:51:49.002,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",583,active,1} [ns_server:debug,2014-08-19T16:51:49.033,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:51:49.034,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:49.034,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 903 us [ns_server:debug,2014-08-19T16:51:49.035,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:49.035,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{593, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.88']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [views:debug,2014-08-19T16:51:49.052,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/581. Updated state: active (1) [ns_server:debug,2014-08-19T16:51:49.052,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",581,active,1} [ns_server:debug,2014-08-19T16:51:49.074,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:51:49.076,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 1444 us [ns_server:debug,2014-08-19T16:51:49.076,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:49.076,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:49.077,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{594, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.88']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:51:49.108,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:51:49.111,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:49.111,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 2765 us [ns_server:debug,2014-08-19T16:51:49.111,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:49.112,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{579, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.88']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [views:debug,2014-08-19T16:51:49.119,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/577. Updated state: active (1) [ns_server:debug,2014-08-19T16:51:49.119,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",577,active,1} [rebalance:debug,2014-08-19T16:51:49.120,ns_1@10.242.238.90:<0.30011.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:51:49.120,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.30011.0> (ok) [ns_server:debug,2014-08-19T16:51:49.144,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:51:49.149,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 4932 us [ns_server:debug,2014-08-19T16:51:49.149,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:49.149,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:49.150,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{581, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.88']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [views:debug,2014-08-19T16:51:49.169,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/596. Updated state: active (1) [ns_server:debug,2014-08-19T16:51:49.169,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",596,active,1} [ns_server:debug,2014-08-19T16:51:49.178,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:51:49.180,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:49.180,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 1483 us [ns_server:debug,2014-08-19T16:51:49.181,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{580, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.88']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:51:49.182,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:49.217,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:51:49.220,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:49.220,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 2739 us [ns_server:debug,2014-08-19T16:51:49.220,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [views:debug,2014-08-19T16:51:49.220,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/592. Updated state: active (1) [ns_server:debug,2014-08-19T16:51:49.221,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",592,active,1} [ns_server:debug,2014-08-19T16:51:49.221,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{586, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.88']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [views:debug,2014-08-19T16:51:49.270,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/590. Updated state: active (1) [ns_server:debug,2014-08-19T16:51:49.270,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",590,active,1} [ns_server:debug,2014-08-19T16:51:49.287,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:51:49.291,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:49.291,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 3758 us [ns_server:debug,2014-08-19T16:51:49.292,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:49.292,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{578, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.88']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [views:debug,2014-08-19T16:51:49.320,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/588. Updated state: active (1) [ns_server:debug,2014-08-19T16:51:49.320,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",588,active,1} [ns_server:debug,2014-08-19T16:51:49.355,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:51:49.359,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:49.359,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 3376 us [ns_server:debug,2014-08-19T16:51:49.359,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:49.360,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{585, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.88']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [views:debug,2014-08-19T16:51:49.370,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/584. Updated state: active (1) [ns_server:debug,2014-08-19T16:51:49.371,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",584,active,1} [ns_server:debug,2014-08-19T16:51:49.389,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:51:49.392,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:49.393,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 3001 us [ns_server:debug,2014-08-19T16:51:49.393,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:49.393,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{583, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.88']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:51:49.425,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [views:debug,2014-08-19T16:51:49.432,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/582. Updated state: active (1) [ns_server:debug,2014-08-19T16:51:49.432,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",582,active,1} [ns_server:debug,2014-08-19T16:51:49.433,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 7480 us [ns_server:debug,2014-08-19T16:51:49.433,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:49.433,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{577, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.88']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:51:49.434,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:49.465,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:51:49.468,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:49.468,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 3260 us [ns_server:debug,2014-08-19T16:51:49.469,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{595, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.88']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:51:49.469,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [views:debug,2014-08-19T16:51:49.479,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/576. Updated state: active (1) [ns_server:debug,2014-08-19T16:51:49.479,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",576,active,1} [ns_server:debug,2014-08-19T16:51:49.499,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:51:49.502,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 3517 us [ns_server:debug,2014-08-19T16:51:49.503,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:49.503,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:49.503,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{584, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.88']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:51:49.536,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:51:49.539,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:49.540,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 3244 us [ns_server:debug,2014-08-19T16:51:49.540,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:49.540,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{587, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.88']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:51:49.571,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:51:49.574,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:49.574,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 3448 us [ns_server:debug,2014-08-19T16:51:49.574,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:49.575,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{589, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.88']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:51:49.603,ns_1@10.242.238.90:<0.30050.0>:capi_set_view_manager:do_wait_index_updated:618]References to wait: [] ("default", 597) [ns_server:debug,2014-08-19T16:51:49.603,ns_1@10.242.238.90:<0.30050.0>:capi_set_view_manager:do_wait_index_updated:638]All refs fired [ns_server:debug,2014-08-19T16:51:49.603,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.30049.0> (ok) [rebalance:debug,2014-08-19T16:51:49.604,ns_1@10.242.238.90:<0.29396.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:51:49.604,ns_1@10.242.238.90:<0.29396.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:51:49.604,ns_1@10.242.238.90:<0.30051.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:51:49.605,ns_1@10.242.238.90:<0.30051.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:51:49.605,ns_1@10.242.238.90:<0.29396.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:51:49.605,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:51:49.609,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:49.609,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 3241 us [ns_server:debug,2014-08-19T16:51:49.609,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:49.610,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{588, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.88']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:51:49.665,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:info,2014-08-19T16:51:49.667,ns_1@10.242.238.90:<0.18784.0>:ns_memcached:do_handle_call:527]Changed vbucket 597 state to active [ns_server:debug,2014-08-19T16:51:49.673,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 7976 us [ns_server:debug,2014-08-19T16:51:49.673,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:49.674,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:49.674,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{592, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.88']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:51:49.702,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:51:49.705,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:49.705,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 3046 us [ns_server:debug,2014-08-19T16:51:49.705,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:49.706,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{596, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.88']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [views:debug,2014-08-19T16:51:49.730,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/597. Updated state: active (1) [ns_server:debug,2014-08-19T16:51:49.730,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",597,active,1} [ns_server:debug,2014-08-19T16:51:49.743,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:51:49.746,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:49.747,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 3140 us [ns_server:debug,2014-08-19T16:51:49.747,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:49.747,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{576, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.88']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:51:49.778,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:51:49.781,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:49.782,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 3299 us [ns_server:debug,2014-08-19T16:51:49.782,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:49.783,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{590, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.88']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:51:49.827,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:51:49.831,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:49.831,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 4613 us [ns_server:debug,2014-08-19T16:51:49.832,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:49.832,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{582, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.88']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:51:49.847,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:51:49.848,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:49.848,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 1146 us [ns_server:debug,2014-08-19T16:51:49.848,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:49.849,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{332, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.88']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:51:49.883,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:51:49.888,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:49.888,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 5613 us [ns_server:debug,2014-08-19T16:51:49.889,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:49.889,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{334, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.88']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:51:49.920,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:51:49.925,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:49.926,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{336, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.88']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:51:49.932,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 42 us [ns_server:debug,2014-08-19T16:51:49.932,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:49.962,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:51:49.965,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 3122 us [ns_server:debug,2014-08-19T16:51:49.965,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:49.966,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:49.966,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{338, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.88']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:51:49.999,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:51:50.002,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:50.003,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 3540 us [ns_server:debug,2014-08-19T16:51:50.004,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:50.004,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{597, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:51:50.005,ns_1@10.242.238.90:compaction_daemon<0.17567.0>:compaction_daemon:handle_info:447]Starting compaction for the following buckets: [<<"default">>] [ns_server:debug,2014-08-19T16:51:50.006,ns_1@10.242.238.90:compaction_daemon<0.17567.0>:compaction_daemon:compact_next_bucket:1453]Going to spawn bucket compaction with forced view compaction for bucket default [ns_server:debug,2014-08-19T16:51:50.006,ns_1@10.242.238.90:compaction_daemon<0.17567.0>:compaction_daemon:compact_next_bucket:1482]Spawned 'uninhibited' compaction for default [ns_server:info,2014-08-19T16:51:50.007,ns_1@10.242.238.90:<0.30063.0>:compaction_daemon:try_to_cleanup_indexes:650]Cleaning up indexes for bucket `default` [ns_server:info,2014-08-19T16:51:50.009,ns_1@10.242.238.90:<0.30063.0>:compaction_daemon:spawn_bucket_compactor:609]Compacting bucket default with config: [forced_previously_inhibited_view_compaction, {database_fragmentation_threshold,{30,undefined}}, {view_fragmentation_threshold,{30,undefined}}] [ns_server:debug,2014-08-19T16:51:50.013,ns_1@10.242.238.90:<0.30066.0>:compaction_daemon:bucket_needs_compaction:1042]`default` data size is 50232, disk size is 7564648 [ns_server:debug,2014-08-19T16:51:50.013,ns_1@10.242.238.90:compaction_daemon<0.17567.0>:compaction_daemon:handle_info:505]Finished compaction iteration. [ns_server:debug,2014-08-19T16:51:50.013,ns_1@10.242.238.90:compaction_daemon<0.17567.0>:compaction_daemon:schedule_next_compaction:1519]Finished compaction too soon. Next run will be in 30s [ns_server:debug,2014-08-19T16:51:50.029,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:51:50.032,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 2822 us [ns_server:debug,2014-08-19T16:51:50.033,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:50.033,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:50.034,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{340, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.88']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:51:50.064,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:51:50.067,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:50.067,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 2630 us [ns_server:debug,2014-08-19T16:51:50.068,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:50.068,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{333, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.88']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:51:50.100,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:51:50.107,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 6694 us [ns_server:debug,2014-08-19T16:51:50.107,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:50.108,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:50.108,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{339, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.88']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:51:50.140,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:51:50.141,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:50.141,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 1225 us [ns_server:debug,2014-08-19T16:51:50.142,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:50.143,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{322, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.88']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:51:50.172,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:51:50.175,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 2638 us [ns_server:debug,2014-08-19T16:51:50.175,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:50.175,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:50.176,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{320, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.88']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:51:50.210,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:51:50.213,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 2814 us [ns_server:debug,2014-08-19T16:51:50.213,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:50.214,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:50.214,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{329, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.88']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:51:50.246,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:51:50.249,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 2774 us [ns_server:debug,2014-08-19T16:51:50.250,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:50.251,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:50.251,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{327, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.88']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:51:50.284,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:51:50.284,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:50.284,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 19 us [ns_server:debug,2014-08-19T16:51:50.284,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:50.285,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{323, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.88']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:51:50.317,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:51:50.325,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 7718 us [ns_server:debug,2014-08-19T16:51:50.325,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:50.326,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:50.327,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{330, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.88']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:51:50.354,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:51:50.358,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 3773 us [ns_server:debug,2014-08-19T16:51:50.358,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:50.358,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:50.359,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{321, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.88']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:51:50.389,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:51:50.393,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 3634 us [ns_server:debug,2014-08-19T16:51:50.393,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:50.393,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:50.394,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{328, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.88']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:51:50.425,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:51:50.428,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 2923 us [ns_server:debug,2014-08-19T16:51:50.428,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:50.429,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:50.429,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{325, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.88']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:51:50.461,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:51:50.465,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 4076 us [ns_server:debug,2014-08-19T16:51:50.466,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:50.466,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:50.467,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{324, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.88']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:51:50.495,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:51:50.498,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:50.498,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 2830 us [ns_server:debug,2014-08-19T16:51:50.498,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:50.499,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{326, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.88']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:51:50.530,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:51:50.537,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 7714 us [ns_server:debug,2014-08-19T16:51:50.538,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:50.538,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:50.539,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{341, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.88']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:51:50.564,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:51:50.567,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:50.567,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 2980 us [ns_server:debug,2014-08-19T16:51:50.567,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:50.568,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{335, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.88']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:51:50.600,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:51:50.603,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:50.603,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 2960 us [ns_server:debug,2014-08-19T16:51:50.604,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:50.604,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{337, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.88']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:51:50.634,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:51:50.637,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 2881 us [ns_server:debug,2014-08-19T16:51:50.637,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:50.637,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:50.639,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{331, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.88']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:info,2014-08-19T16:51:50.690,ns_1@10.242.238.90:<0.18784.0>:ns_memcached:do_handle_call:527]Changed vbucket 575 state to replica [ns_server:info,2014-08-19T16:51:50.694,ns_1@10.242.238.90:<0.30083.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 575 to state replica [ns_server:debug,2014-08-19T16:51:50.719,ns_1@10.242.238.90:<0.30083.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_575_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:51:50.720,ns_1@10.242.238.90:<0.30083.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[575]}, {checkpoints,[{575,0}]}, {name,<<"replication_building_575_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[575]}, {takeover,false}, {suffix,"building_575_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",575,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,true}]} [rebalance:debug,2014-08-19T16:51:50.721,ns_1@10.242.238.90:<0.30083.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.30084.0> [rebalance:debug,2014-08-19T16:51:50.721,ns_1@10.242.238.90:<0.30083.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:51:50.721,ns_1@10.242.238.90:<0.30083.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.28816.1>,#Ref<16550.0.2.76805>}]} [rebalance:info,2014-08-19T16:51:50.722,ns_1@10.242.238.90:<0.30083.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 575 [rebalance:debug,2014-08-19T16:51:50.722,ns_1@10.242.238.90:<0.30083.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.28816.1>,#Ref<16550.0.2.76805>}] [ns_server:debug,2014-08-19T16:51:50.723,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.30085.0> (ok) [ns_server:debug,2014-08-19T16:51:50.723,ns_1@10.242.238.90:<0.30083.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:debug,2014-08-19T16:51:50.724,ns_1@10.242.238.90:<0.30086.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 575 [ns_server:info,2014-08-19T16:51:50.797,ns_1@10.242.238.90:<0.18784.0>:ns_memcached:do_handle_call:527]Changed vbucket 574 state to replica [ns_server:info,2014-08-19T16:51:50.802,ns_1@10.242.238.90:<0.30103.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 574 to state replica [ns_server:debug,2014-08-19T16:51:50.820,ns_1@10.242.238.90:<0.30103.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_574_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:51:50.821,ns_1@10.242.238.90:<0.30103.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[574]}, {checkpoints,[{574,0}]}, {name,<<"replication_building_574_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[574]}, {takeover,false}, {suffix,"building_574_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",574,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,true}]} [rebalance:debug,2014-08-19T16:51:50.822,ns_1@10.242.238.90:<0.30103.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.30104.0> [ns_server:debug,2014-08-19T16:51:50.822,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 575. Nacking mccouch update. [rebalance:debug,2014-08-19T16:51:50.822,ns_1@10.242.238.90:<0.30103.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [views:debug,2014-08-19T16:51:50.822,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/575. Updated state: replica (0) [ns_server:debug,2014-08-19T16:51:50.822,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",575,replica,0} [rebalance:debug,2014-08-19T16:51:50.823,ns_1@10.242.238.90:<0.30103.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.28867.1>,#Ref<16550.0.2.77067>}]} [rebalance:info,2014-08-19T16:51:50.823,ns_1@10.242.238.90:<0.30103.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 574 [rebalance:debug,2014-08-19T16:51:50.823,ns_1@10.242.238.90:<0.30103.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.28867.1>,#Ref<16550.0.2.77067>}] [ns_server:debug,2014-08-19T16:51:50.823,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,984,673,362,724,596,413,958,647,698,387,1009,749,621,983,672,361,723, 595,412,957,646,944,761,697,633,386,1008,995,748,684,620,373,982,735,671,607, 424,360,969,722,658,594,411,347,956,709,645,581,398,1020,943,760,696,632,385, 1007,994,747,683,619,372,981,734,670,606,423,359,968,721,657,593,410,346,955, 708,644,580,397,1019,942,759,695,631,384,1006,993,746,682,618,371,980,733, 669,605,422,358,967,720,656,592,409,345,954,707,643,579,396,1018,941,758,694, 630,383,1005,992,745,681,617,370,979,732,668,604,421,357,966,719,655,591,408, 344,953,706,642,578,395,1017,940,757,693,629,382,1004,991,744,680,616,369, 978,731,667,603,420,356,965,718,654,590,407,343,952,705,641,577,394,1016,939, 756,692,628,381,1003,990,743,679,615,368,977,730,666,602,419,355,964,717,653, 589,406,342,951,704,640,576,393,1015,938,755,691,627,380,1002,989,742,678, 614,367,976,729,665,601,418,354,963,716,652,588,405,950,767,703,639,575,392, 1014,754,690,626,379,1001,988,741,677,613,366,975,728,664,600,417,353,962, 715,651,587,404,949,766,702,638,391,1013,753,689,625,378,1000,987,740,676, 612,365,974,727,663,599,416,352,961,714,650,586,403,948,765,701,637,390,1012, 999,752,688,624,377,986,739,675,611,364,973,726,662,598,415,351,960,713,649, 585,402,947,764,700,636,389,1011,998,751,687,623,376,985,738,674,610,363,972, 725,661,597,414,350,959,712,648,584,401,1023,946,763,699,635,388,1010,997, 686,375,737,609,426,971,660,349,711,583,400,1022,945,762,634,996,685,374,736, 608,425,970,659,348,710,582,399,1021] [ns_server:debug,2014-08-19T16:51:50.824,ns_1@10.242.238.90:<0.30103.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:51:50.825,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.30105.0> (ok) [rebalance:debug,2014-08-19T16:51:50.826,ns_1@10.242.238.90:<0.30106.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 574 [views:debug,2014-08-19T16:51:50.873,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/575. Updated state: replica (0) [ns_server:debug,2014-08-19T16:51:50.873,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",575,replica,0} [ns_server:info,2014-08-19T16:51:50.893,ns_1@10.242.238.90:<0.18784.0>:ns_memcached:do_handle_call:527]Changed vbucket 573 state to replica [ns_server:info,2014-08-19T16:51:50.898,ns_1@10.242.238.90:<0.30109.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 573 to state replica [ns_server:debug,2014-08-19T16:51:50.916,ns_1@10.242.238.90:<0.30109.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_573_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:51:50.918,ns_1@10.242.238.90:<0.30109.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[573]}, {checkpoints,[{573,0}]}, {name,<<"replication_building_573_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[573]}, {takeover,false}, {suffix,"building_573_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",573,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,true}]} [rebalance:debug,2014-08-19T16:51:50.918,ns_1@10.242.238.90:<0.30109.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.30110.0> [rebalance:debug,2014-08-19T16:51:50.918,ns_1@10.242.238.90:<0.30109.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:51:50.919,ns_1@10.242.238.90:<0.30109.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.28918.1>,#Ref<16550.0.2.77354>}]} [rebalance:info,2014-08-19T16:51:50.919,ns_1@10.242.238.90:<0.30109.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 573 [rebalance:debug,2014-08-19T16:51:50.919,ns_1@10.242.238.90:<0.30109.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.28918.1>,#Ref<16550.0.2.77354>}] [ns_server:debug,2014-08-19T16:51:50.920,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.30111.0> (ok) [ns_server:debug,2014-08-19T16:51:50.920,ns_1@10.242.238.90:<0.30109.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:debug,2014-08-19T16:51:50.921,ns_1@10.242.238.90:<0.30112.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 573 [ns_server:debug,2014-08-19T16:51:50.965,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 574. Nacking mccouch update. [views:debug,2014-08-19T16:51:50.965,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/574. Updated state: pending (0) [ns_server:debug,2014-08-19T16:51:50.965,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",574,pending,0} [ns_server:debug,2014-08-19T16:51:50.966,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,984,673,362,724,596,413,958,647,698,387,1009,749,621,983,672,361,723, 595,412,957,646,944,761,697,633,386,1008,995,748,684,620,373,982,735,671,607, 424,360,969,722,658,594,411,347,956,709,645,581,398,1020,943,760,696,632,385, 1007,994,747,683,619,372,981,734,670,606,423,359,968,721,657,593,410,346,955, 708,644,580,397,1019,942,759,695,631,384,1006,993,746,682,618,371,980,733, 669,605,422,358,967,720,656,592,409,345,954,707,643,579,396,1018,941,758,694, 630,383,1005,992,745,681,617,370,979,732,668,604,421,357,966,719,655,591,408, 344,953,706,642,578,395,1017,940,757,693,629,382,1004,991,744,680,616,369, 978,731,667,603,420,356,965,718,654,590,407,343,952,705,641,577,394,1016,939, 756,692,628,381,1003,990,743,679,615,368,977,730,666,602,419,355,964,717,653, 589,406,342,951,704,640,576,393,1015,938,755,691,627,380,1002,989,742,678, 614,367,976,729,665,601,418,354,963,716,652,588,405,950,767,703,639,575,392, 1014,754,690,626,379,1001,988,741,677,613,366,975,728,664,600,417,353,962, 715,651,587,404,949,766,702,638,574,391,1013,753,689,625,378,1000,987,740, 676,612,365,974,727,663,599,416,352,961,714,650,586,403,948,765,701,637,390, 1012,999,752,688,624,377,986,739,675,611,364,973,726,662,598,415,351,960,713, 649,585,402,947,764,700,636,389,1011,998,751,687,623,376,985,738,674,610,363, 972,725,661,597,414,350,959,712,648,584,401,1023,946,763,699,635,388,1010, 997,686,375,737,609,426,971,660,349,711,583,400,1022,945,762,634,996,685,374, 736,608,425,970,659,348,710,582,399,1021] [ns_server:info,2014-08-19T16:51:50.988,ns_1@10.242.238.90:<0.18784.0>:ns_memcached:do_handle_call:527]Changed vbucket 572 state to replica [ns_server:info,2014-08-19T16:51:50.993,ns_1@10.242.238.90:<0.30129.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 572 to state replica [views:debug,2014-08-19T16:51:50.998,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/574. Updated state: pending (0) [ns_server:debug,2014-08-19T16:51:50.998,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",574,pending,0} [rebalance:debug,2014-08-19T16:51:50.999,ns_1@10.242.238.90:<0.30106.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:51:50.999,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.30106.0> (ok) [rebalance:debug,2014-08-19T16:51:51.002,ns_1@10.242.238.90:<0.30130.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 574 [ns_server:debug,2014-08-19T16:51:51.011,ns_1@10.242.238.90:<0.30129.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_572_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:51:51.013,ns_1@10.242.238.90:<0.30129.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[572]}, {checkpoints,[{572,0}]}, {name,<<"replication_building_572_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[572]}, {takeover,false}, {suffix,"building_572_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",572,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,true}]} [rebalance:debug,2014-08-19T16:51:51.014,ns_1@10.242.238.90:<0.30129.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.30133.0> [rebalance:debug,2014-08-19T16:51:51.014,ns_1@10.242.238.90:<0.30129.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:51:51.014,ns_1@10.242.238.90:<0.30129.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.28969.1>,#Ref<16550.0.2.77639>}]} [rebalance:info,2014-08-19T16:51:51.014,ns_1@10.242.238.90:<0.30129.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 572 [rebalance:debug,2014-08-19T16:51:51.015,ns_1@10.242.238.90:<0.30129.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.28969.1>,#Ref<16550.0.2.77639>}] [ns_server:debug,2014-08-19T16:51:51.015,ns_1@10.242.238.90:<0.30129.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:51:51.015,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.30134.0> (ok) [rebalance:debug,2014-08-19T16:51:51.017,ns_1@10.242.238.90:<0.30135.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 572 [views:debug,2014-08-19T16:51:51.057,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/575. Updated state: pending (0) [ns_server:debug,2014-08-19T16:51:51.058,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",575,pending,0} [ns_server:info,2014-08-19T16:51:51.087,ns_1@10.242.238.90:<0.18784.0>:ns_memcached:do_handle_call:527]Changed vbucket 571 state to replica [ns_server:info,2014-08-19T16:51:51.092,ns_1@10.242.238.90:<0.30138.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 571 to state replica [ns_server:debug,2014-08-19T16:51:51.110,ns_1@10.242.238.90:<0.30138.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_571_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:51:51.112,ns_1@10.242.238.90:<0.30138.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[571]}, {checkpoints,[{571,0}]}, {name,<<"replication_building_571_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[571]}, {takeover,false}, {suffix,"building_571_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",571,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,true}]} [rebalance:debug,2014-08-19T16:51:51.112,ns_1@10.242.238.90:<0.30138.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.30139.0> [rebalance:debug,2014-08-19T16:51:51.113,ns_1@10.242.238.90:<0.30138.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:51:51.113,ns_1@10.242.238.90:<0.30138.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.29031.1>,#Ref<16550.0.2.77992>}]} [rebalance:info,2014-08-19T16:51:51.113,ns_1@10.242.238.90:<0.30138.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 571 [rebalance:debug,2014-08-19T16:51:51.114,ns_1@10.242.238.90:<0.30138.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.29031.1>,#Ref<16550.0.2.77992>}] [ns_server:debug,2014-08-19T16:51:51.114,ns_1@10.242.238.90:<0.30138.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:51:51.115,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.30140.0> (ok) [rebalance:debug,2014-08-19T16:51:51.116,ns_1@10.242.238.90:<0.30141.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 571 [ns_server:debug,2014-08-19T16:51:51.174,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 573. Nacking mccouch update. [views:debug,2014-08-19T16:51:51.174,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/573. Updated state: pending (0) [ns_server:debug,2014-08-19T16:51:51.175,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",573,pending,0} [ns_server:debug,2014-08-19T16:51:51.175,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,984,673,362,724,596,413,958,647,698,387,1009,749,621,983,672,361,723, 595,412,957,646,697,386,1008,995,748,684,620,373,982,735,671,607,424,360,969, 722,658,594,411,347,956,709,645,581,398,1020,943,760,696,632,385,1007,994, 747,683,619,372,981,734,670,606,423,359,968,721,657,593,410,346,955,708,644, 580,397,1019,942,759,695,631,384,1006,993,746,682,618,371,980,733,669,605, 422,358,967,720,656,592,409,345,954,707,643,579,396,1018,941,758,694,630,383, 1005,992,745,681,617,370,979,732,668,604,421,357,966,719,655,591,408,344,953, 706,642,578,395,1017,940,757,693,629,382,1004,991,744,680,616,369,978,731, 667,603,420,356,965,718,654,590,407,343,952,705,641,577,394,1016,939,756,692, 628,381,1003,990,743,679,615,368,977,730,666,602,419,355,964,717,653,589,406, 342,951,704,640,576,393,1015,938,755,691,627,380,1002,989,742,678,614,367, 976,729,665,601,418,354,963,716,652,588,405,950,767,703,639,575,392,1014,754, 690,626,379,1001,988,741,677,613,366,975,728,664,600,417,353,962,715,651,587, 404,949,766,702,638,574,391,1013,753,689,625,378,1000,987,740,676,612,365, 974,727,663,599,416,352,961,714,650,586,403,948,765,701,637,573,390,1012,999, 752,688,624,377,986,739,675,611,364,973,726,662,598,415,351,960,713,649,585, 402,947,764,700,636,389,1011,998,751,687,623,376,985,738,674,610,363,972,725, 661,597,414,350,959,712,648,584,401,1023,946,763,699,635,388,1010,997,686, 375,737,609,426,971,660,349,711,583,400,1022,945,762,634,996,685,374,736,608, 425,970,659,348,710,582,399,1021,944,761,633] [ns_server:info,2014-08-19T16:51:51.183,ns_1@10.242.238.90:<0.18784.0>:ns_memcached:do_handle_call:527]Changed vbucket 570 state to replica [ns_server:info,2014-08-19T16:51:51.187,ns_1@10.242.238.90:<0.30158.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 570 to state replica [ns_server:debug,2014-08-19T16:51:51.207,ns_1@10.242.238.90:<0.30158.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_570_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:51:51.208,ns_1@10.242.238.90:<0.30158.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[570]}, {checkpoints,[{570,0}]}, {name,<<"replication_building_570_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[570]}, {takeover,false}, {suffix,"building_570_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",570,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,true}]} [rebalance:debug,2014-08-19T16:51:51.209,ns_1@10.242.238.90:<0.30158.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.30159.0> [rebalance:debug,2014-08-19T16:51:51.209,ns_1@10.242.238.90:<0.30158.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:51:51.209,ns_1@10.242.238.90:<0.30158.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.29082.1>,#Ref<16550.0.2.78276>}]} [rebalance:info,2014-08-19T16:51:51.210,ns_1@10.242.238.90:<0.30158.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 570 [rebalance:debug,2014-08-19T16:51:51.210,ns_1@10.242.238.90:<0.30158.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.29082.1>,#Ref<16550.0.2.78276>}] [ns_server:debug,2014-08-19T16:51:51.211,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.30160.0> (ok) [ns_server:debug,2014-08-19T16:51:51.211,ns_1@10.242.238.90:<0.30158.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:debug,2014-08-19T16:51:51.212,ns_1@10.242.238.90:<0.30161.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 570 [views:debug,2014-08-19T16:51:51.241,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/573. Updated state: pending (0) [ns_server:debug,2014-08-19T16:51:51.242,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",573,pending,0} [ns_server:info,2014-08-19T16:51:51.278,ns_1@10.242.238.90:<0.18784.0>:ns_memcached:do_handle_call:527]Changed vbucket 569 state to replica [ns_server:info,2014-08-19T16:51:51.282,ns_1@10.242.238.90:<0.30164.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 569 to state replica [ns_server:debug,2014-08-19T16:51:51.300,ns_1@10.242.238.90:<0.30164.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_569_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:51:51.302,ns_1@10.242.238.90:<0.30164.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[569]}, {checkpoints,[{569,0}]}, {name,<<"replication_building_569_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[569]}, {takeover,false}, {suffix,"building_569_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",569,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,true}]} [rebalance:debug,2014-08-19T16:51:51.303,ns_1@10.242.238.90:<0.30164.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.30165.0> [rebalance:debug,2014-08-19T16:51:51.303,ns_1@10.242.238.90:<0.30164.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:51:51.303,ns_1@10.242.238.90:<0.30164.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.29133.1>,#Ref<16550.0.2.78538>}]} [rebalance:info,2014-08-19T16:51:51.303,ns_1@10.242.238.90:<0.30164.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 569 [rebalance:debug,2014-08-19T16:51:51.304,ns_1@10.242.238.90:<0.30164.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.29133.1>,#Ref<16550.0.2.78538>}] [ns_server:debug,2014-08-19T16:51:51.304,ns_1@10.242.238.90:<0.30164.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:51:51.304,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.30166.0> (ok) [rebalance:debug,2014-08-19T16:51:51.306,ns_1@10.242.238.90:<0.30167.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 569 [ns_server:debug,2014-08-19T16:51:51.370,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 572. Nacking mccouch update. [views:debug,2014-08-19T16:51:51.370,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/572. Updated state: pending (0) [ns_server:debug,2014-08-19T16:51:51.371,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",572,pending,0} [ns_server:debug,2014-08-19T16:51:51.371,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,984,673,362,724,596,413,958,647,698,387,1009,749,621,983,672,361,723, 595,412,957,646,697,386,1008,995,748,684,620,373,982,735,671,607,424,360,969, 722,658,594,411,347,956,709,645,581,398,1020,943,760,696,632,385,1007,994, 747,683,619,372,981,734,670,606,423,359,968,721,657,593,410,346,955,708,644, 580,397,1019,942,759,695,631,384,1006,993,746,682,618,371,980,733,669,605, 422,358,967,720,656,592,409,345,954,707,643,579,396,1018,941,758,694,630,383, 1005,992,745,681,617,370,979,732,668,604,421,357,966,719,655,591,408,344,953, 706,642,578,395,1017,940,757,693,629,382,1004,991,744,680,616,369,978,731, 667,603,420,356,965,718,654,590,407,343,952,705,641,577,394,1016,939,756,692, 628,381,1003,990,743,679,615,368,977,730,666,602,419,355,964,717,653,589,406, 342,951,704,640,576,393,1015,938,755,691,627,380,1002,989,742,678,614,367, 976,729,665,601,418,354,963,716,652,588,405,950,767,703,639,575,392,1014,754, 690,626,379,1001,988,741,677,613,366,975,728,664,600,417,353,962,715,651,587, 404,949,766,702,638,574,391,1013,753,689,625,378,1000,987,740,676,612,365, 974,727,663,599,416,352,961,714,650,586,403,948,765,701,637,573,390,1012,999, 752,688,624,377,986,739,675,611,364,973,726,662,598,415,351,960,713,649,585, 402,947,764,700,636,572,389,1011,998,751,687,623,376,985,738,674,610,363,972, 725,661,597,414,350,959,712,648,584,401,1023,946,763,699,635,388,1010,997, 686,375,737,609,426,971,660,349,711,583,400,1022,945,762,634,996,685,374,736, 608,425,970,659,348,710,582,399,1021,944,761,633] [ns_server:info,2014-08-19T16:51:51.373,ns_1@10.242.238.90:<0.18784.0>:ns_memcached:do_handle_call:527]Changed vbucket 568 state to replica [ns_server:info,2014-08-19T16:51:51.377,ns_1@10.242.238.90:<0.30190.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 568 to state replica [ns_server:debug,2014-08-19T16:51:51.396,ns_1@10.242.238.90:<0.30190.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_568_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:51:51.397,ns_1@10.242.238.90:<0.30190.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[568]}, {checkpoints,[{568,0}]}, {name,<<"replication_building_568_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[568]}, {takeover,false}, {suffix,"building_568_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",568,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,true}]} [rebalance:debug,2014-08-19T16:51:51.398,ns_1@10.242.238.90:<0.30190.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.30191.0> [rebalance:debug,2014-08-19T16:51:51.398,ns_1@10.242.238.90:<0.30190.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:51:51.399,ns_1@10.242.238.90:<0.30190.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.29194.1>,#Ref<16550.0.2.78829>}]} [rebalance:info,2014-08-19T16:51:51.399,ns_1@10.242.238.90:<0.30190.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 568 [rebalance:debug,2014-08-19T16:51:51.399,ns_1@10.242.238.90:<0.30190.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.29194.1>,#Ref<16550.0.2.78829>}] [ns_server:debug,2014-08-19T16:51:51.400,ns_1@10.242.238.90:<0.30190.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:51:51.400,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.30192.0> (ok) [rebalance:debug,2014-08-19T16:51:51.401,ns_1@10.242.238.90:<0.30193.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 568 [views:debug,2014-08-19T16:51:51.438,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/572. Updated state: pending (0) [ns_server:debug,2014-08-19T16:51:51.438,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",572,pending,0} [ns_server:info,2014-08-19T16:51:51.467,ns_1@10.242.238.90:<0.18784.0>:ns_memcached:do_handle_call:527]Changed vbucket 567 state to replica [ns_server:info,2014-08-19T16:51:51.472,ns_1@10.242.238.90:<0.30197.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 567 to state replica [ns_server:debug,2014-08-19T16:51:51.495,ns_1@10.242.238.90:<0.30197.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_567_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:51:51.496,ns_1@10.242.238.90:<0.30197.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[567]}, {checkpoints,[{567,0}]}, {name,<<"replication_building_567_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[567]}, {takeover,false}, {suffix,"building_567_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",567,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,true}]} [rebalance:debug,2014-08-19T16:51:51.497,ns_1@10.242.238.90:<0.30197.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.30212.0> [rebalance:debug,2014-08-19T16:51:51.497,ns_1@10.242.238.90:<0.30197.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:51:51.498,ns_1@10.242.238.90:<0.30197.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.29245.1>,#Ref<16550.0.2.79092>}]} [rebalance:info,2014-08-19T16:51:51.498,ns_1@10.242.238.90:<0.30197.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 567 [rebalance:debug,2014-08-19T16:51:51.498,ns_1@10.242.238.90:<0.30197.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.29245.1>,#Ref<16550.0.2.79092>}] [ns_server:debug,2014-08-19T16:51:51.499,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.30213.0> (ok) [ns_server:debug,2014-08-19T16:51:51.499,ns_1@10.242.238.90:<0.30197.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:debug,2014-08-19T16:51:51.500,ns_1@10.242.238.90:<0.30214.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 567 [ns_server:debug,2014-08-19T16:51:51.563,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 570. Nacking mccouch update. [views:debug,2014-08-19T16:51:51.563,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/570. Updated state: pending (0) [ns_server:debug,2014-08-19T16:51:51.563,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",570,pending,0} [ns_server:debug,2014-08-19T16:51:51.564,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,984,673,362,724,596,413,958,647,698,570,387,1009,749,621,983,672,361, 723,595,412,957,646,697,386,1008,995,748,684,620,373,982,735,671,607,424,360, 969,722,658,594,411,347,956,709,645,581,398,1020,943,760,696,632,385,1007, 994,747,683,619,372,981,734,670,606,423,359,968,721,657,593,410,346,955,708, 644,580,397,1019,942,759,695,631,384,1006,993,746,682,618,371,980,733,669, 605,422,358,967,720,656,592,409,345,954,707,643,579,396,1018,941,758,694,630, 383,1005,992,745,681,617,370,979,732,668,604,421,357,966,719,655,591,408,344, 953,706,642,578,395,1017,940,757,693,629,382,1004,991,744,680,616,369,978, 731,667,603,420,356,965,718,654,590,407,343,952,705,641,577,394,1016,939,756, 692,628,381,1003,990,743,679,615,368,977,730,666,602,419,355,964,717,653,589, 406,342,951,704,640,576,393,1015,938,755,691,627,380,1002,989,742,678,614, 367,976,729,665,601,418,354,963,716,652,588,405,950,767,703,639,575,392,1014, 754,690,626,379,1001,988,741,677,613,366,975,728,664,600,417,353,962,715,651, 587,404,949,766,702,638,574,391,1013,753,689,625,378,1000,987,740,676,612, 365,974,727,663,599,416,352,961,714,650,586,403,948,765,701,637,573,390,1012, 999,752,688,624,377,986,739,675,611,364,973,726,662,598,415,351,960,713,649, 585,402,947,764,700,636,572,389,1011,998,751,687,623,376,985,738,674,610,363, 972,725,661,597,414,350,959,712,648,584,401,1023,946,763,699,635,388,1010, 997,686,375,737,609,426,971,660,349,711,583,400,1022,945,762,634,996,685,374, 736,608,425,970,659,348,710,582,399,1021,944,761,633] [ns_server:info,2014-08-19T16:51:51.567,ns_1@10.242.238.90:<0.18784.0>:ns_memcached:do_handle_call:527]Changed vbucket 566 state to replica [ns_server:info,2014-08-19T16:51:51.571,ns_1@10.242.238.90:<0.30217.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 566 to state replica [ns_server:debug,2014-08-19T16:51:51.591,ns_1@10.242.238.90:<0.30217.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_566_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:51:51.593,ns_1@10.242.238.90:<0.30217.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[566]}, {checkpoints,[{566,0}]}, {name,<<"replication_building_566_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[566]}, {takeover,false}, {suffix,"building_566_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",566,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,true}]} [rebalance:debug,2014-08-19T16:51:51.593,ns_1@10.242.238.90:<0.30217.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.30218.0> [rebalance:debug,2014-08-19T16:51:51.593,ns_1@10.242.238.90:<0.30217.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:51:51.594,ns_1@10.242.238.90:<0.30217.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.29300.1>,#Ref<16550.0.2.79420>}]} [rebalance:info,2014-08-19T16:51:51.594,ns_1@10.242.238.90:<0.30217.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 566 [rebalance:debug,2014-08-19T16:51:51.594,ns_1@10.242.238.90:<0.30217.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.29300.1>,#Ref<16550.0.2.79420>}] [ns_server:debug,2014-08-19T16:51:51.595,ns_1@10.242.238.90:<0.30217.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:51:51.595,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.30219.0> (ok) [rebalance:debug,2014-08-19T16:51:51.597,ns_1@10.242.238.90:<0.30220.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 566 [views:debug,2014-08-19T16:51:51.597,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/570. Updated state: pending (0) [ns_server:debug,2014-08-19T16:51:51.597,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",570,pending,0} [ns_server:info,2014-08-19T16:51:51.662,ns_1@10.242.238.90:<0.18784.0>:ns_memcached:do_handle_call:527]Changed vbucket 565 state to replica [ns_server:info,2014-08-19T16:51:51.667,ns_1@10.242.238.90:<0.30237.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 565 to state replica [ns_server:debug,2014-08-19T16:51:51.685,ns_1@10.242.238.90:<0.30237.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_565_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:51:51.687,ns_1@10.242.238.90:<0.30237.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[565]}, {checkpoints,[{565,0}]}, {name,<<"replication_building_565_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[565]}, {takeover,false}, {suffix,"building_565_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",565,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,true}]} [rebalance:debug,2014-08-19T16:51:51.687,ns_1@10.242.238.90:<0.30237.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.30238.0> [rebalance:debug,2014-08-19T16:51:51.687,ns_1@10.242.238.90:<0.30237.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:51:51.688,ns_1@10.242.238.90:<0.30237.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.29351.1>,#Ref<16550.0.2.79684>}]} [rebalance:info,2014-08-19T16:51:51.688,ns_1@10.242.238.90:<0.30237.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 565 [rebalance:debug,2014-08-19T16:51:51.688,ns_1@10.242.238.90:<0.30237.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.29351.1>,#Ref<16550.0.2.79684>}] [ns_server:debug,2014-08-19T16:51:51.689,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.30239.0> (ok) [ns_server:debug,2014-08-19T16:51:51.689,ns_1@10.242.238.90:<0.30237.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:debug,2014-08-19T16:51:51.690,ns_1@10.242.238.90:<0.30240.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 565 [ns_server:debug,2014-08-19T16:51:51.730,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 571. Nacking mccouch update. [views:debug,2014-08-19T16:51:51.730,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/571. Updated state: pending (0) [ns_server:debug,2014-08-19T16:51:51.731,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",571,pending,0} [ns_server:debug,2014-08-19T16:51:51.731,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,984,673,362,724,596,413,958,647,698,570,387,1009,749,621,983,672,361, 723,595,412,957,646,697,386,1008,995,748,684,620,373,982,735,671,607,424,360, 969,722,658,594,411,347,956,709,645,581,398,1020,943,760,696,632,385,1007, 994,747,683,619,372,981,734,670,606,423,359,968,721,657,593,410,346,955,708, 644,580,397,1019,942,759,695,631,384,1006,993,746,682,618,371,980,733,669, 605,422,358,967,720,656,592,409,345,954,707,643,579,396,1018,941,758,694,630, 383,1005,992,745,681,617,370,979,732,668,604,421,357,966,719,655,591,408,344, 953,706,642,578,395,1017,940,757,693,629,382,1004,991,744,680,616,369,978, 731,667,603,420,356,965,718,654,590,407,343,952,705,641,577,394,1016,939,756, 692,628,381,1003,990,743,679,615,368,977,730,666,602,419,355,964,717,653,589, 406,342,951,704,640,576,393,1015,938,755,691,627,380,1002,989,742,678,614, 367,976,729,665,601,418,354,963,716,652,588,405,950,767,703,639,575,392,1014, 754,690,626,379,1001,988,741,677,613,366,975,728,664,600,417,353,962,715,651, 587,404,949,766,702,638,574,391,1013,753,689,625,378,1000,987,740,676,612, 365,974,727,663,599,416,352,961,714,650,586,403,948,765,701,637,573,390,1012, 999,752,688,624,377,986,739,675,611,364,973,726,662,598,415,351,960,713,649, 585,402,947,764,700,636,572,389,1011,998,751,687,623,376,985,738,674,610,363, 972,725,661,597,414,350,959,712,648,584,401,1023,946,763,699,635,571,388, 1010,997,686,375,737,609,426,971,660,349,711,583,400,1022,945,762,634,996, 685,374,736,608,425,970,659,348,710,582,399,1021,944,761,633] [ns_server:info,2014-08-19T16:51:51.757,ns_1@10.242.238.90:<0.18784.0>:ns_memcached:do_handle_call:527]Changed vbucket 564 state to replica [ns_server:info,2014-08-19T16:51:51.761,ns_1@10.242.238.90:<0.30243.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 564 to state replica [ns_server:debug,2014-08-19T16:51:51.781,ns_1@10.242.238.90:<0.30243.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_564_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:51:51.783,ns_1@10.242.238.90:<0.30243.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[564]}, {checkpoints,[{564,0}]}, {name,<<"replication_building_564_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[564]}, {takeover,false}, {suffix,"building_564_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",564,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,true}]} [rebalance:debug,2014-08-19T16:51:51.783,ns_1@10.242.238.90:<0.30243.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.30244.0> [rebalance:debug,2014-08-19T16:51:51.783,ns_1@10.242.238.90:<0.30243.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:51:51.784,ns_1@10.242.238.90:<0.30243.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.29402.1>,#Ref<16550.0.2.79949>}]} [rebalance:info,2014-08-19T16:51:51.784,ns_1@10.242.238.90:<0.30243.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 564 [rebalance:debug,2014-08-19T16:51:51.784,ns_1@10.242.238.90:<0.30243.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.29402.1>,#Ref<16550.0.2.79949>}] [ns_server:debug,2014-08-19T16:51:51.785,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.30245.0> (ok) [ns_server:debug,2014-08-19T16:51:51.785,ns_1@10.242.238.90:<0.30243.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:debug,2014-08-19T16:51:51.786,ns_1@10.242.238.90:<0.30246.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 564 [views:debug,2014-08-19T16:51:51.791,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/571. Updated state: pending (0) [ns_server:debug,2014-08-19T16:51:51.791,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",571,pending,0} [ns_server:info,2014-08-19T16:51:51.854,ns_1@10.242.238.90:<0.18784.0>:ns_memcached:do_handle_call:527]Changed vbucket 563 state to replica [ns_server:info,2014-08-19T16:51:51.858,ns_1@10.242.238.90:<0.30263.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 563 to state replica [ns_server:debug,2014-08-19T16:51:51.874,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 569. Nacking mccouch update. [views:debug,2014-08-19T16:51:51.874,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/569. Updated state: pending (0) [ns_server:debug,2014-08-19T16:51:51.874,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",569,pending,0} [ns_server:debug,2014-08-19T16:51:51.875,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,984,673,362,724,596,413,958,647,698,570,387,1009,749,621,983,672,361, 723,595,412,957,646,697,569,386,1008,995,748,684,620,373,982,735,671,607,424, 360,969,722,658,594,411,347,956,709,645,581,398,1020,943,760,696,632,385, 1007,994,747,683,619,372,981,734,670,606,423,359,968,721,657,593,410,346,955, 708,644,580,397,1019,942,759,695,631,384,1006,993,746,682,618,371,980,733, 669,605,422,358,967,720,656,592,409,345,954,707,643,579,396,1018,941,758,694, 630,383,1005,992,745,681,617,370,979,732,668,604,421,357,966,719,655,591,408, 344,953,706,642,578,395,1017,940,757,693,629,382,1004,991,744,680,616,369, 978,731,667,603,420,356,965,718,654,590,407,343,952,705,641,577,394,1016,939, 756,692,628,381,1003,990,743,679,615,368,977,730,666,602,419,355,964,717,653, 589,406,342,951,704,640,576,393,1015,938,755,691,627,380,1002,989,742,678, 614,367,976,729,665,601,418,354,963,716,652,588,405,950,767,703,639,575,392, 1014,754,690,626,379,1001,988,741,677,613,366,975,728,664,600,417,353,962, 715,651,587,404,949,766,702,638,574,391,1013,753,689,625,378,1000,987,740, 676,612,365,974,727,663,599,416,352,961,714,650,586,403,948,765,701,637,573, 390,1012,999,752,688,624,377,986,739,675,611,364,973,726,662,598,415,351,960, 713,649,585,402,947,764,700,636,572,389,1011,998,751,687,623,376,985,738,674, 610,363,972,725,661,597,414,350,959,712,648,584,401,1023,946,763,699,635,571, 388,1010,997,686,375,737,609,426,971,660,349,711,583,400,1022,945,762,634, 996,685,374,736,608,425,970,659,348,710,582,399,1021,944,761,633] [ns_server:debug,2014-08-19T16:51:51.877,ns_1@10.242.238.90:<0.30263.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_563_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:51:51.878,ns_1@10.242.238.90:<0.30263.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[563]}, {checkpoints,[{563,0}]}, {name,<<"replication_building_563_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[563]}, {takeover,false}, {suffix,"building_563_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",563,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,true}]} [rebalance:debug,2014-08-19T16:51:51.879,ns_1@10.242.238.90:<0.30263.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.30264.0> [rebalance:debug,2014-08-19T16:51:51.879,ns_1@10.242.238.90:<0.30263.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:51:51.879,ns_1@10.242.238.90:<0.30263.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.29453.1>,#Ref<16550.0.2.80210>}]} [rebalance:info,2014-08-19T16:51:51.880,ns_1@10.242.238.90:<0.30263.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 563 [rebalance:debug,2014-08-19T16:51:51.880,ns_1@10.242.238.90:<0.30263.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.29453.1>,#Ref<16550.0.2.80210>}] [ns_server:debug,2014-08-19T16:51:51.881,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.30265.0> (ok) [ns_server:debug,2014-08-19T16:51:51.881,ns_1@10.242.238.90:<0.30263.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:debug,2014-08-19T16:51:51.882,ns_1@10.242.238.90:<0.30266.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 563 [views:debug,2014-08-19T16:51:51.942,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/569. Updated state: pending (0) [ns_server:debug,2014-08-19T16:51:51.942,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",569,pending,0} [ns_server:info,2014-08-19T16:51:51.950,ns_1@10.242.238.90:<0.18784.0>:ns_memcached:do_handle_call:527]Changed vbucket 562 state to replica [ns_server:info,2014-08-19T16:51:51.955,ns_1@10.242.238.90:<0.30269.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 562 to state replica [ns_server:debug,2014-08-19T16:51:51.973,ns_1@10.242.238.90:<0.30269.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_562_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:51:51.975,ns_1@10.242.238.90:<0.30269.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[562]}, {checkpoints,[{562,0}]}, {name,<<"replication_building_562_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[562]}, {takeover,false}, {suffix,"building_562_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",562,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,true}]} [rebalance:debug,2014-08-19T16:51:51.975,ns_1@10.242.238.90:<0.30269.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.30270.0> [rebalance:debug,2014-08-19T16:51:51.976,ns_1@10.242.238.90:<0.30269.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:51:51.976,ns_1@10.242.238.90:<0.30269.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.29504.1>,#Ref<16550.0.2.80504>}]} [rebalance:info,2014-08-19T16:51:51.976,ns_1@10.242.238.90:<0.30269.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 562 [rebalance:debug,2014-08-19T16:51:51.976,ns_1@10.242.238.90:<0.30269.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.29504.1>,#Ref<16550.0.2.80504>}] [ns_server:debug,2014-08-19T16:51:51.977,ns_1@10.242.238.90:<0.30269.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:51:51.977,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.30271.0> (ok) [rebalance:debug,2014-08-19T16:51:51.978,ns_1@10.242.238.90:<0.30272.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 562 [ns_server:info,2014-08-19T16:51:52.047,ns_1@10.242.238.90:<0.18784.0>:ns_memcached:do_handle_call:527]Changed vbucket 561 state to replica [ns_server:info,2014-08-19T16:51:52.051,ns_1@10.242.238.90:<0.30289.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 561 to state replica [ns_server:debug,2014-08-19T16:51:52.070,ns_1@10.242.238.90:<0.30289.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_561_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:51:52.072,ns_1@10.242.238.90:<0.30289.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[561]}, {checkpoints,[{561,0}]}, {name,<<"replication_building_561_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[561]}, {takeover,false}, {suffix,"building_561_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",561,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,true}]} [rebalance:debug,2014-08-19T16:51:52.072,ns_1@10.242.238.90:<0.30289.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.30290.0> [rebalance:debug,2014-08-19T16:51:52.073,ns_1@10.242.238.90:<0.30289.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:51:52.073,ns_1@10.242.238.90:<0.30289.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.29555.1>,#Ref<16550.0.2.80803>}]} [rebalance:info,2014-08-19T16:51:52.073,ns_1@10.242.238.90:<0.30289.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 561 [rebalance:debug,2014-08-19T16:51:52.074,ns_1@10.242.238.90:<0.30289.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.29555.1>,#Ref<16550.0.2.80803>}] [ns_server:debug,2014-08-19T16:51:52.074,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.30291.0> (ok) [ns_server:debug,2014-08-19T16:51:52.074,ns_1@10.242.238.90:<0.30289.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:debug,2014-08-19T16:51:52.076,ns_1@10.242.238.90:<0.30292.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 561 [ns_server:debug,2014-08-19T16:51:52.092,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 567. Nacking mccouch update. [views:debug,2014-08-19T16:51:52.092,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/567. Updated state: pending (0) [ns_server:debug,2014-08-19T16:51:52.092,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",567,pending,0} [ns_server:debug,2014-08-19T16:51:52.093,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,984,673,362,724,596,413,958,647,698,570,387,1009,749,621,983,672,361, 723,595,412,957,646,697,569,386,1008,748,620,982,735,671,607,424,360,969,722, 658,594,411,347,956,709,645,581,398,1020,943,760,696,632,385,1007,994,747, 683,619,372,981,734,670,606,423,359,968,721,657,593,410,346,955,708,644,580, 397,1019,942,759,695,631,567,384,1006,993,746,682,618,371,980,733,669,605, 422,358,967,720,656,592,409,345,954,707,643,579,396,1018,941,758,694,630,383, 1005,992,745,681,617,370,979,732,668,604,421,357,966,719,655,591,408,344,953, 706,642,578,395,1017,940,757,693,629,382,1004,991,744,680,616,369,978,731, 667,603,420,356,965,718,654,590,407,343,952,705,641,577,394,1016,939,756,692, 628,381,1003,990,743,679,615,368,977,730,666,602,419,355,964,717,653,589,406, 342,951,704,640,576,393,1015,938,755,691,627,380,1002,989,742,678,614,367, 976,729,665,601,418,354,963,716,652,588,405,950,767,703,639,575,392,1014,754, 690,626,379,1001,988,741,677,613,366,975,728,664,600,417,353,962,715,651,587, 404,949,766,702,638,574,391,1013,753,689,625,378,1000,987,740,676,612,365, 974,727,663,599,416,352,961,714,650,586,403,948,765,701,637,573,390,1012,999, 752,688,624,377,986,739,675,611,364,973,726,662,598,415,351,960,713,649,585, 402,947,764,700,636,572,389,1011,998,751,687,623,376,985,738,674,610,363,972, 725,661,597,414,350,959,712,648,584,401,1023,946,763,699,635,571,388,1010, 997,686,375,737,609,426,971,660,349,711,583,400,1022,945,762,634,996,685,374, 736,608,425,970,659,348,710,582,399,1021,944,761,633,995,684,373] [ns_server:info,2014-08-19T16:51:52.143,ns_1@10.242.238.90:<0.18784.0>:ns_memcached:do_handle_call:527]Changed vbucket 560 state to replica [ns_server:info,2014-08-19T16:51:52.147,ns_1@10.242.238.90:<0.30295.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 560 to state replica [ns_server:debug,2014-08-19T16:51:52.166,ns_1@10.242.238.90:<0.30295.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_560_'ns_1@10.242.238.90' [views:debug,2014-08-19T16:51:52.167,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/567. Updated state: pending (0) [ns_server:debug,2014-08-19T16:51:52.167,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",567,pending,0} [rebalance:info,2014-08-19T16:51:52.167,ns_1@10.242.238.90:<0.30295.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[560]}, {checkpoints,[{560,0}]}, {name,<<"replication_building_560_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[560]}, {takeover,false}, {suffix,"building_560_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",560,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,true}]} [rebalance:debug,2014-08-19T16:51:52.168,ns_1@10.242.238.90:<0.30295.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.30296.0> [rebalance:debug,2014-08-19T16:51:52.168,ns_1@10.242.238.90:<0.30295.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:51:52.169,ns_1@10.242.238.90:<0.30295.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.29606.1>,#Ref<16550.0.2.81102>}]} [rebalance:info,2014-08-19T16:51:52.169,ns_1@10.242.238.90:<0.30295.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 560 [rebalance:debug,2014-08-19T16:51:52.169,ns_1@10.242.238.90:<0.30295.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.29606.1>,#Ref<16550.0.2.81102>}] [ns_server:debug,2014-08-19T16:51:52.169,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.30297.0> (ok) [ns_server:debug,2014-08-19T16:51:52.170,ns_1@10.242.238.90:<0.30295.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:debug,2014-08-19T16:51:52.171,ns_1@10.242.238.90:<0.30298.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 560 [ns_server:info,2014-08-19T16:51:52.237,ns_1@10.242.238.90:<0.18784.0>:ns_memcached:do_handle_call:527]Changed vbucket 559 state to replica [ns_server:info,2014-08-19T16:51:52.241,ns_1@10.242.238.90:<0.30301.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 559 to state replica [ns_server:debug,2014-08-19T16:51:52.260,ns_1@10.242.238.90:<0.30301.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_559_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:51:52.262,ns_1@10.242.238.90:<0.30301.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[559]}, {checkpoints,[{559,0}]}, {name,<<"replication_building_559_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[559]}, {takeover,false}, {suffix,"building_559_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",559,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,true}]} [rebalance:debug,2014-08-19T16:51:52.262,ns_1@10.242.238.90:<0.30301.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.30316.0> [rebalance:debug,2014-08-19T16:51:52.262,ns_1@10.242.238.90:<0.30301.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:51:52.263,ns_1@10.242.238.90:<0.30301.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.29657.1>,#Ref<16550.0.2.81363>}]} [rebalance:info,2014-08-19T16:51:52.263,ns_1@10.242.238.90:<0.30301.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 559 [rebalance:debug,2014-08-19T16:51:52.263,ns_1@10.242.238.90:<0.30301.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.29657.1>,#Ref<16550.0.2.81363>}] [ns_server:debug,2014-08-19T16:51:52.264,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.30317.0> (ok) [ns_server:debug,2014-08-19T16:51:52.264,ns_1@10.242.238.90:<0.30301.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:debug,2014-08-19T16:51:52.265,ns_1@10.242.238.90:<0.30318.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 559 [ns_server:info,2014-08-19T16:51:52.331,ns_1@10.242.238.90:<0.18784.0>:ns_memcached:do_handle_call:527]Changed vbucket 558 state to replica [ns_server:debug,2014-08-19T16:51:52.334,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 568. Nacking mccouch update. [views:debug,2014-08-19T16:51:52.334,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/568. Updated state: pending (0) [ns_server:debug,2014-08-19T16:51:52.334,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",568,pending,0} [ns_server:debug,2014-08-19T16:51:52.335,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,984,673,362,724,596,413,958,647,698,570,387,1009,749,621,983,672,361, 723,595,412,957,646,697,569,386,1008,748,620,982,735,671,607,424,360,969,722, 658,594,411,347,956,709,645,581,398,1020,943,760,696,632,568,385,1007,994, 747,683,619,372,981,734,670,606,423,359,968,721,657,593,410,346,955,708,644, 580,397,1019,942,759,695,631,567,384,1006,993,746,682,618,371,980,733,669, 605,422,358,967,720,656,592,409,345,954,707,643,579,396,1018,941,758,694,630, 383,1005,992,745,681,617,370,979,732,668,604,421,357,966,719,655,591,408,344, 953,706,642,578,395,1017,940,757,693,629,382,1004,991,744,680,616,369,978, 731,667,603,420,356,965,718,654,590,407,343,952,705,641,577,394,1016,939,756, 692,628,381,1003,990,743,679,615,368,977,730,666,602,419,355,964,717,653,589, 406,342,951,704,640,576,393,1015,938,755,691,627,380,1002,989,742,678,614, 367,976,729,665,601,418,354,963,716,652,588,405,950,767,703,639,575,392,1014, 754,690,626,379,1001,988,741,677,613,366,975,728,664,600,417,353,962,715,651, 587,404,949,766,702,638,574,391,1013,753,689,625,378,1000,987,740,676,612, 365,974,727,663,599,416,352,961,714,650,586,403,948,765,701,637,573,390,1012, 999,752,688,624,377,986,739,675,611,364,973,726,662,598,415,351,960,713,649, 585,402,947,764,700,636,572,389,1011,998,751,687,623,376,985,738,674,610,363, 972,725,661,597,414,350,959,712,648,584,401,1023,946,763,699,635,571,388, 1010,997,686,375,737,609,426,971,660,349,711,583,400,1022,945,762,634,996, 685,374,736,608,425,970,659,348,710,582,399,1021,944,761,633,995,684,373] [ns_server:info,2014-08-19T16:51:52.336,ns_1@10.242.238.90:<0.30321.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 558 to state replica [ns_server:debug,2014-08-19T16:51:52.354,ns_1@10.242.238.90:<0.30321.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_558_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:51:52.356,ns_1@10.242.238.90:<0.30321.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[558]}, {checkpoints,[{558,0}]}, {name,<<"replication_building_558_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[558]}, {takeover,false}, {suffix,"building_558_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",558,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,true}]} [rebalance:debug,2014-08-19T16:51:52.356,ns_1@10.242.238.90:<0.30321.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.30322.0> [rebalance:debug,2014-08-19T16:51:52.356,ns_1@10.242.238.90:<0.30321.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:51:52.357,ns_1@10.242.238.90:<0.30321.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.29708.1>,#Ref<16550.0.2.81625>}]} [rebalance:info,2014-08-19T16:51:52.357,ns_1@10.242.238.90:<0.30321.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 558 [rebalance:debug,2014-08-19T16:51:52.357,ns_1@10.242.238.90:<0.30321.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.29708.1>,#Ref<16550.0.2.81625>}] [ns_server:debug,2014-08-19T16:51:52.358,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.30323.0> (ok) [ns_server:debug,2014-08-19T16:51:52.358,ns_1@10.242.238.90:<0.30321.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:debug,2014-08-19T16:51:52.360,ns_1@10.242.238.90:<0.30324.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 558 [views:debug,2014-08-19T16:51:52.368,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/568. Updated state: pending (0) [ns_server:debug,2014-08-19T16:51:52.368,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",568,pending,0} [ns_server:info,2014-08-19T16:51:52.427,ns_1@10.242.238.90:<0.18784.0>:ns_memcached:do_handle_call:527]Changed vbucket 557 state to replica [ns_server:info,2014-08-19T16:51:52.431,ns_1@10.242.238.90:<0.30341.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 557 to state replica [ns_server:debug,2014-08-19T16:51:52.454,ns_1@10.242.238.90:<0.30341.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_557_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:51:52.456,ns_1@10.242.238.90:<0.30341.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[557]}, {checkpoints,[{557,0}]}, {name,<<"replication_building_557_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[557]}, {takeover,false}, {suffix,"building_557_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",557,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,true}]} [rebalance:debug,2014-08-19T16:51:52.456,ns_1@10.242.238.90:<0.30341.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.30342.0> [rebalance:debug,2014-08-19T16:51:52.457,ns_1@10.242.238.90:<0.30341.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:51:52.457,ns_1@10.242.238.90:<0.30341.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.29759.1>,#Ref<16550.0.2.81926>}]} [rebalance:info,2014-08-19T16:51:52.457,ns_1@10.242.238.90:<0.30341.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 557 [rebalance:debug,2014-08-19T16:51:52.458,ns_1@10.242.238.90:<0.30341.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.29759.1>,#Ref<16550.0.2.81926>}] [ns_server:debug,2014-08-19T16:51:52.458,ns_1@10.242.238.90:<0.30341.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:51:52.458,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.30343.0> (ok) [ns_server:debug,2014-08-19T16:51:52.460,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 566. Nacking mccouch update. [views:debug,2014-08-19T16:51:52.460,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/566. Updated state: pending (0) [ns_server:debug,2014-08-19T16:51:52.460,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",566,pending,0} [rebalance:debug,2014-08-19T16:51:52.460,ns_1@10.242.238.90:<0.30344.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 557 [ns_server:debug,2014-08-19T16:51:52.461,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,984,673,362,724,596,413,958,647,698,570,387,1009,749,621,983,672,361, 723,595,412,957,646,697,569,386,1008,748,620,982,735,671,607,424,360,969,722, 658,594,411,347,956,709,645,581,398,1020,943,760,696,632,568,385,1007,994, 747,683,619,372,981,734,670,606,423,359,968,721,657,593,410,346,955,708,644, 580,397,1019,942,759,695,631,567,384,1006,993,746,682,618,371,980,733,669, 605,422,358,967,720,656,592,409,345,954,707,643,579,396,1018,941,758,694,630, 566,383,1005,992,745,681,617,370,979,732,668,604,421,357,966,719,655,591,408, 344,953,706,642,578,395,1017,940,757,693,629,382,1004,991,744,680,616,369, 978,731,667,603,420,356,965,718,654,590,407,343,952,705,641,577,394,1016,939, 756,692,628,381,1003,990,743,679,615,368,977,730,666,602,419,355,964,717,653, 589,406,342,951,704,640,576,393,1015,938,755,691,627,380,1002,989,742,678, 614,367,976,729,665,601,418,354,963,716,652,588,405,950,767,703,639,575,392, 1014,754,690,626,379,1001,988,741,677,613,366,975,728,664,600,417,353,962, 715,651,587,404,949,766,702,638,574,391,1013,753,689,625,378,1000,987,740, 676,612,365,974,727,663,599,416,352,961,714,650,586,403,948,765,701,637,573, 390,1012,999,752,688,624,377,986,739,675,611,364,973,726,662,598,415,351,960, 713,649,585,402,947,764,700,636,572,389,1011,998,751,687,623,376,985,738,674, 610,363,972,725,661,597,414,350,959,712,648,584,401,1023,946,763,699,635,571, 388,1010,997,686,375,737,609,426,971,660,349,711,583,400,1022,945,762,634, 996,685,374,736,608,425,970,659,348,710,582,399,1021,944,761,633,995,684,373] [views:debug,2014-08-19T16:51:52.494,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/566. Updated state: pending (0) [ns_server:debug,2014-08-19T16:51:52.494,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",566,pending,0} [ns_server:info,2014-08-19T16:51:52.527,ns_1@10.242.238.90:<0.18784.0>:ns_memcached:do_handle_call:527]Changed vbucket 556 state to replica [ns_server:info,2014-08-19T16:51:52.532,ns_1@10.242.238.90:<0.30347.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 556 to state replica [ns_server:debug,2014-08-19T16:51:52.553,ns_1@10.242.238.90:<0.30347.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_556_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:51:52.554,ns_1@10.242.238.90:<0.30347.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[556]}, {checkpoints,[{556,0}]}, {name,<<"replication_building_556_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[556]}, {takeover,false}, {suffix,"building_556_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",556,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,true}]} [rebalance:debug,2014-08-19T16:51:52.555,ns_1@10.242.238.90:<0.30347.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.30362.0> [rebalance:debug,2014-08-19T16:51:52.555,ns_1@10.242.238.90:<0.30347.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:51:52.555,ns_1@10.242.238.90:<0.30347.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.29810.1>,#Ref<16550.0.2.82189>}]} [rebalance:info,2014-08-19T16:51:52.556,ns_1@10.242.238.90:<0.30347.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 556 [rebalance:debug,2014-08-19T16:51:52.556,ns_1@10.242.238.90:<0.30347.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.29810.1>,#Ref<16550.0.2.82189>}] [ns_server:debug,2014-08-19T16:51:52.557,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.30363.0> (ok) [ns_server:debug,2014-08-19T16:51:52.557,ns_1@10.242.238.90:<0.30347.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:debug,2014-08-19T16:51:52.558,ns_1@10.242.238.90:<0.30364.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 556 [ns_server:debug,2014-08-19T16:51:52.602,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 564. Nacking mccouch update. [views:debug,2014-08-19T16:51:52.603,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/564. Updated state: pending (0) [ns_server:debug,2014-08-19T16:51:52.603,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",564,pending,0} [ns_server:debug,2014-08-19T16:51:52.604,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,984,673,362,724,596,413,958,647,698,570,387,1009,749,621,983,672,361, 723,595,412,957,646,697,569,386,1008,748,620,982,735,671,607,424,360,969,722, 658,594,411,347,956,709,645,581,398,1020,943,760,696,632,568,385,1007,994, 747,683,619,372,981,734,670,606,423,359,968,721,657,593,410,346,955,708,644, 580,397,1019,942,759,695,631,567,384,1006,993,746,682,618,371,980,733,669, 605,422,358,967,720,656,592,409,345,954,707,643,579,396,1018,941,758,694,630, 566,383,1005,992,745,681,617,370,979,732,668,604,421,357,966,719,655,591,408, 344,953,706,642,578,395,1017,940,757,693,629,382,1004,991,744,680,616,369, 978,731,667,603,420,356,965,718,654,590,407,343,952,705,641,577,394,1016,939, 756,692,628,564,381,1003,990,743,679,615,368,977,730,666,602,419,355,964,717, 653,589,406,342,951,704,640,576,393,1015,938,755,691,627,380,1002,989,742, 678,614,367,976,729,665,601,418,354,963,716,652,588,405,950,767,703,639,575, 392,1014,754,690,626,379,1001,988,741,677,613,366,975,728,664,600,417,353, 962,715,651,587,404,949,766,702,638,574,391,1013,753,689,625,378,1000,987, 740,676,612,365,974,727,663,599,416,352,961,714,650,586,403,948,765,701,637, 573,390,1012,999,752,688,624,377,986,739,675,611,364,973,726,662,598,415,351, 960,713,649,585,402,947,764,700,636,572,389,1011,998,751,687,623,376,985,738, 674,610,363,972,725,661,597,414,350,959,712,648,584,401,1023,946,763,699,635, 571,388,1010,997,686,375,737,609,426,971,660,349,711,583,400,1022,945,762, 634,996,685,374,736,608,425,970,659,348,710,582,399,1021,944,761,633,995,684, 373] [ns_server:info,2014-08-19T16:51:52.625,ns_1@10.242.238.90:<0.18784.0>:ns_memcached:do_handle_call:527]Changed vbucket 555 state to replica [ns_server:info,2014-08-19T16:51:52.629,ns_1@10.242.238.90:<0.30367.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 555 to state replica [ns_server:debug,2014-08-19T16:51:52.649,ns_1@10.242.238.90:<0.30367.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_555_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:51:52.650,ns_1@10.242.238.90:<0.30367.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[555]}, {checkpoints,[{555,0}]}, {name,<<"replication_building_555_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[555]}, {takeover,false}, {suffix,"building_555_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",555,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,true}]} [rebalance:debug,2014-08-19T16:51:52.651,ns_1@10.242.238.90:<0.30367.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.30368.0> [rebalance:debug,2014-08-19T16:51:52.651,ns_1@10.242.238.90:<0.30367.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:51:52.652,ns_1@10.242.238.90:<0.30367.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.29861.1>,#Ref<16550.0.2.83507>}]} [rebalance:info,2014-08-19T16:51:52.652,ns_1@10.242.238.90:<0.30367.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 555 [rebalance:debug,2014-08-19T16:51:52.652,ns_1@10.242.238.90:<0.30367.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.29861.1>,#Ref<16550.0.2.83507>}] [views:debug,2014-08-19T16:51:52.653,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/564. Updated state: pending (0) [ns_server:debug,2014-08-19T16:51:52.653,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",564,pending,0} [ns_server:debug,2014-08-19T16:51:52.659,ns_1@10.242.238.90:<0.30367.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:51:52.660,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.30369.0> (ok) [rebalance:debug,2014-08-19T16:51:52.662,ns_1@10.242.238.90:<0.30370.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 555 [ns_server:debug,2014-08-19T16:51:52.736,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 562. Nacking mccouch update. [views:debug,2014-08-19T16:51:52.737,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/562. Updated state: pending (0) [ns_server:debug,2014-08-19T16:51:52.737,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",562,pending,0} [ns_server:debug,2014-08-19T16:51:52.738,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,984,673,362,724,596,413,958,647,698,570,387,1009,749,621,983,672,361, 723,595,412,957,646,697,569,386,1008,748,620,982,735,671,607,424,360,969,722, 658,594,411,347,956,709,645,581,398,1020,943,760,696,632,568,385,1007,994, 747,683,619,372,981,734,670,606,423,359,968,721,657,593,410,346,955,708,644, 580,397,1019,942,759,695,631,567,384,1006,993,746,682,618,371,980,733,669, 605,422,358,967,720,656,592,409,345,954,707,643,579,396,1018,941,758,694,630, 566,383,1005,992,745,681,617,370,979,732,668,604,421,357,966,719,655,591,408, 344,953,706,642,578,395,1017,940,757,693,629,382,1004,991,744,680,616,369, 978,731,667,603,420,356,965,718,654,590,407,343,952,705,641,577,394,1016,939, 756,692,628,564,381,1003,990,743,679,615,368,977,730,666,602,419,355,964,717, 653,589,406,342,951,704,640,576,393,1015,938,755,691,627,380,1002,989,742, 678,614,367,976,729,665,601,418,354,963,716,652,588,405,950,767,703,639,575, 392,1014,754,690,626,562,379,1001,988,741,677,613,366,975,728,664,600,417, 353,962,715,651,587,404,949,766,702,638,574,391,1013,753,689,625,378,1000, 987,740,676,612,365,974,727,663,599,416,352,961,714,650,586,403,948,765,701, 637,573,390,1012,999,752,688,624,377,986,739,675,611,364,973,726,662,598,415, 351,960,713,649,585,402,947,764,700,636,572,389,1011,998,751,687,623,376,985, 738,674,610,363,972,725,661,597,414,350,959,712,648,584,401,1023,946,763,699, 635,571,388,1010,997,686,375,737,609,426,971,660,349,711,583,400,1022,945, 762,634,996,685,374,736,608,425,970,659,348,710,582,399,1021,944,761,633,995, 684,373] [views:debug,2014-08-19T16:51:52.787,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/562. Updated state: pending (0) [ns_server:debug,2014-08-19T16:51:52.787,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",562,pending,0} [ns_server:debug,2014-08-19T16:51:52.917,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 560. Nacking mccouch update. [views:debug,2014-08-19T16:51:52.917,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/560. Updated state: pending (0) [ns_server:debug,2014-08-19T16:51:52.917,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",560,pending,0} [ns_server:debug,2014-08-19T16:51:52.918,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,984,673,362,724,596,413,958,647,698,570,387,1009,749,621,983,672,361, 723,595,412,957,646,697,569,386,1008,748,620,982,671,360,969,722,658,594,411, 347,956,709,645,581,398,1020,943,760,696,632,568,385,1007,994,747,683,619, 372,981,734,670,606,423,359,968,721,657,593,410,346,955,708,644,580,397,1019, 942,759,695,631,567,384,1006,993,746,682,618,371,980,733,669,605,422,358,967, 720,656,592,409,345,954,707,643,579,396,1018,941,758,694,630,566,383,1005, 992,745,681,617,370,979,732,668,604,421,357,966,719,655,591,408,344,953,706, 642,578,395,1017,940,757,693,629,382,1004,991,744,680,616,369,978,731,667, 603,420,356,965,718,654,590,407,343,952,705,641,577,394,1016,939,756,692,628, 564,381,1003,990,743,679,615,368,977,730,666,602,419,355,964,717,653,589,406, 342,951,704,640,576,393,1015,938,755,691,627,380,1002,989,742,678,614,367, 976,729,665,601,418,354,963,716,652,588,405,950,767,703,639,575,392,1014,754, 690,626,562,379,1001,988,741,677,613,366,975,728,664,600,417,353,962,715,651, 587,404,949,766,702,638,574,391,1013,753,689,625,378,1000,987,740,676,612, 365,974,727,663,599,416,352,961,714,650,586,403,948,765,701,637,573,390,1012, 999,752,688,624,560,377,986,739,675,611,364,973,726,662,598,415,351,960,713, 649,585,402,947,764,700,636,572,389,1011,998,751,687,623,376,985,738,674,610, 363,972,725,661,597,414,350,959,712,648,584,401,1023,946,763,699,635,571,388, 1010,997,686,375,737,609,426,971,660,349,711,583,400,1022,945,762,634,996, 685,374,736,608,425,970,659,348,710,582,399,1021,944,761,633,995,684,373,735, 607,424] [views:debug,2014-08-19T16:51:52.968,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/560. Updated state: pending (0) [ns_server:debug,2014-08-19T16:51:52.968,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",560,pending,0} [rebalance:debug,2014-08-19T16:51:52.969,ns_1@10.242.238.90:<0.30086.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:51:52.969,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.30086.0> (ok) [rebalance:debug,2014-08-19T16:51:52.972,ns_1@10.242.238.90:<0.30402.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 575 [ns_server:debug,2014-08-19T16:51:53.168,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 565. Nacking mccouch update. [views:debug,2014-08-19T16:51:53.168,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/565. Updated state: pending (0) [ns_server:debug,2014-08-19T16:51:53.168,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",565,pending,0} [ns_server:debug,2014-08-19T16:51:53.169,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,984,673,362,724,596,413,958,647,698,570,387,1009,749,621,983,672,361, 723,595,412,957,646,697,569,386,1008,748,620,982,671,360,969,722,658,594,411, 347,956,709,645,581,398,1020,943,760,696,632,568,385,1007,994,747,683,619, 372,981,734,670,606,423,359,968,721,657,593,410,346,955,708,644,580,397,1019, 942,759,695,631,567,384,1006,993,746,682,618,371,980,733,669,605,422,358,967, 720,656,592,409,345,954,707,643,579,396,1018,941,758,694,630,566,383,1005, 992,745,681,617,370,979,732,668,604,421,357,966,719,655,591,408,344,953,706, 642,578,395,1017,940,757,693,629,565,382,1004,991,744,680,616,369,978,731, 667,603,420,356,965,718,654,590,407,343,952,705,641,577,394,1016,939,756,692, 628,564,381,1003,990,743,679,615,368,977,730,666,602,419,355,964,717,653,589, 406,342,951,704,640,576,393,1015,938,755,691,627,380,1002,989,742,678,614, 367,976,729,665,601,418,354,963,716,652,588,405,950,767,703,639,575,392,1014, 754,690,626,562,379,1001,988,741,677,613,366,975,728,664,600,417,353,962,715, 651,587,404,949,766,702,638,574,391,1013,753,689,625,378,1000,987,740,676, 612,365,974,727,663,599,416,352,961,714,650,586,403,948,765,701,637,573,390, 1012,999,752,688,624,560,377,986,739,675,611,364,973,726,662,598,415,351,960, 713,649,585,402,947,764,700,636,572,389,1011,998,751,687,623,376,985,738,674, 610,363,972,725,661,597,414,350,959,712,648,584,401,1023,946,763,699,635,571, 388,1010,997,686,375,737,609,426,971,660,349,711,583,400,1022,945,762,634, 996,685,374,736,608,425,970,659,348,710,582,399,1021,944,761,633,995,684,373, 735,607,424] [views:debug,2014-08-19T16:51:53.235,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/565. Updated state: pending (0) [ns_server:debug,2014-08-19T16:51:53.236,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",565,pending,0} [ns_server:debug,2014-08-19T16:51:53.369,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 563. Nacking mccouch update. [views:debug,2014-08-19T16:51:53.369,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/563. Updated state: pending (0) [ns_server:debug,2014-08-19T16:51:53.369,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",563,pending,0} [ns_server:debug,2014-08-19T16:51:53.370,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,984,673,362,724,596,413,958,647,698,570,387,1009,749,621,983,672,361, 723,595,412,957,646,697,569,386,1008,748,620,982,671,360,969,722,658,594,411, 347,956,709,645,581,398,1020,943,760,696,632,568,385,1007,994,747,683,619, 372,981,734,670,606,423,359,968,721,657,593,410,346,955,708,644,580,397,1019, 942,759,695,631,567,384,1006,993,746,682,618,371,980,733,669,605,422,358,967, 720,656,592,409,345,954,707,643,579,396,1018,941,758,694,630,566,383,1005, 992,745,681,617,370,979,732,668,604,421,357,966,719,655,591,408,344,953,706, 642,578,395,1017,940,757,693,629,565,382,1004,991,744,680,616,369,978,731, 667,603,420,356,965,718,654,590,407,343,952,705,641,577,394,1016,939,756,692, 628,564,381,1003,990,743,679,615,368,977,730,666,602,419,355,964,717,653,589, 406,342,951,704,640,576,393,1015,938,755,691,627,563,380,1002,989,742,678, 614,367,976,729,665,601,418,354,963,716,652,588,405,950,767,703,639,575,392, 1014,754,690,626,562,379,1001,988,741,677,613,366,975,728,664,600,417,353, 962,715,651,587,404,949,766,702,638,574,391,1013,753,689,625,378,1000,987, 740,676,612,365,974,727,663,599,416,352,961,714,650,586,403,948,765,701,637, 573,390,1012,999,752,688,624,560,377,986,739,675,611,364,973,726,662,598,415, 351,960,713,649,585,402,947,764,700,636,572,389,1011,998,751,687,623,376,985, 738,674,610,363,972,725,661,597,414,350,959,712,648,584,401,1023,946,763,699, 635,571,388,1010,997,686,375,737,609,426,971,660,349,711,583,400,1022,945, 762,634,996,685,374,736,608,425,970,659,348,710,582,399,1021,944,761,633,995, 684,373,735,607,424] [views:debug,2014-08-19T16:51:53.437,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/563. Updated state: pending (0) [ns_server:debug,2014-08-19T16:51:53.437,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",563,pending,0} [ns_server:debug,2014-08-19T16:51:53.536,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 561. Nacking mccouch update. [views:debug,2014-08-19T16:51:53.537,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/561. Updated state: pending (0) [ns_server:debug,2014-08-19T16:51:53.537,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",561,pending,0} [ns_server:debug,2014-08-19T16:51:53.538,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,984,673,362,724,596,413,958,647,698,570,387,1009,749,621,983,672,361, 723,595,412,957,646,697,569,386,1008,748,620,982,671,360,969,722,658,594,411, 347,956,709,645,581,398,1020,943,760,696,632,568,385,1007,994,747,683,619, 372,981,734,670,606,423,359,968,721,657,593,410,346,955,708,644,580,397,1019, 942,759,695,631,567,384,1006,993,746,682,618,371,980,733,669,605,422,358,967, 720,656,592,409,345,954,707,643,579,396,1018,941,758,694,630,566,383,1005, 992,745,681,617,370,979,732,668,604,421,357,966,719,655,591,408,344,953,706, 642,578,395,1017,940,757,693,629,565,382,1004,991,744,680,616,369,978,731, 667,603,420,356,965,718,654,590,407,343,952,705,641,577,394,1016,939,756,692, 628,564,381,1003,990,743,679,615,368,977,730,666,602,419,355,964,717,653,589, 406,342,951,704,640,576,393,1015,938,755,691,627,563,380,1002,989,742,678, 614,367,976,729,665,601,418,354,963,716,652,588,405,950,767,703,639,575,392, 1014,754,690,626,562,379,1001,988,741,677,613,366,975,728,664,600,417,353, 962,715,651,587,404,949,766,702,638,574,391,1013,753,689,625,561,378,1000, 987,740,676,612,365,974,727,663,599,416,352,961,714,650,586,403,948,765,701, 637,573,390,1012,999,752,688,624,560,377,986,739,675,611,364,973,726,662,598, 415,351,960,713,649,585,402,947,764,700,636,572,389,1011,998,751,687,623,376, 985,738,674,610,363,972,725,661,597,414,350,959,712,648,584,401,1023,946,763, 699,635,571,388,1010,997,686,375,737,609,426,971,660,349,711,583,400,1022, 945,762,634,996,685,374,736,608,425,970,659,348,710,582,399,1021,944,761,633, 995,684,373,735,607,424] [views:debug,2014-08-19T16:51:53.588,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/561. Updated state: pending (0) [ns_server:debug,2014-08-19T16:51:53.588,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",561,pending,0} [ns_server:debug,2014-08-19T16:51:53.704,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 559. Nacking mccouch update. [views:debug,2014-08-19T16:51:53.704,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/559. Updated state: pending (0) [ns_server:debug,2014-08-19T16:51:53.704,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",559,pending,0} [ns_server:debug,2014-08-19T16:51:53.705,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,984,673,362,724,596,413,958,647,698,570,387,1009,749,621,983,672,361, 723,595,412,957,646,697,569,386,1008,748,620,982,671,360,969,722,658,594,411, 347,956,709,645,581,398,1020,943,760,696,632,568,385,1007,994,747,683,619, 372,981,734,670,606,423,359,968,721,657,593,410,346,955,708,644,580,397,1019, 942,759,695,631,567,384,1006,993,746,682,618,371,980,733,669,605,422,358,967, 720,656,592,409,345,954,707,643,579,396,1018,941,758,694,630,566,383,1005, 992,745,681,617,370,979,732,668,604,421,357,966,719,655,591,408,344,953,706, 642,578,395,1017,940,757,693,629,565,382,1004,991,744,680,616,369,978,731, 667,603,420,356,965,718,654,590,407,343,952,705,641,577,394,1016,939,756,692, 628,564,381,1003,990,743,679,615,368,977,730,666,602,419,355,964,717,653,589, 406,342,951,704,640,576,393,1015,938,755,691,627,563,380,1002,989,742,678, 614,367,976,729,665,601,418,354,963,716,652,588,405,950,767,703,639,575,392, 1014,754,690,626,562,379,1001,988,741,677,613,366,975,728,664,600,417,353, 962,715,651,587,404,949,766,702,638,574,391,1013,753,689,625,561,378,1000, 987,740,676,612,365,974,727,663,599,416,352,961,714,650,586,403,948,765,701, 637,573,390,1012,999,752,688,624,560,377,986,739,675,611,364,973,726,662,598, 415,351,960,713,649,585,402,947,764,700,636,572,389,1011,998,751,687,623,559, 376,985,738,674,610,363,972,725,661,597,414,350,959,712,648,584,401,1023,946, 763,699,635,571,388,1010,997,686,375,737,609,426,971,660,349,711,583,400, 1022,945,762,634,996,685,374,736,608,425,970,659,348,710,582,399,1021,944, 761,633,995,684,373,735,607,424] [views:debug,2014-08-19T16:51:53.755,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/559. Updated state: pending (0) [ns_server:debug,2014-08-19T16:51:53.756,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",559,pending,0} [ns_server:debug,2014-08-19T16:51:53.898,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 557. Nacking mccouch update. [views:debug,2014-08-19T16:51:53.898,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/557. Updated state: pending (0) [ns_server:debug,2014-08-19T16:51:53.898,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",557,pending,0} [ns_server:debug,2014-08-19T16:51:53.899,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,984,673,362,724,596,413,958,647,698,570,387,1009,749,621,983,672,361, 723,595,412,957,646,697,569,386,1008,748,620,982,671,360,722,594,411,956,709, 645,581,398,1020,943,760,696,632,568,385,1007,994,747,683,619,372,981,734, 670,606,423,359,968,721,657,593,410,346,955,708,644,580,397,1019,942,759,695, 631,567,384,1006,993,746,682,618,371,980,733,669,605,422,358,967,720,656,592, 409,345,954,707,643,579,396,1018,941,758,694,630,566,383,1005,992,745,681, 617,370,979,732,668,604,421,357,966,719,655,591,408,344,953,706,642,578,395, 1017,940,757,693,629,565,382,1004,991,744,680,616,369,978,731,667,603,420, 356,965,718,654,590,407,343,952,705,641,577,394,1016,939,756,692,628,564,381, 1003,990,743,679,615,368,977,730,666,602,419,355,964,717,653,589,406,342,951, 704,640,576,393,1015,938,755,691,627,563,380,1002,989,742,678,614,367,976, 729,665,601,418,354,963,716,652,588,405,950,767,703,639,575,392,1014,754,690, 626,562,379,1001,988,741,677,613,366,975,728,664,600,417,353,962,715,651,587, 404,949,766,702,638,574,391,1013,753,689,625,561,378,1000,987,740,676,612, 365,974,727,663,599,416,352,961,714,650,586,403,948,765,701,637,573,390,1012, 999,752,688,624,560,377,986,739,675,611,364,973,726,662,598,415,351,960,713, 649,585,402,947,764,700,636,572,389,1011,998,751,687,623,559,376,985,738,674, 610,363,972,725,661,597,414,350,959,712,648,584,401,1023,946,763,699,635,571, 388,1010,997,686,375,737,609,426,971,660,349,711,583,400,1022,945,762,634, 996,685,557,374,736,608,425,970,659,348,710,582,399,1021,944,761,633,995,684, 373,735,607,424,969,658,347] [views:debug,2014-08-19T16:51:53.965,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/557. Updated state: pending (0) [ns_server:debug,2014-08-19T16:51:53.965,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",557,pending,0} [ns_server:debug,2014-08-19T16:51:54.115,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 555. Nacking mccouch update. [views:debug,2014-08-19T16:51:54.115,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/555. Updated state: pending (0) [ns_server:debug,2014-08-19T16:51:54.115,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",555,pending,0} [ns_server:debug,2014-08-19T16:51:54.116,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,984,673,362,724,596,413,958,647,698,570,387,1009,749,621,983,672,361, 723,595,412,957,646,697,569,386,1008,748,620,982,671,360,722,594,411,956,709, 645,581,398,1020,943,760,696,632,568,385,1007,994,747,683,619,555,372,981, 734,670,606,423,359,968,721,657,593,410,346,955,708,644,580,397,1019,942,759, 695,631,567,384,1006,993,746,682,618,371,980,733,669,605,422,358,967,720,656, 592,409,345,954,707,643,579,396,1018,941,758,694,630,566,383,1005,992,745, 681,617,370,979,732,668,604,421,357,966,719,655,591,408,344,953,706,642,578, 395,1017,940,757,693,629,565,382,1004,991,744,680,616,369,978,731,667,603, 420,356,965,718,654,590,407,343,952,705,641,577,394,1016,939,756,692,628,564, 381,1003,990,743,679,615,368,977,730,666,602,419,355,964,717,653,589,406,342, 951,704,640,576,393,1015,938,755,691,627,563,380,1002,989,742,678,614,367, 976,729,665,601,418,354,963,716,652,588,405,950,767,703,639,575,392,1014,754, 690,626,562,379,1001,988,741,677,613,366,975,728,664,600,417,353,962,715,651, 587,404,949,766,702,638,574,391,1013,753,689,625,561,378,1000,987,740,676, 612,365,974,727,663,599,416,352,961,714,650,586,403,948,765,701,637,573,390, 1012,999,752,688,624,560,377,986,739,675,611,364,973,726,662,598,415,351,960, 713,649,585,402,947,764,700,636,572,389,1011,998,751,687,623,559,376,985,738, 674,610,363,972,725,661,597,414,350,959,712,648,584,401,1023,946,763,699,635, 571,388,1010,997,686,375,737,609,426,971,660,349,711,583,400,1022,945,762, 634,996,685,557,374,736,608,425,970,659,348,710,582,399,1021,944,761,633,995, 684,373,735,607,424,969,658,347] [views:debug,2014-08-19T16:51:54.149,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/555. Updated state: pending (0) [ns_server:debug,2014-08-19T16:51:54.149,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",555,pending,0} [ns_server:debug,2014-08-19T16:51:54.241,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 558. Nacking mccouch update. [views:debug,2014-08-19T16:51:54.241,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/558. Updated state: pending (0) [ns_server:debug,2014-08-19T16:51:54.241,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",558,pending,0} [ns_server:debug,2014-08-19T16:51:54.242,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,984,673,362,724,596,413,958,647,698,570,387,1009,749,621,983,672,361, 723,595,412,957,646,697,569,386,1008,748,620,982,671,360,722,594,411,956,709, 645,581,398,1020,943,760,696,632,568,385,1007,994,747,683,619,555,372,981, 734,670,606,423,359,968,721,657,593,410,346,955,708,644,580,397,1019,942,759, 695,631,567,384,1006,993,746,682,618,371,980,733,669,605,422,358,967,720,656, 592,409,345,954,707,643,579,396,1018,941,758,694,630,566,383,1005,992,745, 681,617,370,979,732,668,604,421,357,966,719,655,591,408,344,953,706,642,578, 395,1017,940,757,693,629,565,382,1004,991,744,680,616,369,978,731,667,603, 420,356,965,718,654,590,407,343,952,705,641,577,394,1016,939,756,692,628,564, 381,1003,990,743,679,615,368,977,730,666,602,419,355,964,717,653,589,406,342, 951,704,640,576,393,1015,938,755,691,627,563,380,1002,989,742,678,614,367, 976,729,665,601,418,354,963,716,652,588,405,950,767,703,639,575,392,1014,754, 690,626,562,379,1001,988,741,677,613,366,975,728,664,600,417,353,962,715,651, 587,404,949,766,702,638,574,391,1013,753,689,625,561,378,1000,987,740,676, 612,365,974,727,663,599,416,352,961,714,650,586,403,948,765,701,637,573,390, 1012,999,752,688,624,560,377,986,739,675,611,364,973,726,662,598,415,351,960, 713,649,585,402,947,764,700,636,572,389,1011,998,751,687,623,559,376,985,738, 674,610,363,972,725,661,597,414,350,959,712,648,584,401,1023,946,763,699,635, 571,388,1010,997,686,558,375,737,609,426,971,660,349,711,583,400,1022,945, 762,634,996,685,557,374,736,608,425,970,659,348,710,582,399,1021,944,761,633, 995,684,373,735,607,424,969,658,347] [views:debug,2014-08-19T16:51:54.275,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/558. Updated state: pending (0) [ns_server:debug,2014-08-19T16:51:54.276,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",558,pending,0} [ns_server:debug,2014-08-19T16:51:54.367,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 556. Nacking mccouch update. [views:debug,2014-08-19T16:51:54.367,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/556. Updated state: pending (0) [ns_server:debug,2014-08-19T16:51:54.367,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",556,pending,0} [ns_server:debug,2014-08-19T16:51:54.368,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,984,673,362,724,596,413,958,647,698,570,387,1009,749,621,983,672,361, 723,595,412,957,646,697,569,386,1008,748,620,982,671,360,722,594,411,956,709, 645,581,398,1020,943,760,696,632,568,385,1007,994,747,683,619,555,372,981, 734,670,606,423,359,968,721,657,593,410,346,955,708,644,580,397,1019,942,759, 695,631,567,384,1006,993,746,682,618,371,980,733,669,605,422,358,967,720,656, 592,409,345,954,707,643,579,396,1018,941,758,694,630,566,383,1005,992,745, 681,617,370,979,732,668,604,421,357,966,719,655,591,408,344,953,706,642,578, 395,1017,940,757,693,629,565,382,1004,991,744,680,616,369,978,731,667,603, 420,356,965,718,654,590,407,343,952,705,641,577,394,1016,939,756,692,628,564, 381,1003,990,743,679,615,368,977,730,666,602,419,355,964,717,653,589,406,342, 951,704,640,576,393,1015,938,755,691,627,563,380,1002,989,742,678,614,367, 976,729,665,601,418,354,963,716,652,588,405,950,767,703,639,575,392,1014,754, 690,626,562,379,1001,988,741,677,613,366,975,728,664,600,417,353,962,715,651, 587,404,949,766,702,638,574,391,1013,753,689,625,561,378,1000,987,740,676, 612,365,974,727,663,599,416,352,961,714,650,586,403,948,765,701,637,573,390, 1012,999,752,688,624,560,377,986,739,675,611,364,973,726,662,598,415,351,960, 713,649,585,402,947,764,700,636,572,389,1011,998,751,687,623,559,376,985,738, 674,610,363,972,725,661,597,414,350,959,712,648,584,401,1023,946,763,699,635, 571,388,1010,997,686,558,375,737,609,426,971,660,349,711,583,400,1022,945, 762,634,996,685,557,374,736,608,425,970,659,348,710,582,399,1021,944,761,633, 995,684,556,373,735,607,424,969,658,347] [views:debug,2014-08-19T16:51:54.429,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/556. Updated state: pending (0) [ns_server:debug,2014-08-19T16:51:54.429,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",556,pending,0} [rebalance:debug,2014-08-19T16:51:54.432,ns_1@10.242.238.90:<0.30364.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:51:54.433,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.30364.0> (ok) [rebalance:debug,2014-08-19T16:51:54.436,ns_1@10.242.238.90:<0.30534.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 556 [rebalance:debug,2014-08-19T16:51:54.498,ns_1@10.242.238.90:<0.30324.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:51:54.499,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.30324.0> (ok) [rebalance:debug,2014-08-19T16:51:54.501,ns_1@10.242.238.90:<0.30537.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 558 [rebalance:debug,2014-08-19T16:51:54.546,ns_1@10.242.238.90:<0.30298.0>:janitor_agent:handle_call:795]Done [rebalance:debug,2014-08-19T16:51:54.547,ns_1@10.242.238.90:<0.30370.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:51:54.547,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.30298.0> (ok) [ns_server:debug,2014-08-19T16:51:54.547,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.30370.0> (ok) [rebalance:debug,2014-08-19T16:51:54.550,ns_1@10.242.238.90:<0.30540.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 560 [rebalance:debug,2014-08-19T16:51:54.551,ns_1@10.242.238.90:<0.30543.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 555 [rebalance:debug,2014-08-19T16:51:54.655,ns_1@10.242.238.90:<0.30344.0>:janitor_agent:handle_call:795]Done [rebalance:debug,2014-08-19T16:51:54.655,ns_1@10.242.238.90:<0.30272.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:51:54.656,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.30344.0> (ok) [ns_server:debug,2014-08-19T16:51:54.656,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.30272.0> (ok) [rebalance:debug,2014-08-19T16:51:54.660,ns_1@10.242.238.90:<0.30546.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 557 [rebalance:debug,2014-08-19T16:51:54.662,ns_1@10.242.238.90:<0.30549.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 562 [rebalance:debug,2014-08-19T16:51:54.739,ns_1@10.242.238.90:<0.30318.0>:janitor_agent:handle_call:795]Done [rebalance:debug,2014-08-19T16:51:54.739,ns_1@10.242.238.90:<0.30246.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:51:54.739,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.30318.0> (ok) [ns_server:debug,2014-08-19T16:51:54.739,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.30246.0> (ok) [rebalance:debug,2014-08-19T16:51:54.746,ns_1@10.242.238.90:<0.30552.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 559 [rebalance:debug,2014-08-19T16:51:54.747,ns_1@10.242.238.90:<0.30553.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 564 [rebalance:debug,2014-08-19T16:51:54.823,ns_1@10.242.238.90:<0.30220.0>:janitor_agent:handle_call:795]Done [rebalance:debug,2014-08-19T16:51:54.823,ns_1@10.242.238.90:<0.30292.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:51:54.823,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.30220.0> (ok) [ns_server:debug,2014-08-19T16:51:54.823,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.30292.0> (ok) [rebalance:debug,2014-08-19T16:51:54.827,ns_1@10.242.238.90:<0.30558.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 561 [rebalance:debug,2014-08-19T16:51:54.827,ns_1@10.242.238.90:<0.30559.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 566 [rebalance:debug,2014-08-19T16:51:54.907,ns_1@10.242.238.90:<0.30266.0>:janitor_agent:handle_call:795]Done [rebalance:debug,2014-08-19T16:51:54.907,ns_1@10.242.238.90:<0.30193.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:51:54.907,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.30266.0> (ok) [ns_server:debug,2014-08-19T16:51:54.907,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.30193.0> (ok) [rebalance:debug,2014-08-19T16:51:54.911,ns_1@10.242.238.90:<0.30564.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 563 [rebalance:debug,2014-08-19T16:51:54.911,ns_1@10.242.238.90:<0.30565.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 568 [rebalance:debug,2014-08-19T16:51:55.007,ns_1@10.242.238.90:<0.30161.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:51:55.007,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.30161.0> (ok) [rebalance:debug,2014-08-19T16:51:55.007,ns_1@10.242.238.90:<0.30240.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:51:55.007,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.30240.0> (ok) [rebalance:debug,2014-08-19T16:51:55.011,ns_1@10.242.238.90:<0.30570.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 570 [rebalance:debug,2014-08-19T16:51:55.012,ns_1@10.242.238.90:<0.30571.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 565 [rebalance:debug,2014-08-19T16:51:55.101,ns_1@10.242.238.90:<0.30135.0>:janitor_agent:handle_call:795]Done [rebalance:debug,2014-08-19T16:51:55.101,ns_1@10.242.238.90:<0.30214.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:51:55.101,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.30135.0> (ok) [ns_server:debug,2014-08-19T16:51:55.101,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.30214.0> (ok) [rebalance:debug,2014-08-19T16:51:55.105,ns_1@10.242.238.90:<0.30576.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 572 [rebalance:debug,2014-08-19T16:51:55.105,ns_1@10.242.238.90:<0.30579.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 567 [rebalance:debug,2014-08-19T16:51:55.226,ns_1@10.242.238.90:<0.30167.0>:janitor_agent:handle_call:795]Done [rebalance:debug,2014-08-19T16:51:55.226,ns_1@10.242.238.90:<0.30130.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:51:55.226,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.30167.0> (ok) [ns_server:debug,2014-08-19T16:51:55.226,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.30130.0> (ok) [ns_server:debug,2014-08-19T16:51:55.227,ns_1@10.242.238.90:<0.30583.0>:capi_set_view_manager:do_wait_index_updated:618]References to wait: [] ("default", 574) [ns_server:debug,2014-08-19T16:51:55.227,ns_1@10.242.238.90:<0.30583.0>:capi_set_view_manager:do_wait_index_updated:638]All refs fired [ns_server:debug,2014-08-19T16:51:55.227,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.30582.0> (ok) [rebalance:debug,2014-08-19T16:51:55.228,ns_1@10.242.238.90:<0.30103.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:51:55.228,ns_1@10.242.238.90:<0.30103.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:51:55.228,ns_1@10.242.238.90:<0.30584.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:51:55.229,ns_1@10.242.238.90:<0.30584.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:51:55.229,ns_1@10.242.238.90:<0.30103.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [rebalance:debug,2014-08-19T16:51:55.230,ns_1@10.242.238.90:<0.30585.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 569 [ns_server:info,2014-08-19T16:51:55.265,ns_1@10.242.238.90:<0.18784.0>:ns_memcached:do_handle_call:527]Changed vbucket 574 state to active [rebalance:debug,2014-08-19T16:51:55.276,ns_1@10.242.238.90:<0.30141.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:51:55.276,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.30141.0> (ok) [ns_server:debug,2014-08-19T16:51:55.301,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [rebalance:debug,2014-08-19T16:51:55.304,ns_1@10.242.238.90:<0.30588.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 571 [ns_server:debug,2014-08-19T16:51:55.305,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:55.305,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 3407 us [ns_server:debug,2014-08-19T16:51:55.305,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:55.306,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{574, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.88']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:51:55.338,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:51:55.341,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:55.342,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 2401 us [ns_server:debug,2014-08-19T16:51:55.342,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:55.342,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{318, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.88']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [rebalance:debug,2014-08-19T16:51:55.351,ns_1@10.242.238.90:<0.30112.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:51:55.351,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.30112.0> (ok) [rebalance:debug,2014-08-19T16:51:55.354,ns_1@10.242.238.90:<0.30593.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 573 [views:debug,2014-08-19T16:51:55.401,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/574. Updated state: active (1) [ns_server:debug,2014-08-19T16:51:55.402,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",574,active,1} [rebalance:debug,2014-08-19T16:51:55.469,ns_1@10.242.238.90:<0.30402.0>:janitor_agent:handle_call:795]Done [rebalance:debug,2014-08-19T16:51:55.469,ns_1@10.242.238.90:<0.30540.0>:janitor_agent:handle_call:795]Done [rebalance:debug,2014-08-19T16:51:55.469,ns_1@10.242.238.90:<0.30534.0>:janitor_agent:handle_call:795]Done [rebalance:debug,2014-08-19T16:51:55.469,ns_1@10.242.238.90:<0.30549.0>:janitor_agent:handle_call:795]Done [rebalance:debug,2014-08-19T16:51:55.469,ns_1@10.242.238.90:<0.30537.0>:janitor_agent:handle_call:795]Done [rebalance:debug,2014-08-19T16:51:55.469,ns_1@10.242.238.90:<0.30553.0>:janitor_agent:handle_call:795]Done [rebalance:debug,2014-08-19T16:51:55.469,ns_1@10.242.238.90:<0.30546.0>:janitor_agent:handle_call:795]Done [rebalance:debug,2014-08-19T16:51:55.469,ns_1@10.242.238.90:<0.30543.0>:janitor_agent:handle_call:795]Done [rebalance:debug,2014-08-19T16:51:55.470,ns_1@10.242.238.90:<0.30559.0>:janitor_agent:handle_call:795]Done [rebalance:debug,2014-08-19T16:51:55.470,ns_1@10.242.238.90:<0.30565.0>:janitor_agent:handle_call:795]Done [rebalance:debug,2014-08-19T16:51:55.470,ns_1@10.242.238.90:<0.30552.0>:janitor_agent:handle_call:795]Done [rebalance:debug,2014-08-19T16:51:55.470,ns_1@10.242.238.90:<0.30570.0>:janitor_agent:handle_call:795]Done [rebalance:debug,2014-08-19T16:51:55.470,ns_1@10.242.238.90:<0.30564.0>:janitor_agent:handle_call:795]Done [rebalance:debug,2014-08-19T16:51:55.470,ns_1@10.242.238.90:<0.30576.0>:janitor_agent:handle_call:795]Done [rebalance:debug,2014-08-19T16:51:55.470,ns_1@10.242.238.90:<0.30571.0>:janitor_agent:handle_call:795]Done [rebalance:debug,2014-08-19T16:51:55.470,ns_1@10.242.238.90:<0.30558.0>:janitor_agent:handle_call:795]Done [rebalance:debug,2014-08-19T16:51:55.470,ns_1@10.242.238.90:<0.30585.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:51:55.470,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.30402.0> (ok) [rebalance:debug,2014-08-19T16:51:55.470,ns_1@10.242.238.90:<0.30579.0>:janitor_agent:handle_call:795]Done [rebalance:debug,2014-08-19T16:51:55.470,ns_1@10.242.238.90:<0.30588.0>:janitor_agent:handle_call:795]Done [rebalance:debug,2014-08-19T16:51:55.470,ns_1@10.242.238.90:<0.30593.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:51:55.470,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.30540.0> (ok) [ns_server:debug,2014-08-19T16:51:55.470,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.30534.0> (ok) [ns_server:debug,2014-08-19T16:51:55.470,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.30549.0> (ok) [ns_server:debug,2014-08-19T16:51:55.470,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.30537.0> (ok) [ns_server:debug,2014-08-19T16:51:55.471,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.30553.0> (ok) [ns_server:debug,2014-08-19T16:51:55.471,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.30546.0> (ok) [ns_server:debug,2014-08-19T16:51:55.471,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.30543.0> (ok) [ns_server:debug,2014-08-19T16:51:55.471,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.30559.0> (ok) [ns_server:debug,2014-08-19T16:51:55.471,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.30565.0> (ok) [ns_server:debug,2014-08-19T16:51:55.471,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.30552.0> (ok) [ns_server:debug,2014-08-19T16:51:55.471,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.30570.0> (ok) [ns_server:debug,2014-08-19T16:51:55.471,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.30564.0> (ok) [ns_server:debug,2014-08-19T16:51:55.471,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.30576.0> (ok) [ns_server:debug,2014-08-19T16:51:55.471,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.30571.0> (ok) [ns_server:debug,2014-08-19T16:51:55.471,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.30558.0> (ok) [ns_server:debug,2014-08-19T16:51:55.471,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.30585.0> (ok) [ns_server:debug,2014-08-19T16:51:55.472,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.30579.0> (ok) [ns_server:debug,2014-08-19T16:51:55.472,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.30588.0> (ok) [ns_server:debug,2014-08-19T16:51:55.472,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.30593.0> (ok) [ns_server:debug,2014-08-19T16:51:55.472,ns_1@10.242.238.90:<0.30604.0>:capi_set_view_manager:do_wait_index_updated:618]References to wait: [] ("default", 575) [ns_server:debug,2014-08-19T16:51:55.472,ns_1@10.242.238.90:<0.30604.0>:capi_set_view_manager:do_wait_index_updated:638]All refs fired [ns_server:debug,2014-08-19T16:51:55.472,ns_1@10.242.238.90:<0.30618.0>:capi_set_view_manager:do_wait_index_updated:618]References to wait: [] ("default", 560) [ns_server:debug,2014-08-19T16:51:55.472,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.30602.0> (ok) [ns_server:debug,2014-08-19T16:51:55.473,ns_1@10.242.238.90:<0.30618.0>:capi_set_view_manager:do_wait_index_updated:638]All refs fired [ns_server:debug,2014-08-19T16:51:55.473,ns_1@10.242.238.90:<0.30619.0>:capi_set_view_manager:do_wait_index_updated:618]References to wait: [] ("default", 558) [ns_server:debug,2014-08-19T16:51:55.473,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.30603.0> (ok) [ns_server:debug,2014-08-19T16:51:55.473,ns_1@10.242.238.90:<0.30619.0>:capi_set_view_manager:do_wait_index_updated:638]All refs fired [ns_server:debug,2014-08-19T16:51:55.473,ns_1@10.242.238.90:<0.30625.0>:capi_set_view_manager:do_wait_index_updated:618]References to wait: [] ("default", 556) [ns_server:debug,2014-08-19T16:51:55.473,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.30605.0> (ok) [ns_server:debug,2014-08-19T16:51:55.473,ns_1@10.242.238.90:<0.30625.0>:capi_set_view_manager:do_wait_index_updated:638]All refs fired [ns_server:debug,2014-08-19T16:51:55.473,ns_1@10.242.238.90:<0.30626.0>:capi_set_view_manager:do_wait_index_updated:618]References to wait: [] ("default", 562) [rebalance:debug,2014-08-19T16:51:55.473,ns_1@10.242.238.90:<0.30083.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:51:55.473,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.30606.0> (ok) [ns_server:debug,2014-08-19T16:51:55.473,ns_1@10.242.238.90:<0.30626.0>:capi_set_view_manager:do_wait_index_updated:638]All refs fired [ns_server:debug,2014-08-19T16:51:55.473,ns_1@10.242.238.90:<0.30083.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:51:55.473,ns_1@10.242.238.90:<0.30627.0>:capi_set_view_manager:do_wait_index_updated:618]References to wait: [] ("default", 555) [ns_server:debug,2014-08-19T16:51:55.473,ns_1@10.242.238.90:<0.30628.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:51:55.473,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.30607.0> (ok) [rebalance:debug,2014-08-19T16:51:55.473,ns_1@10.242.238.90:<0.30321.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [rebalance:debug,2014-08-19T16:51:55.473,ns_1@10.242.238.90:<0.30295.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:51:55.473,ns_1@10.242.238.90:<0.30627.0>:capi_set_view_manager:do_wait_index_updated:638]All refs fired [ns_server:debug,2014-08-19T16:51:55.474,ns_1@10.242.238.90:<0.30628.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [ns_server:debug,2014-08-19T16:51:55.474,ns_1@10.242.238.90:<0.30629.0>:capi_set_view_manager:do_wait_index_updated:618]References to wait: [] ("default", 564) [ns_server:debug,2014-08-19T16:51:55.474,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.30608.0> (ok) [ns_server:debug,2014-08-19T16:51:55.474,ns_1@10.242.238.90:<0.30295.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:51:55.474,ns_1@10.242.238.90:<0.30321.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:51:55.474,ns_1@10.242.238.90:<0.30629.0>:capi_set_view_manager:do_wait_index_updated:638]All refs fired [ns_server:debug,2014-08-19T16:51:55.474,ns_1@10.242.238.90:<0.30631.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:51:55.474,ns_1@10.242.238.90:<0.30632.0>:capi_set_view_manager:do_wait_index_updated:618]References to wait: [] ("default", 559) [ns_server:debug,2014-08-19T16:51:55.474,ns_1@10.242.238.90:<0.30631.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:debug,2014-08-19T16:51:55.474,ns_1@10.242.238.90:<0.30347.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:51:55.474,ns_1@10.242.238.90:<0.30632.0>:capi_set_view_manager:do_wait_index_updated:638]All refs fired [ns_server:debug,2014-08-19T16:51:55.474,ns_1@10.242.238.90:<0.30630.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [rebalance:info,2014-08-19T16:51:55.474,ns_1@10.242.238.90:<0.30083.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:51:55.474,ns_1@10.242.238.90:<0.30633.0>:capi_set_view_manager:do_wait_index_updated:618]References to wait: [] ("default", 557) [ns_server:debug,2014-08-19T16:51:55.474,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.30609.0> (ok) [rebalance:debug,2014-08-19T16:51:55.474,ns_1@10.242.238.90:<0.30269.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:51:55.474,ns_1@10.242.238.90:<0.30630.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [ns_server:debug,2014-08-19T16:51:55.474,ns_1@10.242.238.90:<0.30347.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:51:55.474,ns_1@10.242.238.90:<0.30634.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [rebalance:info,2014-08-19T16:51:55.474,ns_1@10.242.238.90:<0.30321.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [rebalance:debug,2014-08-19T16:51:55.474,ns_1@10.242.238.90:<0.30367.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [rebalance:info,2014-08-19T16:51:55.474,ns_1@10.242.238.90:<0.30295.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:51:55.474,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.30610.0> (ok) [ns_server:debug,2014-08-19T16:51:55.474,ns_1@10.242.238.90:<0.30633.0>:capi_set_view_manager:do_wait_index_updated:638]All refs fired [ns_server:debug,2014-08-19T16:51:55.475,ns_1@10.242.238.90:<0.30634.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [ns_server:debug,2014-08-19T16:51:55.475,ns_1@10.242.238.90:<0.30635.0>:capi_set_view_manager:do_wait_index_updated:618]References to wait: [] ("default", 568) [ns_server:debug,2014-08-19T16:51:55.475,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.30611.0> (ok) [ns_server:debug,2014-08-19T16:51:55.475,ns_1@10.242.238.90:<0.30367.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:51:55.475,ns_1@10.242.238.90:<0.30269.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:51:55.475,ns_1@10.242.238.90:<0.30635.0>:capi_set_view_manager:do_wait_index_updated:638]All refs fired [ns_server:debug,2014-08-19T16:51:55.475,ns_1@10.242.238.90:<0.30637.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:51:55.475,ns_1@10.242.238.90:<0.30638.0>:capi_set_view_manager:do_wait_index_updated:618]References to wait: [] ("default", 566) [rebalance:info,2014-08-19T16:51:55.475,ns_1@10.242.238.90:<0.30347.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [rebalance:debug,2014-08-19T16:51:55.475,ns_1@10.242.238.90:<0.30301.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [rebalance:debug,2014-08-19T16:51:55.475,ns_1@10.242.238.90:<0.30243.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:51:55.475,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.30612.0> (ok) [ns_server:debug,2014-08-19T16:51:55.475,ns_1@10.242.238.90:<0.30636.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:51:55.475,ns_1@10.242.238.90:<0.30637.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [ns_server:debug,2014-08-19T16:51:55.475,ns_1@10.242.238.90:<0.30638.0>:capi_set_view_manager:do_wait_index_updated:638]All refs fired [ns_server:debug,2014-08-19T16:51:55.475,ns_1@10.242.238.90:<0.30301.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:51:55.475,ns_1@10.242.238.90:<0.30639.0>:capi_set_view_manager:do_wait_index_updated:618]References to wait: [] ("default", 563) [rebalance:info,2014-08-19T16:51:55.475,ns_1@10.242.238.90:<0.30367.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:51:55.475,ns_1@10.242.238.90:<0.30640.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:51:55.475,ns_1@10.242.238.90:<0.30636.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [ns_server:debug,2014-08-19T16:51:55.475,ns_1@10.242.238.90:<0.30243.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [rebalance:debug,2014-08-19T16:51:55.475,ns_1@10.242.238.90:<0.30341.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:51:55.475,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.30613.0> (ok) [rebalance:info,2014-08-19T16:51:55.475,ns_1@10.242.238.90:<0.30269.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:51:55.475,ns_1@10.242.238.90:<0.30641.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:51:55.475,ns_1@10.242.238.90:<0.30639.0>:capi_set_view_manager:do_wait_index_updated:638]All refs fired [ns_server:debug,2014-08-19T16:51:55.475,ns_1@10.242.238.90:<0.30642.0>:capi_set_view_manager:do_wait_index_updated:618]References to wait: [] ("default", 572) [ns_server:debug,2014-08-19T16:51:55.475,ns_1@10.242.238.90:<0.30640.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [ns_server:debug,2014-08-19T16:51:55.476,ns_1@10.242.238.90:<0.30641.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [ns_server:debug,2014-08-19T16:51:55.476,ns_1@10.242.238.90:<0.30642.0>:capi_set_view_manager:do_wait_index_updated:638]All refs fired [ns_server:debug,2014-08-19T16:51:55.476,ns_1@10.242.238.90:<0.30341.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:51:55.476,ns_1@10.242.238.90:<0.30644.0>:capi_set_view_manager:do_wait_index_updated:618]References to wait: [] ("default", 565) [ns_server:debug,2014-08-19T16:51:55.476,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.30614.0> (ok) [ns_server:debug,2014-08-19T16:51:55.476,ns_1@10.242.238.90:<0.30643.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [rebalance:info,2014-08-19T16:51:55.476,ns_1@10.242.238.90:<0.30301.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [rebalance:debug,2014-08-19T16:51:55.476,ns_1@10.242.238.90:<0.30190.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [rebalance:info,2014-08-19T16:51:55.476,ns_1@10.242.238.90:<0.30243.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:51:55.476,ns_1@10.242.238.90:<0.30644.0>:capi_set_view_manager:do_wait_index_updated:638]All refs fired [ns_server:debug,2014-08-19T16:51:55.476,ns_1@10.242.238.90:<0.30643.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [ns_server:debug,2014-08-19T16:51:55.476,ns_1@10.242.238.90:<0.30645.0>:capi_set_view_manager:do_wait_index_updated:618]References to wait: [] ("default", 570) [ns_server:debug,2014-08-19T16:51:55.476,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.30616.0> (ok) [rebalance:debug,2014-08-19T16:51:55.476,ns_1@10.242.238.90:<0.30217.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:51:55.476,ns_1@10.242.238.90:<0.30190.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:51:55.476,ns_1@10.242.238.90:<0.30646.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:51:55.476,ns_1@10.242.238.90:<0.30645.0>:capi_set_view_manager:do_wait_index_updated:638]All refs fired [ns_server:debug,2014-08-19T16:51:55.476,ns_1@10.242.238.90:<0.30647.0>:capi_set_view_manager:do_wait_index_updated:618]References to wait: [] ("default", 569) [ns_server:debug,2014-08-19T16:51:55.476,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.30617.0> (ok) [rebalance:info,2014-08-19T16:51:55.476,ns_1@10.242.238.90:<0.30341.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:51:55.476,ns_1@10.242.238.90:<0.30646.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [ns_server:debug,2014-08-19T16:51:55.476,ns_1@10.242.238.90:<0.30217.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:51:55.476,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.30615.0> (ok) [ns_server:debug,2014-08-19T16:51:55.476,ns_1@10.242.238.90:<0.30647.0>:capi_set_view_manager:do_wait_index_updated:638]All refs fired [ns_server:debug,2014-08-19T16:51:55.476,ns_1@10.242.238.90:<0.30648.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [rebalance:debug,2014-08-19T16:51:55.476,ns_1@10.242.238.90:<0.30263.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:51:55.476,ns_1@10.242.238.90:<0.30649.0>:capi_set_view_manager:do_wait_index_updated:618]References to wait: [] ("default", 573) [rebalance:info,2014-08-19T16:51:55.476,ns_1@10.242.238.90:<0.30190.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:51:55.476,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.30621.0> (ok) [rebalance:debug,2014-08-19T16:51:55.477,ns_1@10.242.238.90:<0.30129.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:51:55.477,ns_1@10.242.238.90:<0.30648.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [ns_server:debug,2014-08-19T16:51:55.477,ns_1@10.242.238.90:<0.30649.0>:capi_set_view_manager:do_wait_index_updated:638]All refs fired [ns_server:debug,2014-08-19T16:51:55.477,ns_1@10.242.238.90:<0.30650.0>:capi_set_view_manager:do_wait_index_updated:618]References to wait: [] ("default", 561) [ns_server:debug,2014-08-19T16:51:55.477,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.30623.0> (ok) [ns_server:debug,2014-08-19T16:51:55.477,ns_1@10.242.238.90:<0.30263.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:51:55.477,ns_1@10.242.238.90:<0.30129.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [rebalance:debug,2014-08-19T16:51:55.477,ns_1@10.242.238.90:<0.30237.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:51:55.477,ns_1@10.242.238.90:<0.30652.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:51:55.477,ns_1@10.242.238.90:<0.30650.0>:capi_set_view_manager:do_wait_index_updated:638]All refs fired [ns_server:debug,2014-08-19T16:51:55.477,ns_1@10.242.238.90:<0.30651.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [rebalance:info,2014-08-19T16:51:55.477,ns_1@10.242.238.90:<0.30217.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:51:55.477,ns_1@10.242.238.90:<0.30652.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [ns_server:debug,2014-08-19T16:51:55.477,ns_1@10.242.238.90:<0.30653.0>:capi_set_view_manager:do_wait_index_updated:618]References to wait: [] ("default", 571) [ns_server:debug,2014-08-19T16:51:55.477,ns_1@10.242.238.90:<0.30237.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [rebalance:debug,2014-08-19T16:51:55.477,ns_1@10.242.238.90:<0.30158.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [rebalance:debug,2014-08-19T16:51:55.477,ns_1@10.242.238.90:<0.30164.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:51:55.477,ns_1@10.242.238.90:<0.30651.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [ns_server:debug,2014-08-19T16:51:55.477,ns_1@10.242.238.90:<0.30653.0>:capi_set_view_manager:do_wait_index_updated:638]All refs fired [ns_server:debug,2014-08-19T16:51:55.477,ns_1@10.242.238.90:<0.30654.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:51:55.477,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.30620.0> (ok) [ns_server:debug,2014-08-19T16:51:55.477,ns_1@10.242.238.90:<0.30655.0>:capi_set_view_manager:do_wait_index_updated:618]References to wait: [] ("default", 567) [rebalance:info,2014-08-19T16:51:55.477,ns_1@10.242.238.90:<0.30129.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [rebalance:debug,2014-08-19T16:51:55.477,ns_1@10.242.238.90:<0.30109.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:51:55.477,ns_1@10.242.238.90:<0.30158.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:51:55.477,ns_1@10.242.238.90:<0.30656.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:51:55.477,ns_1@10.242.238.90:<0.30654.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:51:55.477,ns_1@10.242.238.90:<0.30263.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:51:55.477,ns_1@10.242.238.90:<0.30164.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:51:55.477,ns_1@10.242.238.90:<0.30657.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:51:55.477,ns_1@10.242.238.90:<0.30655.0>:capi_set_view_manager:do_wait_index_updated:638]All refs fired [ns_server:debug,2014-08-19T16:51:55.477,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.30624.0> (ok) [ns_server:debug,2014-08-19T16:51:55.477,ns_1@10.242.238.90:<0.30656.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:51:55.478,ns_1@10.242.238.90:<0.30237.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [rebalance:info,2014-08-19T16:51:55.478,ns_1@10.242.238.90:<0.30158.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:51:55.478,ns_1@10.242.238.90:<0.30109.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:51:55.478,ns_1@10.242.238.90:<0.30657.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [ns_server:debug,2014-08-19T16:51:55.478,ns_1@10.242.238.90:<0.30658.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:51:55.478,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.30622.0> (ok) [rebalance:info,2014-08-19T16:51:55.478,ns_1@10.242.238.90:<0.30164.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:51:55.478,ns_1@10.242.238.90:<0.30658.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:debug,2014-08-19T16:51:55.478,ns_1@10.242.238.90:<0.30289.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [rebalance:info,2014-08-19T16:51:55.478,ns_1@10.242.238.90:<0.30109.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [rebalance:debug,2014-08-19T16:51:55.478,ns_1@10.242.238.90:<0.30138.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:51:55.478,ns_1@10.242.238.90:<0.30289.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [rebalance:debug,2014-08-19T16:51:55.478,ns_1@10.242.238.90:<0.30197.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:51:55.478,ns_1@10.242.238.90:<0.30659.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:51:55.478,ns_1@10.242.238.90:<0.30138.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:51:55.478,ns_1@10.242.238.90:<0.30659.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [ns_server:debug,2014-08-19T16:51:55.478,ns_1@10.242.238.90:<0.30660.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [rebalance:info,2014-08-19T16:51:55.478,ns_1@10.242.238.90:<0.30289.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:51:55.478,ns_1@10.242.238.90:<0.30197.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:51:55.478,ns_1@10.242.238.90:<0.30660.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [ns_server:debug,2014-08-19T16:51:55.478,ns_1@10.242.238.90:<0.30661.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:51:55.479,ns_1@10.242.238.90:<0.30661.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:51:55.479,ns_1@10.242.238.90:<0.30138.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [rebalance:info,2014-08-19T16:51:55.479,ns_1@10.242.238.90:<0.30197.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:51:55.553,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:51:55.559,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 5758 us [ns_server:debug,2014-08-19T16:51:55.559,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:55.559,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:55.559,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{319, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.88']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:info,2014-08-19T16:51:55.562,ns_1@10.242.238.90:<0.18786.0>:ns_memcached:do_handle_call:527]Changed vbucket 558 state to active [ns_server:info,2014-08-19T16:51:55.564,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 575 state to active [ns_server:info,2014-08-19T16:51:55.573,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 556 state to active [ns_server:info,2014-08-19T16:51:55.592,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 560 state to active [ns_server:info,2014-08-19T16:51:55.601,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 564 state to active [views:debug,2014-08-19T16:51:55.628,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/558. Updated state: active (1) [ns_server:debug,2014-08-19T16:51:55.628,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",558,active,1} [ns_server:debug,2014-08-19T16:51:55.639,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:51:55.643,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:55.643,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 3683 us [ns_server:debug,2014-08-19T16:51:55.643,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:55.644,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{558, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.88']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:info,2014-08-19T16:51:55.648,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 562 state to active [ns_server:info,2014-08-19T16:51:55.674,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 561 state to active [ns_server:info,2014-08-19T16:51:55.685,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 568 state to active [ns_server:info,2014-08-19T16:51:55.687,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 570 state to active [ns_server:info,2014-08-19T16:51:55.692,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 563 state to active [views:debug,2014-08-19T16:51:55.694,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/575. Updated state: active (1) [ns_server:debug,2014-08-19T16:51:55.694,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",575,active,1} [ns_server:info,2014-08-19T16:51:55.697,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 555 state to active [ns_server:info,2014-08-19T16:51:55.699,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 571 state to active [ns_server:info,2014-08-19T16:51:55.700,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 557 state to active [ns_server:info,2014-08-19T16:51:55.702,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 569 state to active [ns_server:debug,2014-08-19T16:51:55.713,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:51:55.716,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:55.716,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 2992 us [ns_server:debug,2014-08-19T16:51:55.716,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:55.717,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{575, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.88']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:info,2014-08-19T16:51:55.718,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 566 state to active [ns_server:info,2014-08-19T16:51:55.719,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 559 state to active [ns_server:info,2014-08-19T16:51:55.727,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 573 state to active [ns_server:info,2014-08-19T16:51:55.738,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 567 state to active [ns_server:info,2014-08-19T16:51:55.748,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 565 state to active [ns_server:debug,2014-08-19T16:51:55.752,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:51:55.756,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:55.756,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 4143 us [ns_server:debug,2014-08-19T16:51:55.757,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:55.757,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{556, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.88']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:info,2014-08-19T16:51:55.758,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 572 state to active [views:debug,2014-08-19T16:51:55.761,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/570. Updated state: active (1) [ns_server:debug,2014-08-19T16:51:55.761,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",570,active,1} [ns_server:debug,2014-08-19T16:51:55.798,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:51:55.800,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:55.800,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 1635 us [ns_server:debug,2014-08-19T16:51:55.801,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:55.801,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{560, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.88']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [views:debug,2014-08-19T16:51:55.828,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/568. Updated state: active (1) [ns_server:debug,2014-08-19T16:51:55.828,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",568,active,1} [ns_server:debug,2014-08-19T16:51:55.834,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:51:55.837,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:55.837,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 2893 us [ns_server:debug,2014-08-19T16:51:55.838,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:55.838,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{564, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.88']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [views:debug,2014-08-19T16:51:55.895,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/564. Updated state: active (1) [ns_server:debug,2014-08-19T16:51:55.895,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",564,active,1} [ns_server:debug,2014-08-19T16:51:55.913,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:51:55.918,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:55.918,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 5324 us [ns_server:debug,2014-08-19T16:51:55.919,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:55.919,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{562, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.88']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:51:55.951,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:51:55.954,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 2957 us [ns_server:debug,2014-08-19T16:51:55.954,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:55.955,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:55.955,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{561, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.88']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [views:debug,2014-08-19T16:51:55.962,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/562. Updated state: active (1) [ns_server:debug,2014-08-19T16:51:55.962,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",562,active,1} [ns_server:debug,2014-08-19T16:51:55.991,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:51:55.994,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:55.994,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 3020 us [ns_server:debug,2014-08-19T16:51:55.995,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:55.995,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{568, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.88']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [views:debug,2014-08-19T16:51:56.029,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/560. Updated state: active (1) [ns_server:debug,2014-08-19T16:51:56.029,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",560,active,1} [ns_server:debug,2014-08-19T16:51:56.036,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:51:56.039,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:56.040,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{570, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.88']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:51:56.039,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 3298 us [ns_server:debug,2014-08-19T16:51:56.040,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:56.076,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:51:56.079,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:56.079,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 3247 us [ns_server:debug,2014-08-19T16:51:56.079,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [views:debug,2014-08-19T16:51:56.080,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/556. Updated state: active (1) [ns_server:debug,2014-08-19T16:51:56.080,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",556,active,1} [ns_server:debug,2014-08-19T16:51:56.080,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{563, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.88']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:51:56.117,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:51:56.119,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:56.120,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 2938 us [ns_server:debug,2014-08-19T16:51:56.120,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:56.121,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{555, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.88']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [views:debug,2014-08-19T16:51:56.154,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/572. Updated state: active (1) [ns_server:debug,2014-08-19T16:51:56.155,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",572,active,1} [ns_server:debug,2014-08-19T16:51:56.156,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:51:56.163,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:56.163,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 6914 us [ns_server:debug,2014-08-19T16:51:56.164,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:56.164,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{571, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.88']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:51:56.198,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:51:56.199,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:56.199,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 1132 us [ns_server:debug,2014-08-19T16:51:56.200,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:56.200,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{569, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.88']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [views:debug,2014-08-19T16:51:56.225,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/566. Updated state: active (1) [ns_server:debug,2014-08-19T16:51:56.226,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",566,active,1} [ns_server:debug,2014-08-19T16:51:56.234,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:51:56.237,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:56.238,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 3195 us [ns_server:debug,2014-08-19T16:51:56.238,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:56.238,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{557, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.88']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [views:debug,2014-08-19T16:51:56.259,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/573. Updated state: active (1) [ns_server:debug,2014-08-19T16:51:56.259,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",573,active,1} [ns_server:debug,2014-08-19T16:51:56.272,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:51:56.274,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:56.274,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 1685 us [ns_server:debug,2014-08-19T16:51:56.275,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:56.275,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{566, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.88']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [views:debug,2014-08-19T16:51:56.301,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/571. Updated state: active (1) [ns_server:debug,2014-08-19T16:51:56.301,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",571,active,1} [ns_server:debug,2014-08-19T16:51:56.314,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:51:56.318,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:56.318,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 3520 us [ns_server:debug,2014-08-19T16:51:56.318,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:56.319,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{559, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.88']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [views:debug,2014-08-19T16:51:56.334,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/569. Updated state: active (1) [ns_server:debug,2014-08-19T16:51:56.335,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",569,active,1} [ns_server:debug,2014-08-19T16:51:56.350,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:51:56.353,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:56.353,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 2732 us [ns_server:debug,2014-08-19T16:51:56.354,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:56.354,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{573, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.88']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [views:debug,2014-08-19T16:51:56.376,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/567. Updated state: active (1) [ns_server:debug,2014-08-19T16:51:56.377,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",567,active,1} [ns_server:debug,2014-08-19T16:51:56.453,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:51:56.460,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:56.461,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 7620 us [ns_server:debug,2014-08-19T16:51:56.461,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:56.461,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{567, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.88']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [views:debug,2014-08-19T16:51:56.477,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/565. Updated state: active (1) [ns_server:debug,2014-08-19T16:51:56.477,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",565,active,1} [ns_server:debug,2014-08-19T16:51:56.494,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:51:56.497,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:56.498,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 1802 us [ns_server:debug,2014-08-19T16:51:56.498,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{565, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.88']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:51:56.498,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [views:debug,2014-08-19T16:51:56.510,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/563. Updated state: active (1) [ns_server:debug,2014-08-19T16:51:56.510,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",563,active,1} [ns_server:debug,2014-08-19T16:51:56.538,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:51:56.542,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 3174 us [ns_server:debug,2014-08-19T16:51:56.542,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:56.543,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:56.543,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{572, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.88']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [views:debug,2014-08-19T16:51:56.544,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/561. Updated state: active (1) [ns_server:debug,2014-08-19T16:51:56.544,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",561,active,1} [ns_server:debug,2014-08-19T16:51:56.575,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [views:debug,2014-08-19T16:51:56.577,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/559. Updated state: active (1) [ns_server:debug,2014-08-19T16:51:56.577,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",559,active,1} [ns_server:debug,2014-08-19T16:51:56.578,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 2952 us [ns_server:debug,2014-08-19T16:51:56.578,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:56.578,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:56.579,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{303, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.88']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [views:debug,2014-08-19T16:51:56.611,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/557. Updated state: active (1) [ns_server:debug,2014-08-19T16:51:56.612,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",557,active,1} [ns_server:debug,2014-08-19T16:51:56.617,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:51:56.618,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:56.619,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 1800 us [ns_server:debug,2014-08-19T16:51:56.619,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:56.620,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{302, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.88']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [views:debug,2014-08-19T16:51:56.644,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/555. Updated state: active (1) [ns_server:debug,2014-08-19T16:51:56.645,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",555,active,1} [ns_server:debug,2014-08-19T16:51:56.651,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:51:56.654,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:56.654,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 3030 us [ns_server:debug,2014-08-19T16:51:56.655,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:56.655,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{300, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.88']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:51:56.689,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:51:56.693,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:56.693,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 4170 us [ns_server:debug,2014-08-19T16:51:56.694,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:56.694,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{312, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.88']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:51:56.725,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:51:56.728,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:56.728,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 2948 us [ns_server:debug,2014-08-19T16:51:56.728,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:56.729,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{299, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.88']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:51:56.760,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:51:56.763,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 2761 us [ns_server:debug,2014-08-19T16:51:56.763,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:56.764,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:56.765,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{313, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.88']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:51:56.836,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:51:56.839,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:56.839,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 3417 us [ns_server:debug,2014-08-19T16:51:56.840,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{304, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.88']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:51:56.841,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:56.900,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:51:56.902,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:56.902,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 1900 us [ns_server:debug,2014-08-19T16:51:56.903,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{301, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.88']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:51:56.903,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:56.936,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:51:56.937,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:56.937,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 1481 us [ns_server:debug,2014-08-19T16:51:56.938,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:56.938,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{314, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.88']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:51:56.974,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:51:56.981,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:56.981,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 7231 us [ns_server:debug,2014-08-19T16:51:56.982,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{317, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.88']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:51:56.983,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:57.015,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:51:57.016,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 838 us [ns_server:debug,2014-08-19T16:51:57.016,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:57.017,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:57.018,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{309, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.88']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:51:57.050,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:51:57.053,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:57.053,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 3090 us [ns_server:debug,2014-08-19T16:51:57.054,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:57.054,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{306, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.88']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:51:57.090,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:51:57.091,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:57.091,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 1631 us [ns_server:debug,2014-08-19T16:51:57.092,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:57.092,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{305, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.88']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:51:57.125,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:51:57.127,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:57.127,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 1677 us [ns_server:debug,2014-08-19T16:51:57.127,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:57.128,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{315, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.88']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:51:57.184,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:51:57.186,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:57.186,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 2780 us [ns_server:debug,2014-08-19T16:51:57.187,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:57.187,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{307, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.88']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:51:57.219,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:51:57.226,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:57.226,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 7327 us [ns_server:debug,2014-08-19T16:51:57.228,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:57.229,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{311, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.88']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:51:57.258,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:51:57.261,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:57.261,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 3165 us [ns_server:debug,2014-08-19T16:51:57.262,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:57.262,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{310, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.88']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:51:57.300,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:51:57.300,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:57.300,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 23 us [ns_server:debug,2014-08-19T16:51:57.300,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:57.301,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{316, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.88']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:51:57.339,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:51:57.342,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:57.342,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 3189 us [ns_server:debug,2014-08-19T16:51:57.343,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:57.343,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{308, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.88']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:51:57.373,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:51:57.377,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:57.377,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 1768 us [ns_server:debug,2014-08-19T16:51:57.377,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:57.378,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{831, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.88']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:51:57.409,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:51:57.412,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:57.412,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 3166 us [ns_server:debug,2014-08-19T16:51:57.413,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:57.413,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{810, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.88']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:51:57.451,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:51:57.458,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:57.458,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 7048 us [ns_server:debug,2014-08-19T16:51:57.458,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:57.459,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{821, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.88']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:51:57.488,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:51:57.491,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:57.492,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 3362 us [ns_server:debug,2014-08-19T16:51:57.492,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:57.493,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{830, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.88']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:51:57.528,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:51:57.532,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:57.532,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 3182 us [ns_server:debug,2014-08-19T16:51:57.532,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:57.533,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{823, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.88']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:51:57.566,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:51:57.569,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:57.569,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 3119 us [ns_server:debug,2014-08-19T16:51:57.570,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{826, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.88']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:51:57.570,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:57.604,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:51:57.608,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:57.608,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 3601 us [ns_server:debug,2014-08-19T16:51:57.609,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:57.609,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{827, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.88']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:51:57.647,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:51:57.649,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:57.649,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 1653 us [ns_server:debug,2014-08-19T16:51:57.649,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:57.650,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{814, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.88']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:51:57.682,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:51:57.685,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:57.685,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 3668 us [ns_server:debug,2014-08-19T16:51:57.686,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:57.686,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{815, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.88']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:51:57.718,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:51:57.721,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 3039 us [ns_server:debug,2014-08-19T16:51:57.721,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:57.721,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:57.722,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{818, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.88']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:51:57.756,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:51:57.759,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:57.759,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 3092 us [ns_server:debug,2014-08-19T16:51:57.760,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:57.760,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{822, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.88']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:51:57.798,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:51:57.801,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:57.802,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 3034 us [ns_server:debug,2014-08-19T16:51:57.802,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:57.803,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{811, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.88']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:51:57.837,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:51:57.840,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 2968 us [ns_server:debug,2014-08-19T16:51:57.840,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:57.841,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:57.841,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{819, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.88']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:51:57.878,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:51:57.879,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:57.879,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 1426 us [ns_server:debug,2014-08-19T16:51:57.879,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:57.880,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{820, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.88']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:51:57.916,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:51:57.923,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:57.924,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 7186 us [ns_server:debug,2014-08-19T16:51:57.924,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:57.925,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{828, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.88']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:51:57.958,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:51:57.961,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:57.961,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 3413 us [ns_server:debug,2014-08-19T16:51:57.962,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:57.963,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{829, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.88']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:51:58.026,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:51:58.030,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:58.030,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 3526 us [ns_server:debug,2014-08-19T16:51:58.030,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:58.031,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{812, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.88']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:51:58.065,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:51:58.068,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:58.069,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 3395 us [ns_server:debug,2014-08-19T16:51:58.069,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:58.070,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{824, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.88']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:51:58.103,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:51:58.107,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 1523 us [ns_server:debug,2014-08-19T16:51:58.107,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:58.107,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:58.108,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{817, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.88']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:51:58.139,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:51:58.142,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:58.142,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 2755 us [ns_server:debug,2014-08-19T16:51:58.143,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:58.143,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{813, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.88']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:51:58.177,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:51:58.185,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:58.185,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 7930 us [ns_server:debug,2014-08-19T16:51:58.185,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:58.186,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{825, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.88']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:51:58.214,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:51:58.218,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:58.219,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 4637 us [ns_server:debug,2014-08-19T16:51:58.219,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{816, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.88']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:51:58.221,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:info,2014-08-19T16:51:58.286,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 554 state to replica [ns_server:info,2014-08-19T16:51:58.290,ns_1@10.242.238.90:<0.30732.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 554 to state replica [ns_server:debug,2014-08-19T16:51:58.312,ns_1@10.242.238.90:<0.30732.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_554_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:51:58.314,ns_1@10.242.238.90:<0.30732.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[554]}, {checkpoints,[{554,0}]}, {name,<<"replication_building_554_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[554]}, {takeover,false}, {suffix,"building_554_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",554,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,true}]} [rebalance:debug,2014-08-19T16:51:58.314,ns_1@10.242.238.90:<0.30732.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.30733.0> [rebalance:debug,2014-08-19T16:51:58.314,ns_1@10.242.238.90:<0.30732.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:51:58.315,ns_1@10.242.238.90:<0.30732.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.31758.1>,#Ref<16550.0.2.108875>}]} [rebalance:info,2014-08-19T16:51:58.315,ns_1@10.242.238.90:<0.30732.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 554 [rebalance:debug,2014-08-19T16:51:58.316,ns_1@10.242.238.90:<0.30732.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.31758.1>,#Ref<16550.0.2.108875>}] [ns_server:debug,2014-08-19T16:51:58.316,ns_1@10.242.238.90:<0.30732.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:51:58.316,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.30734.0> (ok) [rebalance:debug,2014-08-19T16:51:58.318,ns_1@10.242.238.90:<0.30735.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 554 [ns_server:info,2014-08-19T16:51:58.391,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 553 state to replica [ns_server:info,2014-08-19T16:51:58.396,ns_1@10.242.238.90:<0.30760.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 553 to state replica [ns_server:debug,2014-08-19T16:51:58.403,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 554. Nacking mccouch update. [views:debug,2014-08-19T16:51:58.403,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/554. Updated state: replica (0) [ns_server:debug,2014-08-19T16:51:58.403,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",554,replica,0} [ns_server:debug,2014-08-19T16:51:58.404,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,984,673,362,724,596,413,958,647,698,570,387,1009,749,621,983,672,361, 723,595,412,957,646,697,569,386,1008,748,620,982,671,360,722,594,411,956,709, 645,581,398,1020,943,760,696,632,568,385,1007,994,747,683,619,555,372,981, 734,670,606,423,359,968,721,657,593,410,346,955,708,644,580,397,1019,942,759, 695,631,567,384,1006,993,746,682,618,554,371,980,733,669,605,422,358,967,720, 656,592,409,345,954,707,643,579,396,1018,941,758,694,630,566,383,1005,992, 745,681,617,370,979,732,668,604,421,357,966,719,655,591,408,344,953,706,642, 578,395,1017,940,757,693,629,565,382,1004,991,744,680,616,369,978,731,667, 603,420,356,965,718,654,590,407,343,952,705,641,577,394,1016,939,756,692,628, 564,381,1003,990,743,679,615,368,977,730,666,602,419,355,964,717,653,589,406, 342,951,704,640,576,393,1015,938,755,691,627,563,380,1002,989,742,678,614, 367,976,729,665,601,418,354,963,716,652,588,405,950,767,703,639,575,392,1014, 754,690,626,562,379,1001,988,741,677,613,366,975,728,664,600,417,353,962,715, 651,587,404,949,766,702,638,574,391,1013,753,689,625,561,378,1000,987,740, 676,612,365,974,727,663,599,416,352,961,714,650,586,403,948,765,701,637,573, 390,1012,999,752,688,624,560,377,986,739,675,611,364,973,726,662,598,415,351, 960,713,649,585,402,947,764,700,636,572,389,1011,998,751,687,623,559,376,985, 738,674,610,363,972,725,661,597,414,350,959,712,648,584,401,1023,946,763,699, 635,571,388,1010,997,686,558,375,737,609,426,971,660,349,711,583,400,1022, 945,762,634,996,685,557,374,736,608,425,970,659,348,710,582,399,1021,944,761, 633,995,684,556,373,735,607,424,969,658,347] [ns_server:debug,2014-08-19T16:51:58.414,ns_1@10.242.238.90:<0.30760.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_553_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:51:58.416,ns_1@10.242.238.90:<0.30760.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[553]}, {checkpoints,[{553,0}]}, {name,<<"replication_building_553_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[553]}, {takeover,false}, {suffix,"building_553_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",553,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,true}]} [rebalance:debug,2014-08-19T16:51:58.417,ns_1@10.242.238.90:<0.30760.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.30761.0> [rebalance:debug,2014-08-19T16:51:58.417,ns_1@10.242.238.90:<0.30760.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:51:58.417,ns_1@10.242.238.90:<0.30760.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.31809.1>,#Ref<16550.0.2.109154>}]} [rebalance:info,2014-08-19T16:51:58.417,ns_1@10.242.238.90:<0.30760.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 553 [rebalance:debug,2014-08-19T16:51:58.418,ns_1@10.242.238.90:<0.30760.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.31809.1>,#Ref<16550.0.2.109154>}] [ns_server:debug,2014-08-19T16:51:58.418,ns_1@10.242.238.90:<0.30760.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:51:58.418,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.30762.0> (ok) [rebalance:debug,2014-08-19T16:51:58.420,ns_1@10.242.238.90:<0.30763.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 553 [views:debug,2014-08-19T16:51:58.437,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/554. Updated state: replica (0) [ns_server:debug,2014-08-19T16:51:58.437,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",554,replica,0} [ns_server:info,2014-08-19T16:51:58.489,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 552 state to replica [ns_server:info,2014-08-19T16:51:58.493,ns_1@10.242.238.90:<0.30768.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 552 to state replica [ns_server:debug,2014-08-19T16:51:58.512,ns_1@10.242.238.90:<0.30768.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_552_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:51:58.513,ns_1@10.242.238.90:<0.30768.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[552]}, {checkpoints,[{552,0}]}, {name,<<"replication_building_552_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[552]}, {takeover,false}, {suffix,"building_552_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",552,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,true}]} [rebalance:debug,2014-08-19T16:51:58.514,ns_1@10.242.238.90:<0.30768.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.30783.0> [rebalance:debug,2014-08-19T16:51:58.514,ns_1@10.242.238.90:<0.30768.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:51:58.515,ns_1@10.242.238.90:<0.30768.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.31863.1>,#Ref<16550.0.2.109450>}]} [rebalance:info,2014-08-19T16:51:58.515,ns_1@10.242.238.90:<0.30768.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 552 [rebalance:debug,2014-08-19T16:51:58.515,ns_1@10.242.238.90:<0.30768.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.31863.1>,#Ref<16550.0.2.109450>}] [ns_server:debug,2014-08-19T16:51:58.516,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.30784.0> (ok) [ns_server:debug,2014-08-19T16:51:58.516,ns_1@10.242.238.90:<0.30768.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:debug,2014-08-19T16:51:58.518,ns_1@10.242.238.90:<0.30785.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 552 [ns_server:debug,2014-08-19T16:51:58.579,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 553. Nacking mccouch update. [views:debug,2014-08-19T16:51:58.579,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/553. Updated state: pending (0) [ns_server:debug,2014-08-19T16:51:58.579,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",553,pending,0} [ns_server:debug,2014-08-19T16:51:58.580,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,984,673,362,724,596,413,958,647,698,570,387,1009,749,621,983,672,361, 723,595,412,957,646,697,569,386,1008,748,620,982,671,360,722,594,411,956,645, 943,760,696,632,568,385,1007,994,747,683,619,555,372,981,734,670,606,423,359, 968,721,657,593,410,346,955,708,644,580,397,1019,942,759,695,631,567,384, 1006,993,746,682,618,554,371,980,733,669,605,422,358,967,720,656,592,409,345, 954,707,643,579,396,1018,941,758,694,630,566,383,1005,992,745,681,617,553, 370,979,732,668,604,421,357,966,719,655,591,408,344,953,706,642,578,395,1017, 940,757,693,629,565,382,1004,991,744,680,616,369,978,731,667,603,420,356,965, 718,654,590,407,343,952,705,641,577,394,1016,939,756,692,628,564,381,1003, 990,743,679,615,368,977,730,666,602,419,355,964,717,653,589,406,342,951,704, 640,576,393,1015,938,755,691,627,563,380,1002,989,742,678,614,367,976,729, 665,601,418,354,963,716,652,588,405,950,767,703,639,575,392,1014,754,690,626, 562,379,1001,988,741,677,613,366,975,728,664,600,417,353,962,715,651,587,404, 949,766,702,638,574,391,1013,753,689,625,561,378,1000,987,740,676,612,365, 974,727,663,599,416,352,961,714,650,586,403,948,765,701,637,573,390,1012,999, 752,688,624,560,377,986,739,675,611,364,973,726,662,598,415,351,960,713,649, 585,402,947,764,700,636,572,389,1011,998,751,687,623,559,376,985,738,674,610, 363,972,725,661,597,414,350,959,712,648,584,401,1023,946,763,699,635,571,388, 1010,997,686,558,375,737,609,426,971,660,349,711,583,400,1022,945,762,634, 996,685,557,374,736,608,425,970,659,348,710,582,399,1021,944,761,633,995,684, 556,373,735,607,424,969,658,347,709,581,398,1020] [ns_server:info,2014-08-19T16:51:58.584,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 551 state to replica [ns_server:info,2014-08-19T16:51:58.588,ns_1@10.242.238.90:<0.30788.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 551 to state replica [ns_server:debug,2014-08-19T16:51:58.607,ns_1@10.242.238.90:<0.30788.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_551_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:51:58.608,ns_1@10.242.238.90:<0.30788.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[551]}, {checkpoints,[{551,0}]}, {name,<<"replication_building_551_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[551]}, {takeover,false}, {suffix,"building_551_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",551,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,true}]} [rebalance:debug,2014-08-19T16:51:58.609,ns_1@10.242.238.90:<0.30788.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.30789.0> [rebalance:debug,2014-08-19T16:51:58.609,ns_1@10.242.238.90:<0.30788.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:51:58.610,ns_1@10.242.238.90:<0.30788.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.31920.1>,#Ref<16550.0.2.109794>}]} [rebalance:info,2014-08-19T16:51:58.610,ns_1@10.242.238.90:<0.30788.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 551 [rebalance:debug,2014-08-19T16:51:58.610,ns_1@10.242.238.90:<0.30788.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.31920.1>,#Ref<16550.0.2.109794>}] [ns_server:debug,2014-08-19T16:51:58.611,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.30790.0> (ok) [ns_server:debug,2014-08-19T16:51:58.611,ns_1@10.242.238.90:<0.30788.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:debug,2014-08-19T16:51:58.612,ns_1@10.242.238.90:<0.30791.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 551 [views:debug,2014-08-19T16:51:58.646,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/553. Updated state: pending (0) [ns_server:debug,2014-08-19T16:51:58.646,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",553,pending,0} [rebalance:debug,2014-08-19T16:51:58.648,ns_1@10.242.238.90:<0.30791.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:51:58.648,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.30791.0> (ok) [rebalance:debug,2014-08-19T16:51:58.651,ns_1@10.242.238.90:<0.30794.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 551 [ns_server:info,2014-08-19T16:51:58.680,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 550 state to replica [ns_server:info,2014-08-19T16:51:58.685,ns_1@10.242.238.90:<0.30797.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 550 to state replica [ns_server:debug,2014-08-19T16:51:58.703,ns_1@10.242.238.90:<0.30797.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_550_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:51:58.704,ns_1@10.242.238.90:<0.30797.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[550]}, {checkpoints,[{550,0}]}, {name,<<"replication_building_550_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[550]}, {takeover,false}, {suffix,"building_550_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",550,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,true}]} [rebalance:debug,2014-08-19T16:51:58.705,ns_1@10.242.238.90:<0.30797.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.30798.0> [rebalance:debug,2014-08-19T16:51:58.705,ns_1@10.242.238.90:<0.30797.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:51:58.705,ns_1@10.242.238.90:<0.30797.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.31974.1>,#Ref<16550.0.2.110113>}]} [rebalance:info,2014-08-19T16:51:58.705,ns_1@10.242.238.90:<0.30797.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 550 [rebalance:debug,2014-08-19T16:51:58.706,ns_1@10.242.238.90:<0.30797.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.31974.1>,#Ref<16550.0.2.110113>}] [ns_server:debug,2014-08-19T16:51:58.707,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.30799.0> (ok) [ns_server:debug,2014-08-19T16:51:58.707,ns_1@10.242.238.90:<0.30797.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:debug,2014-08-19T16:51:58.708,ns_1@10.242.238.90:<0.30800.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 550 [views:debug,2014-08-19T16:51:58.722,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/554. Updated state: pending (0) [ns_server:debug,2014-08-19T16:51:58.722,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",554,pending,0} [ns_server:info,2014-08-19T16:51:58.775,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 549 state to replica [ns_server:info,2014-08-19T16:51:58.779,ns_1@10.242.238.90:<0.30803.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 549 to state replica [ns_server:debug,2014-08-19T16:51:58.798,ns_1@10.242.238.90:<0.30803.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_549_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:51:58.799,ns_1@10.242.238.90:<0.30803.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[549]}, {checkpoints,[{549,0}]}, {name,<<"replication_building_549_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[549]}, {takeover,false}, {suffix,"building_549_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",549,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,true}]} [rebalance:debug,2014-08-19T16:51:58.800,ns_1@10.242.238.90:<0.30803.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.30810.0> [rebalance:debug,2014-08-19T16:51:58.800,ns_1@10.242.238.90:<0.30803.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:51:58.801,ns_1@10.242.238.90:<0.30803.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.32025.1>,#Ref<16550.0.2.110375>}]} [rebalance:info,2014-08-19T16:51:58.801,ns_1@10.242.238.90:<0.30803.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 549 [rebalance:debug,2014-08-19T16:51:58.801,ns_1@10.242.238.90:<0.30803.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.32025.1>,#Ref<16550.0.2.110375>}] [ns_server:debug,2014-08-19T16:51:58.802,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.30813.0> (ok) [ns_server:debug,2014-08-19T16:51:58.802,ns_1@10.242.238.90:<0.30803.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:debug,2014-08-19T16:51:58.803,ns_1@10.242.238.90:<0.30820.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 549 [ns_server:info,2014-08-19T16:51:58.871,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 548 state to replica [ns_server:debug,2014-08-19T16:51:58.872,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 552. Nacking mccouch update. [views:debug,2014-08-19T16:51:58.872,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/552. Updated state: pending (0) [ns_server:debug,2014-08-19T16:51:58.872,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",552,pending,0} [ns_server:debug,2014-08-19T16:51:58.873,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,984,673,362,724,596,413,958,647,698,570,387,1009,749,621,983,672,361, 723,595,412,957,646,697,569,386,1008,748,620,982,671,360,722,594,411,956,645, 943,760,696,632,568,385,1007,994,747,683,619,555,372,981,734,670,606,423,359, 968,721,657,593,410,346,955,708,644,580,397,1019,942,759,695,631,567,384, 1006,993,746,682,618,554,371,980,733,669,605,422,358,967,720,656,592,409,345, 954,707,643,579,396,1018,941,758,694,630,566,383,1005,992,745,681,617,553, 370,979,732,668,604,421,357,966,719,655,591,408,344,953,706,642,578,395,1017, 940,757,693,629,565,382,1004,991,744,680,616,552,369,978,731,667,603,420,356, 965,718,654,590,407,343,952,705,641,577,394,1016,939,756,692,628,564,381, 1003,990,743,679,615,368,977,730,666,602,419,355,964,717,653,589,406,342,951, 704,640,576,393,1015,938,755,691,627,563,380,1002,989,742,678,614,367,976, 729,665,601,418,354,963,716,652,588,405,950,767,703,639,575,392,1014,754,690, 626,562,379,1001,988,741,677,613,366,975,728,664,600,417,353,962,715,651,587, 404,949,766,702,638,574,391,1013,753,689,625,561,378,1000,987,740,676,612, 365,974,727,663,599,416,352,961,714,650,586,403,948,765,701,637,573,390,1012, 999,752,688,624,560,377,986,739,675,611,364,973,726,662,598,415,351,960,713, 649,585,402,947,764,700,636,572,389,1011,998,751,687,623,559,376,985,738,674, 610,363,972,725,661,597,414,350,959,712,648,584,401,1023,946,763,699,635,571, 388,1010,997,686,558,375,737,609,426,971,660,349,711,583,400,1022,945,762, 634,996,685,557,374,736,608,425,970,659,348,710,582,399,1021,944,761,633,995, 684,556,373,735,607,424,969,658,347,709,581,398,1020] [ns_server:info,2014-08-19T16:51:58.876,ns_1@10.242.238.90:<0.30823.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 548 to state replica [ns_server:debug,2014-08-19T16:51:58.894,ns_1@10.242.238.90:<0.30823.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_548_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:51:58.896,ns_1@10.242.238.90:<0.30823.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[548]}, {checkpoints,[{548,0}]}, {name,<<"replication_building_548_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[548]}, {takeover,false}, {suffix,"building_548_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",548,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,true}]} [rebalance:debug,2014-08-19T16:51:58.897,ns_1@10.242.238.90:<0.30823.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.30824.0> [rebalance:debug,2014-08-19T16:51:58.897,ns_1@10.242.238.90:<0.30823.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:51:58.898,ns_1@10.242.238.90:<0.30823.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.32076.1>,#Ref<16550.0.2.110662>}]} [rebalance:info,2014-08-19T16:51:58.898,ns_1@10.242.238.90:<0.30823.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 548 [rebalance:debug,2014-08-19T16:51:58.898,ns_1@10.242.238.90:<0.30823.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.32076.1>,#Ref<16550.0.2.110662>}] [ns_server:debug,2014-08-19T16:51:58.899,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.30825.0> (ok) [ns_server:debug,2014-08-19T16:51:58.899,ns_1@10.242.238.90:<0.30823.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:debug,2014-08-19T16:51:58.900,ns_1@10.242.238.90:<0.30826.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 548 [views:debug,2014-08-19T16:51:58.948,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/552. Updated state: pending (0) [ns_server:debug,2014-08-19T16:51:58.948,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",552,pending,0} [ns_server:info,2014-08-19T16:51:58.965,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 547 state to replica [ns_server:info,2014-08-19T16:51:58.970,ns_1@10.242.238.90:<0.30829.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 547 to state replica [ns_server:debug,2014-08-19T16:51:58.988,ns_1@10.242.238.90:<0.30829.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_547_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:51:58.989,ns_1@10.242.238.90:<0.30829.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[547]}, {checkpoints,[{547,0}]}, {name,<<"replication_building_547_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[547]}, {takeover,false}, {suffix,"building_547_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",547,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,true}]} [rebalance:debug,2014-08-19T16:51:58.990,ns_1@10.242.238.90:<0.30829.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.30830.0> [rebalance:debug,2014-08-19T16:51:58.990,ns_1@10.242.238.90:<0.30829.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:51:58.991,ns_1@10.242.238.90:<0.30829.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.32127.1>,#Ref<16550.0.2.110969>}]} [rebalance:info,2014-08-19T16:51:58.991,ns_1@10.242.238.90:<0.30829.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 547 [rebalance:debug,2014-08-19T16:51:58.991,ns_1@10.242.238.90:<0.30829.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.32127.1>,#Ref<16550.0.2.110969>}] [ns_server:debug,2014-08-19T16:51:58.992,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.30831.0> (ok) [ns_server:debug,2014-08-19T16:51:58.992,ns_1@10.242.238.90:<0.30829.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:debug,2014-08-19T16:51:58.993,ns_1@10.242.238.90:<0.30832.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 547 [ns_server:info,2014-08-19T16:51:59.065,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 546 state to replica [ns_server:info,2014-08-19T16:51:59.069,ns_1@10.242.238.90:<0.30849.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 546 to state replica [ns_server:debug,2014-08-19T16:51:59.090,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 551. Nacking mccouch update. [views:debug,2014-08-19T16:51:59.091,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/551. Updated state: pending (1) [ns_server:debug,2014-08-19T16:51:59.091,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",551,pending,1} [ns_server:debug,2014-08-19T16:51:59.092,ns_1@10.242.238.90:<0.30849.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_546_'ns_1@10.242.238.90' [ns_server:debug,2014-08-19T16:51:59.092,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,984,673,362,724,596,413,958,647,698,570,387,1009,749,621,983,672,361, 723,595,412,957,646,697,569,386,1008,748,620,982,671,360,722,594,411,956,645, 943,760,696,632,568,385,1007,994,747,683,619,555,372,981,734,670,606,423,359, 968,721,657,593,410,346,955,708,644,580,397,1019,942,759,695,631,567,384, 1006,993,746,682,618,554,371,980,733,669,605,422,358,967,720,656,592,409,345, 954,707,643,579,396,1018,941,758,694,630,566,383,1005,992,745,681,617,553, 370,979,732,668,604,421,357,966,719,655,591,408,344,953,706,642,578,395,1017, 940,757,693,629,565,382,1004,991,744,680,616,552,369,978,731,667,603,420,356, 965,718,654,590,407,343,952,705,641,577,394,1016,939,756,692,628,564,381, 1003,990,743,679,615,551,368,977,730,666,602,419,355,964,717,653,589,406,342, 951,704,640,576,393,1015,938,755,691,627,563,380,1002,989,742,678,614,367, 976,729,665,601,418,354,963,716,652,588,405,950,767,703,639,575,392,1014,754, 690,626,562,379,1001,988,741,677,613,366,975,728,664,600,417,353,962,715,651, 587,404,949,766,702,638,574,391,1013,753,689,625,561,378,1000,987,740,676, 612,365,974,727,663,599,416,352,961,714,650,586,403,948,765,701,637,573,390, 1012,999,752,688,624,560,377,986,739,675,611,364,973,726,662,598,415,351,960, 713,649,585,402,947,764,700,636,572,389,1011,998,751,687,623,559,376,985,738, 674,610,363,972,725,661,597,414,350,959,712,648,584,401,1023,946,763,699,635, 571,388,1010,997,686,558,375,737,609,426,971,660,349,711,583,400,1022,945, 762,634,996,685,557,374,736,608,425,970,659,348,710,582,399,1021,944,761,633, 995,684,556,373,735,607,424,969,658,347,709,581,398,1020] [rebalance:info,2014-08-19T16:51:59.093,ns_1@10.242.238.90:<0.30849.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[546]}, {checkpoints,[{546,0}]}, {name,<<"replication_building_546_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[546]}, {takeover,false}, {suffix,"building_546_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",546,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,true}]} [rebalance:debug,2014-08-19T16:51:59.094,ns_1@10.242.238.90:<0.30849.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.30850.0> [rebalance:debug,2014-08-19T16:51:59.094,ns_1@10.242.238.90:<0.30849.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:51:59.095,ns_1@10.242.238.90:<0.30849.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.32178.1>,#Ref<16550.0.2.111254>}]} [rebalance:info,2014-08-19T16:51:59.095,ns_1@10.242.238.90:<0.30849.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 546 [rebalance:debug,2014-08-19T16:51:59.095,ns_1@10.242.238.90:<0.30849.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.32178.1>,#Ref<16550.0.2.111254>}] [ns_server:debug,2014-08-19T16:51:59.096,ns_1@10.242.238.90:<0.30849.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:51:59.096,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.30851.0> (ok) [rebalance:debug,2014-08-19T16:51:59.097,ns_1@10.242.238.90:<0.30852.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 546 [views:debug,2014-08-19T16:51:59.166,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/551. Updated state: pending (1) [ns_server:debug,2014-08-19T16:51:59.166,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",551,pending,1} [ns_server:info,2014-08-19T16:51:59.168,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 545 state to replica [ns_server:info,2014-08-19T16:51:59.173,ns_1@10.242.238.90:<0.30855.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 545 to state replica [ns_server:debug,2014-08-19T16:51:59.191,ns_1@10.242.238.90:<0.30855.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_545_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:51:59.192,ns_1@10.242.238.90:<0.30855.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[545]}, {checkpoints,[{545,0}]}, {name,<<"replication_building_545_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[545]}, {takeover,false}, {suffix,"building_545_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",545,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,true}]} [rebalance:debug,2014-08-19T16:51:59.193,ns_1@10.242.238.90:<0.30855.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.30856.0> [rebalance:debug,2014-08-19T16:51:59.193,ns_1@10.242.238.90:<0.30855.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:51:59.194,ns_1@10.242.238.90:<0.30855.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.32234.1>,#Ref<16550.0.2.111515>}]} [rebalance:info,2014-08-19T16:51:59.194,ns_1@10.242.238.90:<0.30855.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 545 [rebalance:debug,2014-08-19T16:51:59.194,ns_1@10.242.238.90:<0.30855.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.32234.1>,#Ref<16550.0.2.111515>}] [ns_server:debug,2014-08-19T16:51:59.195,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.30857.0> (ok) [ns_server:debug,2014-08-19T16:51:59.195,ns_1@10.242.238.90:<0.30855.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:debug,2014-08-19T16:51:59.196,ns_1@10.242.238.90:<0.30858.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 545 [ns_server:info,2014-08-19T16:51:59.267,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 544 state to replica [ns_server:info,2014-08-19T16:51:59.271,ns_1@10.242.238.90:<0.30875.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 544 to state replica [ns_server:debug,2014-08-19T16:51:59.289,ns_1@10.242.238.90:<0.30875.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_544_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:51:59.290,ns_1@10.242.238.90:<0.30875.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[544]}, {checkpoints,[{544,0}]}, {name,<<"replication_building_544_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[544]}, {takeover,false}, {suffix,"building_544_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",544,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,true}]} [rebalance:debug,2014-08-19T16:51:59.291,ns_1@10.242.238.90:<0.30875.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.30876.0> [rebalance:debug,2014-08-19T16:51:59.291,ns_1@10.242.238.90:<0.30875.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:51:59.291,ns_1@10.242.238.90:<0.30875.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.32285.1>,#Ref<16550.0.2.111776>}]} [rebalance:info,2014-08-19T16:51:59.292,ns_1@10.242.238.90:<0.30875.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 544 [rebalance:debug,2014-08-19T16:51:59.292,ns_1@10.242.238.90:<0.30875.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.32285.1>,#Ref<16550.0.2.111776>}] [ns_server:debug,2014-08-19T16:51:59.292,ns_1@10.242.238.90:<0.30875.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:51:59.293,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.30877.0> (ok) [rebalance:debug,2014-08-19T16:51:59.294,ns_1@10.242.238.90:<0.30878.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 544 [ns_server:debug,2014-08-19T16:51:59.308,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 549. Nacking mccouch update. [views:debug,2014-08-19T16:51:59.308,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/549. Updated state: pending (0) [ns_server:debug,2014-08-19T16:51:59.308,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",549,pending,0} [ns_server:debug,2014-08-19T16:51:59.309,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,984,673,362,724,596,413,958,647,698,570,387,1009,749,621,983,672,361, 723,595,412,957,646,697,569,386,1008,748,620,982,671,360,722,594,411,956,645, 943,760,696,632,568,385,1007,994,747,683,619,555,372,981,734,670,606,423,359, 968,721,657,593,410,346,955,708,644,580,397,1019,942,759,695,631,567,384, 1006,993,746,682,618,554,371,980,733,669,605,422,358,967,720,656,592,409,345, 954,707,643,579,396,1018,941,758,694,630,566,383,1005,992,745,681,617,553, 370,979,732,668,604,421,357,966,719,655,591,408,344,953,706,642,578,395,1017, 940,757,693,629,565,382,1004,991,744,680,616,552,369,978,731,667,603,420,356, 965,718,654,590,407,343,952,705,641,577,394,1016,939,756,692,628,564,381, 1003,990,743,679,615,551,368,977,730,666,602,419,355,964,717,653,589,406,342, 951,704,640,576,393,1015,938,755,691,627,563,380,1002,989,742,678,614,367, 976,729,665,601,418,354,963,716,652,588,405,950,767,703,639,575,392,1014,754, 690,626,562,379,1001,988,741,677,613,549,366,975,728,664,600,417,353,962,715, 651,587,404,949,766,702,638,574,391,1013,753,689,625,561,378,1000,987,740, 676,612,365,974,727,663,599,416,352,961,714,650,586,403,948,765,701,637,573, 390,1012,999,752,688,624,560,377,986,739,675,611,364,973,726,662,598,415,351, 960,713,649,585,402,947,764,700,636,572,389,1011,998,751,687,623,559,376,985, 738,674,610,363,972,725,661,597,414,350,959,712,648,584,401,1023,946,763,699, 635,571,388,1010,997,686,558,375,737,609,426,971,660,349,711,583,400,1022, 945,762,634,996,685,557,374,736,608,425,970,659,348,710,582,399,1021,944,761, 633,995,684,556,373,735,607,424,969,658,347,709,581,398,1020] [ns_server:info,2014-08-19T16:51:59.371,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 543 state to replica [ns_server:info,2014-08-19T16:51:59.374,ns_1@10.242.238.90:<0.30887.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 543 to state replica [views:debug,2014-08-19T16:51:59.375,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/549. Updated state: pending (0) [ns_server:debug,2014-08-19T16:51:59.375,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",549,pending,0} [ns_server:debug,2014-08-19T16:51:59.393,ns_1@10.242.238.90:<0.30887.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_543_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:51:59.394,ns_1@10.242.238.90:<0.30887.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[543]}, {checkpoints,[{543,0}]}, {name,<<"replication_building_543_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[543]}, {takeover,false}, {suffix,"building_543_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",543,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,true}]} [rebalance:debug,2014-08-19T16:51:59.395,ns_1@10.242.238.90:<0.30887.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.30888.0> [rebalance:debug,2014-08-19T16:51:59.395,ns_1@10.242.238.90:<0.30887.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:51:59.396,ns_1@10.242.238.90:<0.30887.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.32337.1>,#Ref<16550.0.2.112099>}]} [rebalance:info,2014-08-19T16:51:59.396,ns_1@10.242.238.90:<0.30887.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 543 [rebalance:debug,2014-08-19T16:51:59.396,ns_1@10.242.238.90:<0.30887.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.32337.1>,#Ref<16550.0.2.112099>}] [ns_server:debug,2014-08-19T16:51:59.397,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.30889.0> (ok) [ns_server:debug,2014-08-19T16:51:59.397,ns_1@10.242.238.90:<0.30887.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:debug,2014-08-19T16:51:59.398,ns_1@10.242.238.90:<0.30890.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 543 [ns_server:info,2014-08-19T16:51:59.464,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 542 state to replica [ns_server:info,2014-08-19T16:51:59.468,ns_1@10.242.238.90:<0.30907.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 542 to state replica [ns_server:debug,2014-08-19T16:51:59.486,ns_1@10.242.238.90:<0.30907.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_542_'ns_1@10.242.238.90' [ns_server:debug,2014-08-19T16:51:59.487,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 550. Nacking mccouch update. [views:debug,2014-08-19T16:51:59.487,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/550. Updated state: pending (0) [ns_server:debug,2014-08-19T16:51:59.487,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",550,pending,0} [rebalance:info,2014-08-19T16:51:59.488,ns_1@10.242.238.90:<0.30907.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[542]}, {checkpoints,[{542,0}]}, {name,<<"replication_building_542_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[542]}, {takeover,false}, {suffix,"building_542_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",542,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,true}]} [ns_server:debug,2014-08-19T16:51:59.488,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,984,673,362,724,596,413,958,647,698,570,387,1009,749,621,983,672,361, 723,595,412,957,646,697,569,386,1008,748,620,982,671,360,722,594,411,956,645, 943,760,696,632,568,385,1007,994,747,683,619,555,372,981,734,670,606,423,359, 968,721,657,593,410,346,955,708,644,580,397,1019,942,759,695,631,567,384, 1006,993,746,682,618,554,371,980,733,669,605,422,358,967,720,656,592,409,345, 954,707,643,579,396,1018,941,758,694,630,566,383,1005,992,745,681,617,553, 370,979,732,668,604,421,357,966,719,655,591,408,344,953,706,642,578,395,1017, 940,757,693,629,565,382,1004,991,744,680,616,552,369,978,731,667,603,420,356, 965,718,654,590,407,343,952,705,641,577,394,1016,939,756,692,628,564,381, 1003,990,743,679,615,551,368,977,730,666,602,419,355,964,717,653,589,406,342, 951,704,640,576,393,1015,938,755,691,627,563,380,1002,989,742,678,614,550, 367,976,729,665,601,418,354,963,716,652,588,405,950,767,703,639,575,392,1014, 754,690,626,562,379,1001,988,741,677,613,549,366,975,728,664,600,417,353,962, 715,651,587,404,949,766,702,638,574,391,1013,753,689,625,561,378,1000,987, 740,676,612,365,974,727,663,599,416,352,961,714,650,586,403,948,765,701,637, 573,390,1012,999,752,688,624,560,377,986,739,675,611,364,973,726,662,598,415, 351,960,713,649,585,402,947,764,700,636,572,389,1011,998,751,687,623,559,376, 985,738,674,610,363,972,725,661,597,414,350,959,712,648,584,401,1023,946,763, 699,635,571,388,1010,997,686,558,375,737,609,426,971,660,349,711,583,400, 1022,945,762,634,996,685,557,374,736,608,425,970,659,348,710,582,399,1021, 944,761,633,995,684,556,373,735,607,424,969,658,347,709,581,398,1020] [rebalance:debug,2014-08-19T16:51:59.489,ns_1@10.242.238.90:<0.30907.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.30908.0> [rebalance:debug,2014-08-19T16:51:59.489,ns_1@10.242.238.90:<0.30907.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:51:59.489,ns_1@10.242.238.90:<0.30907.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.32388.1>,#Ref<16550.0.2.112402>}]} [rebalance:info,2014-08-19T16:51:59.490,ns_1@10.242.238.90:<0.30907.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 542 [rebalance:debug,2014-08-19T16:51:59.490,ns_1@10.242.238.90:<0.30907.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.32388.1>,#Ref<16550.0.2.112402>}] [ns_server:debug,2014-08-19T16:51:59.490,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.30909.0> (ok) [ns_server:debug,2014-08-19T16:51:59.490,ns_1@10.242.238.90:<0.30907.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:debug,2014-08-19T16:51:59.492,ns_1@10.242.238.90:<0.30910.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 542 [views:debug,2014-08-19T16:51:59.521,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/550. Updated state: pending (0) [ns_server:debug,2014-08-19T16:51:59.521,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",550,pending,0} [ns_server:info,2014-08-19T16:51:59.563,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 541 state to replica [ns_server:info,2014-08-19T16:51:59.567,ns_1@10.242.238.90:<0.30913.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 541 to state replica [ns_server:debug,2014-08-19T16:51:59.585,ns_1@10.242.238.90:<0.30913.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_541_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:51:59.586,ns_1@10.242.238.90:<0.30913.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[541]}, {checkpoints,[{541,0}]}, {name,<<"replication_building_541_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[541]}, {takeover,false}, {suffix,"building_541_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",541,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,true}]} [rebalance:debug,2014-08-19T16:51:59.587,ns_1@10.242.238.90:<0.30913.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.30928.0> [rebalance:debug,2014-08-19T16:51:59.587,ns_1@10.242.238.90:<0.30913.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:51:59.587,ns_1@10.242.238.90:<0.30913.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.32439.1>,#Ref<16550.0.2.112689>}]} [rebalance:info,2014-08-19T16:51:59.587,ns_1@10.242.238.90:<0.30913.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 541 [rebalance:debug,2014-08-19T16:51:59.588,ns_1@10.242.238.90:<0.30913.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.32439.1>,#Ref<16550.0.2.112689>}] [ns_server:debug,2014-08-19T16:51:59.588,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.30929.0> (ok) [ns_server:debug,2014-08-19T16:51:59.589,ns_1@10.242.238.90:<0.30913.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:debug,2014-08-19T16:51:59.590,ns_1@10.242.238.90:<0.30930.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 541 [ns_server:debug,2014-08-19T16:51:59.613,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 548. Nacking mccouch update. [views:debug,2014-08-19T16:51:59.613,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/548. Updated state: pending (0) [ns_server:debug,2014-08-19T16:51:59.613,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",548,pending,0} [ns_server:debug,2014-08-19T16:51:59.614,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,984,673,362,724,596,413,958,647,698,570,387,1009,749,621,983,672,361, 723,595,412,957,646,697,569,386,1008,748,620,982,671,360,722,594,411,956,645, 696,568,385,1007,994,747,683,619,555,372,981,734,670,606,423,359,968,721,657, 593,410,346,955,708,644,580,397,1019,942,759,695,631,567,384,1006,993,746, 682,618,554,371,980,733,669,605,422,358,967,720,656,592,409,345,954,707,643, 579,396,1018,941,758,694,630,566,383,1005,992,745,681,617,553,370,979,732, 668,604,421,357,966,719,655,591,408,344,953,706,642,578,395,1017,940,757,693, 629,565,382,1004,991,744,680,616,552,369,978,731,667,603,420,356,965,718,654, 590,407,343,952,705,641,577,394,1016,939,756,692,628,564,381,1003,990,743, 679,615,551,368,977,730,666,602,419,355,964,717,653,589,406,342,951,704,640, 576,393,1015,938,755,691,627,563,380,1002,989,742,678,614,550,367,976,729, 665,601,418,354,963,716,652,588,405,950,767,703,639,575,392,1014,754,690,626, 562,379,1001,988,741,677,613,549,366,975,728,664,600,417,353,962,715,651,587, 404,949,766,702,638,574,391,1013,753,689,625,561,378,1000,987,740,676,612, 548,365,974,727,663,599,416,352,961,714,650,586,403,948,765,701,637,573,390, 1012,999,752,688,624,560,377,986,739,675,611,364,973,726,662,598,415,351,960, 713,649,585,402,947,764,700,636,572,389,1011,998,751,687,623,559,376,985,738, 674,610,363,972,725,661,597,414,350,959,712,648,584,401,1023,946,763,699,635, 571,388,1010,997,686,558,375,737,609,426,971,660,349,711,583,400,1022,945, 762,634,996,685,557,374,736,608,425,970,659,348,710,582,399,1021,944,761,633, 995,684,556,373,735,607,424,969,658,347,709,581,398,1020,943,760,632] [views:debug,2014-08-19T16:51:59.655,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/548. Updated state: pending (0) [ns_server:debug,2014-08-19T16:51:59.655,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",548,pending,0} [ns_server:info,2014-08-19T16:51:59.659,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 540 state to replica [ns_server:info,2014-08-19T16:51:59.663,ns_1@10.242.238.90:<0.30933.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 540 to state replica [ns_server:debug,2014-08-19T16:51:59.682,ns_1@10.242.238.90:<0.30933.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_540_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:51:59.683,ns_1@10.242.238.90:<0.30933.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[540]}, {checkpoints,[{540,0}]}, {name,<<"replication_building_540_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[540]}, {takeover,false}, {suffix,"building_540_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",540,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,true}]} [rebalance:debug,2014-08-19T16:51:59.684,ns_1@10.242.238.90:<0.30933.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.30934.0> [rebalance:debug,2014-08-19T16:51:59.684,ns_1@10.242.238.90:<0.30933.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:51:59.684,ns_1@10.242.238.90:<0.30933.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.32490.1>,#Ref<16550.0.2.112954>}]} [rebalance:info,2014-08-19T16:51:59.684,ns_1@10.242.238.90:<0.30933.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 540 [rebalance:debug,2014-08-19T16:51:59.685,ns_1@10.242.238.90:<0.30933.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.32490.1>,#Ref<16550.0.2.112954>}] [ns_server:debug,2014-08-19T16:51:59.685,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.30935.0> (ok) [ns_server:debug,2014-08-19T16:51:59.686,ns_1@10.242.238.90:<0.30933.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:debug,2014-08-19T16:51:59.686,ns_1@10.242.238.90:<0.30936.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 540 [ns_server:debug,2014-08-19T16:51:59.739,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 546. Nacking mccouch update. [views:debug,2014-08-19T16:51:59.739,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/546. Updated state: pending (0) [ns_server:debug,2014-08-19T16:51:59.739,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",546,pending,0} [ns_server:debug,2014-08-19T16:51:59.740,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,984,673,362,724,596,413,958,647,698,570,387,1009,749,621,983,672,361, 723,595,412,957,646,697,569,386,1008,748,620,982,671,360,722,594,411,956,645, 696,568,385,1007,994,747,683,619,555,372,981,734,670,606,423,359,968,721,657, 593,410,346,955,708,644,580,397,1019,942,759,695,631,567,384,1006,993,746, 682,618,554,371,980,733,669,605,422,358,967,720,656,592,409,345,954,707,643, 579,396,1018,941,758,694,630,566,383,1005,992,745,681,617,553,370,979,732, 668,604,421,357,966,719,655,591,408,344,953,706,642,578,395,1017,940,757,693, 629,565,382,1004,991,744,680,616,552,369,978,731,667,603,420,356,965,718,654, 590,407,343,952,705,641,577,394,1016,939,756,692,628,564,381,1003,990,743, 679,615,551,368,977,730,666,602,419,355,964,717,653,589,406,342,951,704,640, 576,393,1015,938,755,691,627,563,380,1002,989,742,678,614,550,367,976,729, 665,601,418,354,963,716,652,588,405,950,767,703,639,575,392,1014,754,690,626, 562,379,1001,988,741,677,613,549,366,975,728,664,600,417,353,962,715,651,587, 404,949,766,702,638,574,391,1013,753,689,625,561,378,1000,987,740,676,612, 548,365,974,727,663,599,416,352,961,714,650,586,403,948,765,701,637,573,390, 1012,999,752,688,624,560,377,986,739,675,611,364,973,726,662,598,415,351,960, 713,649,585,402,947,764,700,636,572,389,1011,998,751,687,623,559,376,985,738, 674,610,546,363,972,725,661,597,414,350,959,712,648,584,401,1023,946,763,699, 635,571,388,1010,997,686,558,375,737,609,426,971,660,349,711,583,400,1022, 945,762,634,996,685,557,374,736,608,425,970,659,348,710,582,399,1021,944,761, 633,995,684,556,373,735,607,424,969,658,347,709,581,398,1020,943,760,632] [ns_server:info,2014-08-19T16:51:59.755,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 539 state to replica [ns_server:info,2014-08-19T16:51:59.759,ns_1@10.242.238.90:<0.30953.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 539 to state replica [views:debug,2014-08-19T16:51:59.775,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/546. Updated state: pending (0) [ns_server:debug,2014-08-19T16:51:59.775,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",546,pending,0} [ns_server:debug,2014-08-19T16:51:59.780,ns_1@10.242.238.90:<0.30953.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_539_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:51:59.781,ns_1@10.242.238.90:<0.30953.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[539]}, {checkpoints,[{539,0}]}, {name,<<"replication_building_539_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[539]}, {takeover,false}, {suffix,"building_539_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",539,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,true}]} [rebalance:debug,2014-08-19T16:51:59.781,ns_1@10.242.238.90:<0.30953.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.30954.0> [rebalance:debug,2014-08-19T16:51:59.781,ns_1@10.242.238.90:<0.30953.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:51:59.782,ns_1@10.242.238.90:<0.30953.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.32541.1>,#Ref<16550.0.2.113241>}]} [rebalance:info,2014-08-19T16:51:59.782,ns_1@10.242.238.90:<0.30953.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 539 [rebalance:debug,2014-08-19T16:51:59.782,ns_1@10.242.238.90:<0.30953.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.32541.1>,#Ref<16550.0.2.113241>}] [ns_server:debug,2014-08-19T16:51:59.783,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.30955.0> (ok) [ns_server:debug,2014-08-19T16:51:59.783,ns_1@10.242.238.90:<0.30953.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:debug,2014-08-19T16:51:59.784,ns_1@10.242.238.90:<0.30956.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 539 [ns_server:debug,2014-08-19T16:51:59.840,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 544. Nacking mccouch update. [views:debug,2014-08-19T16:51:59.840,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/544. Updated state: pending (0) [ns_server:debug,2014-08-19T16:51:59.840,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",544,pending,0} [ns_server:debug,2014-08-19T16:51:59.841,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,984,673,362,724,596,413,958,647,698,570,387,1009,749,621,983,672,544, 361,723,595,412,957,646,697,569,386,1008,748,620,982,671,360,722,594,411,956, 645,696,568,385,1007,994,747,683,619,555,372,981,734,670,606,423,359,968,721, 657,593,410,346,955,708,644,580,397,1019,942,759,695,631,567,384,1006,993, 746,682,618,554,371,980,733,669,605,422,358,967,720,656,592,409,345,954,707, 643,579,396,1018,941,758,694,630,566,383,1005,992,745,681,617,553,370,979, 732,668,604,421,357,966,719,655,591,408,344,953,706,642,578,395,1017,940,757, 693,629,565,382,1004,991,744,680,616,552,369,978,731,667,603,420,356,965,718, 654,590,407,343,952,705,641,577,394,1016,939,756,692,628,564,381,1003,990, 743,679,615,551,368,977,730,666,602,419,355,964,717,653,589,406,342,951,704, 640,576,393,1015,938,755,691,627,563,380,1002,989,742,678,614,550,367,976, 729,665,601,418,354,963,716,652,588,405,950,767,703,639,575,392,1014,754,690, 626,562,379,1001,988,741,677,613,549,366,975,728,664,600,417,353,962,715,651, 587,404,949,766,702,638,574,391,1013,753,689,625,561,378,1000,987,740,676, 612,548,365,974,727,663,599,416,352,961,714,650,586,403,948,765,701,637,573, 390,1012,999,752,688,624,560,377,986,739,675,611,364,973,726,662,598,415,351, 960,713,649,585,402,947,764,700,636,572,389,1011,998,751,687,623,559,376,985, 738,674,610,546,363,972,725,661,597,414,350,959,712,648,584,401,1023,946,763, 699,635,571,388,1010,997,686,558,375,737,609,426,971,660,349,711,583,400, 1022,945,762,634,996,685,557,374,736,608,425,970,659,348,710,582,399,1021, 944,761,633,995,684,556,373,735,607,424,969,658,347,709,581,398,1020,943,760, 632] [ns_server:info,2014-08-19T16:51:59.851,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 538 state to replica [ns_server:info,2014-08-19T16:51:59.855,ns_1@10.242.238.90:<0.30973.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 538 to state replica [ns_server:debug,2014-08-19T16:51:59.873,ns_1@10.242.238.90:<0.30973.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_538_'ns_1@10.242.238.90' [views:debug,2014-08-19T16:51:59.873,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/544. Updated state: pending (0) [ns_server:debug,2014-08-19T16:51:59.874,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",544,pending,0} [rebalance:info,2014-08-19T16:51:59.875,ns_1@10.242.238.90:<0.30973.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[538]}, {checkpoints,[{538,0}]}, {name,<<"replication_building_538_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[538]}, {takeover,false}, {suffix,"building_538_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",538,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,true}]} [rebalance:debug,2014-08-19T16:51:59.876,ns_1@10.242.238.90:<0.30973.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.30974.0> [rebalance:debug,2014-08-19T16:51:59.876,ns_1@10.242.238.90:<0.30973.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:51:59.877,ns_1@10.242.238.90:<0.30973.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.32592.1>,#Ref<16550.0.2.113504>}]} [rebalance:info,2014-08-19T16:51:59.877,ns_1@10.242.238.90:<0.30973.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 538 [rebalance:debug,2014-08-19T16:51:59.877,ns_1@10.242.238.90:<0.30973.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.32592.1>,#Ref<16550.0.2.113504>}] [ns_server:debug,2014-08-19T16:51:59.878,ns_1@10.242.238.90:<0.30973.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:51:59.878,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.30975.0> (ok) [rebalance:debug,2014-08-19T16:51:59.879,ns_1@10.242.238.90:<0.30976.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 538 [ns_server:info,2014-08-19T16:51:59.946,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 537 state to replica [ns_server:debug,2014-08-19T16:51:59.950,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 547. Nacking mccouch update. [views:debug,2014-08-19T16:51:59.950,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/547. Updated state: pending (0) [ns_server:info,2014-08-19T16:51:59.950,ns_1@10.242.238.90:<0.30993.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 537 to state replica [ns_server:debug,2014-08-19T16:51:59.951,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",547,pending,0} [ns_server:debug,2014-08-19T16:51:59.951,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,984,673,362,724,596,413,958,647,698,570,387,1009,749,621,983,672,544, 361,723,595,412,957,646,697,569,386,1008,748,620,982,671,360,722,594,411,956, 645,696,568,385,1007,994,747,683,619,555,372,981,734,670,606,423,359,968,721, 657,593,410,346,955,708,644,580,397,1019,942,759,695,631,567,384,1006,993, 746,682,618,554,371,980,733,669,605,422,358,967,720,656,592,409,345,954,707, 643,579,396,1018,941,758,694,630,566,383,1005,992,745,681,617,553,370,979, 732,668,604,421,357,966,719,655,591,408,344,953,706,642,578,395,1017,940,757, 693,629,565,382,1004,991,744,680,616,552,369,978,731,667,603,420,356,965,718, 654,590,407,343,952,705,641,577,394,1016,939,756,692,628,564,381,1003,990, 743,679,615,551,368,977,730,666,602,419,355,964,717,653,589,406,342,951,704, 640,576,393,1015,938,755,691,627,563,380,1002,989,742,678,614,550,367,976, 729,665,601,418,354,963,716,652,588,405,950,767,703,639,575,392,1014,754,690, 626,562,379,1001,988,741,677,613,549,366,975,728,664,600,417,353,962,715,651, 587,404,949,766,702,638,574,391,1013,753,689,625,561,378,1000,987,740,676, 612,548,365,974,727,663,599,416,352,961,714,650,586,403,948,765,701,637,573, 390,1012,999,752,688,624,560,377,986,739,675,611,547,364,973,726,662,598,415, 351,960,713,649,585,402,947,764,700,636,572,389,1011,998,751,687,623,559,376, 985,738,674,610,546,363,972,725,661,597,414,350,959,712,648,584,401,1023,946, 763,699,635,571,388,1010,997,686,558,375,737,609,426,971,660,349,711,583,400, 1022,945,762,634,996,685,557,374,736,608,425,970,659,348,710,582,399,1021, 944,761,633,995,684,556,373,735,607,424,969,658,347,709,581,398,1020,943,760, 632] [ns_server:debug,2014-08-19T16:51:59.969,ns_1@10.242.238.90:<0.30993.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_537_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:51:59.970,ns_1@10.242.238.90:<0.30993.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[537]}, {checkpoints,[{537,0}]}, {name,<<"replication_building_537_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[537]}, {takeover,false}, {suffix,"building_537_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",537,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,true}]} [rebalance:debug,2014-08-19T16:51:59.971,ns_1@10.242.238.90:<0.30993.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.30994.0> [rebalance:debug,2014-08-19T16:51:59.971,ns_1@10.242.238.90:<0.30993.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:51:59.972,ns_1@10.242.238.90:<0.30993.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.32643.1>,#Ref<16550.0.2.113766>}]} [rebalance:info,2014-08-19T16:51:59.972,ns_1@10.242.238.90:<0.30993.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 537 [rebalance:debug,2014-08-19T16:51:59.972,ns_1@10.242.238.90:<0.30993.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.32643.1>,#Ref<16550.0.2.113766>}] [ns_server:debug,2014-08-19T16:51:59.973,ns_1@10.242.238.90:<0.30993.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:51:59.973,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.30995.0> (ok) [rebalance:debug,2014-08-19T16:51:59.974,ns_1@10.242.238.90:<0.30996.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 537 [views:debug,2014-08-19T16:52:00.017,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/547. Updated state: pending (0) [ns_server:debug,2014-08-19T16:52:00.018,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",547,pending,0} [ns_server:info,2014-08-19T16:52:00.041,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 536 state to replica [ns_server:info,2014-08-19T16:52:00.046,ns_1@10.242.238.90:<0.30999.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 536 to state replica [ns_server:debug,2014-08-19T16:52:00.064,ns_1@10.242.238.90:<0.30999.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_536_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:52:00.065,ns_1@10.242.238.90:<0.30999.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[536]}, {checkpoints,[{536,0}]}, {name,<<"replication_building_536_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[536]}, {takeover,false}, {suffix,"building_536_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",536,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,true}]} [rebalance:debug,2014-08-19T16:52:00.066,ns_1@10.242.238.90:<0.30999.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.31000.0> [rebalance:debug,2014-08-19T16:52:00.066,ns_1@10.242.238.90:<0.30999.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:52:00.067,ns_1@10.242.238.90:<0.30999.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.32694.1>,#Ref<16550.0.2.114051>}]} [rebalance:info,2014-08-19T16:52:00.067,ns_1@10.242.238.90:<0.30999.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 536 [rebalance:debug,2014-08-19T16:52:00.067,ns_1@10.242.238.90:<0.30999.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.32694.1>,#Ref<16550.0.2.114051>}] [ns_server:debug,2014-08-19T16:52:00.068,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.31001.0> (ok) [ns_server:debug,2014-08-19T16:52:00.068,ns_1@10.242.238.90:<0.30999.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:debug,2014-08-19T16:52:00.069,ns_1@10.242.238.90:<0.31002.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 536 [ns_server:info,2014-08-19T16:52:00.136,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 535 state to replica [ns_server:info,2014-08-19T16:52:00.140,ns_1@10.242.238.90:<0.31019.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 535 to state replica [ns_server:debug,2014-08-19T16:52:00.159,ns_1@10.242.238.90:<0.31019.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_535_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:52:00.160,ns_1@10.242.238.90:<0.31019.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[535]}, {checkpoints,[{535,0}]}, {name,<<"replication_building_535_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[535]}, {takeover,false}, {suffix,"building_535_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",535,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,true}]} [rebalance:debug,2014-08-19T16:52:00.161,ns_1@10.242.238.90:<0.31019.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.31020.0> [rebalance:debug,2014-08-19T16:52:00.161,ns_1@10.242.238.90:<0.31019.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:52:00.161,ns_1@10.242.238.90:<0.31019.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.32745.1>,#Ref<16550.0.2.114343>}]} [rebalance:info,2014-08-19T16:52:00.162,ns_1@10.242.238.90:<0.31019.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 535 [rebalance:debug,2014-08-19T16:52:00.162,ns_1@10.242.238.90:<0.31019.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.32745.1>,#Ref<16550.0.2.114343>}] [ns_server:debug,2014-08-19T16:52:00.162,ns_1@10.242.238.90:<0.31019.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:52:00.163,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.31021.0> (ok) [rebalance:debug,2014-08-19T16:52:00.164,ns_1@10.242.238.90:<0.31022.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 535 [ns_server:debug,2014-08-19T16:52:00.168,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 545. Nacking mccouch update. [views:debug,2014-08-19T16:52:00.168,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/545. Updated state: pending (0) [ns_server:debug,2014-08-19T16:52:00.168,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",545,pending,0} [ns_server:debug,2014-08-19T16:52:00.169,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,984,673,545,362,724,596,413,958,647,698,570,387,1009,749,621,983,672, 544,361,723,595,412,957,646,697,569,386,1008,748,620,982,671,360,722,594,411, 956,645,696,568,385,1007,994,747,683,619,555,372,981,734,670,606,423,359,968, 721,657,593,410,346,955,708,644,580,397,1019,942,759,695,631,567,384,1006, 993,746,682,618,554,371,980,733,669,605,422,358,967,720,656,592,409,345,954, 707,643,579,396,1018,941,758,694,630,566,383,1005,992,745,681,617,553,370, 979,732,668,604,421,357,966,719,655,591,408,344,953,706,642,578,395,1017,940, 757,693,629,565,382,1004,991,744,680,616,552,369,978,731,667,603,420,356,965, 718,654,590,407,343,952,705,641,577,394,1016,939,756,692,628,564,381,1003, 990,743,679,615,551,368,977,730,666,602,419,355,964,717,653,589,406,342,951, 704,640,576,393,1015,938,755,691,627,563,380,1002,989,742,678,614,550,367, 976,729,665,601,418,354,963,716,652,588,405,950,767,703,639,575,392,1014,754, 690,626,562,379,1001,988,741,677,613,549,366,975,728,664,600,417,353,962,715, 651,587,404,949,766,702,638,574,391,1013,753,689,625,561,378,1000,987,740, 676,612,548,365,974,727,663,599,416,352,961,714,650,586,403,948,765,701,637, 573,390,1012,999,752,688,624,560,377,986,739,675,611,547,364,973,726,662,598, 415,351,960,713,649,585,402,947,764,700,636,572,389,1011,998,751,687,623,559, 376,985,738,674,610,546,363,972,725,661,597,414,350,959,712,648,584,401,1023, 946,763,699,635,571,388,1010,997,686,558,375,737,609,426,971,660,349,711,583, 400,1022,945,762,634,996,685,557,374,736,608,425,970,659,348,710,582,399, 1021,944,761,633,995,684,556,373,735,607,424,969,658,347,709,581,398,1020, 943,760,632] [ns_server:info,2014-08-19T16:52:00.232,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 534 state to replica [views:debug,2014-08-19T16:52:00.235,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/545. Updated state: pending (0) [ns_server:debug,2014-08-19T16:52:00.235,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",545,pending,0} [ns_server:info,2014-08-19T16:52:00.236,ns_1@10.242.238.90:<0.31025.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 534 to state replica [ns_server:debug,2014-08-19T16:52:00.254,ns_1@10.242.238.90:<0.31025.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_534_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:52:00.255,ns_1@10.242.238.90:<0.31025.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[534]}, {checkpoints,[{534,0}]}, {name,<<"replication_building_534_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[534]}, {takeover,false}, {suffix,"building_534_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",534,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,true}]} [rebalance:debug,2014-08-19T16:52:00.256,ns_1@10.242.238.90:<0.31025.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.31026.0> [rebalance:debug,2014-08-19T16:52:00.256,ns_1@10.242.238.90:<0.31025.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:52:00.257,ns_1@10.242.238.90:<0.31025.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.28.2>,#Ref<16550.0.2.114610>}]} [rebalance:info,2014-08-19T16:52:00.257,ns_1@10.242.238.90:<0.31025.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 534 [rebalance:debug,2014-08-19T16:52:00.257,ns_1@10.242.238.90:<0.31025.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.28.2>,#Ref<16550.0.2.114610>}] [ns_server:debug,2014-08-19T16:52:00.258,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.31027.0> (ok) [ns_server:debug,2014-08-19T16:52:00.258,ns_1@10.242.238.90:<0.31025.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:debug,2014-08-19T16:52:00.259,ns_1@10.242.238.90:<0.31028.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 534 [ns_server:debug,2014-08-19T16:52:00.385,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 543. Nacking mccouch update. [views:debug,2014-08-19T16:52:00.385,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/543. Updated state: pending (0) [ns_server:debug,2014-08-19T16:52:00.385,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",543,pending,0} [ns_server:debug,2014-08-19T16:52:00.386,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,984,673,545,362,724,596,413,958,647,698,570,387,1009,749,621,983,672, 544,361,723,595,412,957,646,697,569,386,1008,748,620,982,671,543,360,722,594, 411,956,645,696,568,385,1007,747,619,981,734,670,606,423,359,968,721,657,593, 410,346,955,708,644,580,397,1019,942,759,695,631,567,384,1006,993,746,682, 618,554,371,980,733,669,605,422,358,967,720,656,592,409,345,954,707,643,579, 396,1018,941,758,694,630,566,383,1005,992,745,681,617,553,370,979,732,668, 604,421,357,966,719,655,591,408,344,953,706,642,578,395,1017,940,757,693,629, 565,382,1004,991,744,680,616,552,369,978,731,667,603,420,356,965,718,654,590, 407,343,952,705,641,577,394,1016,939,756,692,628,564,381,1003,990,743,679, 615,551,368,977,730,666,602,419,355,964,717,653,589,406,342,951,704,640,576, 393,1015,938,755,691,627,563,380,1002,989,742,678,614,550,367,976,729,665, 601,418,354,963,716,652,588,405,950,767,703,639,575,392,1014,754,690,626,562, 379,1001,988,741,677,613,549,366,975,728,664,600,417,353,962,715,651,587,404, 949,766,702,638,574,391,1013,753,689,625,561,378,1000,987,740,676,612,548, 365,974,727,663,599,416,352,961,714,650,586,403,948,765,701,637,573,390,1012, 999,752,688,624,560,377,986,739,675,611,547,364,973,726,662,598,415,351,960, 713,649,585,402,947,764,700,636,572,389,1011,998,751,687,623,559,376,985,738, 674,610,546,363,972,725,661,597,414,350,959,712,648,584,401,1023,946,763,699, 635,571,388,1010,997,686,558,375,737,609,426,971,660,349,711,583,400,1022, 945,762,634,996,685,557,374,736,608,425,970,659,348,710,582,399,1021,944,761, 633,995,684,556,373,735,607,424,969,658,347,709,581,398,1020,943,760,632,994, 683,555,372] [views:debug,2014-08-19T16:52:00.453,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/543. Updated state: pending (0) [ns_server:debug,2014-08-19T16:52:00.453,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",543,pending,0} [ns_server:debug,2014-08-19T16:52:00.594,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 541. Nacking mccouch update. [views:debug,2014-08-19T16:52:00.595,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/541. Updated state: pending (0) [ns_server:debug,2014-08-19T16:52:00.595,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",541,pending,0} [ns_server:debug,2014-08-19T16:52:00.595,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,984,673,545,362,724,596,413,958,647,698,570,387,1009,749,621,983,672, 544,361,723,595,412,957,646,697,569,386,1008,748,620,982,671,543,360,722,594, 411,956,645,696,568,385,1007,747,619,981,734,670,606,423,359,968,721,657,593, 410,346,955,708,644,580,397,1019,942,759,695,631,567,384,1006,993,746,682, 618,554,371,980,733,669,605,541,422,358,967,720,656,592,409,345,954,707,643, 579,396,1018,941,758,694,630,566,383,1005,992,745,681,617,553,370,979,732, 668,604,421,357,966,719,655,591,408,344,953,706,642,578,395,1017,940,757,693, 629,565,382,1004,991,744,680,616,552,369,978,731,667,603,420,356,965,718,654, 590,407,343,952,705,641,577,394,1016,939,756,692,628,564,381,1003,990,743, 679,615,551,368,977,730,666,602,419,355,964,717,653,589,406,342,951,704,640, 576,393,1015,938,755,691,627,563,380,1002,989,742,678,614,550,367,976,729, 665,601,418,354,963,716,652,588,405,950,767,703,639,575,392,1014,754,690,626, 562,379,1001,988,741,677,613,549,366,975,728,664,600,417,353,962,715,651,587, 404,949,766,702,638,574,391,1013,753,689,625,561,378,1000,987,740,676,612, 548,365,974,727,663,599,416,352,961,714,650,586,403,948,765,701,637,573,390, 1012,999,752,688,624,560,377,986,739,675,611,547,364,973,726,662,598,415,351, 960,713,649,585,402,947,764,700,636,572,389,1011,998,751,687,623,559,376,985, 738,674,610,546,363,972,725,661,597,414,350,959,712,648,584,401,1023,946,763, 699,635,571,388,1010,997,686,558,375,737,609,426,971,660,349,711,583,400, 1022,945,762,634,996,685,557,374,736,608,425,970,659,348,710,582,399,1021, 944,761,633,995,684,556,373,735,607,424,969,658,347,709,581,398,1020,943,760, 632,994,683,555,372] [views:debug,2014-08-19T16:52:00.662,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/541. Updated state: pending (0) [ns_server:debug,2014-08-19T16:52:00.662,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",541,pending,0} [ns_server:debug,2014-08-19T16:52:00.804,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 539. Nacking mccouch update. [views:debug,2014-08-19T16:52:00.804,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/539. Updated state: pending (0) [ns_server:debug,2014-08-19T16:52:00.804,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",539,pending,0} [ns_server:debug,2014-08-19T16:52:00.805,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,984,673,545,362,724,596,413,958,647,698,570,387,1009,749,621,983,672, 544,361,723,595,412,957,646,697,569,386,1008,748,620,982,671,543,360,722,594, 411,956,645,696,568,385,1007,747,619,981,734,670,606,423,359,968,721,657,593, 410,346,955,708,644,580,397,1019,942,759,695,631,567,384,1006,993,746,682, 618,554,371,980,733,669,605,541,422,358,967,720,656,592,409,345,954,707,643, 579,396,1018,941,758,694,630,566,383,1005,992,745,681,617,553,370,979,732, 668,604,421,357,966,719,655,591,408,344,953,706,642,578,395,1017,940,757,693, 629,565,382,1004,991,744,680,616,552,369,978,731,667,603,539,420,356,965,718, 654,590,407,343,952,705,641,577,394,1016,939,756,692,628,564,381,1003,990, 743,679,615,551,368,977,730,666,602,419,355,964,717,653,589,406,342,951,704, 640,576,393,1015,938,755,691,627,563,380,1002,989,742,678,614,550,367,976, 729,665,601,418,354,963,716,652,588,405,950,767,703,639,575,392,1014,754,690, 626,562,379,1001,988,741,677,613,549,366,975,728,664,600,417,353,962,715,651, 587,404,949,766,702,638,574,391,1013,753,689,625,561,378,1000,987,740,676, 612,548,365,974,727,663,599,416,352,961,714,650,586,403,948,765,701,637,573, 390,1012,999,752,688,624,560,377,986,739,675,611,547,364,973,726,662,598,415, 351,960,713,649,585,402,947,764,700,636,572,389,1011,998,751,687,623,559,376, 985,738,674,610,546,363,972,725,661,597,414,350,959,712,648,584,401,1023,946, 763,699,635,571,388,1010,997,686,558,375,737,609,426,971,660,349,711,583,400, 1022,945,762,634,996,685,557,374,736,608,425,970,659,348,710,582,399,1021, 944,761,633,995,684,556,373,735,607,424,969,658,347,709,581,398,1020,943,760, 632,994,683,555,372] [views:debug,2014-08-19T16:52:00.866,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/539. Updated state: pending (0) [ns_server:debug,2014-08-19T16:52:00.866,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",539,pending,0} [rebalance:debug,2014-08-19T16:52:00.867,ns_1@10.242.238.90:<0.30763.0>:janitor_agent:handle_call:795]Done [rebalance:debug,2014-08-19T16:52:00.867,ns_1@10.242.238.90:<0.30735.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:52:00.867,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.30735.0> (ok) [ns_server:debug,2014-08-19T16:52:00.867,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.30763.0> (ok) [rebalance:debug,2014-08-19T16:52:00.871,ns_1@10.242.238.90:<0.31074.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 553 [rebalance:debug,2014-08-19T16:52:00.872,ns_1@10.242.238.90:<0.31077.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 554 [ns_server:debug,2014-08-19T16:52:00.984,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 542. Nacking mccouch update. [views:debug,2014-08-19T16:52:00.984,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/542. Updated state: pending (0) [ns_server:debug,2014-08-19T16:52:00.985,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",542,pending,0} [ns_server:debug,2014-08-19T16:52:00.985,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,984,673,545,362,724,596,413,958,647,698,570,387,1009,749,621,983,672, 544,361,723,595,412,957,646,697,569,386,1008,748,620,982,671,543,360,722,594, 411,956,645,696,568,385,1007,747,619,981,734,670,606,542,423,359,968,721,657, 593,410,346,955,708,644,580,397,1019,942,759,695,631,567,384,1006,993,746, 682,618,554,371,980,733,669,605,541,422,358,967,720,656,592,409,345,954,707, 643,579,396,1018,941,758,694,630,566,383,1005,992,745,681,617,553,370,979, 732,668,604,421,357,966,719,655,591,408,344,953,706,642,578,395,1017,940,757, 693,629,565,382,1004,991,744,680,616,552,369,978,731,667,603,539,420,356,965, 718,654,590,407,343,952,705,641,577,394,1016,939,756,692,628,564,381,1003, 990,743,679,615,551,368,977,730,666,602,419,355,964,717,653,589,406,342,951, 704,640,576,393,1015,938,755,691,627,563,380,1002,989,742,678,614,550,367, 976,729,665,601,418,354,963,716,652,588,405,950,767,703,639,575,392,1014,754, 690,626,562,379,1001,988,741,677,613,549,366,975,728,664,600,417,353,962,715, 651,587,404,949,766,702,638,574,391,1013,753,689,625,561,378,1000,987,740, 676,612,548,365,974,727,663,599,416,352,961,714,650,586,403,948,765,701,637, 573,390,1012,999,752,688,624,560,377,986,739,675,611,547,364,973,726,662,598, 415,351,960,713,649,585,402,947,764,700,636,572,389,1011,998,751,687,623,559, 376,985,738,674,610,546,363,972,725,661,597,414,350,959,712,648,584,401,1023, 946,763,699,635,571,388,1010,997,686,558,375,737,609,426,971,660,349,711,583, 400,1022,945,762,634,996,685,557,374,736,608,425,970,659,348,710,582,399, 1021,944,761,633,995,684,556,373,735,607,424,969,658,347,709,581,398,1020, 943,760,632,994,683,555,372] [views:debug,2014-08-19T16:52:01.018,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/542. Updated state: pending (0) [ns_server:debug,2014-08-19T16:52:01.019,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",542,pending,0} [ns_server:debug,2014-08-19T16:52:01.085,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 540. Nacking mccouch update. [views:debug,2014-08-19T16:52:01.085,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/540. Updated state: pending (0) [ns_server:debug,2014-08-19T16:52:01.085,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",540,pending,0} [ns_server:debug,2014-08-19T16:52:01.086,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,984,673,545,362,724,596,413,958,647,698,570,387,1009,749,621,983,672, 544,361,723,595,412,957,646,697,569,386,1008,748,620,982,671,543,360,722,594, 411,956,645,696,568,385,1007,747,619,981,734,670,606,542,423,359,968,721,657, 593,410,346,955,708,644,580,397,1019,942,759,695,631,567,384,1006,993,746, 682,618,554,371,980,733,669,605,541,422,358,967,720,656,592,409,345,954,707, 643,579,396,1018,941,758,694,630,566,383,1005,992,745,681,617,553,370,979, 732,668,604,540,421,357,966,719,655,591,408,344,953,706,642,578,395,1017,940, 757,693,629,565,382,1004,991,744,680,616,552,369,978,731,667,603,539,420,356, 965,718,654,590,407,343,952,705,641,577,394,1016,939,756,692,628,564,381, 1003,990,743,679,615,551,368,977,730,666,602,419,355,964,717,653,589,406,342, 951,704,640,576,393,1015,938,755,691,627,563,380,1002,989,742,678,614,550, 367,976,729,665,601,418,354,963,716,652,588,405,950,767,703,639,575,392,1014, 754,690,626,562,379,1001,988,741,677,613,549,366,975,728,664,600,417,353,962, 715,651,587,404,949,766,702,638,574,391,1013,753,689,625,561,378,1000,987, 740,676,612,548,365,974,727,663,599,416,352,961,714,650,586,403,948,765,701, 637,573,390,1012,999,752,688,624,560,377,986,739,675,611,547,364,973,726,662, 598,415,351,960,713,649,585,402,947,764,700,636,572,389,1011,998,751,687,623, 559,376,985,738,674,610,546,363,972,725,661,597,414,350,959,712,648,584,401, 1023,946,763,699,635,571,388,1010,997,686,558,375,737,609,426,971,660,349, 711,583,400,1022,945,762,634,996,685,557,374,736,608,425,970,659,348,710,582, 399,1021,944,761,633,995,684,556,373,735,607,424,969,658,347,709,581,398, 1020,943,760,632,994,683,555,372] [views:debug,2014-08-19T16:52:01.119,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/540. Updated state: pending (0) [ns_server:debug,2014-08-19T16:52:01.119,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",540,pending,0} [ns_server:debug,2014-08-19T16:52:01.186,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 538. Nacking mccouch update. [views:debug,2014-08-19T16:52:01.186,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/538. Updated state: pending (0) [ns_server:debug,2014-08-19T16:52:01.186,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",538,pending,0} [ns_server:debug,2014-08-19T16:52:01.187,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,984,673,545,362,724,596,413,958,647,698,570,387,1009,749,621,983,672, 544,361,723,595,412,957,646,697,569,386,1008,748,620,982,671,543,360,722,594, 411,956,645,696,568,385,1007,747,619,981,670,542,359,968,721,657,593,410,346, 955,708,644,580,397,1019,942,759,695,631,567,384,1006,993,746,682,618,554, 371,980,733,669,605,541,422,358,967,720,656,592,409,345,954,707,643,579,396, 1018,941,758,694,630,566,383,1005,992,745,681,617,553,370,979,732,668,604, 540,421,357,966,719,655,591,408,344,953,706,642,578,395,1017,940,757,693,629, 565,382,1004,991,744,680,616,552,369,978,731,667,603,539,420,356,965,718,654, 590,407,343,952,705,641,577,394,1016,939,756,692,628,564,381,1003,990,743, 679,615,551,368,977,730,666,602,538,419,355,964,717,653,589,406,342,951,704, 640,576,393,1015,938,755,691,627,563,380,1002,989,742,678,614,550,367,976, 729,665,601,418,354,963,716,652,588,405,950,767,703,639,575,392,1014,754,690, 626,562,379,1001,988,741,677,613,549,366,975,728,664,600,417,353,962,715,651, 587,404,949,766,702,638,574,391,1013,753,689,625,561,378,1000,987,740,676, 612,548,365,974,727,663,599,416,352,961,714,650,586,403,948,765,701,637,573, 390,1012,999,752,688,624,560,377,986,739,675,611,547,364,973,726,662,598,415, 351,960,713,649,585,402,947,764,700,636,572,389,1011,998,751,687,623,559,376, 985,738,674,610,546,363,972,725,661,597,414,350,959,712,648,584,401,1023,946, 763,699,635,571,388,1010,997,686,558,375,737,609,426,971,660,349,711,583,400, 1022,945,762,634,996,685,557,374,736,608,425,970,659,348,710,582,399,1021, 944,761,633,995,684,556,373,735,607,424,969,658,347,709,581,398,1020,943,760, 632,994,683,555,372,734,606,423] [views:debug,2014-08-19T16:52:01.220,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/538. Updated state: pending (0) [ns_server:debug,2014-08-19T16:52:01.220,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",538,pending,0} [ns_server:debug,2014-08-19T16:52:01.303,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 536. Nacking mccouch update. [views:debug,2014-08-19T16:52:01.303,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/536. Updated state: pending (0) [ns_server:debug,2014-08-19T16:52:01.304,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",536,pending,0} [ns_server:debug,2014-08-19T16:52:01.304,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,984,673,545,362,724,596,413,958,647,698,570,387,1009,749,621,983,672, 544,361,723,595,412,957,646,697,569,386,1008,748,620,982,671,543,360,722,594, 411,956,645,696,568,385,1007,747,619,981,670,542,359,968,721,657,593,410,346, 955,708,644,580,397,1019,942,759,695,631,567,384,1006,993,746,682,618,554, 371,980,733,669,605,541,422,358,967,720,656,592,409,345,954,707,643,579,396, 1018,941,758,694,630,566,383,1005,992,745,681,617,553,370,979,732,668,604, 540,421,357,966,719,655,591,408,344,953,706,642,578,395,1017,940,757,693,629, 565,382,1004,991,744,680,616,552,369,978,731,667,603,539,420,356,965,718,654, 590,407,343,952,705,641,577,394,1016,939,756,692,628,564,381,1003,990,743, 679,615,551,368,977,730,666,602,538,419,355,964,717,653,589,406,342,951,704, 640,576,393,1015,938,755,691,627,563,380,1002,989,742,678,614,550,367,976, 729,665,601,418,354,963,716,652,588,405,950,767,703,639,575,392,1014,754,690, 626,562,379,1001,988,741,677,613,549,366,975,728,664,600,536,417,353,962,715, 651,587,404,949,766,702,638,574,391,1013,753,689,625,561,378,1000,987,740, 676,612,548,365,974,727,663,599,416,352,961,714,650,586,403,948,765,701,637, 573,390,1012,999,752,688,624,560,377,986,739,675,611,547,364,973,726,662,598, 415,351,960,713,649,585,402,947,764,700,636,572,389,1011,998,751,687,623,559, 376,985,738,674,610,546,363,972,725,661,597,414,350,959,712,648,584,401,1023, 946,763,699,635,571,388,1010,997,686,558,375,737,609,426,971,660,349,711,583, 400,1022,945,762,634,996,685,557,374,736,608,425,970,659,348,710,582,399, 1021,944,761,633,995,684,556,373,735,607,424,969,658,347,709,581,398,1020, 943,760,632,994,683,555,372,734,606,423] [views:debug,2014-08-19T16:52:01.337,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/536. Updated state: pending (0) [ns_server:debug,2014-08-19T16:52:01.337,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",536,pending,0} [ns_server:debug,2014-08-19T16:52:01.488,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 534. Nacking mccouch update. [views:debug,2014-08-19T16:52:01.488,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/534. Updated state: pending (0) [ns_server:debug,2014-08-19T16:52:01.488,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",534,pending,0} [ns_server:debug,2014-08-19T16:52:01.489,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,984,673,545,362,724,596,413,958,647,698,570,387,1009,749,621,983,672, 544,361,723,595,412,957,646,697,569,386,1008,748,620,982,671,543,360,722,594, 411,956,645,696,568,385,1007,747,619,981,670,542,359,968,721,657,593,410,346, 955,708,644,580,397,1019,942,759,695,631,567,384,1006,993,746,682,618,554, 371,980,733,669,605,541,422,358,967,720,656,592,409,345,954,707,643,579,396, 1018,941,758,694,630,566,383,1005,992,745,681,617,553,370,979,732,668,604, 540,421,357,966,719,655,591,408,344,953,706,642,578,395,1017,940,757,693,629, 565,382,1004,991,744,680,616,552,369,978,731,667,603,539,420,356,965,718,654, 590,407,343,952,705,641,577,394,1016,939,756,692,628,564,381,1003,990,743, 679,615,551,368,977,730,666,602,538,419,355,964,717,653,589,406,342,951,704, 640,576,393,1015,938,755,691,627,563,380,1002,989,742,678,614,550,367,976, 729,665,601,418,354,963,716,652,588,405,950,767,703,639,575,392,1014,754,690, 626,562,379,1001,988,741,677,613,549,366,975,728,664,600,536,417,353,962,715, 651,587,404,949,766,702,638,574,391,1013,753,689,625,561,378,1000,987,740, 676,612,548,365,974,727,663,599,416,352,961,714,650,586,403,948,765,701,637, 573,390,1012,999,752,688,624,560,377,986,739,675,611,547,364,973,726,662,598, 534,415,351,960,713,649,585,402,947,764,700,636,572,389,1011,998,751,687,623, 559,376,985,738,674,610,546,363,972,725,661,597,414,350,959,712,648,584,401, 1023,946,763,699,635,571,388,1010,997,686,558,375,737,609,426,971,660,349, 711,583,400,1022,945,762,634,996,685,557,374,736,608,425,970,659,348,710,582, 399,1021,944,761,633,995,684,556,373,735,607,424,969,658,347,709,581,398, 1020,943,760,632,994,683,555,372,734,606,423] [views:debug,2014-08-19T16:52:01.556,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/534. Updated state: pending (0) [ns_server:debug,2014-08-19T16:52:01.556,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",534,pending,0} [ns_server:debug,2014-08-19T16:52:01.781,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 537. Nacking mccouch update. [views:debug,2014-08-19T16:52:01.781,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/537. Updated state: pending (0) [ns_server:debug,2014-08-19T16:52:01.781,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",537,pending,0} [ns_server:debug,2014-08-19T16:52:01.782,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,984,673,545,362,724,596,413,958,647,698,570,387,1009,749,621,983,672, 544,361,723,595,412,957,646,697,569,386,1008,748,620,982,671,543,360,722,594, 411,956,645,696,568,385,1007,747,619,981,670,542,359,968,721,657,593,410,346, 955,708,644,580,397,1019,942,759,695,631,567,384,1006,993,746,682,618,554, 371,980,733,669,605,541,422,358,967,720,656,592,409,345,954,707,643,579,396, 1018,941,758,694,630,566,383,1005,992,745,681,617,553,370,979,732,668,604, 540,421,357,966,719,655,591,408,344,953,706,642,578,395,1017,940,757,693,629, 565,382,1004,991,744,680,616,552,369,978,731,667,603,539,420,356,965,718,654, 590,407,343,952,705,641,577,394,1016,939,756,692,628,564,381,1003,990,743, 679,615,551,368,977,730,666,602,538,419,355,964,717,653,589,406,342,951,704, 640,576,393,1015,938,755,691,627,563,380,1002,989,742,678,614,550,367,976, 729,665,601,537,418,354,963,716,652,588,405,950,767,703,639,575,392,1014,754, 690,626,562,379,1001,988,741,677,613,549,366,975,728,664,600,536,417,353,962, 715,651,587,404,949,766,702,638,574,391,1013,753,689,625,561,378,1000,987, 740,676,612,548,365,974,727,663,599,416,352,961,714,650,586,403,948,765,701, 637,573,390,1012,999,752,688,624,560,377,986,739,675,611,547,364,973,726,662, 598,534,415,351,960,713,649,585,402,947,764,700,636,572,389,1011,998,751,687, 623,559,376,985,738,674,610,546,363,972,725,661,597,414,350,959,712,648,584, 401,1023,946,763,699,635,571,388,1010,997,686,558,375,737,609,426,971,660, 349,711,583,400,1022,945,762,634,996,685,557,374,736,608,425,970,659,348,710, 582,399,1021,944,761,633,995,684,556,373,735,607,424,969,658,347,709,581,398, 1020,943,760,632,994,683,555,372,734,606,423] [views:debug,2014-08-19T16:52:01.848,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/537. Updated state: pending (0) [ns_server:debug,2014-08-19T16:52:01.848,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",537,pending,0} [ns_server:debug,2014-08-19T16:52:02.015,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 535. Nacking mccouch update. [views:debug,2014-08-19T16:52:02.015,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/535. Updated state: pending (0) [ns_server:debug,2014-08-19T16:52:02.015,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",535,pending,0} [ns_server:debug,2014-08-19T16:52:02.016,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,984,673,545,362,724,596,413,958,647,698,570,387,1009,749,621,983,672, 544,361,723,595,412,957,646,697,569,386,1008,748,620,982,671,543,360,722,594, 411,956,645,696,568,385,1007,747,619,981,670,542,359,968,721,657,593,410,346, 955,708,644,580,397,1019,942,759,695,631,567,384,1006,993,746,682,618,554, 371,980,733,669,605,541,422,358,967,720,656,592,409,345,954,707,643,579,396, 1018,941,758,694,630,566,383,1005,992,745,681,617,553,370,979,732,668,604, 540,421,357,966,719,655,591,408,344,953,706,642,578,395,1017,940,757,693,629, 565,382,1004,991,744,680,616,552,369,978,731,667,603,539,420,356,965,718,654, 590,407,343,952,705,641,577,394,1016,939,756,692,628,564,381,1003,990,743, 679,615,551,368,977,730,666,602,538,419,355,964,717,653,589,406,342,951,704, 640,576,393,1015,938,755,691,627,563,380,1002,989,742,678,614,550,367,976, 729,665,601,537,418,354,963,716,652,588,405,950,767,703,639,575,392,1014,754, 690,626,562,379,1001,988,741,677,613,549,366,975,728,664,600,536,417,353,962, 715,651,587,404,949,766,702,638,574,391,1013,753,689,625,561,378,1000,987, 740,676,612,548,365,974,727,663,599,535,416,352,961,714,650,586,403,948,765, 701,637,573,390,1012,999,752,688,624,560,377,986,739,675,611,547,364,973,726, 662,598,534,415,351,960,713,649,585,402,947,764,700,636,572,389,1011,998,751, 687,623,559,376,985,738,674,610,546,363,972,725,661,597,414,350,959,712,648, 584,401,1023,946,763,699,635,571,388,1010,997,686,558,375,737,609,426,971, 660,349,711,583,400,1022,945,762,634,996,685,557,374,736,608,425,970,659,348, 710,582,399,1021,944,761,633,995,684,556,373,735,607,424,969,658,347,709,581, 398,1020,943,760,632,994,683,555,372,734,606,423] [views:debug,2014-08-19T16:52:02.082,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/535. Updated state: pending (0) [ns_server:debug,2014-08-19T16:52:02.082,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",535,pending,0} [rebalance:debug,2014-08-19T16:52:02.087,ns_1@10.242.238.90:<0.31028.0>:janitor_agent:handle_call:795]Done [rebalance:debug,2014-08-19T16:52:02.087,ns_1@10.242.238.90:<0.31022.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:52:02.087,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.31028.0> (ok) [ns_server:debug,2014-08-19T16:52:02.087,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.31022.0> (ok) [rebalance:debug,2014-08-19T16:52:02.091,ns_1@10.242.238.90:<0.31184.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 535 [rebalance:debug,2014-08-19T16:52:02.091,ns_1@10.242.238.90:<0.31187.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 534 [rebalance:debug,2014-08-19T16:52:02.251,ns_1@10.242.238.90:<0.31002.0>:janitor_agent:handle_call:795]Done [rebalance:debug,2014-08-19T16:52:02.251,ns_1@10.242.238.90:<0.30996.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:52:02.251,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.31002.0> (ok) [ns_server:debug,2014-08-19T16:52:02.251,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.30996.0> (ok) [rebalance:debug,2014-08-19T16:52:02.255,ns_1@10.242.238.90:<0.31190.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 537 [rebalance:debug,2014-08-19T16:52:02.255,ns_1@10.242.238.90:<0.31193.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 536 [rebalance:debug,2014-08-19T16:52:02.412,ns_1@10.242.238.90:<0.30976.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:52:02.413,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.30976.0> (ok) [rebalance:debug,2014-08-19T16:52:02.413,ns_1@10.242.238.90:<0.30956.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:52:02.413,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.30956.0> (ok) [rebalance:debug,2014-08-19T16:52:02.416,ns_1@10.242.238.90:<0.31196.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 538 [rebalance:debug,2014-08-19T16:52:02.417,ns_1@10.242.238.90:<0.31199.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 539 [rebalance:debug,2014-08-19T16:52:02.480,ns_1@10.242.238.90:<0.30936.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:52:02.480,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.30936.0> (ok) [rebalance:debug,2014-08-19T16:52:02.480,ns_1@10.242.238.90:<0.30930.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:52:02.480,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.30930.0> (ok) [rebalance:debug,2014-08-19T16:52:02.484,ns_1@10.242.238.90:<0.31202.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 540 [rebalance:debug,2014-08-19T16:52:02.484,ns_1@10.242.238.90:<0.31205.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 541 [rebalance:debug,2014-08-19T16:52:02.547,ns_1@10.242.238.90:<0.30910.0>:janitor_agent:handle_call:795]Done [rebalance:debug,2014-08-19T16:52:02.547,ns_1@10.242.238.90:<0.30890.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:52:02.547,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.30910.0> (ok) [ns_server:debug,2014-08-19T16:52:02.547,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.30890.0> (ok) [rebalance:debug,2014-08-19T16:52:02.551,ns_1@10.242.238.90:<0.31208.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 542 [rebalance:debug,2014-08-19T16:52:02.551,ns_1@10.242.238.90:<0.31209.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 543 [rebalance:debug,2014-08-19T16:52:02.614,ns_1@10.242.238.90:<0.30878.0>:janitor_agent:handle_call:795]Done [rebalance:debug,2014-08-19T16:52:02.614,ns_1@10.242.238.90:<0.30858.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:52:02.614,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.30878.0> (ok) [ns_server:debug,2014-08-19T16:52:02.614,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.30858.0> (ok) [rebalance:debug,2014-08-19T16:52:02.618,ns_1@10.242.238.90:<0.31214.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 544 [rebalance:debug,2014-08-19T16:52:02.619,ns_1@10.242.238.90:<0.31217.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 545 [rebalance:debug,2014-08-19T16:52:02.681,ns_1@10.242.238.90:<0.30832.0>:janitor_agent:handle_call:795]Done [rebalance:debug,2014-08-19T16:52:02.681,ns_1@10.242.238.90:<0.30852.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:52:02.681,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.30832.0> (ok) [ns_server:debug,2014-08-19T16:52:02.681,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.30852.0> (ok) [rebalance:debug,2014-08-19T16:52:02.685,ns_1@10.242.238.90:<0.31220.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 547 [rebalance:debug,2014-08-19T16:52:02.685,ns_1@10.242.238.90:<0.31223.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 546 [rebalance:debug,2014-08-19T16:52:02.748,ns_1@10.242.238.90:<0.30826.0>:janitor_agent:handle_call:795]Done [rebalance:debug,2014-08-19T16:52:02.748,ns_1@10.242.238.90:<0.30820.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:52:02.748,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.30826.0> (ok) [ns_server:debug,2014-08-19T16:52:02.748,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.30820.0> (ok) [rebalance:debug,2014-08-19T16:52:02.752,ns_1@10.242.238.90:<0.31227.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 548 [rebalance:debug,2014-08-19T16:52:02.752,ns_1@10.242.238.90:<0.31230.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 549 [rebalance:debug,2014-08-19T16:52:02.815,ns_1@10.242.238.90:<0.30800.0>:janitor_agent:handle_call:795]Done [rebalance:debug,2014-08-19T16:52:02.815,ns_1@10.242.238.90:<0.30794.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:52:02.815,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.30800.0> (ok) [ns_server:debug,2014-08-19T16:52:02.815,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.30794.0> (ok) [ns_server:debug,2014-08-19T16:52:02.816,ns_1@10.242.238.90:<0.31234.0>:capi_set_view_manager:do_wait_index_updated:618]References to wait: [] ("default", 551) [ns_server:debug,2014-08-19T16:52:02.816,ns_1@10.242.238.90:<0.31234.0>:capi_set_view_manager:do_wait_index_updated:638]All refs fired [ns_server:debug,2014-08-19T16:52:02.817,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.31233.0> (ok) [rebalance:debug,2014-08-19T16:52:02.817,ns_1@10.242.238.90:<0.30788.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:52:02.818,ns_1@10.242.238.90:<0.30788.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:52:02.818,ns_1@10.242.238.90:<0.31235.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:52:02.818,ns_1@10.242.238.90:<0.31235.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:52:02.818,ns_1@10.242.238.90:<0.30788.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [rebalance:debug,2014-08-19T16:52:02.818,ns_1@10.242.238.90:<0.31236.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 550 [ns_server:info,2014-08-19T16:52:02.847,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 551 state to active [rebalance:debug,2014-08-19T16:52:02.857,ns_1@10.242.238.90:<0.30785.0>:janitor_agent:handle_call:795]Done [rebalance:debug,2014-08-19T16:52:02.857,ns_1@10.242.238.90:<0.31074.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:52:02.857,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.30785.0> (ok) [ns_server:debug,2014-08-19T16:52:02.857,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.31074.0> (ok) [ns_server:debug,2014-08-19T16:52:02.858,ns_1@10.242.238.90:<0.31240.0>:capi_set_view_manager:do_wait_index_updated:618]References to wait: [] ("default", 553) [ns_server:debug,2014-08-19T16:52:02.858,ns_1@10.242.238.90:<0.31240.0>:capi_set_view_manager:do_wait_index_updated:638]All refs fired [ns_server:debug,2014-08-19T16:52:02.858,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.31239.0> (ok) [rebalance:debug,2014-08-19T16:52:02.859,ns_1@10.242.238.90:<0.30760.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:52:02.859,ns_1@10.242.238.90:<0.30760.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:52:02.859,ns_1@10.242.238.90:<0.31241.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:52:02.859,ns_1@10.242.238.90:<0.31241.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:52:02.859,ns_1@10.242.238.90:<0.30760.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:52:02.879,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:52:02.882,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:02.883,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 3280 us [ns_server:debug,2014-08-19T16:52:02.883,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:02.883,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{551, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.88']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [rebalance:debug,2014-08-19T16:52:02.884,ns_1@10.242.238.90:<0.31243.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 552 [ns_server:info,2014-08-19T16:52:02.896,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 553 state to active [rebalance:debug,2014-08-19T16:52:02.899,ns_1@10.242.238.90:<0.31077.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:52:02.899,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.31077.0> (ok) [ns_server:debug,2014-08-19T16:52:02.900,ns_1@10.242.238.90:<0.31247.0>:capi_set_view_manager:do_wait_index_updated:618]References to wait: [] ("default", 554) [ns_server:debug,2014-08-19T16:52:02.900,ns_1@10.242.238.90:<0.31247.0>:capi_set_view_manager:do_wait_index_updated:638]All refs fired [ns_server:debug,2014-08-19T16:52:02.900,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.31246.0> (ok) [rebalance:debug,2014-08-19T16:52:02.900,ns_1@10.242.238.90:<0.30732.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:52:02.900,ns_1@10.242.238.90:<0.30732.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:52:02.900,ns_1@10.242.238.90:<0.31248.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:52:02.901,ns_1@10.242.238.90:<0.31248.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:52:02.901,ns_1@10.242.238.90:<0.30732.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:52:02.927,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:52:02.930,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:02.930,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 3426 us [ns_server:debug,2014-08-19T16:52:02.930,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:02.931,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{553, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.88']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [views:debug,2014-08-19T16:52:02.932,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/553. Updated state: active (1) [ns_server:debug,2014-08-19T16:52:02.932,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",553,active,1} [ns_server:info,2014-08-19T16:52:02.935,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 554 state to active [ns_server:debug,2014-08-19T16:52:02.964,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:52:02.968,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:02.968,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 3983 us [ns_server:debug,2014-08-19T16:52:02.968,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:02.969,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{554, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.88']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [views:debug,2014-08-19T16:52:02.976,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/551. Updated state: active (1) [ns_server:debug,2014-08-19T16:52:02.976,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",551,active,1} [views:debug,2014-08-19T16:52:03.051,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/554. Updated state: active (1) [ns_server:debug,2014-08-19T16:52:03.051,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",554,active,1} [rebalance:debug,2014-08-19T16:52:03.052,ns_1@10.242.238.90:<0.31184.0>:janitor_agent:handle_call:795]Done [rebalance:debug,2014-08-19T16:52:03.052,ns_1@10.242.238.90:<0.31190.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:52:03.053,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.31184.0> (ok) [ns_server:debug,2014-08-19T16:52:03.053,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.31190.0> (ok) [rebalance:debug,2014-08-19T16:52:03.053,ns_1@10.242.238.90:<0.31187.0>:janitor_agent:handle_call:795]Done [rebalance:debug,2014-08-19T16:52:03.053,ns_1@10.242.238.90:<0.31193.0>:janitor_agent:handle_call:795]Done [rebalance:debug,2014-08-19T16:52:03.053,ns_1@10.242.238.90:<0.31199.0>:janitor_agent:handle_call:795]Done [rebalance:debug,2014-08-19T16:52:03.053,ns_1@10.242.238.90:<0.31205.0>:janitor_agent:handle_call:795]Done [rebalance:debug,2014-08-19T16:52:03.053,ns_1@10.242.238.90:<0.31209.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:52:03.053,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.31193.0> (ok) [rebalance:debug,2014-08-19T16:52:03.053,ns_1@10.242.238.90:<0.31202.0>:janitor_agent:handle_call:795]Done [rebalance:debug,2014-08-19T16:52:03.053,ns_1@10.242.238.90:<0.31196.0>:janitor_agent:handle_call:795]Done [rebalance:debug,2014-08-19T16:52:03.053,ns_1@10.242.238.90:<0.31208.0>:janitor_agent:handle_call:795]Done [rebalance:debug,2014-08-19T16:52:03.053,ns_1@10.242.238.90:<0.31220.0>:janitor_agent:handle_call:795]Done [rebalance:debug,2014-08-19T16:52:03.053,ns_1@10.242.238.90:<0.31214.0>:janitor_agent:handle_call:795]Done [rebalance:debug,2014-08-19T16:52:03.053,ns_1@10.242.238.90:<0.31217.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:52:03.053,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.31187.0> (ok) [rebalance:debug,2014-08-19T16:52:03.053,ns_1@10.242.238.90:<0.31230.0>:janitor_agent:handle_call:795]Done [rebalance:debug,2014-08-19T16:52:03.053,ns_1@10.242.238.90:<0.31223.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:52:03.053,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.31199.0> (ok) [rebalance:debug,2014-08-19T16:52:03.053,ns_1@10.242.238.90:<0.31236.0>:janitor_agent:handle_call:795]Done [rebalance:debug,2014-08-19T16:52:03.053,ns_1@10.242.238.90:<0.31227.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:52:03.053,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.31205.0> (ok) [rebalance:debug,2014-08-19T16:52:03.053,ns_1@10.242.238.90:<0.31243.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:52:03.054,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.31209.0> (ok) [ns_server:debug,2014-08-19T16:52:03.054,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.31202.0> (ok) [ns_server:debug,2014-08-19T16:52:03.054,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.31196.0> (ok) [ns_server:debug,2014-08-19T16:52:03.054,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.31208.0> (ok) [ns_server:debug,2014-08-19T16:52:03.054,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.31220.0> (ok) [ns_server:debug,2014-08-19T16:52:03.054,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.31214.0> (ok) [ns_server:debug,2014-08-19T16:52:03.054,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.31217.0> (ok) [ns_server:debug,2014-08-19T16:52:03.054,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.31230.0> (ok) [ns_server:debug,2014-08-19T16:52:03.054,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.31223.0> (ok) [ns_server:debug,2014-08-19T16:52:03.054,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.31236.0> (ok) [ns_server:debug,2014-08-19T16:52:03.054,ns_1@10.242.238.90:<0.31252.0>:capi_set_view_manager:do_wait_index_updated:618]References to wait: [] ("default", 535) [ns_server:debug,2014-08-19T16:52:03.054,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.31227.0> (ok) [ns_server:debug,2014-08-19T16:52:03.055,ns_1@10.242.238.90:<0.31252.0>:capi_set_view_manager:do_wait_index_updated:638]All refs fired [ns_server:debug,2014-08-19T16:52:03.055,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.31243.0> (ok) [ns_server:debug,2014-08-19T16:52:03.055,ns_1@10.242.238.90:<0.31255.0>:capi_set_view_manager:do_wait_index_updated:618]References to wait: [] ("default", 536) [ns_server:debug,2014-08-19T16:52:03.055,ns_1@10.242.238.90:<0.31255.0>:capi_set_view_manager:do_wait_index_updated:638]All refs fired [ns_server:debug,2014-08-19T16:52:03.055,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.31251.0> (ok) [ns_server:debug,2014-08-19T16:52:03.055,ns_1@10.242.238.90:<0.31265.0>:capi_set_view_manager:do_wait_index_updated:618]References to wait: [] ("default", 537) [ns_server:debug,2014-08-19T16:52:03.055,ns_1@10.242.238.90:<0.31265.0>:capi_set_view_manager:do_wait_index_updated:638]All refs fired [ns_server:debug,2014-08-19T16:52:03.055,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.31254.0> (ok) [ns_server:debug,2014-08-19T16:52:03.055,ns_1@10.242.238.90:<0.31267.0>:capi_set_view_manager:do_wait_index_updated:618]References to wait: [] ("default", 540) [ns_server:debug,2014-08-19T16:52:03.055,ns_1@10.242.238.90:<0.31267.0>:capi_set_view_manager:do_wait_index_updated:638]All refs fired [ns_server:debug,2014-08-19T16:52:03.055,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.31253.0> (ok) [ns_server:debug,2014-08-19T16:52:03.055,ns_1@10.242.238.90:<0.31272.0>:capi_set_view_manager:do_wait_index_updated:618]References to wait: [] ("default", 542) [ns_server:debug,2014-08-19T16:52:03.055,ns_1@10.242.238.90:<0.31272.0>:capi_set_view_manager:do_wait_index_updated:638]All refs fired [ns_server:debug,2014-08-19T16:52:03.055,ns_1@10.242.238.90:<0.31273.0>:capi_set_view_manager:do_wait_index_updated:618]References to wait: [] ("default", 538) [ns_server:debug,2014-08-19T16:52:03.055,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.31260.0> (ok) [ns_server:debug,2014-08-19T16:52:03.055,ns_1@10.242.238.90:<0.31273.0>:capi_set_view_manager:do_wait_index_updated:638]All refs fired [rebalance:debug,2014-08-19T16:52:03.055,ns_1@10.242.238.90:<0.30999.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [rebalance:debug,2014-08-19T16:52:03.055,ns_1@10.242.238.90:<0.31019.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:52:03.055,ns_1@10.242.238.90:<0.31275.0>:capi_set_view_manager:do_wait_index_updated:618]References to wait: [] ("default", 544) [ns_server:debug,2014-08-19T16:52:03.055,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.31262.0> (ok) [rebalance:debug,2014-08-19T16:52:03.056,ns_1@10.242.238.90:<0.30993.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:52:03.056,ns_1@10.242.238.90:<0.31275.0>:capi_set_view_manager:do_wait_index_updated:638]All refs fired [ns_server:debug,2014-08-19T16:52:03.056,ns_1@10.242.238.90:<0.31019.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:52:03.056,ns_1@10.242.238.90:<0.30999.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:52:03.056,ns_1@10.242.238.90:<0.31276.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:52:03.056,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.31261.0> (ok) [ns_server:debug,2014-08-19T16:52:03.056,ns_1@10.242.238.90:<0.31278.0>:capi_set_view_manager:do_wait_index_updated:618]References to wait: [] ("default", 547) [ns_server:debug,2014-08-19T16:52:03.056,ns_1@10.242.238.90:<0.31277.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [rebalance:debug,2014-08-19T16:52:03.056,ns_1@10.242.238.90:<0.30933.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:52:03.056,ns_1@10.242.238.90:<0.31276.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [ns_server:debug,2014-08-19T16:52:03.056,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.31264.0> (ok) [ns_server:debug,2014-08-19T16:52:03.056,ns_1@10.242.238.90:<0.30993.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:52:03.056,ns_1@10.242.238.90:<0.31279.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:52:03.056,ns_1@10.242.238.90:<0.31278.0>:capi_set_view_manager:do_wait_index_updated:638]All refs fired [ns_server:debug,2014-08-19T16:52:03.056,ns_1@10.242.238.90:<0.31277.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:debug,2014-08-19T16:52:03.056,ns_1@10.242.238.90:<0.30907.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:52:03.056,ns_1@10.242.238.90:<0.31280.0>:capi_set_view_manager:do_wait_index_updated:618]References to wait: [] ("default", 543) [rebalance:info,2014-08-19T16:52:03.056,ns_1@10.242.238.90:<0.31019.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:52:03.056,ns_1@10.242.238.90:<0.30933.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:52:03.056,ns_1@10.242.238.90:<0.31279.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [ns_server:debug,2014-08-19T16:52:03.056,ns_1@10.242.238.90:<0.31281.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [rebalance:info,2014-08-19T16:52:03.056,ns_1@10.242.238.90:<0.30999.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [rebalance:debug,2014-08-19T16:52:03.056,ns_1@10.242.238.90:<0.30973.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:52:03.056,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.31263.0> (ok) [ns_server:debug,2014-08-19T16:52:03.056,ns_1@10.242.238.90:<0.31280.0>:capi_set_view_manager:do_wait_index_updated:638]All refs fired [rebalance:info,2014-08-19T16:52:03.056,ns_1@10.242.238.90:<0.30993.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:52:03.056,ns_1@10.242.238.90:<0.30907.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:52:03.056,ns_1@10.242.238.90:<0.31282.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:52:03.056,ns_1@10.242.238.90:<0.31283.0>:capi_set_view_manager:do_wait_index_updated:618]References to wait: [] ("default", 541) [ns_server:debug,2014-08-19T16:52:03.056,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.31259.0> (ok) [ns_server:debug,2014-08-19T16:52:03.056,ns_1@10.242.238.90:<0.31281.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [ns_server:debug,2014-08-19T16:52:03.057,ns_1@10.242.238.90:<0.31282.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [ns_server:debug,2014-08-19T16:52:03.057,ns_1@10.242.238.90:<0.31283.0>:capi_set_view_manager:do_wait_index_updated:638]All refs fired [ns_server:debug,2014-08-19T16:52:03.057,ns_1@10.242.238.90:<0.30973.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [rebalance:info,2014-08-19T16:52:03.057,ns_1@10.242.238.90:<0.30933.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [rebalance:debug,2014-08-19T16:52:03.057,ns_1@10.242.238.90:<0.30875.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:52:03.057,ns_1@10.242.238.90:<0.31284.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:52:03.057,ns_1@10.242.238.90:<0.31285.0>:capi_set_view_manager:do_wait_index_updated:618]References to wait: [] ("default", 539) [ns_server:debug,2014-08-19T16:52:03.057,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.31258.0> (ok) [rebalance:info,2014-08-19T16:52:03.057,ns_1@10.242.238.90:<0.30907.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:52:03.057,ns_1@10.242.238.90:<0.31284.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [ns_server:debug,2014-08-19T16:52:03.057,ns_1@10.242.238.90:<0.31285.0>:capi_set_view_manager:do_wait_index_updated:638]All refs fired [ns_server:debug,2014-08-19T16:52:03.057,ns_1@10.242.238.90:<0.30875.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:52:03.057,ns_1@10.242.238.90:<0.31286.0>:capi_set_view_manager:do_wait_index_updated:618]References to wait: [] ("default", 534) [rebalance:info,2014-08-19T16:52:03.057,ns_1@10.242.238.90:<0.30973.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:52:03.057,ns_1@10.242.238.90:<0.31287.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:52:03.057,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.31257.0> (ok) [rebalance:debug,2014-08-19T16:52:03.057,ns_1@10.242.238.90:<0.30829.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:52:03.057,ns_1@10.242.238.90:<0.31286.0>:capi_set_view_manager:do_wait_index_updated:638]All refs fired [ns_server:debug,2014-08-19T16:52:03.057,ns_1@10.242.238.90:<0.31287.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [ns_server:debug,2014-08-19T16:52:03.057,ns_1@10.242.238.90:<0.31288.0>:capi_set_view_manager:do_wait_index_updated:618]References to wait: [] ("default", 545) [rebalance:debug,2014-08-19T16:52:03.057,ns_1@10.242.238.90:<0.30887.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:52:03.057,ns_1@10.242.238.90:<0.31288.0>:capi_set_view_manager:do_wait_index_updated:638]All refs fired [ns_server:debug,2014-08-19T16:52:03.057,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.31256.0> (ok) [rebalance:info,2014-08-19T16:52:03.057,ns_1@10.242.238.90:<0.30875.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:52:03.057,ns_1@10.242.238.90:<0.30829.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:52:03.057,ns_1@10.242.238.90:<0.31290.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:52:03.057,ns_1@10.242.238.90:<0.31289.0>:capi_set_view_manager:do_wait_index_updated:618]References to wait: [] ("default", 550) [ns_server:debug,2014-08-19T16:52:03.057,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.31266.0> (ok) [ns_server:debug,2014-08-19T16:52:03.057,ns_1@10.242.238.90:<0.30887.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:52:03.057,ns_1@10.242.238.90:<0.31291.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:52:03.057,ns_1@10.242.238.90:<0.31289.0>:capi_set_view_manager:do_wait_index_updated:638]All refs fired [ns_server:debug,2014-08-19T16:52:03.057,ns_1@10.242.238.90:<0.31290.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:debug,2014-08-19T16:52:03.058,ns_1@10.242.238.90:<0.30913.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [rebalance:info,2014-08-19T16:52:03.058,ns_1@10.242.238.90:<0.30829.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:52:03.058,ns_1@10.242.238.90:<0.31291.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [ns_server:debug,2014-08-19T16:52:03.058,ns_1@10.242.238.90:<0.31292.0>:capi_set_view_manager:do_wait_index_updated:618]References to wait: [] ("default", 548) [ns_server:debug,2014-08-19T16:52:03.058,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.31270.0> (ok) [rebalance:info,2014-08-19T16:52:03.058,ns_1@10.242.238.90:<0.30887.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:52:03.058,ns_1@10.242.238.90:<0.30913.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:52:03.058,ns_1@10.242.238.90:<0.31293.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:52:03.058,ns_1@10.242.238.90:<0.31294.0>:capi_set_view_manager:do_wait_index_updated:618]References to wait: [] ("default", 549) [ns_server:debug,2014-08-19T16:52:03.058,ns_1@10.242.238.90:<0.31292.0>:capi_set_view_manager:do_wait_index_updated:638]All refs fired [ns_server:debug,2014-08-19T16:52:03.058,ns_1@10.242.238.90:<0.31293.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:debug,2014-08-19T16:52:03.058,ns_1@10.242.238.90:<0.31025.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [rebalance:debug,2014-08-19T16:52:03.058,ns_1@10.242.238.90:<0.30953.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:52:03.058,ns_1@10.242.238.90:<0.31294.0>:capi_set_view_manager:do_wait_index_updated:638]All refs fired [rebalance:debug,2014-08-19T16:52:03.058,ns_1@10.242.238.90:<0.30855.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:52:03.058,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.31271.0> (ok) [ns_server:debug,2014-08-19T16:52:03.058,ns_1@10.242.238.90:<0.31295.0>:capi_set_view_manager:do_wait_index_updated:618]References to wait: [] ("default", 546) [rebalance:info,2014-08-19T16:52:03.058,ns_1@10.242.238.90:<0.30913.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:52:03.058,ns_1@10.242.238.90:<0.31025.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:52:03.058,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.31268.0> (ok) [ns_server:debug,2014-08-19T16:52:03.058,ns_1@10.242.238.90:<0.31295.0>:capi_set_view_manager:do_wait_index_updated:638]All refs fired [ns_server:debug,2014-08-19T16:52:03.058,ns_1@10.242.238.90:<0.31297.0>:capi_set_view_manager:do_wait_index_updated:618]References to wait: [] ("default", 552) [ns_server:debug,2014-08-19T16:52:03.058,ns_1@10.242.238.90:<0.30953.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:52:03.058,ns_1@10.242.238.90:<0.31298.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:52:03.058,ns_1@10.242.238.90:<0.31296.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [rebalance:debug,2014-08-19T16:52:03.058,ns_1@10.242.238.90:<0.30797.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:52:03.058,ns_1@10.242.238.90:<0.31297.0>:capi_set_view_manager:do_wait_index_updated:638]All refs fired [ns_server:debug,2014-08-19T16:52:03.058,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.31269.0> (ok) [ns_server:debug,2014-08-19T16:52:03.058,ns_1@10.242.238.90:<0.30855.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:52:03.059,ns_1@10.242.238.90:<0.31298.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [ns_server:debug,2014-08-19T16:52:03.058,ns_1@10.242.238.90:<0.31299.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:52:03.059,ns_1@10.242.238.90:<0.30797.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:52:03.059,ns_1@10.242.238.90:<0.31296.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [ns_server:debug,2014-08-19T16:52:03.059,ns_1@10.242.238.90:<0.31300.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [rebalance:debug,2014-08-19T16:52:03.059,ns_1@10.242.238.90:<0.30823.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [rebalance:info,2014-08-19T16:52:03.059,ns_1@10.242.238.90:<0.30953.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:52:03.059,ns_1@10.242.238.90:<0.31299.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:52:03.059,ns_1@10.242.238.90:<0.31025.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [rebalance:debug,2014-08-19T16:52:03.059,ns_1@10.242.238.90:<0.30803.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:52:03.059,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.31274.0> (ok) [ns_server:debug,2014-08-19T16:52:03.059,ns_1@10.242.238.90:<0.31300.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:52:03.059,ns_1@10.242.238.90:<0.30855.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:52:03.059,ns_1@10.242.238.90:<0.30823.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:52:03.059,ns_1@10.242.238.90:<0.31301.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:52:03.059,ns_1@10.242.238.90:<0.30803.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [rebalance:info,2014-08-19T16:52:03.059,ns_1@10.242.238.90:<0.30797.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:52:03.059,ns_1@10.242.238.90:<0.31302.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:52:03.059,ns_1@10.242.238.90:<0.31301.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [ns_server:debug,2014-08-19T16:52:03.059,ns_1@10.242.238.90:<0.31302.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:debug,2014-08-19T16:52:03.059,ns_1@10.242.238.90:<0.30849.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [rebalance:info,2014-08-19T16:52:03.059,ns_1@10.242.238.90:<0.30823.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:52:03.059,ns_1@10.242.238.90:<0.30849.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:52:03.059,ns_1@10.242.238.90:<0.31303.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [rebalance:debug,2014-08-19T16:52:03.059,ns_1@10.242.238.90:<0.30768.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [rebalance:info,2014-08-19T16:52:03.059,ns_1@10.242.238.90:<0.30803.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:52:03.059,ns_1@10.242.238.90:<0.31303.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [ns_server:debug,2014-08-19T16:52:03.060,ns_1@10.242.238.90:<0.30768.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:52:03.060,ns_1@10.242.238.90:<0.31304.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [rebalance:info,2014-08-19T16:52:03.060,ns_1@10.242.238.90:<0.30849.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:52:03.060,ns_1@10.242.238.90:<0.31304.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:52:03.060,ns_1@10.242.238.90:<0.30768.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:info,2014-08-19T16:52:03.120,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 535 state to active [ns_server:info,2014-08-19T16:52:03.129,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 536 state to active [ns_server:info,2014-08-19T16:52:03.138,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 540 state to active [ns_server:info,2014-08-19T16:52:03.157,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 550 state to active [ns_server:info,2014-08-19T16:52:03.178,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 534 state to active [views:debug,2014-08-19T16:52:03.185,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/535. Updated state: active (1) [ns_server:debug,2014-08-19T16:52:03.185,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",535,active,1} [ns_server:info,2014-08-19T16:52:03.192,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 546 state to active [ns_server:debug,2014-08-19T16:52:03.218,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:52:03.221,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:03.221,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 3594 us [ns_server:debug,2014-08-19T16:52:03.221,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:03.222,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{535, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.88']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:info,2014-08-19T16:52:03.223,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 552 state to active [ns_server:info,2014-08-19T16:52:03.225,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 548 state to active [ns_server:info,2014-08-19T16:52:03.238,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 539 state to active [ns_server:info,2014-08-19T16:52:03.239,ns_1@10.242.238.90:<0.18786.0>:ns_memcached:do_handle_call:527]Changed vbucket 545 state to active [ns_server:info,2014-08-19T16:52:03.240,ns_1@10.242.238.90:<0.18786.0>:ns_memcached:do_handle_call:527]Changed vbucket 549 state to active [ns_server:info,2014-08-19T16:52:03.241,ns_1@10.242.238.90:<0.18786.0>:ns_memcached:do_handle_call:527]Changed vbucket 541 state to active [ns_server:info,2014-08-19T16:52:03.250,ns_1@10.242.238.90:<0.18786.0>:ns_memcached:do_handle_call:527]Changed vbucket 547 state to active [ns_server:debug,2014-08-19T16:52:03.268,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [views:debug,2014-08-19T16:52:03.269,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/550. Updated state: active (1) [ns_server:debug,2014-08-19T16:52:03.269,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",550,active,1} [ns_server:info,2014-08-19T16:52:03.270,ns_1@10.242.238.90:<0.18786.0>:ns_memcached:do_handle_call:527]Changed vbucket 544 state to active [ns_server:info,2014-08-19T16:52:03.271,ns_1@10.242.238.90:<0.18786.0>:ns_memcached:do_handle_call:527]Changed vbucket 543 state to active [ns_server:debug,2014-08-19T16:52:03.276,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:03.276,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 7127 us [ns_server:debug,2014-08-19T16:52:03.276,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:03.277,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{536, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.88']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:info,2014-08-19T16:52:03.285,ns_1@10.242.238.90:<0.18786.0>:ns_memcached:do_handle_call:527]Changed vbucket 538 state to active [ns_server:info,2014-08-19T16:52:03.290,ns_1@10.242.238.90:<0.18786.0>:ns_memcached:do_handle_call:527]Changed vbucket 537 state to active [ns_server:info,2014-08-19T16:52:03.298,ns_1@10.242.238.90:<0.18786.0>:ns_memcached:do_handle_call:527]Changed vbucket 542 state to active [ns_server:debug,2014-08-19T16:52:03.313,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:52:03.318,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:03.318,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 4754 us [ns_server:debug,2014-08-19T16:52:03.318,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:03.319,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{540, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.88']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [views:debug,2014-08-19T16:52:03.352,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/540. Updated state: active (1) [ns_server:debug,2014-08-19T16:52:03.352,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",540,active,1} [ns_server:debug,2014-08-19T16:52:03.358,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:52:03.361,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:03.362,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 3589 us [ns_server:debug,2014-08-19T16:52:03.362,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:03.363,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{550, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.88']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [views:debug,2014-08-19T16:52:03.423,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/536. Updated state: active (1) [ns_server:debug,2014-08-19T16:52:03.424,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",536,active,1} [ns_server:debug,2014-08-19T16:52:03.428,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:52:03.429,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:03.430,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 1321 us [ns_server:debug,2014-08-19T16:52:03.430,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:03.430,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{534, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.88']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:52:03.463,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:52:03.466,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:03.467,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 3366 us [ns_server:debug,2014-08-19T16:52:03.467,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:03.467,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{808, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.88']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [views:debug,2014-08-19T16:52:03.494,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/534. Updated state: active (1) [ns_server:debug,2014-08-19T16:52:03.494,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",534,active,1} [ns_server:debug,2014-08-19T16:52:03.506,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:52:03.508,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:03.508,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 1613 us [ns_server:debug,2014-08-19T16:52:03.508,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:03.509,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{546, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.88']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:52:03.545,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:52:03.553,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:03.554,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 7918 us [ns_server:debug,2014-08-19T16:52:03.554,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:03.555,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{552, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.88']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [views:debug,2014-08-19T16:52:03.561,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/552. Updated state: active (1) [ns_server:debug,2014-08-19T16:52:03.561,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",552,active,1} [ns_server:debug,2014-08-19T16:52:03.593,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:52:03.597,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:03.597,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 3695 us [ns_server:debug,2014-08-19T16:52:03.598,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:03.598,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{548, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.88']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [views:debug,2014-08-19T16:52:03.628,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/548. Updated state: active (1) [ns_server:debug,2014-08-19T16:52:03.628,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",548,active,1} [ns_server:debug,2014-08-19T16:52:03.634,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:52:03.636,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:03.637,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 2970 us [ns_server:debug,2014-08-19T16:52:03.637,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:03.637,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{539, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.88']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:52:03.675,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:52:03.679,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:03.680,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 3803 us [ns_server:debug,2014-08-19T16:52:03.680,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:03.680,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{545, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.88']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [views:debug,2014-08-19T16:52:03.695,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/546. Updated state: active (1) [ns_server:debug,2014-08-19T16:52:03.695,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",546,active,1} [ns_server:debug,2014-08-19T16:52:03.725,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:52:03.725,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:03.725,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 684 us [ns_server:debug,2014-08-19T16:52:03.726,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:03.726,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{541, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.88']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:52:03.760,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [views:debug,2014-08-19T16:52:03.762,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/544. Updated state: active (1) [ns_server:debug,2014-08-19T16:52:03.762,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",544,active,1} [ns_server:debug,2014-08-19T16:52:03.763,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:03.764,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 3806 us [ns_server:debug,2014-08-19T16:52:03.764,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:03.764,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{549, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.88']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:52:03.798,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:52:03.806,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:03.806,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 8220 us [ns_server:debug,2014-08-19T16:52:03.806,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:03.807,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{547, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.88']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [views:debug,2014-08-19T16:52:03.828,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/542. Updated state: active (1) [ns_server:debug,2014-08-19T16:52:03.829,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",542,active,1} [ns_server:debug,2014-08-19T16:52:03.839,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:52:03.843,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:03.843,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 2081 us [ns_server:debug,2014-08-19T16:52:03.844,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:03.844,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{544, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.88']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:52:03.879,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:52:03.883,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:03.883,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 1514 us [ns_server:debug,2014-08-19T16:52:03.884,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:03.884,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{543, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.88']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [views:debug,2014-08-19T16:52:03.896,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/538. Updated state: active (1) [ns_server:debug,2014-08-19T16:52:03.896,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",538,active,1} [ns_server:debug,2014-08-19T16:52:03.918,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:52:03.922,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:03.922,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 4300 us [ns_server:debug,2014-08-19T16:52:03.922,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:03.923,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{538, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.88']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:52:03.956,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:52:03.959,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:03.959,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 3127 us [ns_server:debug,2014-08-19T16:52:03.959,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:03.960,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{537, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.88']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [views:debug,2014-08-19T16:52:03.962,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/549. Updated state: active (1) [ns_server:debug,2014-08-19T16:52:03.963,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",549,active,1} [ns_server:debug,2014-08-19T16:52:03.997,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:52:04.001,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:04.001,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 3619 us [ns_server:debug,2014-08-19T16:52:04.002,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:04.002,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{542, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.88']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [views:debug,2014-08-19T16:52:04.030,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/547. Updated state: active (1) [ns_server:debug,2014-08-19T16:52:04.030,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",547,active,1} [ns_server:debug,2014-08-19T16:52:04.030,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:52:04.037,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 7197 us [ns_server:debug,2014-08-19T16:52:04.037,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:04.038,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:04.038,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{796, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.88']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:52:04.071,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:52:04.074,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:04.074,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 3632 us [ns_server:debug,2014-08-19T16:52:04.075,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:04.076,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{802, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.88']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [views:debug,2014-08-19T16:52:04.091,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/545. Updated state: active (1) [ns_server:debug,2014-08-19T16:52:04.091,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",545,active,1} [ns_server:debug,2014-08-19T16:52:04.109,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:52:04.112,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:04.113,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 3173 us [ns_server:debug,2014-08-19T16:52:04.113,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:04.114,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{800, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.88']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [views:debug,2014-08-19T16:52:04.132,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/543. Updated state: active (1) [ns_server:debug,2014-08-19T16:52:04.133,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",543,active,1} [ns_server:debug,2014-08-19T16:52:04.150,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:52:04.154,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:04.154,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 3926 us [ns_server:debug,2014-08-19T16:52:04.155,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:04.155,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{798, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.88']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [views:debug,2014-08-19T16:52:04.166,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/541. Updated state: active (1) [ns_server:debug,2014-08-19T16:52:04.166,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",541,active,1} [views:debug,2014-08-19T16:52:04.200,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/539. Updated state: active (1) [ns_server:debug,2014-08-19T16:52:04.200,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",539,active,1} [ns_server:debug,2014-08-19T16:52:04.204,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:52:04.206,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:04.206,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 2354 us [ns_server:debug,2014-08-19T16:52:04.207,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{807, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.88']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:52:04.207,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [views:debug,2014-08-19T16:52:04.234,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/537. Updated state: active (1) [ns_server:debug,2014-08-19T16:52:04.234,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",537,active,1} [ns_server:debug,2014-08-19T16:52:04.241,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:52:04.244,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:04.244,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 2759 us [ns_server:debug,2014-08-19T16:52:04.244,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:04.245,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{809, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.88']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:52:04.281,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:52:04.291,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:04.291,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 10114 us [ns_server:debug,2014-08-19T16:52:04.291,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:04.292,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{788, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.88']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:52:04.320,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:52:04.323,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:04.323,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 3186 us [ns_server:debug,2014-08-19T16:52:04.324,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:04.324,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{805, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.88']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:52:04.362,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:52:04.364,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:04.365,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 3084 us [ns_server:debug,2014-08-19T16:52:04.365,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:04.366,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{803, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.88']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:52:04.404,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:52:04.406,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:04.406,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 1817 us [ns_server:debug,2014-08-19T16:52:04.407,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:04.407,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{799, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.88']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:52:04.444,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:52:04.445,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 496 us [ns_server:debug,2014-08-19T16:52:04.445,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:04.446,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:04.447,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{795, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.88']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:52:04.482,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:52:04.483,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:04.483,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 1433 us [ns_server:debug,2014-08-19T16:52:04.484,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:04.484,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{791, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.88']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:52:04.521,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:52:04.529,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:04.530,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 8572 us [ns_server:debug,2014-08-19T16:52:04.530,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{792, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.88']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:52:04.533,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:04.560,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:52:04.563,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:04.563,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 2911 us [ns_server:debug,2014-08-19T16:52:04.564,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:04.564,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{793, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.88']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:52:04.598,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:52:04.601,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:04.602,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 3322 us [ns_server:debug,2014-08-19T16:52:04.602,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:04.603,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{794, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.88']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:52:04.639,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:52:04.641,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:04.641,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 1654 us [ns_server:debug,2014-08-19T16:52:04.641,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:04.642,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{790, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.88']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:52:04.679,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:52:04.683,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 3672 us [ns_server:debug,2014-08-19T16:52:04.684,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:04.685,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:04.685,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{806, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.88']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:52:04.719,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:52:04.720,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:04.720,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 1424 us [ns_server:debug,2014-08-19T16:52:04.721,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:04.721,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{789, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.88']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:52:04.757,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:52:04.766,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 8565 us [ns_server:debug,2014-08-19T16:52:04.766,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:04.766,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:04.767,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{801, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.88']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:52:04.799,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:52:04.801,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:04.801,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 1455 us [ns_server:debug,2014-08-19T16:52:04.801,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:04.802,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{797, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.88']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:52:04.839,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:52:04.842,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:04.842,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 3531 us [ns_server:debug,2014-08-19T16:52:04.843,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:04.844,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{804, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.88']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:52:04.872,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:52:04.875,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:04.875,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 3043 us [ns_server:debug,2014-08-19T16:52:04.876,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:04.876,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{295, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.88']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:52:04.912,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:52:04.914,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:04.914,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 1397 us [ns_server:debug,2014-08-19T16:52:04.915,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:04.915,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{294, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.88']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:52:04.949,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:52:04.952,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:04.952,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 3328 us [ns_server:debug,2014-08-19T16:52:04.952,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:04.953,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{288, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.88']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:52:04.987,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:52:04.996,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:04.996,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 8995 us [ns_server:debug,2014-08-19T16:52:04.996,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:04.997,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{280, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.88']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:52:05.028,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:52:05.030,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 1379 us [ns_server:debug,2014-08-19T16:52:05.030,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:05.030,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:05.031,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{298, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.88']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:52:05.066,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:52:05.069,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:05.069,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 1610 us [ns_server:debug,2014-08-19T16:52:05.070,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:05.070,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{297, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.88']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:52:05.104,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:52:05.108,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:05.108,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 3115 us [ns_server:debug,2014-08-19T16:52:05.108,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:05.109,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{296, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.88']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:52:05.144,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:52:05.146,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 964 us [ns_server:debug,2014-08-19T16:52:05.146,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:05.146,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:05.147,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{292, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.88']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:52:05.181,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:52:05.184,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:05.184,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 2654 us [ns_server:debug,2014-08-19T16:52:05.184,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:05.185,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{284, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.88']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:52:05.220,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:52:05.229,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 8562 us [ns_server:debug,2014-08-19T16:52:05.229,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:05.229,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:05.230,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{285, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.88']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:52:05.258,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:52:05.261,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:05.261,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 3126 us [ns_server:debug,2014-08-19T16:52:05.261,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:05.262,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{286, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.88']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:52:05.296,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:52:05.299,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 3576 us [ns_server:debug,2014-08-19T16:52:05.300,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:05.301,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:05.301,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{287, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.88']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:52:05.333,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:52:05.336,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 2932 us [ns_server:debug,2014-08-19T16:52:05.336,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:05.336,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:05.337,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{291, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.88']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:52:05.375,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:05.375,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:05.376,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{279, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.88']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:52:05.411,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:52:05.411,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 8 us [ns_server:debug,2014-08-19T16:52:05.444,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:05.445,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:52:05.445,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:05.445,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 20 us [ns_server:debug,2014-08-19T16:52:05.446,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{278, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.88']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:52:05.479,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:52:05.486,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:05.486,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 7230 us [ns_server:debug,2014-08-19T16:52:05.486,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:05.487,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{290, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.88']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:52:05.519,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:52:05.522,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:05.523,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 3764 us [ns_server:debug,2014-08-19T16:52:05.523,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:05.523,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{289, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.88']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:52:05.561,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:52:05.562,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 1469 us [ns_server:debug,2014-08-19T16:52:05.563,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:05.563,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:05.563,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{282, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.88']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:52:05.601,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:52:05.606,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:05.607,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{281, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.88']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:52:05.606,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 4809 us [ns_server:debug,2014-08-19T16:52:05.607,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:05.640,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:52:05.642,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:05.642,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 1495 us [ns_server:debug,2014-08-19T16:52:05.642,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:05.643,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{283, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.88']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:52:05.695,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:52:05.696,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 1166 us [ns_server:debug,2014-08-19T16:52:05.696,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:05.697,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:05.697,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{293, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.88']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:info,2014-08-19T16:52:05.742,ns_1@10.242.238.90:<0.18786.0>:ns_memcached:do_handle_call:527]Changed vbucket 533 state to replica [ns_server:info,2014-08-19T16:52:05.746,ns_1@10.242.238.90:<0.31391.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 533 to state replica [ns_server:debug,2014-08-19T16:52:05.769,ns_1@10.242.238.90:<0.31391.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_533_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:52:05.771,ns_1@10.242.238.90:<0.31391.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[533]}, {checkpoints,[{533,0}]}, {name,<<"replication_building_533_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[533]}, {takeover,false}, {suffix,"building_533_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",533,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,true}]} [rebalance:debug,2014-08-19T16:52:05.772,ns_1@10.242.238.90:<0.31391.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.31392.0> [rebalance:debug,2014-08-19T16:52:05.772,ns_1@10.242.238.90:<0.31391.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:52:05.772,ns_1@10.242.238.90:<0.31391.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.1909.2>,#Ref<16550.0.2.139694>}]} [rebalance:info,2014-08-19T16:52:05.773,ns_1@10.242.238.90:<0.31391.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 533 [rebalance:debug,2014-08-19T16:52:05.773,ns_1@10.242.238.90:<0.31391.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.1909.2>,#Ref<16550.0.2.139694>}] [ns_server:debug,2014-08-19T16:52:05.774,ns_1@10.242.238.90:<0.31391.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:52:05.774,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.31393.0> (ok) [rebalance:debug,2014-08-19T16:52:05.776,ns_1@10.242.238.90:<0.31394.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 533 [ns_server:info,2014-08-19T16:52:05.845,ns_1@10.242.238.90:<0.18786.0>:ns_memcached:do_handle_call:527]Changed vbucket 532 state to replica [ns_server:info,2014-08-19T16:52:05.849,ns_1@10.242.238.90:<0.31411.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 532 to state replica [ns_server:debug,2014-08-19T16:52:05.867,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 533. Nacking mccouch update. [views:debug,2014-08-19T16:52:05.867,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/533. Updated state: replica (0) [ns_server:debug,2014-08-19T16:52:05.867,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",533,replica,0} [ns_server:debug,2014-08-19T16:52:05.868,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,984,673,545,362,724,596,413,958,647,698,570,387,1009,749,621,983,672, 544,361,723,595,412,957,646,697,569,386,1008,748,620,982,671,543,360,722,594, 411,956,645,696,568,385,1007,747,619,981,670,542,359,721,593,410,955,708,644, 580,397,1019,942,759,695,631,567,384,1006,993,746,682,618,554,371,980,733, 669,605,541,422,358,967,720,656,592,409,345,954,707,643,579,396,1018,941,758, 694,630,566,383,1005,992,745,681,617,553,370,979,732,668,604,540,421,357,966, 719,655,591,408,344,953,706,642,578,395,1017,940,757,693,629,565,382,1004, 991,744,680,616,552,369,978,731,667,603,539,420,356,965,718,654,590,407,343, 952,705,641,577,394,1016,939,756,692,628,564,381,1003,990,743,679,615,551, 368,977,730,666,602,538,419,355,964,717,653,589,406,342,951,704,640,576,393, 1015,938,755,691,627,563,380,1002,989,742,678,614,550,367,976,729,665,601, 537,418,354,963,716,652,588,405,950,767,703,639,575,392,1014,754,690,626,562, 379,1001,988,741,677,613,549,366,975,728,664,600,536,417,353,962,715,651,587, 404,949,766,702,638,574,391,1013,753,689,625,561,378,1000,987,740,676,612, 548,365,974,727,663,599,535,416,352,961,714,650,586,403,948,765,701,637,573, 390,1012,999,752,688,624,560,377,986,739,675,611,547,364,973,726,662,598,534, 415,351,960,713,649,585,402,947,764,700,636,572,389,1011,998,751,687,623,559, 376,985,738,674,610,546,363,972,725,661,597,533,414,350,959,712,648,584,401, 1023,946,763,699,635,571,388,1010,997,686,558,375,737,609,426,971,660,349, 711,583,400,1022,945,762,634,996,685,557,374,736,608,425,970,659,348,710,582, 399,1021,944,761,633,995,684,556,373,735,607,424,969,658,347,709,581,398, 1020,943,760,632,994,683,555,372,734,606,423,968,657,346] [ns_server:debug,2014-08-19T16:52:05.871,ns_1@10.242.238.90:<0.31411.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_532_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:52:05.872,ns_1@10.242.238.90:<0.31411.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[532]}, {checkpoints,[{532,0}]}, {name,<<"replication_building_532_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[532]}, {takeover,false}, {suffix,"building_532_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",532,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,true}]} [rebalance:debug,2014-08-19T16:52:05.873,ns_1@10.242.238.90:<0.31411.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.31412.0> [rebalance:debug,2014-08-19T16:52:05.873,ns_1@10.242.238.90:<0.31411.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:52:05.874,ns_1@10.242.238.90:<0.31411.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.1960.2>,#Ref<16550.0.2.139983>}]} [rebalance:info,2014-08-19T16:52:05.874,ns_1@10.242.238.90:<0.31411.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 532 [rebalance:debug,2014-08-19T16:52:05.874,ns_1@10.242.238.90:<0.31411.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.1960.2>,#Ref<16550.0.2.139983>}] [ns_server:debug,2014-08-19T16:52:05.875,ns_1@10.242.238.90:<0.31411.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:52:05.875,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.31413.0> (ok) [rebalance:debug,2014-08-19T16:52:05.877,ns_1@10.242.238.90:<0.31414.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 532 [views:debug,2014-08-19T16:52:05.902,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/533. Updated state: replica (0) [ns_server:debug,2014-08-19T16:52:05.902,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",533,replica,0} [ns_server:info,2014-08-19T16:52:05.941,ns_1@10.242.238.90:<0.18786.0>:ns_memcached:do_handle_call:527]Changed vbucket 531 state to replica [ns_server:info,2014-08-19T16:52:05.946,ns_1@10.242.238.90:<0.31417.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 531 to state replica [ns_server:debug,2014-08-19T16:52:05.966,ns_1@10.242.238.90:<0.31417.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_531_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:52:05.967,ns_1@10.242.238.90:<0.31417.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[531]}, {checkpoints,[{531,0}]}, {name,<<"replication_building_531_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[531]}, {takeover,false}, {suffix,"building_531_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",531,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,true}]} [rebalance:debug,2014-08-19T16:52:05.968,ns_1@10.242.238.90:<0.31417.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.31432.0> [rebalance:debug,2014-08-19T16:52:05.968,ns_1@10.242.238.90:<0.31417.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:52:05.969,ns_1@10.242.238.90:<0.31417.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.2011.2>,#Ref<16550.0.2.140249>}]} [rebalance:info,2014-08-19T16:52:05.969,ns_1@10.242.238.90:<0.31417.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 531 [rebalance:debug,2014-08-19T16:52:05.969,ns_1@10.242.238.90:<0.31417.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.2011.2>,#Ref<16550.0.2.140249>}] [ns_server:debug,2014-08-19T16:52:05.970,ns_1@10.242.238.90:<0.31417.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:52:05.970,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.31433.0> (ok) [rebalance:debug,2014-08-19T16:52:05.971,ns_1@10.242.238.90:<0.31434.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 531 [ns_server:debug,2014-08-19T16:52:05.994,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 532. Nacking mccouch update. [views:debug,2014-08-19T16:52:05.994,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/532. Updated state: pending (0) [ns_server:debug,2014-08-19T16:52:05.994,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",532,pending,0} [ns_server:debug,2014-08-19T16:52:05.995,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,984,673,545,362,724,596,413,958,647,698,570,387,1009,749,621,983,672, 544,361,723,595,412,957,646,697,569,386,1008,748,620,982,671,543,360,722,594, 411,956,645,696,568,385,1007,747,619,981,670,542,359,721,593,410,955,708,644, 580,397,1019,942,759,695,631,567,384,1006,993,746,682,618,554,371,980,733, 669,605,541,422,358,967,720,656,592,409,345,954,707,643,579,396,1018,941,758, 694,630,566,383,1005,992,745,681,617,553,370,979,732,668,604,540,421,357,966, 719,655,591,408,344,953,706,642,578,395,1017,940,757,693,629,565,382,1004, 991,744,680,616,552,369,978,731,667,603,539,420,356,965,718,654,590,407,343, 952,705,641,577,394,1016,939,756,692,628,564,381,1003,990,743,679,615,551, 368,977,730,666,602,538,419,355,964,717,653,589,406,342,951,704,640,576,393, 1015,938,755,691,627,563,380,1002,989,742,678,614,550,367,976,729,665,601, 537,418,354,963,716,652,588,405,950,767,703,639,575,392,1014,754,690,626,562, 379,1001,988,741,677,613,549,366,975,728,664,600,536,417,353,962,715,651,587, 404,949,766,702,638,574,391,1013,753,689,625,561,378,1000,987,740,676,612, 548,365,974,727,663,599,535,416,352,961,714,650,586,403,948,765,701,637,573, 390,1012,999,752,688,624,560,377,986,739,675,611,547,364,973,726,662,598,534, 415,351,960,713,649,585,402,947,764,700,636,572,389,1011,998,751,687,623,559, 376,985,738,674,610,546,363,972,725,661,597,533,414,350,959,712,648,584,401, 1023,946,763,699,635,571,388,1010,997,686,558,375,737,609,426,971,660,532, 349,711,583,400,1022,945,762,634,996,685,557,374,736,608,425,970,659,348,710, 582,399,1021,944,761,633,995,684,556,373,735,607,424,969,658,347,709,581,398, 1020,943,760,632,994,683,555,372,734,606,423,968,657,346] [views:debug,2014-08-19T16:52:06.028,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/532. Updated state: pending (0) [ns_server:debug,2014-08-19T16:52:06.028,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",532,pending,0} [rebalance:debug,2014-08-19T16:52:06.030,ns_1@10.242.238.90:<0.31414.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:52:06.030,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.31414.0> (ok) [rebalance:debug,2014-08-19T16:52:06.033,ns_1@10.242.238.90:<0.31437.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 532 [ns_server:info,2014-08-19T16:52:06.037,ns_1@10.242.238.90:<0.18786.0>:ns_memcached:do_handle_call:527]Changed vbucket 530 state to replica [ns_server:info,2014-08-19T16:52:06.043,ns_1@10.242.238.90:<0.31440.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 530 to state replica [ns_server:debug,2014-08-19T16:52:06.061,ns_1@10.242.238.90:<0.31440.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_530_'ns_1@10.242.238.90' [views:debug,2014-08-19T16:52:06.062,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/533. Updated state: pending (0) [ns_server:debug,2014-08-19T16:52:06.062,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",533,pending,0} [rebalance:info,2014-08-19T16:52:06.063,ns_1@10.242.238.90:<0.31440.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[530]}, {checkpoints,[{530,0}]}, {name,<<"replication_building_530_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[530]}, {takeover,false}, {suffix,"building_530_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",530,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,true}]} [rebalance:debug,2014-08-19T16:52:06.063,ns_1@10.242.238.90:<0.31440.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.31441.0> [rebalance:debug,2014-08-19T16:52:06.063,ns_1@10.242.238.90:<0.31440.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:52:06.064,ns_1@10.242.238.90:<0.31440.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.2068.2>,#Ref<16550.0.2.140605>}]} [rebalance:info,2014-08-19T16:52:06.064,ns_1@10.242.238.90:<0.31440.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 530 [rebalance:debug,2014-08-19T16:52:06.064,ns_1@10.242.238.90:<0.31440.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.2068.2>,#Ref<16550.0.2.140605>}] [ns_server:debug,2014-08-19T16:52:06.065,ns_1@10.242.238.90:<0.31440.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:52:06.065,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.31442.0> (ok) [rebalance:debug,2014-08-19T16:52:06.067,ns_1@10.242.238.90:<0.31443.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 530 [ns_server:debug,2014-08-19T16:52:06.128,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 531. Nacking mccouch update. [views:debug,2014-08-19T16:52:06.129,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/531. Updated state: pending (0) [ns_server:debug,2014-08-19T16:52:06.129,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",531,pending,0} [ns_server:debug,2014-08-19T16:52:06.130,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,984,673,545,362,724,596,413,958,647,698,570,387,1009,749,621,983,672, 544,361,723,595,412,957,646,697,569,386,1008,748,620,982,671,543,360,722,594, 411,956,645,696,568,385,1007,747,619,981,670,542,359,721,593,410,955,708,644, 580,397,1019,942,759,695,631,567,384,1006,993,746,682,618,554,371,980,733, 669,605,541,422,358,967,720,656,592,409,345,954,707,643,579,396,1018,941,758, 694,630,566,383,1005,992,745,681,617,553,370,979,732,668,604,540,421,357,966, 719,655,591,408,344,953,706,642,578,395,1017,940,757,693,629,565,382,1004, 991,744,680,616,552,369,978,731,667,603,539,420,356,965,718,654,590,407,343, 952,705,641,577,394,1016,939,756,692,628,564,381,1003,990,743,679,615,551, 368,977,730,666,602,538,419,355,964,717,653,589,406,342,951,704,640,576,393, 1015,938,755,691,627,563,380,1002,989,742,678,614,550,367,976,729,665,601, 537,418,354,963,716,652,588,405,950,767,703,639,575,392,1014,754,690,626,562, 379,1001,988,741,677,613,549,366,975,728,664,600,536,417,353,962,715,651,587, 404,949,766,702,638,574,391,1013,753,689,625,561,378,1000,987,740,676,612, 548,365,974,727,663,599,535,416,352,961,714,650,586,403,948,765,701,637,573, 390,1012,999,752,688,624,560,377,986,739,675,611,547,364,973,726,662,598,534, 415,351,960,713,649,585,402,947,764,700,636,572,389,1011,998,751,687,623,559, 376,985,738,674,610,546,363,972,725,661,597,533,414,350,959,712,648,584,401, 1023,946,763,699,635,571,388,1010,997,686,558,375,737,609,426,971,660,532, 349,711,583,400,1022,945,762,634,996,685,557,374,736,608,425,970,659,531,348, 710,582,399,1021,944,761,633,995,684,556,373,735,607,424,969,658,347,709,581, 398,1020,943,760,632,994,683,555,372,734,606,423,968,657,346] [ns_server:info,2014-08-19T16:52:06.136,ns_1@10.242.238.90:<0.18786.0>:ns_memcached:do_handle_call:527]Changed vbucket 529 state to replica [ns_server:info,2014-08-19T16:52:06.140,ns_1@10.242.238.90:<0.31460.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 529 to state replica [ns_server:debug,2014-08-19T16:52:06.160,ns_1@10.242.238.90:<0.31460.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_529_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:52:06.161,ns_1@10.242.238.90:<0.31460.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[529]}, {checkpoints,[{529,0}]}, {name,<<"replication_building_529_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[529]}, {takeover,false}, {suffix,"building_529_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",529,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,true}]} [rebalance:debug,2014-08-19T16:52:06.162,ns_1@10.242.238.90:<0.31460.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.31461.0> [rebalance:debug,2014-08-19T16:52:06.162,ns_1@10.242.238.90:<0.31460.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:52:06.163,ns_1@10.242.238.90:<0.31460.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.2119.2>,#Ref<16550.0.2.140892>}]} [rebalance:info,2014-08-19T16:52:06.163,ns_1@10.242.238.90:<0.31460.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 529 [rebalance:debug,2014-08-19T16:52:06.163,ns_1@10.242.238.90:<0.31460.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.2119.2>,#Ref<16550.0.2.140892>}] [ns_server:debug,2014-08-19T16:52:06.164,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.31462.0> (ok) [ns_server:debug,2014-08-19T16:52:06.164,ns_1@10.242.238.90:<0.31460.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:debug,2014-08-19T16:52:06.165,ns_1@10.242.238.90:<0.31463.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 529 [views:debug,2014-08-19T16:52:06.171,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/531. Updated state: pending (0) [ns_server:debug,2014-08-19T16:52:06.171,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",531,pending,0} [ns_server:info,2014-08-19T16:52:06.231,ns_1@10.242.238.90:<0.18786.0>:ns_memcached:do_handle_call:527]Changed vbucket 528 state to replica [ns_server:info,2014-08-19T16:52:06.236,ns_1@10.242.238.90:<0.31466.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 528 to state replica [ns_server:debug,2014-08-19T16:52:06.254,ns_1@10.242.238.90:<0.31466.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_528_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:52:06.255,ns_1@10.242.238.90:<0.31466.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[528]}, {checkpoints,[{528,0}]}, {name,<<"replication_building_528_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[528]}, {takeover,false}, {suffix,"building_528_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",528,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,true}]} [rebalance:debug,2014-08-19T16:52:06.256,ns_1@10.242.238.90:<0.31466.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.31467.0> [rebalance:debug,2014-08-19T16:52:06.256,ns_1@10.242.238.90:<0.31466.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:52:06.256,ns_1@10.242.238.90:<0.31466.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.2170.2>,#Ref<16550.0.2.141180>}]} [rebalance:info,2014-08-19T16:52:06.256,ns_1@10.242.238.90:<0.31466.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 528 [rebalance:debug,2014-08-19T16:52:06.257,ns_1@10.242.238.90:<0.31466.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.2170.2>,#Ref<16550.0.2.141180>}] [ns_server:debug,2014-08-19T16:52:06.257,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.31468.0> (ok) [ns_server:debug,2014-08-19T16:52:06.257,ns_1@10.242.238.90:<0.31466.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:debug,2014-08-19T16:52:06.259,ns_1@10.242.238.90:<0.31469.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 528 [ns_server:info,2014-08-19T16:52:06.322,ns_1@10.242.238.90:<0.18786.0>:ns_memcached:do_handle_call:527]Changed vbucket 527 state to replica [ns_server:info,2014-08-19T16:52:06.326,ns_1@10.242.238.90:<0.31486.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 527 to state replica [ns_server:debug,2014-08-19T16:52:06.345,ns_1@10.242.238.90:<0.31486.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_527_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:52:06.346,ns_1@10.242.238.90:<0.31486.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[527]}, {checkpoints,[{527,0}]}, {name,<<"replication_building_527_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[527]}, {takeover,false}, {suffix,"building_527_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",527,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,true}]} [rebalance:debug,2014-08-19T16:52:06.347,ns_1@10.242.238.90:<0.31486.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.31487.0> [rebalance:debug,2014-08-19T16:52:06.347,ns_1@10.242.238.90:<0.31486.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:52:06.347,ns_1@10.242.238.90:<0.31486.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.2229.2>,#Ref<16550.0.2.141508>}]} [rebalance:info,2014-08-19T16:52:06.347,ns_1@10.242.238.90:<0.31486.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 527 [rebalance:debug,2014-08-19T16:52:06.348,ns_1@10.242.238.90:<0.31486.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.2229.2>,#Ref<16550.0.2.141508>}] [ns_server:debug,2014-08-19T16:52:06.348,ns_1@10.242.238.90:<0.31486.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:52:06.348,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.31488.0> (ok) [rebalance:debug,2014-08-19T16:52:06.350,ns_1@10.242.238.90:<0.31489.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 527 [ns_server:debug,2014-08-19T16:52:06.405,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 530. Nacking mccouch update. [views:debug,2014-08-19T16:52:06.405,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/530. Updated state: pending (0) [ns_server:debug,2014-08-19T16:52:06.405,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",530,pending,0} [ns_server:debug,2014-08-19T16:52:06.406,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,984,673,545,362,724,596,413,958,647,698,570,387,1009,749,621,983,672, 544,361,723,595,412,957,646,697,569,386,1008,748,620,982,671,543,360,722,594, 411,956,645,696,568,385,1007,747,619,981,670,542,359,721,593,410,955,708,644, 580,397,1019,942,759,695,631,567,384,1006,993,746,682,618,554,371,980,733, 669,605,541,422,358,967,720,656,592,409,345,954,707,643,579,396,1018,941,758, 694,630,566,383,1005,992,745,681,617,553,370,979,732,668,604,540,421,357,966, 719,655,591,408,344,953,706,642,578,395,1017,940,757,693,629,565,382,1004, 991,744,680,616,552,369,978,731,667,603,539,420,356,965,718,654,590,407,343, 952,705,641,577,394,1016,939,756,692,628,564,381,1003,990,743,679,615,551, 368,977,730,666,602,538,419,355,964,717,653,589,406,342,951,704,640,576,393, 1015,938,755,691,627,563,380,1002,989,742,678,614,550,367,976,729,665,601, 537,418,354,963,716,652,588,405,950,767,703,639,575,392,1014,754,690,626,562, 379,1001,988,741,677,613,549,366,975,728,664,600,536,417,353,962,715,651,587, 404,949,766,702,638,574,391,1013,753,689,625,561,378,1000,987,740,676,612, 548,365,974,727,663,599,535,416,352,961,714,650,586,403,948,765,701,637,573, 390,1012,999,752,688,624,560,377,986,739,675,611,547,364,973,726,662,598,534, 415,351,960,713,649,585,402,947,764,700,636,572,389,1011,998,751,687,623,559, 376,985,738,674,610,546,363,972,725,661,597,533,414,350,959,712,648,584,401, 1023,946,763,699,635,571,388,1010,997,686,558,375,737,609,426,971,660,532, 349,711,583,400,1022,945,762,634,996,685,557,374,736,608,425,970,659,531,348, 710,582,399,1021,944,761,633,995,684,556,373,735,607,424,969,658,530,347,709, 581,398,1020,943,760,632,994,683,555,372,734,606,423,968,657,346] [ns_server:info,2014-08-19T16:52:06.415,ns_1@10.242.238.90:<0.18786.0>:ns_memcached:do_handle_call:527]Changed vbucket 526 state to replica [ns_server:info,2014-08-19T16:52:06.421,ns_1@10.242.238.90:<0.31492.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 526 to state replica [ns_server:debug,2014-08-19T16:52:06.448,ns_1@10.242.238.90:<0.31492.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_526_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:52:06.449,ns_1@10.242.238.90:<0.31492.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[526]}, {checkpoints,[{526,0}]}, {name,<<"replication_building_526_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[526]}, {takeover,false}, {suffix,"building_526_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",526,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,true}]} [rebalance:debug,2014-08-19T16:52:06.450,ns_1@10.242.238.90:<0.31492.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.31494.0> [rebalance:debug,2014-08-19T16:52:06.450,ns_1@10.242.238.90:<0.31492.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:52:06.451,ns_1@10.242.238.90:<0.31492.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.2282.2>,#Ref<16550.0.2.141827>}]} [rebalance:info,2014-08-19T16:52:06.451,ns_1@10.242.238.90:<0.31492.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 526 [rebalance:debug,2014-08-19T16:52:06.451,ns_1@10.242.238.90:<0.31492.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.2282.2>,#Ref<16550.0.2.141827>}] [ns_server:debug,2014-08-19T16:52:06.452,ns_1@10.242.238.90:<0.31492.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:52:06.452,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.31495.0> (ok) [rebalance:debug,2014-08-19T16:52:06.454,ns_1@10.242.238.90:<0.31496.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 526 [views:debug,2014-08-19T16:52:06.473,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/530. Updated state: pending (0) [ns_server:debug,2014-08-19T16:52:06.473,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",530,pending,0} [ns_server:info,2014-08-19T16:52:06.520,ns_1@10.242.238.90:<0.18786.0>:ns_memcached:do_handle_call:527]Changed vbucket 525 state to replica [ns_server:info,2014-08-19T16:52:06.524,ns_1@10.242.238.90:<0.31499.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 525 to state replica [ns_server:debug,2014-08-19T16:52:06.544,ns_1@10.242.238.90:<0.31499.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_525_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:52:06.546,ns_1@10.242.238.90:<0.31499.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[525]}, {checkpoints,[{525,0}]}, {name,<<"replication_building_525_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[525]}, {takeover,false}, {suffix,"building_525_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",525,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,true}]} [rebalance:debug,2014-08-19T16:52:06.546,ns_1@10.242.238.90:<0.31499.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.31500.0> [rebalance:debug,2014-08-19T16:52:06.546,ns_1@10.242.238.90:<0.31499.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:52:06.547,ns_1@10.242.238.90:<0.31499.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.2333.2>,#Ref<16550.0.2.142119>}]} [rebalance:info,2014-08-19T16:52:06.547,ns_1@10.242.238.90:<0.31499.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 525 [rebalance:debug,2014-08-19T16:52:06.548,ns_1@10.242.238.90:<0.31499.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.2333.2>,#Ref<16550.0.2.142119>}] [ns_server:debug,2014-08-19T16:52:06.549,ns_1@10.242.238.90:<0.31499.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:52:06.549,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.31501.0> (ok) [rebalance:debug,2014-08-19T16:52:06.551,ns_1@10.242.238.90:<0.31507.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 525 [ns_server:info,2014-08-19T16:52:06.615,ns_1@10.242.238.90:<0.18786.0>:ns_memcached:do_handle_call:527]Changed vbucket 524 state to replica [ns_server:info,2014-08-19T16:52:06.619,ns_1@10.242.238.90:<0.31519.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 524 to state replica [ns_server:debug,2014-08-19T16:52:06.622,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 529. Nacking mccouch update. [views:debug,2014-08-19T16:52:06.623,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/529. Updated state: pending (0) [ns_server:debug,2014-08-19T16:52:06.623,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",529,pending,0} [ns_server:debug,2014-08-19T16:52:06.624,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,984,673,545,362,724,596,413,958,647,698,570,387,1009,749,621,983,672, 544,361,723,595,412,957,646,697,569,386,1008,748,620,982,671,543,360,722,594, 411,956,645,696,568,385,1007,747,619,981,670,542,359,721,593,410,955,708,644, 580,397,1019,942,759,695,631,567,384,1006,993,746,682,618,554,371,980,733, 669,605,541,422,358,967,720,656,592,409,345,954,707,643,579,396,1018,941,758, 694,630,566,383,1005,992,745,681,617,553,370,979,732,668,604,540,421,357,966, 719,655,591,408,344,953,706,642,578,395,1017,940,757,693,629,565,382,1004, 991,744,680,616,552,369,978,731,667,603,539,420,356,965,718,654,590,407,343, 952,705,641,577,394,1016,939,756,692,628,564,381,1003,990,743,679,615,551, 368,977,730,666,602,538,419,355,964,717,653,589,406,342,951,704,640,576,393, 1015,938,755,691,627,563,380,1002,989,742,678,614,550,367,976,729,665,601, 537,418,354,963,716,652,588,405,950,767,703,639,575,392,1014,754,690,626,562, 379,1001,988,741,677,613,549,366,975,728,664,600,536,417,353,962,715,651,587, 404,949,766,702,638,574,391,1013,753,689,625,561,378,1000,987,740,676,612, 548,365,974,727,663,599,535,416,352,961,714,650,586,403,948,765,701,637,573, 390,1012,999,752,688,624,560,377,986,739,675,611,547,364,973,726,662,598,534, 415,351,960,713,649,585,402,947,764,700,636,572,389,1011,998,751,687,623,559, 376,985,738,674,610,546,363,972,725,661,597,533,414,350,959,712,648,584,401, 1023,946,763,699,635,571,388,1010,997,686,558,375,737,609,426,971,660,532, 349,711,583,400,1022,945,762,634,996,685,557,374,736,608,425,970,659,531,348, 710,582,399,1021,944,761,633,995,684,556,373,735,607,424,969,658,530,347,709, 581,398,1020,943,760,632,994,683,555,372,734,606,423,968,657,529,346] [ns_server:debug,2014-08-19T16:52:06.637,ns_1@10.242.238.90:<0.31519.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_524_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:52:06.638,ns_1@10.242.238.90:<0.31519.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[524]}, {checkpoints,[{524,0}]}, {name,<<"replication_building_524_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[524]}, {takeover,false}, {suffix,"building_524_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",524,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,true}]} [rebalance:debug,2014-08-19T16:52:06.639,ns_1@10.242.238.90:<0.31519.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.31520.0> [rebalance:debug,2014-08-19T16:52:06.639,ns_1@10.242.238.90:<0.31519.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:52:06.640,ns_1@10.242.238.90:<0.31519.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.2385.2>,#Ref<16550.0.2.142392>}]} [rebalance:info,2014-08-19T16:52:06.640,ns_1@10.242.238.90:<0.31519.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 524 [rebalance:debug,2014-08-19T16:52:06.640,ns_1@10.242.238.90:<0.31519.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.2385.2>,#Ref<16550.0.2.142392>}] [ns_server:debug,2014-08-19T16:52:06.641,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.31521.0> (ok) [ns_server:debug,2014-08-19T16:52:06.641,ns_1@10.242.238.90:<0.31519.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:debug,2014-08-19T16:52:06.642,ns_1@10.242.238.90:<0.31522.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 524 [views:debug,2014-08-19T16:52:06.690,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/529. Updated state: pending (0) [ns_server:debug,2014-08-19T16:52:06.690,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",529,pending,0} [ns_server:info,2014-08-19T16:52:06.707,ns_1@10.242.238.90:<0.18786.0>:ns_memcached:do_handle_call:527]Changed vbucket 523 state to replica [ns_server:info,2014-08-19T16:52:06.711,ns_1@10.242.238.90:<0.31525.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 523 to state replica [ns_server:debug,2014-08-19T16:52:06.730,ns_1@10.242.238.90:<0.31525.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_523_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:52:06.731,ns_1@10.242.238.90:<0.31525.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[523]}, {checkpoints,[{523,0}]}, {name,<<"replication_building_523_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[523]}, {takeover,false}, {suffix,"building_523_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",523,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,true}]} [rebalance:debug,2014-08-19T16:52:06.732,ns_1@10.242.238.90:<0.31525.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.31526.0> [rebalance:debug,2014-08-19T16:52:06.732,ns_1@10.242.238.90:<0.31525.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:52:06.732,ns_1@10.242.238.90:<0.31525.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.2439.2>,#Ref<16550.0.2.142697>}]} [rebalance:info,2014-08-19T16:52:06.733,ns_1@10.242.238.90:<0.31525.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 523 [rebalance:debug,2014-08-19T16:52:06.733,ns_1@10.242.238.90:<0.31525.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.2439.2>,#Ref<16550.0.2.142697>}] [ns_server:debug,2014-08-19T16:52:06.733,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.31527.0> (ok) [ns_server:debug,2014-08-19T16:52:06.734,ns_1@10.242.238.90:<0.31525.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:debug,2014-08-19T16:52:06.735,ns_1@10.242.238.90:<0.31528.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 523 [ns_server:info,2014-08-19T16:52:06.800,ns_1@10.242.238.90:<0.18786.0>:ns_memcached:do_handle_call:527]Changed vbucket 522 state to replica [ns_server:info,2014-08-19T16:52:06.805,ns_1@10.242.238.90:<0.31545.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 522 to state replica [ns_server:debug,2014-08-19T16:52:06.823,ns_1@10.242.238.90:<0.31545.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_522_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:52:06.825,ns_1@10.242.238.90:<0.31545.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[522]}, {checkpoints,[{522,0}]}, {name,<<"replication_building_522_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[522]}, {takeover,false}, {suffix,"building_522_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",522,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,true}]} [rebalance:debug,2014-08-19T16:52:06.826,ns_1@10.242.238.90:<0.31545.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.31546.0> [rebalance:debug,2014-08-19T16:52:06.826,ns_1@10.242.238.90:<0.31545.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:52:06.826,ns_1@10.242.238.90:<0.31545.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.2490.2>,#Ref<16550.0.2.142963>}]} [rebalance:info,2014-08-19T16:52:06.826,ns_1@10.242.238.90:<0.31545.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 522 [rebalance:debug,2014-08-19T16:52:06.827,ns_1@10.242.238.90:<0.31545.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.2490.2>,#Ref<16550.0.2.142963>}] [ns_server:debug,2014-08-19T16:52:06.827,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.31547.0> (ok) [ns_server:debug,2014-08-19T16:52:06.827,ns_1@10.242.238.90:<0.31545.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:debug,2014-08-19T16:52:06.829,ns_1@10.242.238.90:<0.31548.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 522 [ns_server:debug,2014-08-19T16:52:06.840,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 527. Nacking mccouch update. [views:debug,2014-08-19T16:52:06.840,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/527. Updated state: pending (0) [ns_server:debug,2014-08-19T16:52:06.840,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",527,pending,0} [ns_server:debug,2014-08-19T16:52:06.841,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,984,673,545,362,724,596,413,958,647,698,570,387,1009,749,621,983,672, 544,361,723,595,412,957,646,697,569,386,1008,748,620,982,671,543,360,722,594, 411,956,645,696,568,385,1007,747,619,981,670,542,359,721,593,410,955,644,942, 759,695,631,567,384,1006,993,746,682,618,554,371,980,733,669,605,541,422,358, 967,720,656,592,409,345,954,707,643,579,396,1018,941,758,694,630,566,383, 1005,992,745,681,617,553,370,979,732,668,604,540,421,357,966,719,655,591,527, 408,344,953,706,642,578,395,1017,940,757,693,629,565,382,1004,991,744,680, 616,552,369,978,731,667,603,539,420,356,965,718,654,590,407,343,952,705,641, 577,394,1016,939,756,692,628,564,381,1003,990,743,679,615,551,368,977,730, 666,602,538,419,355,964,717,653,589,406,342,951,704,640,576,393,1015,938,755, 691,627,563,380,1002,989,742,678,614,550,367,976,729,665,601,537,418,354,963, 716,652,588,405,950,767,703,639,575,392,1014,754,690,626,562,379,1001,988, 741,677,613,549,366,975,728,664,600,536,417,353,962,715,651,587,404,949,766, 702,638,574,391,1013,753,689,625,561,378,1000,987,740,676,612,548,365,974, 727,663,599,535,416,352,961,714,650,586,403,948,765,701,637,573,390,1012,999, 752,688,624,560,377,986,739,675,611,547,364,973,726,662,598,534,415,351,960, 713,649,585,402,947,764,700,636,572,389,1011,998,751,687,623,559,376,985,738, 674,610,546,363,972,725,661,597,533,414,350,959,712,648,584,401,1023,946,763, 699,635,571,388,1010,997,686,558,375,737,609,426,971,660,532,349,711,583,400, 1022,945,762,634,996,685,557,374,736,608,425,970,659,531,348,710,582,399, 1021,944,761,633,995,684,556,373,735,607,424,969,658,530,347,709,581,398, 1020,943,760,632,994,683,555,372,734,606,423,968,657,529,346,708,580,397, 1019] [ns_server:info,2014-08-19T16:52:06.893,ns_1@10.242.238.90:<0.18786.0>:ns_memcached:do_handle_call:527]Changed vbucket 521 state to replica [views:debug,2014-08-19T16:52:06.899,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/527. Updated state: pending (0) [ns_server:debug,2014-08-19T16:52:06.899,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",527,pending,0} [ns_server:info,2014-08-19T16:52:06.901,ns_1@10.242.238.90:<0.31551.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 521 to state replica [ns_server:debug,2014-08-19T16:52:06.920,ns_1@10.242.238.90:<0.31551.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_521_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:52:06.921,ns_1@10.242.238.90:<0.31551.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[521]}, {checkpoints,[{521,0}]}, {name,<<"replication_building_521_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[521]}, {takeover,false}, {suffix,"building_521_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",521,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,true}]} [rebalance:debug,2014-08-19T16:52:06.922,ns_1@10.242.238.90:<0.31551.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.31552.0> [rebalance:debug,2014-08-19T16:52:06.922,ns_1@10.242.238.90:<0.31551.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:52:06.922,ns_1@10.242.238.90:<0.31551.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.2541.2>,#Ref<16550.0.2.143252>}]} [rebalance:info,2014-08-19T16:52:06.922,ns_1@10.242.238.90:<0.31551.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 521 [rebalance:debug,2014-08-19T16:52:06.923,ns_1@10.242.238.90:<0.31551.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.2541.2>,#Ref<16550.0.2.143252>}] [ns_server:debug,2014-08-19T16:52:06.923,ns_1@10.242.238.90:<0.31551.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:52:06.923,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.31553.0> (ok) [rebalance:debug,2014-08-19T16:52:06.925,ns_1@10.242.238.90:<0.31554.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 521 [ns_server:info,2014-08-19T16:52:06.991,ns_1@10.242.238.90:<0.18786.0>:ns_memcached:do_handle_call:527]Changed vbucket 520 state to replica [ns_server:info,2014-08-19T16:52:06.995,ns_1@10.242.238.90:<0.31571.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 520 to state replica [ns_server:debug,2014-08-19T16:52:07.007,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 528. Nacking mccouch update. [views:debug,2014-08-19T16:52:07.008,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/528. Updated state: pending (0) [ns_server:debug,2014-08-19T16:52:07.008,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",528,pending,0} [ns_server:debug,2014-08-19T16:52:07.009,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,984,673,545,362,724,596,413,958,647,698,570,387,1009,749,621,983,672, 544,361,723,595,412,957,646,697,569,386,1008,748,620,982,671,543,360,722,594, 411,956,645,696,568,385,1007,747,619,981,670,542,359,721,593,410,955,644,942, 759,695,631,567,384,1006,993,746,682,618,554,371,980,733,669,605,541,422,358, 967,720,656,592,528,409,345,954,707,643,579,396,1018,941,758,694,630,566,383, 1005,992,745,681,617,553,370,979,732,668,604,540,421,357,966,719,655,591,527, 408,344,953,706,642,578,395,1017,940,757,693,629,565,382,1004,991,744,680, 616,552,369,978,731,667,603,539,420,356,965,718,654,590,407,343,952,705,641, 577,394,1016,939,756,692,628,564,381,1003,990,743,679,615,551,368,977,730, 666,602,538,419,355,964,717,653,589,406,342,951,704,640,576,393,1015,938,755, 691,627,563,380,1002,989,742,678,614,550,367,976,729,665,601,537,418,354,963, 716,652,588,405,950,767,703,639,575,392,1014,754,690,626,562,379,1001,988, 741,677,613,549,366,975,728,664,600,536,417,353,962,715,651,587,404,949,766, 702,638,574,391,1013,753,689,625,561,378,1000,987,740,676,612,548,365,974, 727,663,599,535,416,352,961,714,650,586,403,948,765,701,637,573,390,1012,999, 752,688,624,560,377,986,739,675,611,547,364,973,726,662,598,534,415,351,960, 713,649,585,402,947,764,700,636,572,389,1011,998,751,687,623,559,376,985,738, 674,610,546,363,972,725,661,597,533,414,350,959,712,648,584,401,1023,946,763, 699,635,571,388,1010,997,686,558,375,737,609,426,971,660,532,349,711,583,400, 1022,945,762,634,996,685,557,374,736,608,425,970,659,531,348,710,582,399, 1021,944,761,633,995,684,556,373,735,607,424,969,658,530,347,709,581,398, 1020,943,760,632,994,683,555,372,734,606,423,968,657,529,346,708,580,397, 1019] [ns_server:debug,2014-08-19T16:52:07.015,ns_1@10.242.238.90:<0.31571.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_520_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:52:07.017,ns_1@10.242.238.90:<0.31571.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[520]}, {checkpoints,[{520,0}]}, {name,<<"replication_building_520_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[520]}, {takeover,false}, {suffix,"building_520_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",520,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,true}]} [rebalance:debug,2014-08-19T16:52:07.017,ns_1@10.242.238.90:<0.31571.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.31572.0> [rebalance:debug,2014-08-19T16:52:07.017,ns_1@10.242.238.90:<0.31571.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:52:07.018,ns_1@10.242.238.90:<0.31571.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.2592.2>,#Ref<16550.0.2.143543>}]} [rebalance:info,2014-08-19T16:52:07.018,ns_1@10.242.238.90:<0.31571.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 520 [rebalance:debug,2014-08-19T16:52:07.018,ns_1@10.242.238.90:<0.31571.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.2592.2>,#Ref<16550.0.2.143543>}] [ns_server:debug,2014-08-19T16:52:07.019,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.31573.0> (ok) [ns_server:debug,2014-08-19T16:52:07.019,ns_1@10.242.238.90:<0.31571.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:debug,2014-08-19T16:52:07.021,ns_1@10.242.238.90:<0.31574.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 520 [views:debug,2014-08-19T16:52:07.079,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/528. Updated state: pending (0) [ns_server:debug,2014-08-19T16:52:07.079,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",528,pending,0} [ns_server:info,2014-08-19T16:52:07.085,ns_1@10.242.238.90:<0.18786.0>:ns_memcached:do_handle_call:527]Changed vbucket 519 state to replica [ns_server:info,2014-08-19T16:52:07.090,ns_1@10.242.238.90:<0.31577.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 519 to state replica [ns_server:debug,2014-08-19T16:52:07.108,ns_1@10.242.238.90:<0.31577.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_519_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:52:07.110,ns_1@10.242.238.90:<0.31577.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[519]}, {checkpoints,[{519,0}]}, {name,<<"replication_building_519_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[519]}, {takeover,false}, {suffix,"building_519_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",519,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,true}]} [rebalance:debug,2014-08-19T16:52:07.111,ns_1@10.242.238.90:<0.31577.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.31578.0> [rebalance:debug,2014-08-19T16:52:07.111,ns_1@10.242.238.90:<0.31577.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:52:07.112,ns_1@10.242.238.90:<0.31577.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.2648.2>,#Ref<16550.0.2.143826>}]} [rebalance:info,2014-08-19T16:52:07.112,ns_1@10.242.238.90:<0.31577.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 519 [rebalance:debug,2014-08-19T16:52:07.112,ns_1@10.242.238.90:<0.31577.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.2648.2>,#Ref<16550.0.2.143826>}] [ns_server:debug,2014-08-19T16:52:07.113,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.31579.0> (ok) [ns_server:debug,2014-08-19T16:52:07.113,ns_1@10.242.238.90:<0.31577.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:debug,2014-08-19T16:52:07.114,ns_1@10.242.238.90:<0.31580.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 519 [ns_server:info,2014-08-19T16:52:07.179,ns_1@10.242.238.90:<0.18786.0>:ns_memcached:do_handle_call:527]Changed vbucket 518 state to replica [ns_server:info,2014-08-19T16:52:07.183,ns_1@10.242.238.90:<0.31597.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 518 to state replica [ns_server:debug,2014-08-19T16:52:07.202,ns_1@10.242.238.90:<0.31597.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_518_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:52:07.203,ns_1@10.242.238.90:<0.31597.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[518]}, {checkpoints,[{518,0}]}, {name,<<"replication_building_518_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[518]}, {takeover,false}, {suffix,"building_518_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",518,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,true}]} [rebalance:debug,2014-08-19T16:52:07.204,ns_1@10.242.238.90:<0.31597.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.31598.0> [rebalance:debug,2014-08-19T16:52:07.204,ns_1@10.242.238.90:<0.31597.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:52:07.205,ns_1@10.242.238.90:<0.31597.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.2699.2>,#Ref<16550.0.2.144114>}]} [rebalance:info,2014-08-19T16:52:07.205,ns_1@10.242.238.90:<0.31597.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 518 [rebalance:debug,2014-08-19T16:52:07.205,ns_1@10.242.238.90:<0.31597.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.2699.2>,#Ref<16550.0.2.144114>}] [ns_server:debug,2014-08-19T16:52:07.206,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.31599.0> (ok) [ns_server:debug,2014-08-19T16:52:07.206,ns_1@10.242.238.90:<0.31597.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:debug,2014-08-19T16:52:07.207,ns_1@10.242.238.90:<0.31600.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 518 [ns_server:debug,2014-08-19T16:52:07.212,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 526. Nacking mccouch update. [views:debug,2014-08-19T16:52:07.212,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/526. Updated state: pending (0) [ns_server:debug,2014-08-19T16:52:07.213,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",526,pending,0} [ns_server:debug,2014-08-19T16:52:07.214,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,984,673,545,362,724,596,413,958,647,698,570,387,1009,749,621,983,672, 544,361,723,595,412,957,646,697,569,386,1008,748,620,982,671,543,360,722,594, 411,956,645,696,568,385,1007,747,619,981,670,542,359,721,593,410,955,644,942, 759,695,631,567,384,1006,993,746,682,618,554,371,980,733,669,605,541,422,358, 967,720,656,592,528,409,345,954,707,643,579,396,1018,941,758,694,630,566,383, 1005,992,745,681,617,553,370,979,732,668,604,540,421,357,966,719,655,591,527, 408,344,953,706,642,578,395,1017,940,757,693,629,565,382,1004,991,744,680, 616,552,369,978,731,667,603,539,420,356,965,718,654,590,526,407,343,952,705, 641,577,394,1016,939,756,692,628,564,381,1003,990,743,679,615,551,368,977, 730,666,602,538,419,355,964,717,653,589,406,342,951,704,640,576,393,1015,938, 755,691,627,563,380,1002,989,742,678,614,550,367,976,729,665,601,537,418,354, 963,716,652,588,405,950,767,703,639,575,392,1014,754,690,626,562,379,1001, 988,741,677,613,549,366,975,728,664,600,536,417,353,962,715,651,587,404,949, 766,702,638,574,391,1013,753,689,625,561,378,1000,987,740,676,612,548,365, 974,727,663,599,535,416,352,961,714,650,586,403,948,765,701,637,573,390,1012, 999,752,688,624,560,377,986,739,675,611,547,364,973,726,662,598,534,415,351, 960,713,649,585,402,947,764,700,636,572,389,1011,998,751,687,623,559,376,985, 738,674,610,546,363,972,725,661,597,533,414,350,959,712,648,584,401,1023,946, 763,699,635,571,388,1010,997,686,558,375,737,609,426,971,660,532,349,711,583, 400,1022,945,762,634,996,685,557,374,736,608,425,970,659,531,348,710,582,399, 1021,944,761,633,995,684,556,373,735,607,424,969,658,530,347,709,581,398, 1020,943,760,632,994,683,555,372,734,606,423,968,657,529,346,708,580,397, 1019] [ns_server:info,2014-08-19T16:52:07.273,ns_1@10.242.238.90:<0.18786.0>:ns_memcached:do_handle_call:527]Changed vbucket 517 state to replica [ns_server:info,2014-08-19T16:52:07.278,ns_1@10.242.238.90:<0.31603.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 517 to state replica [views:debug,2014-08-19T16:52:07.294,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/526. Updated state: pending (0) [ns_server:debug,2014-08-19T16:52:07.294,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",526,pending,0} [ns_server:debug,2014-08-19T16:52:07.303,ns_1@10.242.238.90:<0.31603.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_517_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:52:07.304,ns_1@10.242.238.90:<0.31603.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[517]}, {checkpoints,[{517,0}]}, {name,<<"replication_building_517_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[517]}, {takeover,false}, {suffix,"building_517_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",517,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,true}]} [rebalance:debug,2014-08-19T16:52:07.305,ns_1@10.242.238.90:<0.31603.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.31604.0> [rebalance:debug,2014-08-19T16:52:07.305,ns_1@10.242.238.90:<0.31603.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:52:07.305,ns_1@10.242.238.90:<0.31603.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.2750.2>,#Ref<16550.0.2.144425>}]} [rebalance:info,2014-08-19T16:52:07.306,ns_1@10.242.238.90:<0.31603.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 517 [rebalance:debug,2014-08-19T16:52:07.306,ns_1@10.242.238.90:<0.31603.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.2750.2>,#Ref<16550.0.2.144425>}] [ns_server:debug,2014-08-19T16:52:07.306,ns_1@10.242.238.90:<0.31603.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:52:07.307,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.31605.0> (ok) [rebalance:debug,2014-08-19T16:52:07.309,ns_1@10.242.238.90:<0.31606.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 517 [ns_server:info,2014-08-19T16:52:07.374,ns_1@10.242.238.90:<0.18786.0>:ns_memcached:do_handle_call:527]Changed vbucket 516 state to replica [ns_server:info,2014-08-19T16:52:07.378,ns_1@10.242.238.90:<0.31629.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 516 to state replica [ns_server:debug,2014-08-19T16:52:07.402,ns_1@10.242.238.90:<0.31629.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_516_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:52:07.403,ns_1@10.242.238.90:<0.31629.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[516]}, {checkpoints,[{516,0}]}, {name,<<"replication_building_516_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[516]}, {takeover,false}, {suffix,"building_516_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",516,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,true}]} [rebalance:debug,2014-08-19T16:52:07.404,ns_1@10.242.238.90:<0.31629.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.31630.0> [rebalance:debug,2014-08-19T16:52:07.404,ns_1@10.242.238.90:<0.31629.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:52:07.404,ns_1@10.242.238.90:<0.31629.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.2801.2>,#Ref<16550.0.2.144714>}]} [rebalance:info,2014-08-19T16:52:07.405,ns_1@10.242.238.90:<0.31629.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 516 [rebalance:debug,2014-08-19T16:52:07.405,ns_1@10.242.238.90:<0.31629.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.2801.2>,#Ref<16550.0.2.144714>}] [ns_server:debug,2014-08-19T16:52:07.405,ns_1@10.242.238.90:<0.31629.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:52:07.405,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.31631.0> (ok) [rebalance:debug,2014-08-19T16:52:07.407,ns_1@10.242.238.90:<0.31632.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 516 [ns_server:debug,2014-08-19T16:52:07.430,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 524. Nacking mccouch update. [views:debug,2014-08-19T16:52:07.430,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/524. Updated state: pending (0) [ns_server:debug,2014-08-19T16:52:07.431,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",524,pending,0} [ns_server:debug,2014-08-19T16:52:07.431,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,984,673,545,362,724,596,413,958,647,698,570,387,1009,749,621,983,672, 544,361,723,595,412,957,646,697,569,386,1008,748,620,982,671,543,360,722,594, 411,956,645,696,568,385,1007,747,619,981,670,542,359,721,593,410,955,644,942, 759,695,631,567,384,1006,993,746,682,618,554,371,980,733,669,605,541,422,358, 967,720,656,592,528,409,345,954,707,643,579,396,1018,941,758,694,630,566,383, 1005,992,745,681,617,553,370,979,732,668,604,540,421,357,966,719,655,591,527, 408,344,953,706,642,578,395,1017,940,757,693,629,565,382,1004,991,744,680, 616,552,369,978,731,667,603,539,420,356,965,718,654,590,526,407,343,952,705, 641,577,394,1016,939,756,692,628,564,381,1003,990,743,679,615,551,368,977, 730,666,602,538,419,355,964,717,653,589,406,342,951,704,640,576,393,1015,938, 755,691,627,563,380,1002,989,742,678,614,550,367,976,729,665,601,537,418,354, 963,716,652,588,524,405,950,767,703,639,575,392,1014,754,690,626,562,379, 1001,988,741,677,613,549,366,975,728,664,600,536,417,353,962,715,651,587,404, 949,766,702,638,574,391,1013,753,689,625,561,378,1000,987,740,676,612,548, 365,974,727,663,599,535,416,352,961,714,650,586,403,948,765,701,637,573,390, 1012,999,752,688,624,560,377,986,739,675,611,547,364,973,726,662,598,534,415, 351,960,713,649,585,402,947,764,700,636,572,389,1011,998,751,687,623,559,376, 985,738,674,610,546,363,972,725,661,597,533,414,350,959,712,648,584,401,1023, 946,763,699,635,571,388,1010,997,686,558,375,737,609,426,971,660,532,349,711, 583,400,1022,945,762,634,996,685,557,374,736,608,425,970,659,531,348,710,582, 399,1021,944,761,633,995,684,556,373,735,607,424,969,658,530,347,709,581,398, 1020,943,760,632,994,683,555,372,734,606,423,968,657,529,346,708,580,397, 1019] [ns_server:info,2014-08-19T16:52:07.471,ns_1@10.242.238.90:<0.18786.0>:ns_memcached:do_handle_call:527]Changed vbucket 515 state to replica [ns_server:info,2014-08-19T16:52:07.476,ns_1@10.242.238.90:<0.31635.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 515 to state replica [views:debug,2014-08-19T16:52:07.482,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/524. Updated state: pending (0) [ns_server:debug,2014-08-19T16:52:07.482,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",524,pending,0} [ns_server:debug,2014-08-19T16:52:07.494,ns_1@10.242.238.90:<0.31635.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_515_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:52:07.496,ns_1@10.242.238.90:<0.31635.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[515]}, {checkpoints,[{515,0}]}, {name,<<"replication_building_515_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[515]}, {takeover,false}, {suffix,"building_515_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",515,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,true}]} [rebalance:debug,2014-08-19T16:52:07.497,ns_1@10.242.238.90:<0.31635.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.31636.0> [rebalance:debug,2014-08-19T16:52:07.497,ns_1@10.242.238.90:<0.31635.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:52:07.497,ns_1@10.242.238.90:<0.31635.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.2852.2>,#Ref<16550.0.2.144980>}]} [rebalance:info,2014-08-19T16:52:07.497,ns_1@10.242.238.90:<0.31635.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 515 [rebalance:debug,2014-08-19T16:52:07.498,ns_1@10.242.238.90:<0.31635.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.2852.2>,#Ref<16550.0.2.144980>}] [ns_server:debug,2014-08-19T16:52:07.499,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.31637.0> (ok) [ns_server:debug,2014-08-19T16:52:07.499,ns_1@10.242.238.90:<0.31635.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:debug,2014-08-19T16:52:07.500,ns_1@10.242.238.90:<0.31638.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 515 [ns_server:info,2014-08-19T16:52:07.564,ns_1@10.242.238.90:<0.18786.0>:ns_memcached:do_handle_call:527]Changed vbucket 514 state to replica [ns_server:info,2014-08-19T16:52:07.569,ns_1@10.242.238.90:<0.31655.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 514 to state replica [ns_server:debug,2014-08-19T16:52:07.589,ns_1@10.242.238.90:<0.31655.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_514_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:52:07.590,ns_1@10.242.238.90:<0.31655.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[514]}, {checkpoints,[{514,0}]}, {name,<<"replication_building_514_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[514]}, {takeover,false}, {suffix,"building_514_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",514,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,true}]} [rebalance:debug,2014-08-19T16:52:07.591,ns_1@10.242.238.90:<0.31655.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.31656.0> [rebalance:debug,2014-08-19T16:52:07.591,ns_1@10.242.238.90:<0.31655.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:52:07.592,ns_1@10.242.238.90:<0.31655.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.2903.2>,#Ref<16550.0.2.145895>}]} [rebalance:info,2014-08-19T16:52:07.592,ns_1@10.242.238.90:<0.31655.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 514 [rebalance:debug,2014-08-19T16:52:07.592,ns_1@10.242.238.90:<0.31655.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.2903.2>,#Ref<16550.0.2.145895>}] [ns_server:debug,2014-08-19T16:52:07.593,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.31657.0> (ok) [ns_server:debug,2014-08-19T16:52:07.593,ns_1@10.242.238.90:<0.31655.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:debug,2014-08-19T16:52:07.594,ns_1@10.242.238.90:<0.31658.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 514 [ns_server:debug,2014-08-19T16:52:07.614,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 522. Nacking mccouch update. [views:debug,2014-08-19T16:52:07.614,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/522. Updated state: pending (0) [ns_server:debug,2014-08-19T16:52:07.614,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",522,pending,0} [ns_server:debug,2014-08-19T16:52:07.615,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,984,673,545,362,724,596,413,958,647,698,570,387,1009,749,621,983,672, 544,361,723,595,412,957,646,697,569,386,1008,748,620,982,671,543,360,722,594, 411,956,645,696,568,385,1007,747,619,981,670,542,359,721,593,410,955,644,942, 759,695,631,567,384,1006,993,746,682,618,554,371,980,733,669,605,541,422,358, 967,720,656,592,528,409,345,954,707,643,579,396,1018,941,758,694,630,566,383, 1005,992,745,681,617,553,370,979,732,668,604,540,421,357,966,719,655,591,527, 408,344,953,706,642,578,395,1017,940,757,693,629,565,382,1004,991,744,680, 616,552,369,978,731,667,603,539,420,356,965,718,654,590,526,407,343,952,705, 641,577,394,1016,939,756,692,628,564,381,1003,990,743,679,615,551,368,977, 730,666,602,538,419,355,964,717,653,589,406,342,951,704,640,576,393,1015,938, 755,691,627,563,380,1002,989,742,678,614,550,367,976,729,665,601,537,418,354, 963,716,652,588,524,405,950,767,703,639,575,392,1014,754,690,626,562,379, 1001,988,741,677,613,549,366,975,728,664,600,536,417,353,962,715,651,587,404, 949,766,702,638,574,391,1013,753,689,625,561,378,1000,987,740,676,612,548, 365,974,727,663,599,535,416,352,961,714,650,586,522,403,948,765,701,637,573, 390,1012,999,752,688,624,560,377,986,739,675,611,547,364,973,726,662,598,534, 415,351,960,713,649,585,402,947,764,700,636,572,389,1011,998,751,687,623,559, 376,985,738,674,610,546,363,972,725,661,597,533,414,350,959,712,648,584,401, 1023,946,763,699,635,571,388,1010,997,686,558,375,737,609,426,971,660,532, 349,711,583,400,1022,945,762,634,996,685,557,374,736,608,425,970,659,531,348, 710,582,399,1021,944,761,633,995,684,556,373,735,607,424,969,658,530,347,709, 581,398,1020,943,760,632,994,683,555,372,734,606,423,968,657,529,346,708,580, 397,1019] [ns_server:info,2014-08-19T16:52:07.629,ns_1@10.242.238.90:<0.18786.0>:ns_memcached:do_handle_call:527]Changed vbucket 513 state to replica [ns_server:info,2014-08-19T16:52:07.634,ns_1@10.242.238.90:<0.31661.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 513 to state replica [ns_server:debug,2014-08-19T16:52:07.653,ns_1@10.242.238.90:<0.31661.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_513_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:52:07.654,ns_1@10.242.238.90:<0.31661.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[513]}, {checkpoints,[{513,0}]}, {name,<<"replication_building_513_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[513]}, {takeover,false}, {suffix,"building_513_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",513,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,true}]} [rebalance:debug,2014-08-19T16:52:07.655,ns_1@10.242.238.90:<0.31661.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.31662.0> [rebalance:debug,2014-08-19T16:52:07.655,ns_1@10.242.238.90:<0.31661.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:52:07.655,ns_1@10.242.238.90:<0.31661.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.2937.2>,#Ref<16550.0.2.146495>}]} [rebalance:info,2014-08-19T16:52:07.656,ns_1@10.242.238.90:<0.31661.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 513 [rebalance:debug,2014-08-19T16:52:07.656,ns_1@10.242.238.90:<0.31661.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.2937.2>,#Ref<16550.0.2.146495>}] [ns_server:debug,2014-08-19T16:52:07.656,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.31663.0> (ok) [ns_server:debug,2014-08-19T16:52:07.657,ns_1@10.242.238.90:<0.31661.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:debug,2014-08-19T16:52:07.658,ns_1@10.242.238.90:<0.31664.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 513 [views:debug,2014-08-19T16:52:07.681,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/522. Updated state: pending (0) [ns_server:debug,2014-08-19T16:52:07.681,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",522,pending,0} [rebalance:debug,2014-08-19T16:52:07.682,ns_1@10.242.238.90:<0.31394.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:52:07.682,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.31394.0> (ok) [rebalance:debug,2014-08-19T16:52:07.685,ns_1@10.242.238.90:<0.31667.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 533 [ns_server:info,2014-08-19T16:52:07.692,ns_1@10.242.238.90:<0.18786.0>:ns_memcached:do_handle_call:527]Changed vbucket 512 state to replica [ns_server:info,2014-08-19T16:52:07.696,ns_1@10.242.238.90:<0.31670.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 512 to state replica [ns_server:debug,2014-08-19T16:52:07.714,ns_1@10.242.238.90:<0.31670.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_512_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:52:07.716,ns_1@10.242.238.90:<0.31670.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[512]}, {checkpoints,[{512,0}]}, {name,<<"replication_building_512_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[512]}, {takeover,false}, {suffix,"building_512_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",512,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,true}]} [rebalance:debug,2014-08-19T16:52:07.716,ns_1@10.242.238.90:<0.31670.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.31671.0> [rebalance:debug,2014-08-19T16:52:07.717,ns_1@10.242.238.90:<0.31670.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:52:07.717,ns_1@10.242.238.90:<0.31670.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.2974.2>,#Ref<16550.0.2.146745>}]} [rebalance:info,2014-08-19T16:52:07.717,ns_1@10.242.238.90:<0.31670.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 512 [rebalance:debug,2014-08-19T16:52:07.717,ns_1@10.242.238.90:<0.31670.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.2974.2>,#Ref<16550.0.2.146745>}] [ns_server:debug,2014-08-19T16:52:07.718,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.31672.0> (ok) [ns_server:debug,2014-08-19T16:52:07.718,ns_1@10.242.238.90:<0.31670.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:debug,2014-08-19T16:52:07.719,ns_1@10.242.238.90:<0.31673.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 512 [ns_server:debug,2014-08-19T16:52:07.882,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 525. Nacking mccouch update. [views:debug,2014-08-19T16:52:07.882,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/525. Updated state: pending (0) [ns_server:debug,2014-08-19T16:52:07.882,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",525,pending,0} [ns_server:debug,2014-08-19T16:52:07.883,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,984,673,545,362,724,596,413,958,647,698,570,387,1009,749,621,983,672, 544,361,723,595,412,957,646,697,569,386,1008,748,620,982,671,543,360,722,594, 411,956,645,696,568,385,1007,747,619,981,670,542,359,721,593,410,955,644,695, 567,384,1006,993,746,682,618,554,371,980,733,669,605,541,422,358,967,720,656, 592,528,409,345,954,707,643,579,396,1018,941,758,694,630,566,383,1005,992, 745,681,617,553,370,979,732,668,604,540,421,357,966,719,655,591,527,408,344, 953,706,642,578,395,1017,940,757,693,629,565,382,1004,991,744,680,616,552, 369,978,731,667,603,539,420,356,965,718,654,590,526,407,343,952,705,641,577, 394,1016,939,756,692,628,564,381,1003,990,743,679,615,551,368,977,730,666, 602,538,419,355,964,717,653,589,525,406,342,951,704,640,576,393,1015,938,755, 691,627,563,380,1002,989,742,678,614,550,367,976,729,665,601,537,418,354,963, 716,652,588,524,405,950,767,703,639,575,392,1014,754,690,626,562,379,1001, 988,741,677,613,549,366,975,728,664,600,536,417,353,962,715,651,587,404,949, 766,702,638,574,391,1013,753,689,625,561,378,1000,987,740,676,612,548,365, 974,727,663,599,535,416,352,961,714,650,586,522,403,948,765,701,637,573,390, 1012,999,752,688,624,560,377,986,739,675,611,547,364,973,726,662,598,534,415, 351,960,713,649,585,402,947,764,700,636,572,389,1011,998,751,687,623,559,376, 985,738,674,610,546,363,972,725,661,597,533,414,350,959,712,648,584,401,1023, 946,763,699,635,571,388,1010,997,686,558,375,737,609,426,971,660,532,349,711, 583,400,1022,945,762,634,996,685,557,374,736,608,425,970,659,531,348,710,582, 399,1021,944,761,633,995,684,556,373,735,607,424,969,658,530,347,709,581,398, 1020,943,760,632,994,683,555,372,734,606,423,968,657,529,346,708,580,397, 1019,942,759,631] [views:debug,2014-08-19T16:52:07.958,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/525. Updated state: pending (0) [ns_server:debug,2014-08-19T16:52:07.959,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",525,pending,0} [ns_server:debug,2014-08-19T16:52:08.060,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 523. Nacking mccouch update. [views:debug,2014-08-19T16:52:08.060,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/523. Updated state: pending (0) [ns_server:debug,2014-08-19T16:52:08.060,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",523,pending,0} [ns_server:debug,2014-08-19T16:52:08.061,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,984,673,545,362,724,596,413,958,647,698,570,387,1009,749,621,983,672, 544,361,723,595,412,957,646,697,569,386,1008,748,620,982,671,543,360,722,594, 411,956,645,696,568,385,1007,747,619,981,670,542,359,721,593,410,955,644,695, 567,384,1006,993,746,682,618,554,371,980,733,669,605,541,422,358,967,720,656, 592,528,409,345,954,707,643,579,396,1018,941,758,694,630,566,383,1005,992, 745,681,617,553,370,979,732,668,604,540,421,357,966,719,655,591,527,408,344, 953,706,642,578,395,1017,940,757,693,629,565,382,1004,991,744,680,616,552, 369,978,731,667,603,539,420,356,965,718,654,590,526,407,343,952,705,641,577, 394,1016,939,756,692,628,564,381,1003,990,743,679,615,551,368,977,730,666, 602,538,419,355,964,717,653,589,525,406,342,951,704,640,576,393,1015,938,755, 691,627,563,380,1002,989,742,678,614,550,367,976,729,665,601,537,418,354,963, 716,652,588,524,405,950,767,703,639,575,392,1014,754,690,626,562,379,1001, 988,741,677,613,549,366,975,728,664,600,536,417,353,962,715,651,587,523,404, 949,766,702,638,574,391,1013,753,689,625,561,378,1000,987,740,676,612,548, 365,974,727,663,599,535,416,352,961,714,650,586,522,403,948,765,701,637,573, 390,1012,999,752,688,624,560,377,986,739,675,611,547,364,973,726,662,598,534, 415,351,960,713,649,585,402,947,764,700,636,572,389,1011,998,751,687,623,559, 376,985,738,674,610,546,363,972,725,661,597,533,414,350,959,712,648,584,401, 1023,946,763,699,635,571,388,1010,997,686,558,375,737,609,426,971,660,532, 349,711,583,400,1022,945,762,634,996,685,557,374,736,608,425,970,659,531,348, 710,582,399,1021,944,761,633,995,684,556,373,735,607,424,969,658,530,347,709, 581,398,1020,943,760,632,994,683,555,372,734,606,423,968,657,529,346,708,580, 397,1019,942,759,631] [views:debug,2014-08-19T16:52:08.094,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/523. Updated state: pending (0) [ns_server:debug,2014-08-19T16:52:08.094,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",523,pending,0} [ns_server:debug,2014-08-19T16:52:08.244,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 521. Nacking mccouch update. [views:debug,2014-08-19T16:52:08.244,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/521. Updated state: pending (0) [ns_server:debug,2014-08-19T16:52:08.244,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",521,pending,0} [ns_server:debug,2014-08-19T16:52:08.245,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,984,673,545,362,724,596,413,958,647,698,570,387,1009,749,621,983,672, 544,361,723,595,412,957,646,697,569,386,1008,748,620,982,671,543,360,722,594, 411,956,645,696,568,385,1007,747,619,981,670,542,359,721,593,410,955,644,695, 567,384,1006,993,746,682,618,554,371,980,733,669,605,541,422,358,967,720,656, 592,528,409,345,954,707,643,579,396,1018,941,758,694,630,566,383,1005,992, 745,681,617,553,370,979,732,668,604,540,421,357,966,719,655,591,527,408,344, 953,706,642,578,395,1017,940,757,693,629,565,382,1004,991,744,680,616,552, 369,978,731,667,603,539,420,356,965,718,654,590,526,407,343,952,705,641,577, 394,1016,939,756,692,628,564,381,1003,990,743,679,615,551,368,977,730,666, 602,538,419,355,964,717,653,589,525,406,342,951,704,640,576,393,1015,938,755, 691,627,563,380,1002,989,742,678,614,550,367,976,729,665,601,537,418,354,963, 716,652,588,524,405,950,767,703,639,575,392,1014,754,690,626,562,379,1001, 988,741,677,613,549,366,975,728,664,600,536,417,353,962,715,651,587,523,404, 949,766,702,638,574,391,1013,753,689,625,561,378,1000,987,740,676,612,548, 365,974,727,663,599,535,416,352,961,714,650,586,522,403,948,765,701,637,573, 390,1012,999,752,688,624,560,377,986,739,675,611,547,364,973,726,662,598,534, 415,351,960,713,649,585,521,402,947,764,700,636,572,389,1011,998,751,687,623, 559,376,985,738,674,610,546,363,972,725,661,597,533,414,350,959,712,648,584, 401,1023,946,763,699,635,571,388,1010,997,686,558,375,737,609,426,971,660, 532,349,711,583,400,1022,945,762,634,996,685,557,374,736,608,425,970,659,531, 348,710,582,399,1021,944,761,633,995,684,556,373,735,607,424,969,658,530,347, 709,581,398,1020,943,760,632,994,683,555,372,734,606,423,968,657,529,346,708, 580,397,1019,942,759,631] [views:debug,2014-08-19T16:52:08.294,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/521. Updated state: pending (0) [ns_server:debug,2014-08-19T16:52:08.295,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",521,pending,0} [ns_server:debug,2014-08-19T16:52:08.461,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 519. Nacking mccouch update. [views:debug,2014-08-19T16:52:08.461,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/519. Updated state: pending (0) [ns_server:debug,2014-08-19T16:52:08.461,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",519,pending,0} [ns_server:debug,2014-08-19T16:52:08.462,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,984,673,545,362,724,596,413,958,647,519,698,570,387,1009,749,621,983, 672,544,361,723,595,412,957,646,697,569,386,1008,748,620,982,671,543,360,722, 594,411,956,645,696,568,385,1007,747,619,981,670,542,359,721,593,410,955,644, 695,567,384,1006,993,746,682,618,554,371,980,733,669,605,541,422,358,967,720, 656,592,528,409,345,954,707,643,579,396,1018,941,758,694,630,566,383,1005, 992,745,681,617,553,370,979,732,668,604,540,421,357,966,719,655,591,527,408, 344,953,706,642,578,395,1017,940,757,693,629,565,382,1004,991,744,680,616, 552,369,978,731,667,603,539,420,356,965,718,654,590,526,407,343,952,705,641, 577,394,1016,939,756,692,628,564,381,1003,990,743,679,615,551,368,977,730, 666,602,538,419,355,964,717,653,589,525,406,342,951,704,640,576,393,1015,938, 755,691,627,563,380,1002,989,742,678,614,550,367,976,729,665,601,537,418,354, 963,716,652,588,524,405,950,767,703,639,575,392,1014,754,690,626,562,379, 1001,988,741,677,613,549,366,975,728,664,600,536,417,353,962,715,651,587,523, 404,949,766,702,638,574,391,1013,753,689,625,561,378,1000,987,740,676,612, 548,365,974,727,663,599,535,416,352,961,714,650,586,522,403,948,765,701,637, 573,390,1012,999,752,688,624,560,377,986,739,675,611,547,364,973,726,662,598, 534,415,351,960,713,649,585,521,402,947,764,700,636,572,389,1011,998,751,687, 623,559,376,985,738,674,610,546,363,972,725,661,597,533,414,350,959,712,648, 584,401,1023,946,763,699,635,571,388,1010,997,686,558,375,737,609,426,971, 660,532,349,711,583,400,1022,945,762,634,996,685,557,374,736,608,425,970,659, 531,348,710,582,399,1021,944,761,633,995,684,556,373,735,607,424,969,658,530, 347,709,581,398,1020,943,760,632,994,683,555,372,734,606,423,968,657,529,346, 708,580,397,1019,942,759,631] [views:debug,2014-08-19T16:52:08.529,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/519. Updated state: pending (0) [ns_server:debug,2014-08-19T16:52:08.529,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",519,pending,0} [ns_server:debug,2014-08-19T16:52:08.679,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 517. Nacking mccouch update. [views:debug,2014-08-19T16:52:08.679,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/517. Updated state: pending (0) [ns_server:debug,2014-08-19T16:52:08.679,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",517,pending,0} [ns_server:debug,2014-08-19T16:52:08.680,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,984,673,545,362,724,596,413,958,647,519,698,570,387,1009,749,621,983, 672,544,361,723,595,412,957,646,697,569,386,1008,748,620,982,671,543,360,722, 594,411,956,645,517,696,568,385,1007,747,619,981,670,542,359,721,593,410,955, 644,695,567,384,1006,993,746,682,618,554,371,980,733,669,605,541,422,358,967, 720,656,592,528,409,345,954,707,643,579,396,1018,941,758,694,630,566,383, 1005,992,745,681,617,553,370,979,732,668,604,540,421,357,966,719,655,591,527, 408,344,953,706,642,578,395,1017,940,757,693,629,565,382,1004,991,744,680, 616,552,369,978,731,667,603,539,420,356,965,718,654,590,526,407,343,952,705, 641,577,394,1016,939,756,692,628,564,381,1003,990,743,679,615,551,368,977, 730,666,602,538,419,355,964,717,653,589,525,406,342,951,704,640,576,393,1015, 938,755,691,627,563,380,1002,989,742,678,614,550,367,976,729,665,601,537,418, 354,963,716,652,588,524,405,950,767,703,639,575,392,1014,754,690,626,562,379, 1001,988,741,677,613,549,366,975,728,664,600,536,417,353,962,715,651,587,523, 404,949,766,702,638,574,391,1013,753,689,625,561,378,1000,987,740,676,612, 548,365,974,727,663,599,535,416,352,961,714,650,586,522,403,948,765,701,637, 573,390,1012,999,752,688,624,560,377,986,739,675,611,547,364,973,726,662,598, 534,415,351,960,713,649,585,521,402,947,764,700,636,572,389,1011,998,751,687, 623,559,376,985,738,674,610,546,363,972,725,661,597,533,414,350,959,712,648, 584,401,1023,946,763,699,635,571,388,1010,997,686,558,375,737,609,426,971, 660,532,349,711,583,400,1022,945,762,634,996,685,557,374,736,608,425,970,659, 531,348,710,582,399,1021,944,761,633,995,684,556,373,735,607,424,969,658,530, 347,709,581,398,1020,943,760,632,994,683,555,372,734,606,423,968,657,529,346, 708,580,397,1019,942,759,631] [views:debug,2014-08-19T16:52:08.730,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/517. Updated state: pending (0) [ns_server:debug,2014-08-19T16:52:08.730,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",517,pending,0} [ns_server:debug,2014-08-19T16:52:08.816,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 515. Nacking mccouch update. [views:debug,2014-08-19T16:52:08.816,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/515. Updated state: pending (0) [ns_server:debug,2014-08-19T16:52:08.816,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",515,pending,0} [ns_server:debug,2014-08-19T16:52:08.817,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,984,673,545,362,724,596,413,958,647,519,698,570,387,1009,749,621,983, 672,544,361,723,595,412,957,646,697,569,386,1008,748,620,982,671,543,360,722, 594,411,956,645,517,696,568,385,1007,747,619,981,670,542,359,721,593,410,955, 644,695,567,384,1006,746,618,980,733,669,605,541,422,358,967,720,656,592,528, 409,345,954,707,643,579,515,396,1018,941,758,694,630,566,383,1005,992,745, 681,617,553,370,979,732,668,604,540,421,357,966,719,655,591,527,408,344,953, 706,642,578,395,1017,940,757,693,629,565,382,1004,991,744,680,616,552,369, 978,731,667,603,539,420,356,965,718,654,590,526,407,343,952,705,641,577,394, 1016,939,756,692,628,564,381,1003,990,743,679,615,551,368,977,730,666,602, 538,419,355,964,717,653,589,525,406,342,951,704,640,576,393,1015,938,755,691, 627,563,380,1002,989,742,678,614,550,367,976,729,665,601,537,418,354,963,716, 652,588,524,405,950,767,703,639,575,392,1014,754,690,626,562,379,1001,988, 741,677,613,549,366,975,728,664,600,536,417,353,962,715,651,587,523,404,949, 766,702,638,574,391,1013,753,689,625,561,378,1000,987,740,676,612,548,365, 974,727,663,599,535,416,352,961,714,650,586,522,403,948,765,701,637,573,390, 1012,999,752,688,624,560,377,986,739,675,611,547,364,973,726,662,598,534,415, 351,960,713,649,585,521,402,947,764,700,636,572,389,1011,998,751,687,623,559, 376,985,738,674,610,546,363,972,725,661,597,533,414,350,959,712,648,584,401, 1023,946,763,699,635,571,388,1010,997,686,558,375,737,609,426,971,660,532, 349,711,583,400,1022,945,762,634,996,685,557,374,736,608,425,970,659,531,348, 710,582,399,1021,944,761,633,995,684,556,373,735,607,424,969,658,530,347,709, 581,398,1020,943,760,632,994,683,555,372,734,606,423,968,657,529,346,708,580, 397,1019,942,759,631,993,682,554,371] [views:debug,2014-08-19T16:52:08.850,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/515. Updated state: pending (0) [ns_server:debug,2014-08-19T16:52:08.851,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",515,pending,0} [ns_server:debug,2014-08-19T16:52:08.958,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 513. Nacking mccouch update. [views:debug,2014-08-19T16:52:08.958,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/513. Updated state: pending (0) [ns_server:debug,2014-08-19T16:52:08.958,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",513,pending,0} [ns_server:debug,2014-08-19T16:52:08.959,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,984,673,545,362,724,596,413,958,647,519,698,570,387,1009,749,621,983, 672,544,361,723,595,412,957,646,697,569,386,1008,748,620,982,671,543,360,722, 594,411,956,645,517,696,568,385,1007,747,619,981,670,542,359,721,593,410,955, 644,695,567,384,1006,746,618,980,733,669,605,541,422,358,967,720,656,592,528, 409,345,954,707,643,579,515,396,1018,941,758,694,630,566,383,1005,992,745, 681,617,553,370,979,732,668,604,540,421,357,966,719,655,591,527,408,344,953, 706,642,578,395,1017,940,757,693,629,565,382,1004,991,744,680,616,552,369, 978,731,667,603,539,420,356,965,718,654,590,526,407,343,952,705,641,577,513, 394,1016,939,756,692,628,564,381,1003,990,743,679,615,551,368,977,730,666, 602,538,419,355,964,717,653,589,525,406,342,951,704,640,576,393,1015,938,755, 691,627,563,380,1002,989,742,678,614,550,367,976,729,665,601,537,418,354,963, 716,652,588,524,405,950,767,703,639,575,392,1014,754,690,626,562,379,1001, 988,741,677,613,549,366,975,728,664,600,536,417,353,962,715,651,587,523,404, 949,766,702,638,574,391,1013,753,689,625,561,378,1000,987,740,676,612,548, 365,974,727,663,599,535,416,352,961,714,650,586,522,403,948,765,701,637,573, 390,1012,999,752,688,624,560,377,986,739,675,611,547,364,973,726,662,598,534, 415,351,960,713,649,585,521,402,947,764,700,636,572,389,1011,998,751,687,623, 559,376,985,738,674,610,546,363,972,725,661,597,533,414,350,959,712,648,584, 401,1023,946,763,699,635,571,388,1010,997,686,558,375,737,609,426,971,660, 532,349,711,583,400,1022,945,762,634,996,685,557,374,736,608,425,970,659,531, 348,710,582,399,1021,944,761,633,995,684,556,373,735,607,424,969,658,530,347, 709,581,398,1020,943,760,632,994,683,555,372,734,606,423,968,657,529,346,708, 580,397,1019,942,759,631,993,682,554,371] [views:debug,2014-08-19T16:52:09.010,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/513. Updated state: pending (0) [ns_server:debug,2014-08-19T16:52:09.010,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",513,pending,0} [ns_server:debug,2014-08-19T16:52:09.117,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 520. Nacking mccouch update. [views:debug,2014-08-19T16:52:09.117,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/520. Updated state: pending (0) [ns_server:debug,2014-08-19T16:52:09.118,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",520,pending,0} [ns_server:debug,2014-08-19T16:52:09.119,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,984,673,545,362,724,596,413,958,647,519,698,570,387,1009,749,621,983, 672,544,361,723,595,412,957,646,697,569,386,1008,748,620,982,671,543,360,722, 594,411,956,645,517,696,568,385,1007,747,619,981,670,542,359,721,593,410,955, 644,695,567,384,1006,746,618,980,733,669,605,541,422,358,967,720,656,592,528, 409,345,954,707,643,579,515,396,1018,941,758,694,630,566,383,1005,992,745, 681,617,553,370,979,732,668,604,540,421,357,966,719,655,591,527,408,344,953, 706,642,578,395,1017,940,757,693,629,565,382,1004,991,744,680,616,552,369, 978,731,667,603,539,420,356,965,718,654,590,526,407,343,952,705,641,577,513, 394,1016,939,756,692,628,564,381,1003,990,743,679,615,551,368,977,730,666, 602,538,419,355,964,717,653,589,525,406,342,951,704,640,576,393,1015,938,755, 691,627,563,380,1002,989,742,678,614,550,367,976,729,665,601,537,418,354,963, 716,652,588,524,405,950,767,703,639,575,392,1014,754,690,626,562,379,1001, 988,741,677,613,549,366,975,728,664,600,536,417,353,962,715,651,587,523,404, 949,766,702,638,574,391,1013,753,689,625,561,378,1000,987,740,676,612,548, 365,974,727,663,599,535,416,352,961,714,650,586,522,403,948,765,701,637,573, 390,1012,999,752,688,624,560,377,986,739,675,611,547,364,973,726,662,598,534, 415,351,960,713,649,585,521,402,947,764,700,636,572,389,1011,998,751,687,623, 559,376,985,738,674,610,546,363,972,725,661,597,533,414,350,959,712,648,584, 520,401,1023,946,763,699,635,571,388,1010,997,686,558,375,737,609,426,971, 660,532,349,711,583,400,1022,945,762,634,996,685,557,374,736,608,425,970,659, 531,348,710,582,399,1021,944,761,633,995,684,556,373,735,607,424,969,658,530, 347,709,581,398,1020,943,760,632,994,683,555,372,734,606,423,968,657,529,346, 708,580,397,1019,942,759,631,993,682,554,371] [views:debug,2014-08-19T16:52:09.169,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/520. Updated state: pending (0) [ns_server:debug,2014-08-19T16:52:09.169,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",520,pending,0} [ns_server:debug,2014-08-19T16:52:09.261,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 518. Nacking mccouch update. [views:debug,2014-08-19T16:52:09.261,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/518. Updated state: pending (0) [ns_server:debug,2014-08-19T16:52:09.261,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",518,pending,0} [ns_server:debug,2014-08-19T16:52:09.262,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,984,673,545,362,724,596,413,958,647,519,698,570,387,1009,749,621,983, 672,544,361,723,595,412,957,646,518,697,569,386,1008,748,620,982,671,543,360, 722,594,411,956,645,517,696,568,385,1007,747,619,981,670,542,359,721,593,410, 955,644,695,567,384,1006,746,618,980,733,669,605,541,422,358,967,720,656,592, 528,409,345,954,707,643,579,515,396,1018,941,758,694,630,566,383,1005,992, 745,681,617,553,370,979,732,668,604,540,421,357,966,719,655,591,527,408,344, 953,706,642,578,395,1017,940,757,693,629,565,382,1004,991,744,680,616,552, 369,978,731,667,603,539,420,356,965,718,654,590,526,407,343,952,705,641,577, 513,394,1016,939,756,692,628,564,381,1003,990,743,679,615,551,368,977,730, 666,602,538,419,355,964,717,653,589,525,406,342,951,704,640,576,393,1015,938, 755,691,627,563,380,1002,989,742,678,614,550,367,976,729,665,601,537,418,354, 963,716,652,588,524,405,950,767,703,639,575,392,1014,754,690,626,562,379, 1001,988,741,677,613,549,366,975,728,664,600,536,417,353,962,715,651,587,523, 404,949,766,702,638,574,391,1013,753,689,625,561,378,1000,987,740,676,612, 548,365,974,727,663,599,535,416,352,961,714,650,586,522,403,948,765,701,637, 573,390,1012,999,752,688,624,560,377,986,739,675,611,547,364,973,726,662,598, 534,415,351,960,713,649,585,521,402,947,764,700,636,572,389,1011,998,751,687, 623,559,376,985,738,674,610,546,363,972,725,661,597,533,414,350,959,712,648, 584,520,401,1023,946,763,699,635,571,388,1010,997,686,558,375,737,609,426, 971,660,532,349,711,583,400,1022,945,762,634,996,685,557,374,736,608,425,970, 659,531,348,710,582,399,1021,944,761,633,995,684,556,373,735,607,424,969,658, 530,347,709,581,398,1020,943,760,632,994,683,555,372,734,606,423,968,657,529, 346,708,580,397,1019,942,759,631,993,682,554,371] [views:debug,2014-08-19T16:52:09.312,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/518. Updated state: pending (0) [ns_server:debug,2014-08-19T16:52:09.312,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",518,pending,0} [ns_server:debug,2014-08-19T16:52:09.438,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 516. Nacking mccouch update. [views:debug,2014-08-19T16:52:09.438,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/516. Updated state: pending (0) [ns_server:debug,2014-08-19T16:52:09.438,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",516,pending,0} [ns_server:debug,2014-08-19T16:52:09.439,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,984,673,545,362,724,596,413,958,647,519,698,570,387,1009,749,621,983, 672,544,361,723,595,412,957,646,518,697,569,386,1008,748,620,982,671,543,360, 722,594,411,956,645,517,696,568,385,1007,747,619,981,670,542,359,721,593,410, 955,644,516,695,567,384,1006,746,618,980,733,669,605,541,422,358,967,720,656, 592,528,409,345,954,707,643,579,515,396,1018,941,758,694,630,566,383,1005, 992,745,681,617,553,370,979,732,668,604,540,421,357,966,719,655,591,527,408, 344,953,706,642,578,395,1017,940,757,693,629,565,382,1004,991,744,680,616, 552,369,978,731,667,603,539,420,356,965,718,654,590,526,407,343,952,705,641, 577,513,394,1016,939,756,692,628,564,381,1003,990,743,679,615,551,368,977, 730,666,602,538,419,355,964,717,653,589,525,406,342,951,704,640,576,393,1015, 938,755,691,627,563,380,1002,989,742,678,614,550,367,976,729,665,601,537,418, 354,963,716,652,588,524,405,950,767,703,639,575,392,1014,754,690,626,562,379, 1001,988,741,677,613,549,366,975,728,664,600,536,417,353,962,715,651,587,523, 404,949,766,702,638,574,391,1013,753,689,625,561,378,1000,987,740,676,612, 548,365,974,727,663,599,535,416,352,961,714,650,586,522,403,948,765,701,637, 573,390,1012,999,752,688,624,560,377,986,739,675,611,547,364,973,726,662,598, 534,415,351,960,713,649,585,521,402,947,764,700,636,572,389,1011,998,751,687, 623,559,376,985,738,674,610,546,363,972,725,661,597,533,414,350,959,712,648, 584,520,401,1023,946,763,699,635,571,388,1010,997,686,558,375,737,609,426, 971,660,532,349,711,583,400,1022,945,762,634,996,685,557,374,736,608,425,970, 659,531,348,710,582,399,1021,944,761,633,995,684,556,373,735,607,424,969,658, 530,347,709,581,398,1020,943,760,632,994,683,555,372,734,606,423,968,657,529, 346,708,580,397,1019,942,759,631,993,682,554,371] [views:debug,2014-08-19T16:52:09.488,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/516. Updated state: pending (0) [ns_server:debug,2014-08-19T16:52:09.488,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",516,pending,0} [ns_server:debug,2014-08-19T16:52:09.655,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 514. Nacking mccouch update. [views:debug,2014-08-19T16:52:09.656,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/514. Updated state: pending (0) [ns_server:debug,2014-08-19T16:52:09.656,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",514,pending,0} [ns_server:debug,2014-08-19T16:52:09.657,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,984,673,545,362,724,596,413,958,647,519,698,570,387,1009,749,621,983, 672,544,361,723,595,412,957,646,518,697,569,386,1008,748,620,982,671,543,360, 722,594,411,956,645,517,696,568,385,1007,747,619,981,670,542,359,721,593,410, 955,644,516,695,567,384,1006,746,618,980,669,541,358,967,720,656,592,528,409, 345,954,707,643,579,515,396,1018,941,758,694,630,566,383,1005,992,745,681, 617,553,370,979,732,668,604,540,421,357,966,719,655,591,527,408,344,953,706, 642,578,514,395,1017,940,757,693,629,565,382,1004,991,744,680,616,552,369, 978,731,667,603,539,420,356,965,718,654,590,526,407,343,952,705,641,577,513, 394,1016,939,756,692,628,564,381,1003,990,743,679,615,551,368,977,730,666, 602,538,419,355,964,717,653,589,525,406,342,951,704,640,576,393,1015,938,755, 691,627,563,380,1002,989,742,678,614,550,367,976,729,665,601,537,418,354,963, 716,652,588,524,405,950,767,703,639,575,392,1014,754,690,626,562,379,1001, 988,741,677,613,549,366,975,728,664,600,536,417,353,962,715,651,587,523,404, 949,766,702,638,574,391,1013,753,689,625,561,378,1000,987,740,676,612,548, 365,974,727,663,599,535,416,352,961,714,650,586,522,403,948,765,701,637,573, 390,1012,999,752,688,624,560,377,986,739,675,611,547,364,973,726,662,598,534, 415,351,960,713,649,585,521,402,947,764,700,636,572,389,1011,998,751,687,623, 559,376,985,738,674,610,546,363,972,725,661,597,533,414,350,959,712,648,584, 520,401,1023,946,763,699,635,571,388,1010,997,686,558,375,737,609,426,971, 660,532,349,711,583,400,1022,945,762,634,996,685,557,374,736,608,425,970,659, 531,348,710,582,399,1021,944,761,633,995,684,556,373,735,607,424,969,658,530, 347,709,581,398,1020,943,760,632,994,683,555,372,734,606,423,968,657,529,346, 708,580,397,1019,942,759,631,993,682,554,371,733,605,422] [views:debug,2014-08-19T16:52:09.707,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/514. Updated state: pending (0) [ns_server:debug,2014-08-19T16:52:09.707,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",514,pending,0} [ns_server:debug,2014-08-19T16:52:09.848,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 512. Nacking mccouch update. [views:debug,2014-08-19T16:52:09.848,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/512. Updated state: pending (0) [ns_server:debug,2014-08-19T16:52:09.848,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",512,pending,0} [ns_server:debug,2014-08-19T16:52:09.849,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,984,673,545,362,724,596,413,958,647,519,698,570,387,1009,749,621,983, 672,544,361,723,595,412,957,646,518,697,569,386,1008,748,620,982,671,543,360, 722,594,411,956,645,517,696,568,385,1007,747,619,981,670,542,359,721,593,410, 955,644,516,695,567,384,1006,746,618,980,669,541,358,967,720,656,592,528,409, 345,954,707,643,579,515,396,1018,941,758,694,630,566,383,1005,992,745,681, 617,553,370,979,732,668,604,540,421,357,966,719,655,591,527,408,344,953,706, 642,578,514,395,1017,940,757,693,629,565,382,1004,991,744,680,616,552,369, 978,731,667,603,539,420,356,965,718,654,590,526,407,343,952,705,641,577,513, 394,1016,939,756,692,628,564,381,1003,990,743,679,615,551,368,977,730,666, 602,538,419,355,964,717,653,589,525,406,342,951,704,640,576,512,393,1015,938, 755,691,627,563,380,1002,989,742,678,614,550,367,976,729,665,601,537,418,354, 963,716,652,588,524,405,950,767,703,639,575,392,1014,754,690,626,562,379, 1001,988,741,677,613,549,366,975,728,664,600,536,417,353,962,715,651,587,523, 404,949,766,702,638,574,391,1013,753,689,625,561,378,1000,987,740,676,612, 548,365,974,727,663,599,535,416,352,961,714,650,586,522,403,948,765,701,637, 573,390,1012,999,752,688,624,560,377,986,739,675,611,547,364,973,726,662,598, 534,415,351,960,713,649,585,521,402,947,764,700,636,572,389,1011,998,751,687, 623,559,376,985,738,674,610,546,363,972,725,661,597,533,414,350,959,712,648, 584,520,401,1023,946,763,699,635,571,388,1010,997,686,558,375,737,609,426, 971,660,532,349,711,583,400,1022,945,762,634,996,685,557,374,736,608,425,970, 659,531,348,710,582,399,1021,944,761,633,995,684,556,373,735,607,424,969,658, 530,347,709,581,398,1020,943,760,632,994,683,555,372,734,606,423,968,657,529, 346,708,580,397,1019,942,759,631,993,682,554,371,733,605,422] [views:debug,2014-08-19T16:52:09.899,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/512. Updated state: pending (0) [ns_server:debug,2014-08-19T16:52:09.899,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",512,pending,0} [ns_server:debug,2014-08-19T16:52:09.899,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [rebalance:debug,2014-08-19T16:52:09.901,ns_1@10.242.238.90:<0.31673.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:52:09.902,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.31673.0> (ok) [rebalance:debug,2014-08-19T16:52:09.905,ns_1@10.242.238.90:<0.31862.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 512 [ns_server:debug,2014-08-19T16:52:09.907,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:09.908,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 8630 us [ns_server:debug,2014-08-19T16:52:09.908,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:09.909,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{786, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.88']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [rebalance:debug,2014-08-19T16:52:09.977,ns_1@10.242.238.90:<0.31658.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:52:09.977,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.31658.0> (ok) [rebalance:debug,2014-08-19T16:52:09.980,ns_1@10.242.238.90:<0.31866.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 514 [ns_server:debug,2014-08-19T16:52:10.020,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:52:10.025,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:10.026,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 5452 us [ns_server:debug,2014-08-19T16:52:10.027,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:10.027,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{787, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.88']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [rebalance:debug,2014-08-19T16:52:10.033,ns_1@10.242.238.90:<0.31632.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:52:10.033,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.31632.0> (ok) [rebalance:debug,2014-08-19T16:52:10.033,ns_1@10.242.238.90:<0.31664.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:52:10.033,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.31664.0> (ok) [rebalance:debug,2014-08-19T16:52:10.037,ns_1@10.242.238.90:<0.31870.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 516 [rebalance:debug,2014-08-19T16:52:10.037,ns_1@10.242.238.90:<0.31873.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 513 [rebalance:debug,2014-08-19T16:52:10.133,ns_1@10.242.238.90:<0.31600.0>:janitor_agent:handle_call:795]Done [rebalance:debug,2014-08-19T16:52:10.133,ns_1@10.242.238.90:<0.31638.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:52:10.133,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.31600.0> (ok) [ns_server:debug,2014-08-19T16:52:10.133,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.31638.0> (ok) [rebalance:debug,2014-08-19T16:52:10.139,ns_1@10.242.238.90:<0.31876.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 515 [rebalance:debug,2014-08-19T16:52:10.139,ns_1@10.242.238.90:<0.31879.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 518 [ns_server:debug,2014-08-19T16:52:10.191,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:52:10.194,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:10.194,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 3013 us [ns_server:debug,2014-08-19T16:52:10.194,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:10.195,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{768, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.88']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:52:10.248,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:52:10.251,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:10.252,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:10.252,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 1242 us [ns_server:debug,2014-08-19T16:52:10.253,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{772, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.88']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [rebalance:debug,2014-08-19T16:52:10.257,ns_1@10.242.238.90:<0.31574.0>:janitor_agent:handle_call:795]Done [rebalance:debug,2014-08-19T16:52:10.257,ns_1@10.242.238.90:<0.31606.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:52:10.257,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.31574.0> (ok) [ns_server:debug,2014-08-19T16:52:10.257,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.31606.0> (ok) [ns_server:debug,2014-08-19T16:52:10.292,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [rebalance:debug,2014-08-19T16:52:10.294,ns_1@10.242.238.90:<0.31885.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 517 [rebalance:debug,2014-08-19T16:52:10.295,ns_1@10.242.238.90:<0.31884.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 520 [ns_server:debug,2014-08-19T16:52:10.295,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:10.296,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 1840 us [ns_server:debug,2014-08-19T16:52:10.296,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:10.296,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{775, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.88']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:52:10.338,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:52:10.342,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:10.343,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 4021 us [ns_server:debug,2014-08-19T16:52:10.343,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:10.343,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{782, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.88']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [rebalance:debug,2014-08-19T16:52:10.355,ns_1@10.242.238.90:<0.31580.0>:janitor_agent:handle_call:795]Done [rebalance:debug,2014-08-19T16:52:10.355,ns_1@10.242.238.90:<0.31548.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:52:10.355,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.31580.0> (ok) [ns_server:debug,2014-08-19T16:52:10.355,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.31548.0> (ok) [ns_server:debug,2014-08-19T16:52:10.381,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [rebalance:debug,2014-08-19T16:52:10.381,ns_1@10.242.238.90:<0.31892.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 522 [rebalance:debug,2014-08-19T16:52:10.383,ns_1@10.242.238.90:<0.31895.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 519 [ns_server:debug,2014-08-19T16:52:10.388,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:10.388,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 6966 us [ns_server:debug,2014-08-19T16:52:10.389,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:10.390,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{784, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.88']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:52:10.426,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:52:10.432,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:10.433,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 6620 us [ns_server:debug,2014-08-19T16:52:10.433,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:10.434,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{777, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.88']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [rebalance:debug,2014-08-19T16:52:10.455,ns_1@10.242.238.90:<0.31554.0>:janitor_agent:handle_call:795]Done [rebalance:debug,2014-08-19T16:52:10.455,ns_1@10.242.238.90:<0.31522.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:52:10.456,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.31522.0> (ok) [ns_server:debug,2014-08-19T16:52:10.456,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.31554.0> (ok) [ns_server:debug,2014-08-19T16:52:10.468,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [rebalance:debug,2014-08-19T16:52:10.470,ns_1@10.242.238.90:<0.31900.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 521 [rebalance:debug,2014-08-19T16:52:10.470,ns_1@10.242.238.90:<0.31903.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 524 [ns_server:debug,2014-08-19T16:52:10.470,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:10.471,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 2923 us [ns_server:debug,2014-08-19T16:52:10.471,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:10.471,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{776, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.88']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:52:10.509,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:52:10.515,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:10.515,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:10.515,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 6510 us [ns_server:debug,2014-08-19T16:52:10.516,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{779, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.88']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [rebalance:debug,2014-08-19T16:52:10.539,ns_1@10.242.238.90:<0.31496.0>:janitor_agent:handle_call:795]Done [rebalance:debug,2014-08-19T16:52:10.539,ns_1@10.242.238.90:<0.31528.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:52:10.539,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.31496.0> (ok) [ns_server:debug,2014-08-19T16:52:10.539,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.31528.0> (ok) [ns_server:debug,2014-08-19T16:52:10.552,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [rebalance:debug,2014-08-19T16:52:10.553,ns_1@10.242.238.90:<0.31908.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 523 [rebalance:debug,2014-08-19T16:52:10.553,ns_1@10.242.238.90:<0.31909.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 526 [ns_server:debug,2014-08-19T16:52:10.555,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 3656 us [ns_server:debug,2014-08-19T16:52:10.556,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:10.557,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:10.558,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{781, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.88']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:52:10.598,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:52:10.599,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:10.599,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 1671 us [ns_server:debug,2014-08-19T16:52:10.600,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:10.600,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{778, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.88']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:52:10.639,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:52:10.646,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:10.647,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 8209 us [ns_server:debug,2014-08-19T16:52:10.647,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:10.648,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{785, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.88']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [rebalance:debug,2014-08-19T16:52:10.673,ns_1@10.242.238.90:<0.31469.0>:janitor_agent:handle_call:795]Done [rebalance:debug,2014-08-19T16:52:10.673,ns_1@10.242.238.90:<0.31507.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:52:10.673,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.31469.0> (ok) [ns_server:debug,2014-08-19T16:52:10.673,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.31507.0> (ok) [ns_server:debug,2014-08-19T16:52:10.678,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [rebalance:debug,2014-08-19T16:52:10.681,ns_1@10.242.238.90:<0.31917.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 525 [rebalance:debug,2014-08-19T16:52:10.681,ns_1@10.242.238.90:<0.31918.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 528 [ns_server:debug,2014-08-19T16:52:10.682,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:10.683,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:10.683,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 2755 us [ns_server:debug,2014-08-19T16:52:10.684,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{780, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.88']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:52:10.744,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:52:10.748,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:10.748,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 2981 us [ns_server:debug,2014-08-19T16:52:10.748,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:10.749,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{771, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.88']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:52:10.783,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:52:10.787,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:10.788,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 2051 us [ns_server:debug,2014-08-19T16:52:10.788,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:10.789,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{773, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.88']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [rebalance:debug,2014-08-19T16:52:10.823,ns_1@10.242.238.90:<0.31443.0>:janitor_agent:handle_call:795]Done [rebalance:debug,2014-08-19T16:52:10.823,ns_1@10.242.238.90:<0.31489.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:52:10.823,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.31443.0> (ok) [ns_server:debug,2014-08-19T16:52:10.823,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.31489.0> (ok) [ns_server:debug,2014-08-19T16:52:10.829,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [rebalance:debug,2014-08-19T16:52:10.829,ns_1@10.242.238.90:<0.31926.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 530 [rebalance:debug,2014-08-19T16:52:10.829,ns_1@10.242.238.90:<0.31929.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 527 [ns_server:debug,2014-08-19T16:52:10.831,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 2200 us [ns_server:debug,2014-08-19T16:52:10.832,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:10.832,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:10.833,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{770, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.88']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:52:10.868,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:52:10.871,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:10.871,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 3111 us [ns_server:debug,2014-08-19T16:52:10.871,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:10.872,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{783, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.88']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:52:10.915,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:52:10.921,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:10.921,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 5919 us [ns_server:debug,2014-08-19T16:52:10.922,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:10.923,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{769, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.88']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [rebalance:debug,2014-08-19T16:52:10.949,ns_1@10.242.238.90:<0.31437.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:52:10.949,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.31437.0> (ok) [rebalance:debug,2014-08-19T16:52:10.949,ns_1@10.242.238.90:<0.31463.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:52:10.949,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.31463.0> (ok) [ns_server:debug,2014-08-19T16:52:10.950,ns_1@10.242.238.90:<0.31936.0>:capi_set_view_manager:do_wait_index_updated:618]References to wait: [] ("default", 532) [ns_server:debug,2014-08-19T16:52:10.950,ns_1@10.242.238.90:<0.31936.0>:capi_set_view_manager:do_wait_index_updated:638]All refs fired [ns_server:debug,2014-08-19T16:52:10.950,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.31935.0> (ok) [rebalance:debug,2014-08-19T16:52:10.951,ns_1@10.242.238.90:<0.31411.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:52:10.951,ns_1@10.242.238.90:<0.31411.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:52:10.951,ns_1@10.242.238.90:<0.31937.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:52:10.951,ns_1@10.242.238.90:<0.31937.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:52:10.951,ns_1@10.242.238.90:<0.31411.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:52:10.953,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [rebalance:debug,2014-08-19T16:52:10.953,ns_1@10.242.238.90:<0.31938.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 529 [ns_server:debug,2014-08-19T16:52:10.956,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:10.957,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 3678 us [ns_server:debug,2014-08-19T16:52:10.958,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{774, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.88']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:52:10.959,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:info,2014-08-19T16:52:10.987,ns_1@10.242.238.90:<0.18786.0>:ns_memcached:do_handle_call:527]Changed vbucket 532 state to active [rebalance:debug,2014-08-19T16:52:10.999,ns_1@10.242.238.90:<0.31434.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:52:10.999,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.31434.0> (ok) [rebalance:debug,2014-08-19T16:52:11.019,ns_1@10.242.238.90:<0.31942.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 531 [rebalance:debug,2014-08-19T16:52:11.052,ns_1@10.242.238.90:<0.31667.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:52:11.052,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.31667.0> (ok) [ns_server:debug,2014-08-19T16:52:11.053,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:11.052,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:52:11.053,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 11 us [ns_server:debug,2014-08-19T16:52:11.053,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:11.054,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{532, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.88']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:52:11.054,ns_1@10.242.238.90:<0.31947.0>:capi_set_view_manager:do_wait_index_updated:618]References to wait: [] ("default", 533) [ns_server:debug,2014-08-19T16:52:11.054,ns_1@10.242.238.90:<0.31947.0>:capi_set_view_manager:do_wait_index_updated:638]All refs fired [ns_server:debug,2014-08-19T16:52:11.054,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.31946.0> (ok) [rebalance:debug,2014-08-19T16:52:11.054,ns_1@10.242.238.90:<0.31391.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:52:11.055,ns_1@10.242.238.90:<0.31391.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:52:11.055,ns_1@10.242.238.90:<0.31948.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:52:11.055,ns_1@10.242.238.90:<0.31948.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:52:11.055,ns_1@10.242.238.90:<0.31391.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [views:debug,2014-08-19T16:52:11.099,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/532. Updated state: active (1) [ns_server:debug,2014-08-19T16:52:11.099,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",532,active,1} [rebalance:debug,2014-08-19T16:52:11.100,ns_1@10.242.238.90:<0.31862.0>:janitor_agent:handle_call:795]Done [rebalance:debug,2014-08-19T16:52:11.100,ns_1@10.242.238.90:<0.31866.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:52:11.100,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.31862.0> (ok) [rebalance:debug,2014-08-19T16:52:11.100,ns_1@10.242.238.90:<0.31870.0>:janitor_agent:handle_call:795]Done [rebalance:debug,2014-08-19T16:52:11.101,ns_1@10.242.238.90:<0.31876.0>:janitor_agent:handle_call:795]Done [rebalance:debug,2014-08-19T16:52:11.101,ns_1@10.242.238.90:<0.31879.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:52:11.100,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.31866.0> (ok) [rebalance:debug,2014-08-19T16:52:11.101,ns_1@10.242.238.90:<0.31892.0>:janitor_agent:handle_call:795]Done [rebalance:debug,2014-08-19T16:52:11.101,ns_1@10.242.238.90:<0.31885.0>:janitor_agent:handle_call:795]Done [rebalance:debug,2014-08-19T16:52:11.101,ns_1@10.242.238.90:<0.31873.0>:janitor_agent:handle_call:795]Done [rebalance:debug,2014-08-19T16:52:11.101,ns_1@10.242.238.90:<0.31884.0>:janitor_agent:handle_call:795]Done [rebalance:debug,2014-08-19T16:52:11.101,ns_1@10.242.238.90:<0.31895.0>:janitor_agent:handle_call:795]Done [rebalance:debug,2014-08-19T16:52:11.101,ns_1@10.242.238.90:<0.31900.0>:janitor_agent:handle_call:795]Done [rebalance:debug,2014-08-19T16:52:11.101,ns_1@10.242.238.90:<0.31903.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:52:11.101,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.31870.0> (ok) [rebalance:debug,2014-08-19T16:52:11.101,ns_1@10.242.238.90:<0.31908.0>:janitor_agent:handle_call:795]Done [rebalance:debug,2014-08-19T16:52:11.101,ns_1@10.242.238.90:<0.31909.0>:janitor_agent:handle_call:795]Done [rebalance:debug,2014-08-19T16:52:11.101,ns_1@10.242.238.90:<0.31918.0>:janitor_agent:handle_call:795]Done [rebalance:debug,2014-08-19T16:52:11.101,ns_1@10.242.238.90:<0.31926.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:52:11.101,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.31876.0> (ok) [rebalance:debug,2014-08-19T16:52:11.101,ns_1@10.242.238.90:<0.31917.0>:janitor_agent:handle_call:795]Done [rebalance:debug,2014-08-19T16:52:11.101,ns_1@10.242.238.90:<0.31929.0>:janitor_agent:handle_call:795]Done [rebalance:debug,2014-08-19T16:52:11.101,ns_1@10.242.238.90:<0.31938.0>:janitor_agent:handle_call:795]Done [rebalance:debug,2014-08-19T16:52:11.101,ns_1@10.242.238.90:<0.31942.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:52:11.101,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.31879.0> (ok) [ns_server:debug,2014-08-19T16:52:11.101,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.31885.0> (ok) [ns_server:debug,2014-08-19T16:52:11.101,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.31873.0> (ok) [ns_server:debug,2014-08-19T16:52:11.101,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.31884.0> (ok) [ns_server:debug,2014-08-19T16:52:11.102,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.31892.0> (ok) [ns_server:debug,2014-08-19T16:52:11.102,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.31895.0> (ok) [ns_server:debug,2014-08-19T16:52:11.102,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.31900.0> (ok) [ns_server:debug,2014-08-19T16:52:11.102,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.31903.0> (ok) [ns_server:debug,2014-08-19T16:52:11.102,ns_1@10.242.238.90:<0.31950.0>:capi_set_view_manager:do_wait_index_updated:618]References to wait: [] ("default", 512) [ns_server:debug,2014-08-19T16:52:11.102,ns_1@10.242.238.90:<0.31950.0>:capi_set_view_manager:do_wait_index_updated:638]All refs fired [ns_server:debug,2014-08-19T16:52:11.102,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.31908.0> (ok) [ns_server:debug,2014-08-19T16:52:11.102,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.31918.0> (ok) [ns_server:debug,2014-08-19T16:52:11.102,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.31909.0> (ok) [ns_server:debug,2014-08-19T16:52:11.102,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.31926.0> (ok) [ns_server:debug,2014-08-19T16:52:11.102,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.31917.0> (ok) [ns_server:debug,2014-08-19T16:52:11.103,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.31929.0> (ok) [ns_server:debug,2014-08-19T16:52:11.103,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.31938.0> (ok) [ns_server:debug,2014-08-19T16:52:11.103,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.31942.0> (ok) [ns_server:debug,2014-08-19T16:52:11.103,ns_1@10.242.238.90:<0.31952.0>:capi_set_view_manager:do_wait_index_updated:618]References to wait: [] ("default", 514) [ns_server:debug,2014-08-19T16:52:11.103,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.31949.0> (ok) [ns_server:debug,2014-08-19T16:52:11.103,ns_1@10.242.238.90:<0.31952.0>:capi_set_view_manager:do_wait_index_updated:638]All refs fired [ns_server:debug,2014-08-19T16:52:11.103,ns_1@10.242.238.90:<0.31959.0>:capi_set_view_manager:do_wait_index_updated:618]References to wait: [] ("default", 516) [ns_server:debug,2014-08-19T16:52:11.103,ns_1@10.242.238.90:<0.31959.0>:capi_set_view_manager:do_wait_index_updated:638]All refs fired [ns_server:debug,2014-08-19T16:52:11.103,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.31951.0> (ok) [ns_server:debug,2014-08-19T16:52:11.104,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.31953.0> (ok) [ns_server:debug,2014-08-19T16:52:11.104,ns_1@10.242.238.90:<0.31968.0>:capi_set_view_manager:do_wait_index_updated:618]References to wait: [] ("default", 515) [ns_server:debug,2014-08-19T16:52:11.104,ns_1@10.242.238.90:<0.31968.0>:capi_set_view_manager:do_wait_index_updated:638]All refs fired [ns_server:debug,2014-08-19T16:52:11.104,ns_1@10.242.238.90:<0.31973.0>:capi_set_view_manager:do_wait_index_updated:618]References to wait: [] ("default", 520) [ns_server:debug,2014-08-19T16:52:11.104,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.31954.0> (ok) [rebalance:debug,2014-08-19T16:52:11.104,ns_1@10.242.238.90:<0.31670.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:52:11.104,ns_1@10.242.238.90:<0.31973.0>:capi_set_view_manager:do_wait_index_updated:638]All refs fired [ns_server:debug,2014-08-19T16:52:11.104,ns_1@10.242.238.90:<0.31974.0>:capi_set_view_manager:do_wait_index_updated:618]References to wait: [] ("default", 518) [ns_server:debug,2014-08-19T16:52:11.104,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.31958.0> (ok) [ns_server:debug,2014-08-19T16:52:11.104,ns_1@10.242.238.90:<0.31670.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:52:11.104,ns_1@10.242.238.90:<0.31975.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:52:11.104,ns_1@10.242.238.90:<0.31975.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [ns_server:debug,2014-08-19T16:52:11.104,ns_1@10.242.238.90:<0.31974.0>:capi_set_view_manager:do_wait_index_updated:638]All refs fired [ns_server:debug,2014-08-19T16:52:11.104,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.31955.0> (ok) [ns_server:debug,2014-08-19T16:52:11.104,ns_1@10.242.238.90:<0.31976.0>:capi_set_view_manager:do_wait_index_updated:618]References to wait: [] ("default", 513) [rebalance:info,2014-08-19T16:52:11.104,ns_1@10.242.238.90:<0.31670.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:52:11.104,ns_1@10.242.238.90:<0.31976.0>:capi_set_view_manager:do_wait_index_updated:638]All refs fired [ns_server:debug,2014-08-19T16:52:11.104,ns_1@10.242.238.90:<0.31977.0>:capi_set_view_manager:do_wait_index_updated:618]References to wait: [] ("default", 517) [ns_server:debug,2014-08-19T16:52:11.104,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.31957.0> (ok) [ns_server:debug,2014-08-19T16:52:11.104,ns_1@10.242.238.90:<0.31977.0>:capi_set_view_manager:do_wait_index_updated:638]All refs fired [ns_server:debug,2014-08-19T16:52:11.105,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.31956.0> (ok) [ns_server:debug,2014-08-19T16:52:11.105,ns_1@10.242.238.90:<0.31978.0>:capi_set_view_manager:do_wait_index_updated:618]References to wait: [] ("default", 530) [ns_server:debug,2014-08-19T16:52:11.105,ns_1@10.242.238.90:<0.31978.0>:capi_set_view_manager:do_wait_index_updated:638]All refs fired [ns_server:debug,2014-08-19T16:52:11.105,ns_1@10.242.238.90:<0.31979.0>:capi_set_view_manager:do_wait_index_updated:618]References to wait: [] ("default", 526) [ns_server:debug,2014-08-19T16:52:11.105,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.31967.0> (ok) [ns_server:debug,2014-08-19T16:52:11.105,ns_1@10.242.238.90:<0.31979.0>:capi_set_view_manager:do_wait_index_updated:638]All refs fired [ns_server:debug,2014-08-19T16:52:11.105,ns_1@10.242.238.90:<0.31980.0>:capi_set_view_manager:do_wait_index_updated:618]References to wait: [] ("default", 528) [ns_server:debug,2014-08-19T16:52:11.105,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.31966.0> (ok) [ns_server:debug,2014-08-19T16:52:11.105,ns_1@10.242.238.90:<0.31980.0>:capi_set_view_manager:do_wait_index_updated:638]All refs fired [ns_server:debug,2014-08-19T16:52:11.105,ns_1@10.242.238.90:<0.31981.0>:capi_set_view_manager:do_wait_index_updated:618]References to wait: [] ("default", 523) [ns_server:debug,2014-08-19T16:52:11.105,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.31965.0> (ok) [ns_server:debug,2014-08-19T16:52:11.105,ns_1@10.242.238.90:<0.31981.0>:capi_set_view_manager:do_wait_index_updated:638]All refs fired [ns_server:debug,2014-08-19T16:52:11.105,ns_1@10.242.238.90:<0.31982.0>:capi_set_view_manager:do_wait_index_updated:618]References to wait: [] ("default", 524) [ns_server:debug,2014-08-19T16:52:11.105,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.31964.0> (ok) [ns_server:debug,2014-08-19T16:52:11.105,ns_1@10.242.238.90:<0.31982.0>:capi_set_view_manager:do_wait_index_updated:638]All refs fired [ns_server:debug,2014-08-19T16:52:11.105,ns_1@10.242.238.90:<0.31983.0>:capi_set_view_manager:do_wait_index_updated:618]References to wait: [] ("default", 521) [ns_server:debug,2014-08-19T16:52:11.106,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.31963.0> (ok) [ns_server:debug,2014-08-19T16:52:11.106,ns_1@10.242.238.90:<0.31983.0>:capi_set_view_manager:do_wait_index_updated:638]All refs fired [ns_server:debug,2014-08-19T16:52:11.106,ns_1@10.242.238.90:<0.31984.0>:capi_set_view_manager:do_wait_index_updated:618]References to wait: [] ("default", 519) [ns_server:debug,2014-08-19T16:52:11.106,ns_1@10.242.238.90:<0.31984.0>:capi_set_view_manager:do_wait_index_updated:638]All refs fired [ns_server:debug,2014-08-19T16:52:11.106,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.31962.0> (ok) [ns_server:debug,2014-08-19T16:52:11.106,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.31961.0> (ok) [ns_server:debug,2014-08-19T16:52:11.106,ns_1@10.242.238.90:<0.31985.0>:capi_set_view_manager:do_wait_index_updated:618]References to wait: [] ("default", 522) [ns_server:debug,2014-08-19T16:52:11.106,ns_1@10.242.238.90:<0.31985.0>:capi_set_view_manager:do_wait_index_updated:638]All refs fired [ns_server:debug,2014-08-19T16:52:11.106,ns_1@10.242.238.90:<0.31986.0>:capi_set_view_manager:do_wait_index_updated:618]References to wait: [] ("default", 529) [ns_server:debug,2014-08-19T16:52:11.106,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.31960.0> (ok) [ns_server:debug,2014-08-19T16:52:11.106,ns_1@10.242.238.90:<0.31986.0>:capi_set_view_manager:do_wait_index_updated:638]All refs fired [ns_server:debug,2014-08-19T16:52:11.106,ns_1@10.242.238.90:<0.31987.0>:capi_set_view_manager:do_wait_index_updated:618]References to wait: [] ("default", 525) [ns_server:debug,2014-08-19T16:52:11.106,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.31971.0> (ok) [ns_server:debug,2014-08-19T16:52:11.106,ns_1@10.242.238.90:<0.31987.0>:capi_set_view_manager:do_wait_index_updated:638]All refs fired [ns_server:debug,2014-08-19T16:52:11.106,ns_1@10.242.238.90:<0.31988.0>:capi_set_view_manager:do_wait_index_updated:618]References to wait: [] ("default", 527) [ns_server:debug,2014-08-19T16:52:11.106,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.31970.0> (ok) [ns_server:debug,2014-08-19T16:52:11.106,ns_1@10.242.238.90:<0.31988.0>:capi_set_view_manager:do_wait_index_updated:638]All refs fired [ns_server:debug,2014-08-19T16:52:11.107,ns_1@10.242.238.90:<0.31989.0>:capi_set_view_manager:do_wait_index_updated:618]References to wait: [] ("default", 531) [ns_server:debug,2014-08-19T16:52:11.107,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.31969.0> (ok) [ns_server:debug,2014-08-19T16:52:11.107,ns_1@10.242.238.90:<0.31989.0>:capi_set_view_manager:do_wait_index_updated:638]All refs fired [ns_server:debug,2014-08-19T16:52:11.107,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.31972.0> (ok) [rebalance:debug,2014-08-19T16:52:11.108,ns_1@10.242.238.90:<0.31571.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:52:11.108,ns_1@10.242.238.90:<0.31571.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [rebalance:debug,2014-08-19T16:52:11.108,ns_1@10.242.238.90:<0.31655.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:52:11.108,ns_1@10.242.238.90:<0.31990.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [rebalance:debug,2014-08-19T16:52:11.108,ns_1@10.242.238.90:<0.31597.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [rebalance:debug,2014-08-19T16:52:11.108,ns_1@10.242.238.90:<0.31492.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [rebalance:debug,2014-08-19T16:52:11.108,ns_1@10.242.238.90:<0.31603.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:52:11.108,ns_1@10.242.238.90:<0.31655.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [rebalance:debug,2014-08-19T16:52:11.108,ns_1@10.242.238.90:<0.31440.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:52:11.108,ns_1@10.242.238.90:<0.31990.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:debug,2014-08-19T16:52:11.108,ns_1@10.242.238.90:<0.31629.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:52:11.108,ns_1@10.242.238.90:<0.31991.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [rebalance:debug,2014-08-19T16:52:11.108,ns_1@10.242.238.90:<0.31577.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [rebalance:debug,2014-08-19T16:52:11.108,ns_1@10.242.238.90:<0.31551.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [rebalance:debug,2014-08-19T16:52:11.108,ns_1@10.242.238.90:<0.31661.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [rebalance:debug,2014-08-19T16:52:11.108,ns_1@10.242.238.90:<0.31519.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [rebalance:debug,2014-08-19T16:52:11.108,ns_1@10.242.238.90:<0.31486.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:52:11.108,ns_1@10.242.238.90:<0.31597.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [rebalance:debug,2014-08-19T16:52:11.108,ns_1@10.242.238.90:<0.31525.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [rebalance:debug,2014-08-19T16:52:11.108,ns_1@10.242.238.90:<0.31545.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [rebalance:debug,2014-08-19T16:52:11.108,ns_1@10.242.238.90:<0.31460.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [rebalance:debug,2014-08-19T16:52:11.108,ns_1@10.242.238.90:<0.31499.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [rebalance:info,2014-08-19T16:52:11.108,ns_1@10.242.238.90:<0.31571.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [rebalance:debug,2014-08-19T16:52:11.108,ns_1@10.242.238.90:<0.31466.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:52:11.108,ns_1@10.242.238.90:<0.31492.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [rebalance:debug,2014-08-19T16:52:11.108,ns_1@10.242.238.90:<0.31417.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [rebalance:debug,2014-08-19T16:52:11.109,ns_1@10.242.238.90:<0.31635.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:52:11.109,ns_1@10.242.238.90:<0.31991.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [ns_server:debug,2014-08-19T16:52:11.108,ns_1@10.242.238.90:<0.31993.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:52:11.109,ns_1@10.242.238.90:<0.31440.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:52:11.109,ns_1@10.242.238.90:<0.31486.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:52:11.109,ns_1@10.242.238.90:<0.31992.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:52:11.109,ns_1@10.242.238.90:<0.31577.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:52:11.109,ns_1@10.242.238.90:<0.31545.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:52:11.109,ns_1@10.242.238.90:<0.31603.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:52:11.109,ns_1@10.242.238.90:<0.31519.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:52:11.109,ns_1@10.242.238.90:<0.31499.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:52:11.109,ns_1@10.242.238.90:<0.31997.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:52:11.109,ns_1@10.242.238.90:<0.31460.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:52:11.109,ns_1@10.242.238.90:<0.31995.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:52:11.109,ns_1@10.242.238.90:<0.31551.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:52:11.109,ns_1@10.242.238.90:<0.31994.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:52:11.109,ns_1@10.242.238.90:<0.31998.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:52:11.109,ns_1@10.242.238.90:<0.31999.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:52:11.109,ns_1@10.242.238.90:<0.31996.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:52:11.109,ns_1@10.242.238.90:<0.31992.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [ns_server:debug,2014-08-19T16:52:11.109,ns_1@10.242.238.90:<0.31629.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:52:11.109,ns_1@10.242.238.90:<0.31466.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:52:11.109,ns_1@10.242.238.90:<0.32003.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:52:11.109,ns_1@10.242.238.90:<0.32004.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:52:11.109,ns_1@10.242.238.90:<0.32002.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:52:11.109,ns_1@10.242.238.90:<0.32000.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [rebalance:info,2014-08-19T16:52:11.109,ns_1@10.242.238.90:<0.31655.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:52:11.109,ns_1@10.242.238.90:<0.31997.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [ns_server:debug,2014-08-19T16:52:11.109,ns_1@10.242.238.90:<0.31993.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [ns_server:debug,2014-08-19T16:52:11.109,ns_1@10.242.238.90:<0.31661.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:52:11.109,ns_1@10.242.238.90:<0.31525.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:52:11.109,ns_1@10.242.238.90:<0.32006.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:52:11.109,ns_1@10.242.238.90:<0.31995.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [ns_server:debug,2014-08-19T16:52:11.109,ns_1@10.242.238.90:<0.32001.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:52:11.109,ns_1@10.242.238.90:<0.32005.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:52:11.109,ns_1@10.242.238.90:<0.31998.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:52:11.109,ns_1@10.242.238.90:<0.31597.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:52:11.109,ns_1@10.242.238.90:<0.31994.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [ns_server:debug,2014-08-19T16:52:11.109,ns_1@10.242.238.90:<0.31417.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [rebalance:info,2014-08-19T16:52:11.109,ns_1@10.242.238.90:<0.31486.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:52:11.109,ns_1@10.242.238.90:<0.31999.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [ns_server:debug,2014-08-19T16:52:11.109,ns_1@10.242.238.90:<0.32007.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [rebalance:info,2014-08-19T16:52:11.109,ns_1@10.242.238.90:<0.31603.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:52:11.109,ns_1@10.242.238.90:<0.31635.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:52:11.109,ns_1@10.242.238.90:<0.31996.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [ns_server:debug,2014-08-19T16:52:11.109,ns_1@10.242.238.90:<0.32003.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:52:11.109,ns_1@10.242.238.90:<0.31492.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:52:11.109,ns_1@10.242.238.90:<0.32008.0>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [rebalance:info,2014-08-19T16:52:11.109,ns_1@10.242.238.90:<0.31545.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:52:11.109,ns_1@10.242.238.90:<0.32004.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:52:11.109,ns_1@10.242.238.90:<0.31440.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:52:11.109,ns_1@10.242.238.90:<0.32002.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [ns_server:debug,2014-08-19T16:52:11.109,ns_1@10.242.238.90:<0.32000.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:52:11.109,ns_1@10.242.238.90:<0.31519.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [rebalance:info,2014-08-19T16:52:11.110,ns_1@10.242.238.90:<0.31577.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [rebalance:info,2014-08-19T16:52:11.110,ns_1@10.242.238.90:<0.31629.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [rebalance:info,2014-08-19T16:52:11.110,ns_1@10.242.238.90:<0.31466.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [rebalance:info,2014-08-19T16:52:11.110,ns_1@10.242.238.90:<0.31551.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [rebalance:info,2014-08-19T16:52:11.110,ns_1@10.242.238.90:<0.31499.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:52:11.110,ns_1@10.242.238.90:<0.32006.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [ns_server:debug,2014-08-19T16:52:11.110,ns_1@10.242.238.90:<0.32001.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [ns_server:debug,2014-08-19T16:52:11.110,ns_1@10.242.238.90:<0.32005.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:52:11.110,ns_1@10.242.238.90:<0.31525.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [rebalance:info,2014-08-19T16:52:11.110,ns_1@10.242.238.90:<0.31460.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:52:11.110,ns_1@10.242.238.90:<0.32007.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:52:11.110,ns_1@10.242.238.90:<0.31661.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:52:11.110,ns_1@10.242.238.90:<0.32008.0>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:52:11.110,ns_1@10.242.238.90:<0.31417.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [rebalance:info,2014-08-19T16:52:11.110,ns_1@10.242.238.90:<0.31635.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:52:11.165,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:52:11.168,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:11.169,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 3490 us [ns_server:debug,2014-08-19T16:52:11.169,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:11.170,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{269, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.88']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:52:11.213,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:52:11.216,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:11.216,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 3202 us [ns_server:debug,2014-08-19T16:52:11.216,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:11.217,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{272, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.88']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:info,2014-08-19T16:52:11.238,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 533 state to active [ns_server:debug,2014-08-19T16:52:11.261,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:52:11.264,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:11.264,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 3183 us [ns_server:debug,2014-08-19T16:52:11.265,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:11.265,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{260, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.88']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [views:debug,2014-08-19T16:52:11.300,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/533. Updated state: active (1) [ns_server:debug,2014-08-19T16:52:11.300,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",533,active,1} [ns_server:debug,2014-08-19T16:52:11.313,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:info,2014-08-19T16:52:11.323,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 516 state to active [ns_server:debug,2014-08-19T16:52:11.323,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:11.323,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 9516 us [ns_server:debug,2014-08-19T16:52:11.324,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{274, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.88']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:52:11.325,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:info,2014-08-19T16:52:11.331,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 526 state to active [ns_server:info,2014-08-19T16:52:11.341,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 518 state to active [ns_server:info,2014-08-19T16:52:11.350,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 519 state to active [views:debug,2014-08-19T16:52:11.351,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/516. Updated state: active (1) [ns_server:debug,2014-08-19T16:52:11.351,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",516,active,1} [ns_server:info,2014-08-19T16:52:11.363,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 527 state to active [ns_server:debug,2014-08-19T16:52:11.366,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:52:11.367,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:11.368,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 2111 us [ns_server:debug,2014-08-19T16:52:11.368,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{276, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.88']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:52:11.369,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:info,2014-08-19T16:52:11.369,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 513 state to active [ns_server:info,2014-08-19T16:52:11.377,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 514 state to active [ns_server:info,2014-08-19T16:52:11.387,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 523 state to active [ns_server:info,2014-08-19T16:52:11.405,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 517 state to active [views:debug,2014-08-19T16:52:11.409,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/526. Updated state: active (1) [ns_server:debug,2014-08-19T16:52:11.409,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",526,active,1} [ns_server:info,2014-08-19T16:52:11.411,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 512 state to active [ns_server:debug,2014-08-19T16:52:11.412,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:52:11.415,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:11.416,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 3419 us [ns_server:debug,2014-08-19T16:52:11.416,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:11.416,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{273, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.88']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:info,2014-08-19T16:52:11.431,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 529 state to active [ns_server:info,2014-08-19T16:52:11.439,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 530 state to active [ns_server:info,2014-08-19T16:52:11.444,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 528 state to active [ns_server:info,2014-08-19T16:52:11.453,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 521 state to active [ns_server:debug,2014-08-19T16:52:11.462,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:52:11.465,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:11.465,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 3457 us [ns_server:debug,2014-08-19T16:52:11.466,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{256, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.88']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:52:11.466,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:info,2014-08-19T16:52:11.467,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 515 state to active [views:debug,2014-08-19T16:52:11.467,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/518. Updated state: active (1) [ns_server:debug,2014-08-19T16:52:11.468,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",518,active,1} [ns_server:info,2014-08-19T16:52:11.481,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 522 state to active [ns_server:info,2014-08-19T16:52:11.489,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 520 state to active [ns_server:info,2014-08-19T16:52:11.500,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 525 state to active [ns_server:info,2014-08-19T16:52:11.509,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 531 state to active [ns_server:debug,2014-08-19T16:52:11.519,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:52:11.521,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:11.521,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 1842 us [ns_server:debug,2014-08-19T16:52:11.521,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:11.522,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{271, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.88']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:info,2014-08-19T16:52:11.522,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 524 state to active [views:debug,2014-08-19T16:52:11.527,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/530. Updated state: active (1) [ns_server:debug,2014-08-19T16:52:11.527,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",530,active,1} [ns_server:debug,2014-08-19T16:52:11.556,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:52:11.559,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:11.559,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 3395 us [ns_server:debug,2014-08-19T16:52:11.560,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:11.560,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{265, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.88']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [views:debug,2014-08-19T16:52:11.561,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/528. Updated state: active (1) [ns_server:debug,2014-08-19T16:52:11.561,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",528,active,1} [ns_server:debug,2014-08-19T16:52:11.598,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:52:11.604,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 6431 us [ns_server:debug,2014-08-19T16:52:11.604,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:11.605,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:11.605,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{264, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.88']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [views:debug,2014-08-19T16:52:11.611,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/514. Updated state: active (1) [ns_server:debug,2014-08-19T16:52:11.611,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",514,active,1} [ns_server:debug,2014-08-19T16:52:11.641,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:52:11.644,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:11.644,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 3175 us [ns_server:debug,2014-08-19T16:52:11.645,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:11.645,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{261, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.88']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [views:debug,2014-08-19T16:52:11.661,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/512. Updated state: active (1) [ns_server:debug,2014-08-19T16:52:11.662,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",512,active,1} [ns_server:debug,2014-08-19T16:52:11.688,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:52:11.691,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:11.691,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 2976 us [ns_server:debug,2014-08-19T16:52:11.692,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:11.692,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{268, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.88']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [views:debug,2014-08-19T16:52:11.695,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/531. Updated state: active (1) [ns_server:debug,2014-08-19T16:52:11.695,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",531,active,1} [views:debug,2014-08-19T16:52:11.729,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/529. Updated state: active (1) [ns_server:debug,2014-08-19T16:52:11.729,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",529,active,1} [ns_server:debug,2014-08-19T16:52:11.729,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:52:11.732,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:11.732,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 2959 us [ns_server:debug,2014-08-19T16:52:11.733,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:11.733,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{277, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.88']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [views:debug,2014-08-19T16:52:11.763,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/527. Updated state: active (1) [ns_server:debug,2014-08-19T16:52:11.763,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",527,active,1} [ns_server:debug,2014-08-19T16:52:11.777,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:52:11.778,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:11.779,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 1780 us [ns_server:debug,2014-08-19T16:52:11.779,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:11.779,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{259, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.88']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [views:debug,2014-08-19T16:52:11.796,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/525. Updated state: active (1) [ns_server:debug,2014-08-19T16:52:11.796,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",525,active,1} [ns_server:debug,2014-08-19T16:52:11.815,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:52:11.818,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:11.818,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 2921 us [ns_server:debug,2014-08-19T16:52:11.819,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:11.819,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{257, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.88']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [views:debug,2014-08-19T16:52:11.829,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/523. Updated state: active (1) [ns_server:debug,2014-08-19T16:52:11.829,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",523,active,1} [ns_server:debug,2014-08-19T16:52:11.850,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:52:11.855,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:11.856,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 4523 us [ns_server:debug,2014-08-19T16:52:11.856,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:11.856,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{533, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.88']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:52:11.889,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:52:11.892,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:11.892,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 3299 us [ns_server:debug,2014-08-19T16:52:11.893,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:11.893,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{263, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.88']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [views:debug,2014-08-19T16:52:11.896,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/521. Updated state: active (1) [ns_server:debug,2014-08-19T16:52:11.897,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",521,active,1} [views:debug,2014-08-19T16:52:11.932,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/519. Updated state: active (1) [ns_server:debug,2014-08-19T16:52:11.933,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",519,active,1} [ns_server:debug,2014-08-19T16:52:11.936,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:52:11.938,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:11.938,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 1427 us [ns_server:debug,2014-08-19T16:52:11.938,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:11.939,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{266, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.88']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:52:11.977,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:52:11.979,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:11.979,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 1744 us [ns_server:debug,2014-08-19T16:52:11.980,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:11.980,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{270, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.88']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [views:debug,2014-08-19T16:52:12.008,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/517. Updated state: active (1) [ns_server:debug,2014-08-19T16:52:12.008,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",517,active,1} [ns_server:debug,2014-08-19T16:52:12.021,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:52:12.022,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:12.022,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 1090 us [ns_server:debug,2014-08-19T16:52:12.022,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:12.023,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{267, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.88']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:52:12.062,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:52:12.065,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:12.065,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 3052 us [ns_server:debug,2014-08-19T16:52:12.065,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:12.066,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{262, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.88']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [views:debug,2014-08-19T16:52:12.083,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/515. Updated state: active (1) [ns_server:debug,2014-08-19T16:52:12.083,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",515,active,1} [ns_server:debug,2014-08-19T16:52:12.106,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:52:12.114,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:12.115,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 8247 us [ns_server:debug,2014-08-19T16:52:12.115,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:12.115,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{258, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.88']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [views:debug,2014-08-19T16:52:12.141,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/513. Updated state: active (1) [ns_server:debug,2014-08-19T16:52:12.142,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",513,active,1} [ns_server:debug,2014-08-19T16:52:12.148,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:52:12.151,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:12.152,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 3792 us [ns_server:debug,2014-08-19T16:52:12.152,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:12.152,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{275, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.88']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [views:debug,2014-08-19T16:52:12.175,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/524. Updated state: active (1) [ns_server:debug,2014-08-19T16:52:12.175,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",524,active,1} [ns_server:debug,2014-08-19T16:52:12.186,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:52:12.186,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:12.186,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 10 us [ns_server:debug,2014-08-19T16:52:12.187,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:12.187,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{516, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.88']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [views:debug,2014-08-19T16:52:12.209,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/522. Updated state: active (1) [ns_server:debug,2014-08-19T16:52:12.209,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",522,active,1} [ns_server:debug,2014-08-19T16:52:12.223,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:52:12.226,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:12.226,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 3430 us [ns_server:debug,2014-08-19T16:52:12.227,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:12.227,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{526, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.88']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [views:debug,2014-08-19T16:52:12.259,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/520. Updated state: active (1) [ns_server:debug,2014-08-19T16:52:12.259,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",520,active,1} [ns_server:debug,2014-08-19T16:52:12.268,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:52:12.271,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:12.272,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 3223 us [ns_server:debug,2014-08-19T16:52:12.273,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{518, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.88']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:52:12.273,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:12.309,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:52:12.312,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:12.312,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 3101 us [ns_server:debug,2014-08-19T16:52:12.312,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:12.313,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{519, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.88']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:52:12.351,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:52:12.355,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:12.355,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 4517 us [ns_server:debug,2014-08-19T16:52:12.356,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:12.356,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{527, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.88']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:52:12.390,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:52:12.393,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:12.393,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 3402 us [ns_server:debug,2014-08-19T16:52:12.394,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:12.394,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{513, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.88']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:52:12.441,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:52:12.441,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:12.441,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 10 us [ns_server:debug,2014-08-19T16:52:12.442,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:12.442,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{514, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.88']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:52:12.481,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:52:12.484,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:12.485,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 3071 us [ns_server:debug,2014-08-19T16:52:12.485,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:12.485,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{523, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.88']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:52:12.522,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:52:12.526,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:12.526,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 3363 us [ns_server:debug,2014-08-19T16:52:12.526,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:12.527,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{517, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.88']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:52:12.564,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:52:12.567,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:12.567,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 3035 us [ns_server:debug,2014-08-19T16:52:12.568,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:12.568,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{512, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.88']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:52:12.610,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:52:12.613,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:12.613,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 2585 us [ns_server:debug,2014-08-19T16:52:12.614,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:12.614,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{529, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.88']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:52:12.652,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:52:12.653,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:12.654,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 1821 us [ns_server:debug,2014-08-19T16:52:12.654,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:12.654,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{530, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.88']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:52:12.693,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:52:12.696,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 3222 us [ns_server:debug,2014-08-19T16:52:12.696,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:12.697,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:12.697,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{528, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.88']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:52:12.736,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:52:12.737,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:12.737,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 1692 us [ns_server:debug,2014-08-19T16:52:12.738,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:12.738,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{521, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.88']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:52:12.776,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:52:12.779,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:12.779,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 3433 us [ns_server:debug,2014-08-19T16:52:12.781,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:12.781,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{515, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.88']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:52:12.820,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:52:12.821,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:12.822,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 1575 us [ns_server:debug,2014-08-19T16:52:12.822,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:12.822,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{522, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.88']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:52:12.864,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:52:12.868,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:12.868,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 4495 us [ns_server:debug,2014-08-19T16:52:12.869,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:12.870,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{520, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.88']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:52:12.901,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:52:12.905,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:12.905,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 3523 us [ns_server:debug,2014-08-19T16:52:12.906,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:12.906,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{525, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.88']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:52:12.952,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:52:12.953,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:12.954,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{531, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.88']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:52:12.955,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 2872 us [ns_server:debug,2014-08-19T16:52:12.955,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:12.991,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:52:12.994,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:12.995,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 3227 us [ns_server:debug,2014-08-19T16:52:12.995,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:12.996,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{524, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.88']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:52:12.997,ns_1@10.242.238.90:compaction_daemon<0.17567.0>:compaction_daemon:handle_info:447]Starting compaction for the following buckets: [<<"default">>] [ns_server:debug,2014-08-19T16:52:12.998,ns_1@10.242.238.90:compaction_daemon<0.17567.0>:compaction_daemon:compact_next_bucket:1453]Going to spawn bucket compaction with forced view compaction for bucket default [ns_server:debug,2014-08-19T16:52:12.998,ns_1@10.242.238.90:compaction_daemon<0.17567.0>:compaction_daemon:compact_next_bucket:1482]Spawned 'uninhibited' compaction for default [ns_server:info,2014-08-19T16:52:13.000,ns_1@10.242.238.90:<0.32060.0>:compaction_daemon:try_to_cleanup_indexes:650]Cleaning up indexes for bucket `default` [ns_server:info,2014-08-19T16:52:13.001,ns_1@10.242.238.90:<0.32060.0>:compaction_daemon:spawn_bucket_compactor:609]Compacting bucket default with config: [forced_previously_inhibited_view_compaction, {database_fragmentation_threshold,{30,undefined}}, {view_fragmentation_threshold,{30,undefined}}] [ns_server:debug,2014-08-19T16:52:13.005,ns_1@10.242.238.90:<0.32063.0>:compaction_daemon:bucket_needs_compaction:1042]`default` data size is 59492, disk size is 7032040 [ns_server:debug,2014-08-19T16:52:13.005,ns_1@10.242.238.90:compaction_daemon<0.17567.0>:compaction_daemon:handle_info:505]Finished compaction iteration. [ns_server:debug,2014-08-19T16:52:13.006,ns_1@10.242.238.90:compaction_daemon<0.17567.0>:compaction_daemon:schedule_next_compaction:1519]Finished compaction too soon. Next run will be in 29s [ns_server:info,2014-08-19T16:52:15.133,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:do_pull:341]Pulling config from: 'ns_1@10.242.238.89' [ns_server:info,2014-08-19T16:52:15.627,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 170 state to replica [ns_server:info,2014-08-19T16:52:15.631,ns_1@10.242.238.90:<0.32089.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 170 to state replica [ns_server:debug,2014-08-19T16:52:15.654,ns_1@10.242.238.90:<0.32089.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_170_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:52:15.656,ns_1@10.242.238.90:<0.32089.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,"ª"}, {checkpoints,[{170,0}]}, {name,<<"replication_building_170_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,"ª"}, {takeover,false}, {suffix,"building_170_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",170,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,false}]} [rebalance:debug,2014-08-19T16:52:15.657,ns_1@10.242.238.90:<0.32089.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.32090.0> [rebalance:debug,2014-08-19T16:52:15.657,ns_1@10.242.238.90:<0.32089.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:52:15.657,ns_1@10.242.238.90:<0.32089.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.6196.2>,#Ref<16550.0.2.179365>}]} [rebalance:info,2014-08-19T16:52:15.658,ns_1@10.242.238.90:<0.32089.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 170 [rebalance:debug,2014-08-19T16:52:15.658,ns_1@10.242.238.90:<0.32089.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.6196.2>,#Ref<16550.0.2.179365>}] [ns_server:debug,2014-08-19T16:52:15.659,ns_1@10.242.238.90:<0.32089.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:debug,2014-08-19T16:52:15.660,ns_1@10.242.238.90:<0.32091.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 170 [ns_server:info,2014-08-19T16:52:15.662,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 169 state to replica [ns_server:info,2014-08-19T16:52:15.666,ns_1@10.242.238.90:<0.32094.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 169 to state replica [ns_server:debug,2014-08-19T16:52:15.685,ns_1@10.242.238.90:<0.32094.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_169_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:52:15.687,ns_1@10.242.238.90:<0.32094.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,"©"}, {checkpoints,[{169,0}]}, {name,<<"replication_building_169_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,"©"}, {takeover,false}, {suffix,"building_169_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",169,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,false}]} [rebalance:debug,2014-08-19T16:52:15.687,ns_1@10.242.238.90:<0.32094.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.32095.0> [rebalance:debug,2014-08-19T16:52:15.687,ns_1@10.242.238.90:<0.32094.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:52:15.688,ns_1@10.242.238.90:<0.32094.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.6212.2>,#Ref<16550.0.2.179448>}]} [rebalance:info,2014-08-19T16:52:15.688,ns_1@10.242.238.90:<0.32094.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 169 [rebalance:debug,2014-08-19T16:52:15.688,ns_1@10.242.238.90:<0.32094.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.6212.2>,#Ref<16550.0.2.179448>}] [ns_server:debug,2014-08-19T16:52:15.689,ns_1@10.242.238.90:<0.32094.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:debug,2014-08-19T16:52:15.690,ns_1@10.242.238.90:<0.32096.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 169 [ns_server:info,2014-08-19T16:52:15.692,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 168 state to replica [ns_server:info,2014-08-19T16:52:15.695,ns_1@10.242.238.90:<0.32099.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 168 to state replica [ns_server:debug,2014-08-19T16:52:15.714,ns_1@10.242.238.90:<0.32099.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_168_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:52:15.715,ns_1@10.242.238.90:<0.32099.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,"¨"}, {checkpoints,[{168,0}]}, {name,<<"replication_building_168_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,"¨"}, {takeover,false}, {suffix,"building_168_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",168,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,false}]} [rebalance:debug,2014-08-19T16:52:15.716,ns_1@10.242.238.90:<0.32099.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.32100.0> [rebalance:debug,2014-08-19T16:52:15.716,ns_1@10.242.238.90:<0.32099.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:52:15.716,ns_1@10.242.238.90:<0.32099.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.6228.2>,#Ref<16550.0.2.179532>}]} [rebalance:info,2014-08-19T16:52:15.716,ns_1@10.242.238.90:<0.32099.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 168 [rebalance:debug,2014-08-19T16:52:15.717,ns_1@10.242.238.90:<0.32099.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.6228.2>,#Ref<16550.0.2.179532>}] [ns_server:debug,2014-08-19T16:52:15.718,ns_1@10.242.238.90:<0.32099.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:debug,2014-08-19T16:52:15.718,ns_1@10.242.238.90:<0.32101.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 168 [ns_server:info,2014-08-19T16:52:15.720,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 167 state to replica [ns_server:info,2014-08-19T16:52:15.724,ns_1@10.242.238.90:<0.32117.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 167 to state replica [ns_server:debug,2014-08-19T16:52:15.743,ns_1@10.242.238.90:<0.32117.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_167_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:52:15.744,ns_1@10.242.238.90:<0.32117.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,"§"}, {checkpoints,[{167,0}]}, {name,<<"replication_building_167_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,"§"}, {takeover,false}, {suffix,"building_167_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",167,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,false}]} [rebalance:debug,2014-08-19T16:52:15.745,ns_1@10.242.238.90:<0.32117.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.32119.0> [rebalance:debug,2014-08-19T16:52:15.745,ns_1@10.242.238.90:<0.32117.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:52:15.746,ns_1@10.242.238.90:<0.32117.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.6244.2>,#Ref<16550.0.2.179608>}]} [rebalance:info,2014-08-19T16:52:15.746,ns_1@10.242.238.90:<0.32117.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 167 [rebalance:debug,2014-08-19T16:52:15.746,ns_1@10.242.238.90:<0.32117.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.6244.2>,#Ref<16550.0.2.179608>}] [ns_server:debug,2014-08-19T16:52:15.747,ns_1@10.242.238.90:<0.32117.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:debug,2014-08-19T16:52:15.748,ns_1@10.242.238.90:<0.32120.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 167 [ns_server:info,2014-08-19T16:52:15.749,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 166 state to replica [ns_server:info,2014-08-19T16:52:15.753,ns_1@10.242.238.90:<0.32123.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 166 to state replica [ns_server:debug,2014-08-19T16:52:15.772,ns_1@10.242.238.90:<0.32123.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_166_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:52:15.773,ns_1@10.242.238.90:<0.32123.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,"¦"}, {checkpoints,[{166,0}]}, {name,<<"replication_building_166_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,"¦"}, {takeover,false}, {suffix,"building_166_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",166,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,false}]} [rebalance:debug,2014-08-19T16:52:15.774,ns_1@10.242.238.90:<0.32123.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.32124.0> [rebalance:debug,2014-08-19T16:52:15.774,ns_1@10.242.238.90:<0.32123.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:52:15.774,ns_1@10.242.238.90:<0.32123.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.6260.2>,#Ref<16550.0.2.179681>}]} [rebalance:info,2014-08-19T16:52:15.775,ns_1@10.242.238.90:<0.32123.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 166 [rebalance:debug,2014-08-19T16:52:15.775,ns_1@10.242.238.90:<0.32123.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.6260.2>,#Ref<16550.0.2.179681>}] [ns_server:debug,2014-08-19T16:52:15.776,ns_1@10.242.238.90:<0.32123.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:debug,2014-08-19T16:52:15.776,ns_1@10.242.238.90:<0.32125.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 166 [ns_server:info,2014-08-19T16:52:15.778,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 165 state to replica [ns_server:info,2014-08-19T16:52:15.782,ns_1@10.242.238.90:<0.32128.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 165 to state replica [ns_server:debug,2014-08-19T16:52:15.800,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 170. Nacking mccouch update. [views:debug,2014-08-19T16:52:15.801,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/170. Updated state: replica (0) [ns_server:debug,2014-08-19T16:52:15.801,ns_1@10.242.238.90:<0.32128.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_165_'ns_1@10.242.238.90' [ns_server:debug,2014-08-19T16:52:15.801,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",170,replica,0} [rebalance:info,2014-08-19T16:52:15.802,ns_1@10.242.238.90:<0.32128.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,"¥"}, {checkpoints,[{165,0}]}, {name,<<"replication_building_165_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,"¥"}, {takeover,false}, {suffix,"building_165_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",165,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,false}]} [ns_server:debug,2014-08-19T16:52:15.802,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,984,673,545,362,724,596,413,958,647,519,698,570,387,1009,749,621,983, 672,544,361,723,595,412,957,646,518,697,569,386,1008,748,620,982,671,543,360, 722,594,411,956,645,517,696,568,385,1007,747,619,981,670,542,359,721,593,410, 955,644,516,695,567,384,1006,746,618,980,669,541,358,967,720,656,592,528,409, 345,954,707,643,579,515,396,1018,941,758,694,630,566,383,1005,992,745,681, 617,553,370,979,732,668,604,540,421,357,966,719,655,591,527,408,344,953,706, 642,578,514,395,1017,940,757,693,629,565,382,1004,991,744,680,616,552,369, 978,731,667,603,539,420,356,965,718,654,590,526,407,343,952,705,641,577,513, 394,1016,939,756,692,628,564,381,1003,990,743,679,615,551,368,977,730,666, 602,538,419,355,964,717,653,589,525,406,342,951,704,640,576,512,393,1015,938, 755,691,627,563,380,1002,989,742,678,614,550,367,976,729,665,601,537,418,354, 963,716,652,588,524,405,950,767,703,639,575,392,1014,754,690,626,562,379, 1001,988,741,677,613,549,366,975,728,664,600,536,417,353,170,962,715,651,587, 523,404,949,766,702,638,574,391,1013,753,689,625,561,378,1000,987,740,676, 612,548,365,974,727,663,599,535,416,352,961,714,650,586,522,403,948,765,701, 637,573,390,1012,999,752,688,624,560,377,986,739,675,611,547,364,973,726,662, 598,534,415,351,960,713,649,585,521,402,947,764,700,636,572,389,1011,998,751, 687,623,559,376,985,738,674,610,546,363,972,725,661,597,533,414,350,959,712, 648,584,520,401,1023,946,763,699,635,571,388,1010,997,686,558,375,737,609, 426,971,660,532,349,711,583,400,1022,945,762,634,996,685,557,374,736,608,425, 970,659,531,348,710,582,399,1021,944,761,633,995,684,556,373,735,607,424,969, 658,530,347,709,581,398,1020,943,760,632,994,683,555,372,734,606,423,968,657, 529,346,708,580,397,1019,942,759,631,993,682,554,371,733,605,422] [rebalance:debug,2014-08-19T16:52:15.803,ns_1@10.242.238.90:<0.32128.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.32129.0> [rebalance:debug,2014-08-19T16:52:15.803,ns_1@10.242.238.90:<0.32128.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:52:15.804,ns_1@10.242.238.90:<0.32128.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.6276.2>,#Ref<16550.0.2.179765>}]} [rebalance:info,2014-08-19T16:52:15.804,ns_1@10.242.238.90:<0.32128.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 165 [rebalance:debug,2014-08-19T16:52:15.805,ns_1@10.242.238.90:<0.32128.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.6276.2>,#Ref<16550.0.2.179765>}] [ns_server:debug,2014-08-19T16:52:15.806,ns_1@10.242.238.90:<0.32128.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:debug,2014-08-19T16:52:15.806,ns_1@10.242.238.90:<0.32130.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 165 [ns_server:info,2014-08-19T16:52:15.808,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 164 state to replica [ns_server:info,2014-08-19T16:52:15.812,ns_1@10.242.238.90:<0.32133.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 164 to state replica [ns_server:debug,2014-08-19T16:52:15.830,ns_1@10.242.238.90:<0.32133.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_164_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:52:15.831,ns_1@10.242.238.90:<0.32133.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,"¤"}, {checkpoints,[{164,0}]}, {name,<<"replication_building_164_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,"¤"}, {takeover,false}, {suffix,"building_164_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",164,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,false}]} [rebalance:debug,2014-08-19T16:52:15.832,ns_1@10.242.238.90:<0.32133.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.32134.0> [rebalance:debug,2014-08-19T16:52:15.833,ns_1@10.242.238.90:<0.32133.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:52:15.833,ns_1@10.242.238.90:<0.32133.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.6292.2>,#Ref<16550.0.2.179838>}]} [rebalance:info,2014-08-19T16:52:15.833,ns_1@10.242.238.90:<0.32133.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 164 [rebalance:debug,2014-08-19T16:52:15.833,ns_1@10.242.238.90:<0.32133.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.6292.2>,#Ref<16550.0.2.179838>}] [ns_server:debug,2014-08-19T16:52:15.835,ns_1@10.242.238.90:<0.32133.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:debug,2014-08-19T16:52:15.835,ns_1@10.242.238.90:<0.32135.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 164 [ns_server:info,2014-08-19T16:52:15.838,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 163 state to replica [ns_server:info,2014-08-19T16:52:15.842,ns_1@10.242.238.90:<0.32138.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 163 to state replica [ns_server:debug,2014-08-19T16:52:15.862,ns_1@10.242.238.90:<0.32138.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_163_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:52:15.863,ns_1@10.242.238.90:<0.32138.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,"£"}, {checkpoints,[{163,0}]}, {name,<<"replication_building_163_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,"£"}, {takeover,false}, {suffix,"building_163_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",163,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,false}]} [rebalance:debug,2014-08-19T16:52:15.864,ns_1@10.242.238.90:<0.32138.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.32139.0> [rebalance:debug,2014-08-19T16:52:15.864,ns_1@10.242.238.90:<0.32138.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:52:15.865,ns_1@10.242.238.90:<0.32138.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.6308.2>,#Ref<16550.0.2.179911>}]} [rebalance:info,2014-08-19T16:52:15.865,ns_1@10.242.238.90:<0.32138.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 163 [rebalance:debug,2014-08-19T16:52:15.865,ns_1@10.242.238.90:<0.32138.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.6308.2>,#Ref<16550.0.2.179911>}] [ns_server:debug,2014-08-19T16:52:15.867,ns_1@10.242.238.90:<0.32138.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:debug,2014-08-19T16:52:15.867,ns_1@10.242.238.90:<0.32140.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 163 [views:debug,2014-08-19T16:52:15.868,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/170. Updated state: replica (0) [ns_server:debug,2014-08-19T16:52:15.868,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",170,replica,0} [ns_server:info,2014-08-19T16:52:15.869,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 162 state to replica [ns_server:info,2014-08-19T16:52:15.873,ns_1@10.242.238.90:<0.32143.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 162 to state replica [ns_server:debug,2014-08-19T16:52:15.892,ns_1@10.242.238.90:<0.32143.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_162_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:52:15.893,ns_1@10.242.238.90:<0.32143.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,"¢"}, {checkpoints,[{162,0}]}, {name,<<"replication_building_162_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,"¢"}, {takeover,false}, {suffix,"building_162_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",162,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,false}]} [rebalance:debug,2014-08-19T16:52:15.894,ns_1@10.242.238.90:<0.32143.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.32144.0> [rebalance:debug,2014-08-19T16:52:15.894,ns_1@10.242.238.90:<0.32143.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:52:15.894,ns_1@10.242.238.90:<0.32143.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.6324.2>,#Ref<16550.0.2.179994>}]} [rebalance:info,2014-08-19T16:52:15.895,ns_1@10.242.238.90:<0.32143.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 162 [rebalance:debug,2014-08-19T16:52:15.895,ns_1@10.242.238.90:<0.32143.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.6324.2>,#Ref<16550.0.2.179994>}] [ns_server:debug,2014-08-19T16:52:15.896,ns_1@10.242.238.90:<0.32143.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:debug,2014-08-19T16:52:15.896,ns_1@10.242.238.90:<0.32145.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 162 [ns_server:info,2014-08-19T16:52:15.898,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 161 state to replica [ns_server:info,2014-08-19T16:52:15.902,ns_1@10.242.238.90:<0.32148.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 161 to state replica [ns_server:debug,2014-08-19T16:52:15.922,ns_1@10.242.238.90:<0.32148.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_161_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:52:15.924,ns_1@10.242.238.90:<0.32148.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,"¡"}, {checkpoints,[{161,0}]}, {name,<<"replication_building_161_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,"¡"}, {takeover,false}, {suffix,"building_161_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",161,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,false}]} [rebalance:debug,2014-08-19T16:52:15.924,ns_1@10.242.238.90:<0.32148.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.32163.0> [rebalance:debug,2014-08-19T16:52:15.924,ns_1@10.242.238.90:<0.32148.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:52:15.925,ns_1@10.242.238.90:<0.32148.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.6340.2>,#Ref<16550.0.2.180067>}]} [rebalance:info,2014-08-19T16:52:15.925,ns_1@10.242.238.90:<0.32148.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 161 [rebalance:debug,2014-08-19T16:52:15.925,ns_1@10.242.238.90:<0.32148.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.6340.2>,#Ref<16550.0.2.180067>}] [ns_server:debug,2014-08-19T16:52:15.926,ns_1@10.242.238.90:<0.32148.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:debug,2014-08-19T16:52:15.927,ns_1@10.242.238.90:<0.32164.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 161 [ns_server:info,2014-08-19T16:52:15.928,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 160 state to replica [ns_server:info,2014-08-19T16:52:15.932,ns_1@10.242.238.90:<0.32167.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 160 to state replica [ns_server:debug,2014-08-19T16:52:15.951,ns_1@10.242.238.90:<0.32167.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_160_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:52:15.952,ns_1@10.242.238.90:<0.32167.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets," "}, {checkpoints,[{160,0}]}, {name,<<"replication_building_160_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets," "}, {takeover,false}, {suffix,"building_160_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",160,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,false}]} [rebalance:debug,2014-08-19T16:52:15.953,ns_1@10.242.238.90:<0.32167.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.32168.0> [rebalance:debug,2014-08-19T16:52:15.953,ns_1@10.242.238.90:<0.32167.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:52:15.954,ns_1@10.242.238.90:<0.32167.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.6356.2>,#Ref<16550.0.2.180151>}]} [rebalance:info,2014-08-19T16:52:15.954,ns_1@10.242.238.90:<0.32167.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 160 [rebalance:debug,2014-08-19T16:52:15.954,ns_1@10.242.238.90:<0.32167.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.6356.2>,#Ref<16550.0.2.180151>}] [ns_server:debug,2014-08-19T16:52:15.955,ns_1@10.242.238.90:<0.32167.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:debug,2014-08-19T16:52:15.956,ns_1@10.242.238.90:<0.32169.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 160 [ns_server:info,2014-08-19T16:52:15.958,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 159 state to replica [ns_server:info,2014-08-19T16:52:15.963,ns_1@10.242.238.90:<0.32172.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 159 to state replica [ns_server:debug,2014-08-19T16:52:15.968,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 169. Nacking mccouch update. [views:debug,2014-08-19T16:52:15.968,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/169. Updated state: replica (0) [ns_server:debug,2014-08-19T16:52:15.969,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",169,replica,0} [ns_server:debug,2014-08-19T16:52:15.969,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,984,673,545,362,724,596,413,958,647,519,698,570,387,1009,749,621,983, 672,544,361,723,595,412,957,646,518,697,569,386,1008,748,620,982,671,543,360, 722,594,411,956,645,517,696,568,385,1007,747,619,981,670,542,359,721,593,410, 955,644,516,695,567,384,1006,746,618,980,669,541,358,967,720,656,592,528,409, 345,954,707,643,579,515,396,1018,941,758,694,630,566,383,1005,992,745,681, 617,553,370,979,732,668,604,540,421,357,966,719,655,591,527,408,344,953,706, 642,578,514,395,1017,940,757,693,629,565,382,1004,991,744,680,616,552,369, 978,731,667,603,539,420,356,965,718,654,590,526,407,343,952,705,641,577,513, 394,1016,939,756,692,628,564,381,1003,990,743,679,615,551,368,977,730,666, 602,538,419,355,964,717,653,589,525,406,342,951,704,640,576,512,393,1015,938, 755,691,627,563,380,1002,989,742,678,614,550,367,976,729,665,601,537,418,354, 963,716,652,588,524,405,950,767,703,639,575,392,1014,754,690,626,562,379, 1001,988,741,677,613,549,366,975,728,664,600,536,417,353,170,962,715,651,587, 523,404,949,766,702,638,574,391,1013,753,689,625,561,378,1000,987,740,676, 612,548,365,974,727,663,599,535,416,352,169,961,714,650,586,522,403,948,765, 701,637,573,390,1012,999,752,688,624,560,377,986,739,675,611,547,364,973,726, 662,598,534,415,351,960,713,649,585,521,402,947,764,700,636,572,389,1011,998, 751,687,623,559,376,985,738,674,610,546,363,972,725,661,597,533,414,350,959, 712,648,584,520,401,1023,946,763,699,635,571,388,1010,997,686,558,375,737, 609,426,971,660,532,349,711,583,400,1022,945,762,634,996,685,557,374,736,608, 425,970,659,531,348,710,582,399,1021,944,761,633,995,684,556,373,735,607,424, 969,658,530,347,709,581,398,1020,943,760,632,994,683,555,372,734,606,423,968, 657,529,346,708,580,397,1019,942,759,631,993,682,554,371,733,605,422] [ns_server:debug,2014-08-19T16:52:15.983,ns_1@10.242.238.90:<0.32172.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_159_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:52:15.984,ns_1@10.242.238.90:<0.32172.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[159]}, {checkpoints,[{159,0}]}, {name,<<"replication_building_159_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[159]}, {takeover,false}, {suffix,"building_159_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",159,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,false}]} [rebalance:debug,2014-08-19T16:52:15.985,ns_1@10.242.238.90:<0.32172.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.32173.0> [rebalance:debug,2014-08-19T16:52:15.985,ns_1@10.242.238.90:<0.32172.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:52:15.985,ns_1@10.242.238.90:<0.32172.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.6372.2>,#Ref<16550.0.2.180234>}]} [rebalance:info,2014-08-19T16:52:15.986,ns_1@10.242.238.90:<0.32172.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 159 [rebalance:debug,2014-08-19T16:52:15.986,ns_1@10.242.238.90:<0.32172.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.6372.2>,#Ref<16550.0.2.180234>}] [ns_server:debug,2014-08-19T16:52:15.987,ns_1@10.242.238.90:<0.32172.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:debug,2014-08-19T16:52:15.988,ns_1@10.242.238.90:<0.32174.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 159 [ns_server:info,2014-08-19T16:52:15.989,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 158 state to replica [ns_server:info,2014-08-19T16:52:15.993,ns_1@10.242.238.90:<0.32177.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 158 to state replica [ns_server:debug,2014-08-19T16:52:16.000,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:52:16.003,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:16.004,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 3399 us [ns_server:debug,2014-08-19T16:52:16.005,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{171, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:52:16.005,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:16.021,ns_1@10.242.238.90:<0.32177.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_158_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:52:16.022,ns_1@10.242.238.90:<0.32177.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[158]}, {checkpoints,[{158,0}]}, {name,<<"replication_building_158_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[158]}, {takeover,false}, {suffix,"building_158_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",158,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,false}]} [rebalance:debug,2014-08-19T16:52:16.023,ns_1@10.242.238.90:<0.32177.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.32179.0> [rebalance:debug,2014-08-19T16:52:16.023,ns_1@10.242.238.90:<0.32177.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:52:16.023,ns_1@10.242.238.90:<0.32177.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.6392.2>,#Ref<16550.0.2.180353>}]} [rebalance:info,2014-08-19T16:52:16.023,ns_1@10.242.238.90:<0.32177.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 158 [rebalance:debug,2014-08-19T16:52:16.024,ns_1@10.242.238.90:<0.32177.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.6392.2>,#Ref<16550.0.2.180353>}] [ns_server:debug,2014-08-19T16:52:16.025,ns_1@10.242.238.90:<0.32177.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:debug,2014-08-19T16:52:16.026,ns_1@10.242.238.90:<0.32180.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 158 [ns_server:info,2014-08-19T16:52:16.027,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 157 state to replica [ns_server:info,2014-08-19T16:52:16.031,ns_1@10.242.238.90:<0.32183.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 157 to state replica [views:debug,2014-08-19T16:52:16.035,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/169. Updated state: replica (0) [ns_server:debug,2014-08-19T16:52:16.035,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",169,replica,0} [ns_server:debug,2014-08-19T16:52:16.049,ns_1@10.242.238.90:<0.32183.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_157_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:52:16.051,ns_1@10.242.238.90:<0.32183.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[157]}, {checkpoints,[{157,0}]}, {name,<<"replication_building_157_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[157]}, {takeover,false}, {suffix,"building_157_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",157,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,false}]} [rebalance:debug,2014-08-19T16:52:16.051,ns_1@10.242.238.90:<0.32183.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.32184.0> [rebalance:debug,2014-08-19T16:52:16.051,ns_1@10.242.238.90:<0.32183.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:52:16.052,ns_1@10.242.238.90:<0.32183.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.6417.2>,#Ref<16550.0.2.180493>}]} [rebalance:info,2014-08-19T16:52:16.052,ns_1@10.242.238.90:<0.32183.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 157 [rebalance:debug,2014-08-19T16:52:16.052,ns_1@10.242.238.90:<0.32183.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.6417.2>,#Ref<16550.0.2.180493>}] [ns_server:debug,2014-08-19T16:52:16.053,ns_1@10.242.238.90:<0.32183.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:debug,2014-08-19T16:52:16.056,ns_1@10.242.238.90:<0.32185.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 157 [ns_server:info,2014-08-19T16:52:16.058,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 156 state to replica [ns_server:info,2014-08-19T16:52:16.062,ns_1@10.242.238.90:<0.32188.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 156 to state replica [ns_server:debug,2014-08-19T16:52:16.087,ns_1@10.242.238.90:<0.32188.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_156_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:52:16.088,ns_1@10.242.238.90:<0.32188.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[156]}, {checkpoints,[{156,0}]}, {name,<<"replication_building_156_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[156]}, {takeover,false}, {suffix,"building_156_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",156,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,false}]} [rebalance:debug,2014-08-19T16:52:16.089,ns_1@10.242.238.90:<0.32188.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.32203.0> [rebalance:debug,2014-08-19T16:52:16.089,ns_1@10.242.238.90:<0.32188.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:52:16.090,ns_1@10.242.238.90:<0.32188.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.6433.2>,#Ref<16550.0.2.180586>}]} [rebalance:info,2014-08-19T16:52:16.090,ns_1@10.242.238.90:<0.32188.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 156 [rebalance:debug,2014-08-19T16:52:16.090,ns_1@10.242.238.90:<0.32188.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.6433.2>,#Ref<16550.0.2.180586>}] [ns_server:debug,2014-08-19T16:52:16.091,ns_1@10.242.238.90:<0.32188.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:debug,2014-08-19T16:52:16.092,ns_1@10.242.238.90:<0.32204.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 156 [ns_server:info,2014-08-19T16:52:16.093,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 155 state to replica [ns_server:info,2014-08-19T16:52:16.099,ns_1@10.242.238.90:<0.32207.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 155 to state replica [ns_server:debug,2014-08-19T16:52:16.118,ns_1@10.242.238.90:<0.32207.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_155_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:52:16.120,ns_1@10.242.238.90:<0.32207.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[155]}, {checkpoints,[{155,0}]}, {name,<<"replication_building_155_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[155]}, {takeover,false}, {suffix,"building_155_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",155,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,false}]} [rebalance:debug,2014-08-19T16:52:16.120,ns_1@10.242.238.90:<0.32207.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.32208.0> [rebalance:debug,2014-08-19T16:52:16.120,ns_1@10.242.238.90:<0.32207.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:52:16.121,ns_1@10.242.238.90:<0.32207.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.6449.2>,#Ref<16550.0.2.180683>}]} [rebalance:info,2014-08-19T16:52:16.121,ns_1@10.242.238.90:<0.32207.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 155 [rebalance:debug,2014-08-19T16:52:16.121,ns_1@10.242.238.90:<0.32207.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.6449.2>,#Ref<16550.0.2.180683>}] [ns_server:debug,2014-08-19T16:52:16.122,ns_1@10.242.238.90:<0.32207.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:debug,2014-08-19T16:52:16.123,ns_1@10.242.238.90:<0.32209.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 155 [ns_server:info,2014-08-19T16:52:16.125,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 154 state to replica [ns_server:info,2014-08-19T16:52:16.129,ns_1@10.242.238.90:<0.32212.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 154 to state replica [ns_server:debug,2014-08-19T16:52:16.147,ns_1@10.242.238.90:<0.32212.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_154_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:52:16.149,ns_1@10.242.238.90:<0.32212.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[154]}, {checkpoints,[{154,0}]}, {name,<<"replication_building_154_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[154]}, {takeover,false}, {suffix,"building_154_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",154,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,false}]} [rebalance:debug,2014-08-19T16:52:16.149,ns_1@10.242.238.90:<0.32212.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.32213.0> [rebalance:debug,2014-08-19T16:52:16.150,ns_1@10.242.238.90:<0.32212.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:52:16.150,ns_1@10.242.238.90:<0.32212.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.6465.2>,#Ref<16550.0.2.180766>}]} [rebalance:info,2014-08-19T16:52:16.150,ns_1@10.242.238.90:<0.32212.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 154 [rebalance:debug,2014-08-19T16:52:16.150,ns_1@10.242.238.90:<0.32212.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.6465.2>,#Ref<16550.0.2.180766>}] [ns_server:debug,2014-08-19T16:52:16.151,ns_1@10.242.238.90:<0.32212.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:debug,2014-08-19T16:52:16.152,ns_1@10.242.238.90:<0.32214.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 154 [ns_server:info,2014-08-19T16:52:16.154,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 153 state to replica [ns_server:debug,2014-08-19T16:52:16.155,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 167. Nacking mccouch update. [views:debug,2014-08-19T16:52:16.155,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/167. Updated state: replica (0) [ns_server:debug,2014-08-19T16:52:16.155,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",167,replica,0} [ns_server:debug,2014-08-19T16:52:16.157,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,984,673,545,362,724,596,413,958,647,519,698,570,387,1009,749,621,983, 672,544,361,723,595,412,957,646,518,697,569,386,1008,748,620,982,671,543,360, 722,594,411,956,645,517,696,568,385,1007,747,619,981,670,542,359,721,593,410, 955,644,516,695,567,384,1006,746,618,980,669,541,358,967,720,656,592,528,409, 345,954,707,643,579,515,396,1018,941,758,694,630,566,383,1005,992,745,681, 617,553,370,979,732,668,604,540,421,357,966,719,655,591,527,408,344,953,706, 642,578,514,395,1017,940,757,693,629,565,382,1004,991,744,680,616,552,369, 978,731,667,603,539,420,356,965,718,654,590,526,407,343,952,705,641,577,513, 394,1016,939,756,692,628,564,381,1003,990,743,679,615,551,368,977,730,666, 602,538,419,355,964,717,653,589,525,406,342,951,704,640,576,512,393,1015,938, 755,691,627,563,380,1002,989,742,678,614,550,367,976,729,665,601,537,418,354, 963,716,652,588,524,405,950,767,703,639,575,392,1014,754,690,626,562,379, 1001,988,741,677,613,549,366,975,728,664,600,536,417,353,170,962,715,651,587, 523,404,949,766,702,638,574,391,1013,753,689,625,561,378,1000,987,740,676, 612,548,365,974,727,663,599,535,416,352,169,961,714,650,586,522,403,948,765, 701,637,573,390,1012,999,752,688,624,560,377,986,739,675,611,547,364,973,726, 662,598,534,415,351,960,713,649,585,521,402,947,764,700,636,572,389,1011,998, 751,687,623,559,376,985,738,674,610,546,363,972,725,661,597,533,414,350,167, 959,712,648,584,520,401,1023,946,763,699,635,571,388,1010,997,686,558,375, 737,609,426,971,660,532,349,711,583,400,1022,945,762,634,996,685,557,374,736, 608,425,970,659,531,348,710,582,399,1021,944,761,633,995,684,556,373,735,607, 424,969,658,530,347,709,581,398,1020,943,760,632,994,683,555,372,734,606,423, 968,657,529,346,708,580,397,1019,942,759,631,993,682,554,371,733,605,422] [ns_server:info,2014-08-19T16:52:16.159,ns_1@10.242.238.90:<0.32217.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 153 to state replica [ns_server:debug,2014-08-19T16:52:16.178,ns_1@10.242.238.90:<0.32217.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_153_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:52:16.179,ns_1@10.242.238.90:<0.32217.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[153]}, {checkpoints,[{153,0}]}, {name,<<"replication_building_153_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[153]}, {takeover,false}, {suffix,"building_153_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",153,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,false}]} [rebalance:debug,2014-08-19T16:52:16.180,ns_1@10.242.238.90:<0.32217.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.32218.0> [rebalance:debug,2014-08-19T16:52:16.180,ns_1@10.242.238.90:<0.32217.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:52:16.181,ns_1@10.242.238.90:<0.32217.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.6481.2>,#Ref<16550.0.2.180849>}]} [rebalance:info,2014-08-19T16:52:16.181,ns_1@10.242.238.90:<0.32217.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 153 [rebalance:debug,2014-08-19T16:52:16.181,ns_1@10.242.238.90:<0.32217.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.6481.2>,#Ref<16550.0.2.180849>}] [ns_server:debug,2014-08-19T16:52:16.182,ns_1@10.242.238.90:<0.32217.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:debug,2014-08-19T16:52:16.183,ns_1@10.242.238.90:<0.32219.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 153 [ns_server:info,2014-08-19T16:52:16.185,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 152 state to replica [views:debug,2014-08-19T16:52:16.188,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/167. Updated state: replica (0) [ns_server:debug,2014-08-19T16:52:16.188,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",167,replica,0} [ns_server:info,2014-08-19T16:52:16.189,ns_1@10.242.238.90:<0.32222.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 152 to state replica [ns_server:debug,2014-08-19T16:52:16.208,ns_1@10.242.238.90:<0.32222.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_152_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:52:16.210,ns_1@10.242.238.90:<0.32222.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[152]}, {checkpoints,[{152,0}]}, {name,<<"replication_building_152_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[152]}, {takeover,false}, {suffix,"building_152_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",152,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,false}]} [rebalance:debug,2014-08-19T16:52:16.211,ns_1@10.242.238.90:<0.32222.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.32223.0> [rebalance:debug,2014-08-19T16:52:16.211,ns_1@10.242.238.90:<0.32222.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:52:16.211,ns_1@10.242.238.90:<0.32222.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.6497.2>,#Ref<16550.0.2.180922>}]} [rebalance:info,2014-08-19T16:52:16.211,ns_1@10.242.238.90:<0.32222.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 152 [rebalance:debug,2014-08-19T16:52:16.212,ns_1@10.242.238.90:<0.32222.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.6497.2>,#Ref<16550.0.2.180922>}] [ns_server:debug,2014-08-19T16:52:16.213,ns_1@10.242.238.90:<0.32222.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:debug,2014-08-19T16:52:16.213,ns_1@10.242.238.90:<0.32224.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 152 [ns_server:info,2014-08-19T16:52:16.215,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 151 state to replica [ns_server:info,2014-08-19T16:52:16.220,ns_1@10.242.238.90:<0.32227.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 151 to state replica [ns_server:debug,2014-08-19T16:52:16.240,ns_1@10.242.238.90:<0.32227.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_151_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:52:16.241,ns_1@10.242.238.90:<0.32227.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[151]}, {checkpoints,[{151,0}]}, {name,<<"replication_building_151_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[151]}, {takeover,false}, {suffix,"building_151_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",151,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,false}]} [rebalance:debug,2014-08-19T16:52:16.242,ns_1@10.242.238.90:<0.32227.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.32228.0> [rebalance:debug,2014-08-19T16:52:16.242,ns_1@10.242.238.90:<0.32227.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:52:16.243,ns_1@10.242.238.90:<0.32227.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.6513.2>,#Ref<16550.0.2.181005>}]} [rebalance:info,2014-08-19T16:52:16.243,ns_1@10.242.238.90:<0.32227.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 151 [rebalance:debug,2014-08-19T16:52:16.243,ns_1@10.242.238.90:<0.32227.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.6513.2>,#Ref<16550.0.2.181005>}] [ns_server:debug,2014-08-19T16:52:16.244,ns_1@10.242.238.90:<0.32227.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:debug,2014-08-19T16:52:16.245,ns_1@10.242.238.90:<0.32229.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 151 [ns_server:info,2014-08-19T16:52:16.247,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 150 state to replica [ns_server:info,2014-08-19T16:52:16.251,ns_1@10.242.238.90:<0.32232.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 150 to state replica [ns_server:debug,2014-08-19T16:52:16.272,ns_1@10.242.238.90:<0.32232.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_150_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:52:16.273,ns_1@10.242.238.90:<0.32232.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[150]}, {checkpoints,[{150,0}]}, {name,<<"replication_building_150_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[150]}, {takeover,false}, {suffix,"building_150_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",150,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,false}]} [rebalance:debug,2014-08-19T16:52:16.274,ns_1@10.242.238.90:<0.32232.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.32247.0> [rebalance:debug,2014-08-19T16:52:16.274,ns_1@10.242.238.90:<0.32232.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:52:16.275,ns_1@10.242.238.90:<0.32232.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.6529.2>,#Ref<16550.0.2.181088>}]} [rebalance:info,2014-08-19T16:52:16.275,ns_1@10.242.238.90:<0.32232.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 150 [rebalance:debug,2014-08-19T16:52:16.275,ns_1@10.242.238.90:<0.32232.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.6529.2>,#Ref<16550.0.2.181088>}] [ns_server:debug,2014-08-19T16:52:16.276,ns_1@10.242.238.90:<0.32232.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:debug,2014-08-19T16:52:16.277,ns_1@10.242.238.90:<0.32248.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 150 [ns_server:info,2014-08-19T16:52:16.278,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 149 state to replica [ns_server:info,2014-08-19T16:52:16.282,ns_1@10.242.238.90:<0.32251.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 149 to state replica [ns_server:debug,2014-08-19T16:52:16.300,ns_1@10.242.238.90:<0.32251.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_149_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:52:16.302,ns_1@10.242.238.90:<0.32251.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[149]}, {checkpoints,[{149,0}]}, {name,<<"replication_building_149_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[149]}, {takeover,false}, {suffix,"building_149_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",149,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,false}]} [rebalance:debug,2014-08-19T16:52:16.302,ns_1@10.242.238.90:<0.32251.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.32252.0> [rebalance:debug,2014-08-19T16:52:16.302,ns_1@10.242.238.90:<0.32251.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:52:16.303,ns_1@10.242.238.90:<0.32251.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.6545.2>,#Ref<16550.0.2.181171>}]} [rebalance:info,2014-08-19T16:52:16.303,ns_1@10.242.238.90:<0.32251.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 149 [rebalance:debug,2014-08-19T16:52:16.303,ns_1@10.242.238.90:<0.32251.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.6545.2>,#Ref<16550.0.2.181171>}] [ns_server:debug,2014-08-19T16:52:16.304,ns_1@10.242.238.90:<0.32251.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:debug,2014-08-19T16:52:16.305,ns_1@10.242.238.90:<0.32253.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 149 [ns_server:info,2014-08-19T16:52:16.306,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 148 state to replica [ns_server:info,2014-08-19T16:52:16.310,ns_1@10.242.238.90:<0.32256.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 148 to state replica [ns_server:debug,2014-08-19T16:52:16.329,ns_1@10.242.238.90:<0.32256.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_148_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:52:16.330,ns_1@10.242.238.90:<0.32256.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[148]}, {checkpoints,[{148,0}]}, {name,<<"replication_building_148_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[148]}, {takeover,false}, {suffix,"building_148_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",148,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,false}]} [rebalance:debug,2014-08-19T16:52:16.331,ns_1@10.242.238.90:<0.32256.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.32257.0> [rebalance:debug,2014-08-19T16:52:16.331,ns_1@10.242.238.90:<0.32256.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:52:16.332,ns_1@10.242.238.90:<0.32256.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.6569.2>,#Ref<16550.0.2.181282>}]} [rebalance:info,2014-08-19T16:52:16.332,ns_1@10.242.238.90:<0.32256.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 148 [rebalance:debug,2014-08-19T16:52:16.332,ns_1@10.242.238.90:<0.32256.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.6569.2>,#Ref<16550.0.2.181282>}] [ns_server:debug,2014-08-19T16:52:16.333,ns_1@10.242.238.90:<0.32256.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:debug,2014-08-19T16:52:16.334,ns_1@10.242.238.90:<0.32258.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 148 [ns_server:info,2014-08-19T16:52:16.336,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 147 state to replica [ns_server:debug,2014-08-19T16:52:16.338,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 165. Nacking mccouch update. [views:debug,2014-08-19T16:52:16.339,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/165. Updated state: replica (0) [ns_server:debug,2014-08-19T16:52:16.339,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",165,replica,0} [ns_server:info,2014-08-19T16:52:16.340,ns_1@10.242.238.90:<0.32261.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 147 to state replica [ns_server:debug,2014-08-19T16:52:16.340,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,984,673,545,362,724,596,413,958,647,519,698,570,387,1009,749,621,983, 672,544,361,723,595,412,957,646,518,697,569,386,1008,748,620,982,671,543,360, 722,594,411,956,645,517,696,568,385,1007,747,619,981,670,542,359,721,593,410, 955,644,516,695,567,384,1006,746,618,980,669,541,358,720,592,409,954,707,643, 579,515,396,1018,941,758,694,630,566,383,1005,992,745,681,617,553,370,979, 732,668,604,540,421,357,966,719,655,591,527,408,344,953,706,642,578,514,395, 1017,940,757,693,629,565,382,1004,991,744,680,616,552,369,978,731,667,603, 539,420,356,965,718,654,590,526,407,343,952,705,641,577,513,394,1016,939,756, 692,628,564,381,1003,990,743,679,615,551,368,977,730,666,602,538,419,355,964, 717,653,589,525,406,342,951,704,640,576,512,393,1015,938,755,691,627,563,380, 1002,989,742,678,614,550,367,976,729,665,601,537,418,354,963,716,652,588,524, 405,950,767,703,639,575,392,1014,754,690,626,562,379,1001,988,741,677,613, 549,366,975,728,664,600,536,417,353,170,962,715,651,587,523,404,949,766,702, 638,574,391,1013,753,689,625,561,378,1000,987,740,676,612,548,365,974,727, 663,599,535,416,352,169,961,714,650,586,522,403,948,765,701,637,573,390,1012, 999,752,688,624,560,377,986,739,675,611,547,364,973,726,662,598,534,415,351, 960,713,649,585,521,402,947,764,700,636,572,389,1011,998,751,687,623,559,376, 985,738,674,610,546,363,972,725,661,597,533,414,350,167,959,712,648,584,520, 401,1023,946,763,699,635,571,388,1010,997,686,558,375,737,609,426,971,660, 532,349,711,583,400,1022,945,762,634,996,685,557,374,736,608,425,970,659,531, 348,165,710,582,399,1021,944,761,633,995,684,556,373,735,607,424,969,658,530, 347,709,581,398,1020,943,760,632,994,683,555,372,734,606,423,968,657,529,346, 708,580,397,1019,942,759,631,993,682,554,371,733,605,422,967,656,528,345] [ns_server:debug,2014-08-19T16:52:16.358,ns_1@10.242.238.90:<0.32261.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_147_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:52:16.360,ns_1@10.242.238.90:<0.32261.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[147]}, {checkpoints,[{147,0}]}, {name,<<"replication_building_147_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[147]}, {takeover,false}, {suffix,"building_147_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",147,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,false}]} [rebalance:debug,2014-08-19T16:52:16.360,ns_1@10.242.238.90:<0.32261.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.32262.0> [rebalance:debug,2014-08-19T16:52:16.361,ns_1@10.242.238.90:<0.32261.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:52:16.361,ns_1@10.242.238.90:<0.32261.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.6585.2>,#Ref<16550.0.2.181366>}]} [rebalance:info,2014-08-19T16:52:16.361,ns_1@10.242.238.90:<0.32261.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 147 [rebalance:debug,2014-08-19T16:52:16.361,ns_1@10.242.238.90:<0.32261.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.6585.2>,#Ref<16550.0.2.181366>}] [ns_server:debug,2014-08-19T16:52:16.362,ns_1@10.242.238.90:<0.32261.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:debug,2014-08-19T16:52:16.363,ns_1@10.242.238.90:<0.32263.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 147 [ns_server:info,2014-08-19T16:52:16.365,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 146 state to replica [ns_server:info,2014-08-19T16:52:16.368,ns_1@10.242.238.90:<0.32266.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 146 to state replica [ns_server:debug,2014-08-19T16:52:16.388,ns_1@10.242.238.90:<0.32266.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_146_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:52:16.389,ns_1@10.242.238.90:<0.32266.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[146]}, {checkpoints,[{146,0}]}, {name,<<"replication_building_146_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[146]}, {takeover,false}, {suffix,"building_146_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",146,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,false}]} [rebalance:debug,2014-08-19T16:52:16.390,ns_1@10.242.238.90:<0.32266.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.32267.0> [rebalance:debug,2014-08-19T16:52:16.390,ns_1@10.242.238.90:<0.32266.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:52:16.391,ns_1@10.242.238.90:<0.32266.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.6603.2>,#Ref<16550.0.2.181480>}]} [rebalance:info,2014-08-19T16:52:16.391,ns_1@10.242.238.90:<0.32266.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 146 [rebalance:debug,2014-08-19T16:52:16.391,ns_1@10.242.238.90:<0.32266.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.6603.2>,#Ref<16550.0.2.181480>}] [ns_server:debug,2014-08-19T16:52:16.392,ns_1@10.242.238.90:<0.32266.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:debug,2014-08-19T16:52:16.393,ns_1@10.242.238.90:<0.32268.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 146 [ns_server:info,2014-08-19T16:52:16.395,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 145 state to replica [ns_server:info,2014-08-19T16:52:16.400,ns_1@10.242.238.90:<0.32271.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 145 to state replica [views:debug,2014-08-19T16:52:16.406,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/165. Updated state: replica (0) [ns_server:debug,2014-08-19T16:52:16.406,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",165,replica,0} [ns_server:debug,2014-08-19T16:52:16.418,ns_1@10.242.238.90:<0.32271.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_145_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:52:16.419,ns_1@10.242.238.90:<0.32271.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[145]}, {checkpoints,[{145,0}]}, {name,<<"replication_building_145_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[145]}, {takeover,false}, {suffix,"building_145_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",145,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,false}]} [rebalance:debug,2014-08-19T16:52:16.420,ns_1@10.242.238.90:<0.32271.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.32272.0> [rebalance:debug,2014-08-19T16:52:16.420,ns_1@10.242.238.90:<0.32271.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:52:16.421,ns_1@10.242.238.90:<0.32271.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.6619.2>,#Ref<16550.0.2.181553>}]} [rebalance:info,2014-08-19T16:52:16.421,ns_1@10.242.238.90:<0.32271.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 145 [rebalance:debug,2014-08-19T16:52:16.421,ns_1@10.242.238.90:<0.32271.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.6619.2>,#Ref<16550.0.2.181553>}] [ns_server:debug,2014-08-19T16:52:16.423,ns_1@10.242.238.90:<0.32271.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:debug,2014-08-19T16:52:16.423,ns_1@10.242.238.90:<0.32273.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 145 [ns_server:info,2014-08-19T16:52:16.425,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 144 state to replica [ns_server:info,2014-08-19T16:52:16.429,ns_1@10.242.238.90:<0.32276.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 144 to state replica [ns_server:debug,2014-08-19T16:52:16.448,ns_1@10.242.238.90:<0.32276.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_144_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:52:16.449,ns_1@10.242.238.90:<0.32276.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[144]}, {checkpoints,[{144,0}]}, {name,<<"replication_building_144_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[144]}, {takeover,false}, {suffix,"building_144_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",144,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,false}]} [rebalance:debug,2014-08-19T16:52:16.450,ns_1@10.242.238.90:<0.32276.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.32277.0> [rebalance:debug,2014-08-19T16:52:16.450,ns_1@10.242.238.90:<0.32276.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:52:16.450,ns_1@10.242.238.90:<0.32276.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.6635.2>,#Ref<16550.0.2.181636>}]} [rebalance:info,2014-08-19T16:52:16.451,ns_1@10.242.238.90:<0.32276.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 144 [rebalance:debug,2014-08-19T16:52:16.451,ns_1@10.242.238.90:<0.32276.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.6635.2>,#Ref<16550.0.2.181636>}] [ns_server:debug,2014-08-19T16:52:16.452,ns_1@10.242.238.90:<0.32276.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:debug,2014-08-19T16:52:16.452,ns_1@10.242.238.90:<0.32278.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 144 [ns_server:info,2014-08-19T16:52:16.454,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 143 state to replica [ns_server:info,2014-08-19T16:52:16.458,ns_1@10.242.238.90:<0.32281.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 143 to state replica [ns_server:debug,2014-08-19T16:52:16.479,ns_1@10.242.238.90:<0.32281.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_143_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:52:16.480,ns_1@10.242.238.90:<0.32281.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[143]}, {checkpoints,[{143,0}]}, {name,<<"replication_building_143_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[143]}, {takeover,false}, {suffix,"building_143_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",143,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,false}]} [rebalance:debug,2014-08-19T16:52:16.480,ns_1@10.242.238.90:<0.32281.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.32295.0> [rebalance:debug,2014-08-19T16:52:16.480,ns_1@10.242.238.90:<0.32281.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:52:16.481,ns_1@10.242.238.90:<0.32281.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.6651.2>,#Ref<16550.0.2.181711>}]} [rebalance:info,2014-08-19T16:52:16.481,ns_1@10.242.238.90:<0.32281.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 143 [rebalance:debug,2014-08-19T16:52:16.481,ns_1@10.242.238.90:<0.32281.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.6651.2>,#Ref<16550.0.2.181711>}] [ns_server:debug,2014-08-19T16:52:16.482,ns_1@10.242.238.90:<0.32281.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:debug,2014-08-19T16:52:16.483,ns_1@10.242.238.90:<0.32297.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 143 [ns_server:info,2014-08-19T16:52:16.485,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 142 state to replica [ns_server:info,2014-08-19T16:52:16.489,ns_1@10.242.238.90:<0.32300.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 142 to state replica [ns_server:debug,2014-08-19T16:52:16.507,ns_1@10.242.238.90:<0.32300.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_142_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:52:16.509,ns_1@10.242.238.90:<0.32300.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[142]}, {checkpoints,[{142,0}]}, {name,<<"replication_building_142_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[142]}, {takeover,false}, {suffix,"building_142_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",142,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,false}]} [rebalance:debug,2014-08-19T16:52:16.509,ns_1@10.242.238.90:<0.32300.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.32301.0> [rebalance:debug,2014-08-19T16:52:16.509,ns_1@10.242.238.90:<0.32300.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:52:16.510,ns_1@10.242.238.90:<0.32300.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.6667.2>,#Ref<16550.0.2.181794>}]} [rebalance:info,2014-08-19T16:52:16.510,ns_1@10.242.238.90:<0.32300.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 142 [rebalance:debug,2014-08-19T16:52:16.510,ns_1@10.242.238.90:<0.32300.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.6667.2>,#Ref<16550.0.2.181794>}] [ns_server:debug,2014-08-19T16:52:16.511,ns_1@10.242.238.90:<0.32300.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:debug,2014-08-19T16:52:16.512,ns_1@10.242.238.90:<0.32302.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 142 [ns_server:info,2014-08-19T16:52:16.513,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 141 state to replica [ns_server:info,2014-08-19T16:52:16.518,ns_1@10.242.238.90:<0.32305.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 141 to state replica [ns_server:debug,2014-08-19T16:52:16.537,ns_1@10.242.238.90:<0.32305.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_141_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:52:16.539,ns_1@10.242.238.90:<0.32305.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[141]}, {checkpoints,[{141,0}]}, {name,<<"replication_building_141_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[141]}, {takeover,false}, {suffix,"building_141_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",141,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,false}]} [rebalance:debug,2014-08-19T16:52:16.539,ns_1@10.242.238.90:<0.32305.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.32306.0> [rebalance:debug,2014-08-19T16:52:16.539,ns_1@10.242.238.90:<0.32305.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:52:16.540,ns_1@10.242.238.90:<0.32305.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.6683.2>,#Ref<16550.0.2.181879>}]} [rebalance:info,2014-08-19T16:52:16.540,ns_1@10.242.238.90:<0.32305.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 141 [rebalance:debug,2014-08-19T16:52:16.540,ns_1@10.242.238.90:<0.32305.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.6683.2>,#Ref<16550.0.2.181879>}] [ns_server:debug,2014-08-19T16:52:16.541,ns_1@10.242.238.90:<0.32305.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:debug,2014-08-19T16:52:16.542,ns_1@10.242.238.90:<0.32307.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 141 [ns_server:info,2014-08-19T16:52:16.544,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 140 state to replica [ns_server:debug,2014-08-19T16:52:16.548,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 163. Nacking mccouch update. [views:debug,2014-08-19T16:52:16.548,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/163. Updated state: replica (0) [ns_server:info,2014-08-19T16:52:16.548,ns_1@10.242.238.90:<0.32310.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 140 to state replica [ns_server:debug,2014-08-19T16:52:16.548,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",163,replica,0} [ns_server:debug,2014-08-19T16:52:16.549,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,984,673,545,362,724,596,413,958,647,519,698,570,387,1009,749,621,983, 672,544,361,723,595,412,957,646,518,697,569,386,1008,748,620,982,671,543,360, 722,594,411,956,645,517,696,568,385,1007,747,619,981,670,542,359,721,593,410, 955,644,516,695,567,384,1006,746,618,980,669,541,358,720,592,409,954,707,643, 579,515,396,1018,941,758,694,630,566,383,1005,992,745,681,617,553,370,979, 732,668,604,540,421,357,966,719,655,591,527,408,344,953,706,642,578,514,395, 1017,940,757,693,629,565,382,1004,991,744,680,616,552,369,978,731,667,603, 539,420,356,965,718,654,590,526,407,343,952,705,641,577,513,394,1016,939,756, 692,628,564,381,1003,990,743,679,615,551,368,977,730,666,602,538,419,355,964, 717,653,589,525,406,342,951,704,640,576,512,393,1015,938,755,691,627,563,380, 1002,989,742,678,614,550,367,976,729,665,601,537,418,354,963,716,652,588,524, 405,950,767,703,639,575,392,1014,754,690,626,562,379,1001,988,741,677,613, 549,366,975,728,664,600,536,417,353,170,962,715,651,587,523,404,949,766,702, 638,574,391,1013,753,689,625,561,378,1000,987,740,676,612,548,365,974,727, 663,599,535,416,352,169,961,714,650,586,522,403,948,765,701,637,573,390,1012, 999,752,688,624,560,377,986,739,675,611,547,364,973,726,662,598,534,415,351, 960,713,649,585,521,402,947,764,700,636,572,389,1011,998,751,687,623,559,376, 985,738,674,610,546,363,972,725,661,597,533,414,350,167,959,712,648,584,520, 401,1023,946,763,699,635,571,388,1010,997,686,558,375,737,609,426,971,660, 532,349,711,583,400,1022,945,762,634,996,685,557,374,736,608,425,970,659,531, 348,165,710,582,399,1021,944,761,633,995,684,556,373,735,607,424,969,658,530, 347,709,581,398,1020,943,760,632,994,683,555,372,734,606,423,968,657,529,346, 163,708,580,397,1019,942,759,631,993,682,554,371,733,605,422,967,656,528,345] [ns_server:debug,2014-08-19T16:52:16.566,ns_1@10.242.238.90:<0.32310.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_140_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:52:16.568,ns_1@10.242.238.90:<0.32310.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[140]}, {checkpoints,[{140,0}]}, {name,<<"replication_building_140_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[140]}, {takeover,false}, {suffix,"building_140_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",140,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,false}]} [rebalance:debug,2014-08-19T16:52:16.568,ns_1@10.242.238.90:<0.32310.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.32311.0> [rebalance:debug,2014-08-19T16:52:16.569,ns_1@10.242.238.90:<0.32310.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:52:16.569,ns_1@10.242.238.90:<0.32310.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.6700.2>,#Ref<16550.0.2.181958>}]} [rebalance:info,2014-08-19T16:52:16.569,ns_1@10.242.238.90:<0.32310.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 140 [rebalance:debug,2014-08-19T16:52:16.570,ns_1@10.242.238.90:<0.32310.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.6700.2>,#Ref<16550.0.2.181958>}] [ns_server:debug,2014-08-19T16:52:16.571,ns_1@10.242.238.90:<0.32310.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:debug,2014-08-19T16:52:16.572,ns_1@10.242.238.90:<0.32312.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 140 [ns_server:info,2014-08-19T16:52:16.573,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 139 state to replica [ns_server:info,2014-08-19T16:52:16.577,ns_1@10.242.238.90:<0.32315.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 139 to state replica [views:debug,2014-08-19T16:52:16.581,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/163. Updated state: replica (0) [ns_server:debug,2014-08-19T16:52:16.582,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",163,replica,0} [ns_server:debug,2014-08-19T16:52:16.597,ns_1@10.242.238.90:<0.32315.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_139_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:52:16.598,ns_1@10.242.238.90:<0.32315.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[139]}, {checkpoints,[{139,0}]}, {name,<<"replication_building_139_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[139]}, {takeover,false}, {suffix,"building_139_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",139,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,false}]} [rebalance:debug,2014-08-19T16:52:16.599,ns_1@10.242.238.90:<0.32315.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.32316.0> [rebalance:debug,2014-08-19T16:52:16.599,ns_1@10.242.238.90:<0.32315.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:52:16.599,ns_1@10.242.238.90:<0.32315.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.6716.2>,#Ref<16550.0.2.182042>}]} [rebalance:info,2014-08-19T16:52:16.600,ns_1@10.242.238.90:<0.32315.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 139 [rebalance:debug,2014-08-19T16:52:16.600,ns_1@10.242.238.90:<0.32315.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.6716.2>,#Ref<16550.0.2.182042>}] [ns_server:debug,2014-08-19T16:52:16.601,ns_1@10.242.238.90:<0.32315.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:debug,2014-08-19T16:52:16.602,ns_1@10.242.238.90:<0.32317.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 139 [ns_server:info,2014-08-19T16:52:16.603,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 138 state to replica [ns_server:info,2014-08-19T16:52:16.607,ns_1@10.242.238.90:<0.32320.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 138 to state replica [ns_server:debug,2014-08-19T16:52:16.626,ns_1@10.242.238.90:<0.32320.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_138_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:52:16.628,ns_1@10.242.238.90:<0.32320.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[138]}, {checkpoints,[{138,0}]}, {name,<<"replication_building_138_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[138]}, {takeover,false}, {suffix,"building_138_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",138,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,false}]} [rebalance:debug,2014-08-19T16:52:16.628,ns_1@10.242.238.90:<0.32320.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.32321.0> [rebalance:debug,2014-08-19T16:52:16.629,ns_1@10.242.238.90:<0.32320.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:52:16.629,ns_1@10.242.238.90:<0.32320.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.6732.2>,#Ref<16550.0.2.182117>}]} [rebalance:info,2014-08-19T16:52:16.629,ns_1@10.242.238.90:<0.32320.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 138 [rebalance:debug,2014-08-19T16:52:16.630,ns_1@10.242.238.90:<0.32320.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.6732.2>,#Ref<16550.0.2.182117>}] [ns_server:debug,2014-08-19T16:52:16.631,ns_1@10.242.238.90:<0.32320.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:debug,2014-08-19T16:52:16.631,ns_1@10.242.238.90:<0.32322.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 138 [ns_server:info,2014-08-19T16:52:16.633,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 137 state to replica [ns_server:info,2014-08-19T16:52:16.637,ns_1@10.242.238.90:<0.32325.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 137 to state replica [ns_server:debug,2014-08-19T16:52:16.655,ns_1@10.242.238.90:<0.32325.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_137_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:52:16.656,ns_1@10.242.238.90:<0.32325.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[137]}, {checkpoints,[{137,0}]}, {name,<<"replication_building_137_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[137]}, {takeover,false}, {suffix,"building_137_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",137,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,false}]} [rebalance:debug,2014-08-19T16:52:16.657,ns_1@10.242.238.90:<0.32325.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.32340.0> [rebalance:debug,2014-08-19T16:52:16.657,ns_1@10.242.238.90:<0.32325.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:52:16.658,ns_1@10.242.238.90:<0.32325.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.6748.2>,#Ref<16550.0.2.182190>}]} [rebalance:info,2014-08-19T16:52:16.658,ns_1@10.242.238.90:<0.32325.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 137 [rebalance:debug,2014-08-19T16:52:16.658,ns_1@10.242.238.90:<0.32325.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.6748.2>,#Ref<16550.0.2.182190>}] [ns_server:debug,2014-08-19T16:52:16.660,ns_1@10.242.238.90:<0.32325.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:debug,2014-08-19T16:52:16.660,ns_1@10.242.238.90:<0.32341.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 137 [ns_server:info,2014-08-19T16:52:16.662,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 136 state to replica [ns_server:info,2014-08-19T16:52:16.666,ns_1@10.242.238.90:<0.32344.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 136 to state replica [ns_server:debug,2014-08-19T16:52:16.684,ns_1@10.242.238.90:<0.32344.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_136_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:52:16.686,ns_1@10.242.238.90:<0.32344.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[136]}, {checkpoints,[{136,0}]}, {name,<<"replication_building_136_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[136]}, {takeover,false}, {suffix,"building_136_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",136,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,false}]} [rebalance:debug,2014-08-19T16:52:16.686,ns_1@10.242.238.90:<0.32344.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.32345.0> [rebalance:debug,2014-08-19T16:52:16.686,ns_1@10.242.238.90:<0.32344.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:52:16.687,ns_1@10.242.238.90:<0.32344.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.6764.2>,#Ref<16550.0.2.182273>}]} [rebalance:info,2014-08-19T16:52:16.687,ns_1@10.242.238.90:<0.32344.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 136 [rebalance:debug,2014-08-19T16:52:16.687,ns_1@10.242.238.90:<0.32344.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.6764.2>,#Ref<16550.0.2.182273>}] [ns_server:debug,2014-08-19T16:52:16.688,ns_1@10.242.238.90:<0.32344.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:debug,2014-08-19T16:52:16.689,ns_1@10.242.238.90:<0.32346.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 136 [ns_server:info,2014-08-19T16:52:16.690,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 135 state to replica [ns_server:debug,2014-08-19T16:52:16.690,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 168. Nacking mccouch update. [views:debug,2014-08-19T16:52:16.690,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/168. Updated state: replica (0) [ns_server:debug,2014-08-19T16:52:16.691,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",168,replica,0} [ns_server:debug,2014-08-19T16:52:16.693,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,984,673,545,362,724,596,413,958,647,519,698,570,387,1009,749,621,983, 672,544,361,723,595,412,957,646,518,697,569,386,1008,748,620,982,671,543,360, 722,594,411,956,645,517,696,568,385,1007,747,619,981,670,542,359,721,593,410, 955,644,516,695,567,384,1006,746,618,980,669,541,358,720,592,409,954,707,643, 579,515,396,1018,941,758,694,630,566,383,1005,992,745,681,617,553,370,979, 732,668,604,540,421,357,966,719,655,591,527,408,344,953,706,642,578,514,395, 1017,940,757,693,629,565,382,1004,991,744,680,616,552,369,978,731,667,603, 539,420,356,965,718,654,590,526,407,343,952,705,641,577,513,394,1016,939,756, 692,628,564,381,1003,990,743,679,615,551,368,977,730,666,602,538,419,355,964, 717,653,589,525,406,342,951,704,640,576,512,393,1015,938,755,691,627,563,380, 1002,989,742,678,614,550,367,976,729,665,601,537,418,354,963,716,652,588,524, 405,950,767,703,639,575,392,1014,754,690,626,562,379,1001,988,741,677,613, 549,366,975,728,664,600,536,417,353,170,962,715,651,587,523,404,949,766,702, 638,574,391,1013,753,689,625,561,378,1000,987,740,676,612,548,365,974,727, 663,599,535,416,352,169,961,714,650,586,522,403,948,765,701,637,573,390,1012, 999,752,688,624,560,377,986,739,675,611,547,364,973,726,662,598,534,415,351, 168,960,713,649,585,521,402,947,764,700,636,572,389,1011,998,751,687,623,559, 376,985,738,674,610,546,363,972,725,661,597,533,414,350,167,959,712,648,584, 520,401,1023,946,763,699,635,571,388,1010,997,686,558,375,737,609,426,971, 660,532,349,711,583,400,1022,945,762,634,996,685,557,374,736,608,425,970,659, 531,348,165,710,582,399,1021,944,761,633,995,684,556,373,735,607,424,969,658, 530,347,709,581,398,1020,943,760,632,994,683,555,372,734,606,423,968,657,529, 346,163,708,580,397,1019,942,759,631,993,682,554,371,733,605,422,967,656,528, 345] [ns_server:info,2014-08-19T16:52:16.695,ns_1@10.242.238.90:<0.32349.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 135 to state replica [ns_server:debug,2014-08-19T16:52:16.714,ns_1@10.242.238.90:<0.32349.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_135_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:52:16.715,ns_1@10.242.238.90:<0.32349.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[135]}, {checkpoints,[{135,0}]}, {name,<<"replication_building_135_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[135]}, {takeover,false}, {suffix,"building_135_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",135,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,false}]} [rebalance:debug,2014-08-19T16:52:16.715,ns_1@10.242.238.90:<0.32349.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.32350.0> [rebalance:debug,2014-08-19T16:52:16.715,ns_1@10.242.238.90:<0.32349.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:52:16.716,ns_1@10.242.238.90:<0.32349.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.6780.2>,#Ref<16550.0.2.182346>}]} [rebalance:info,2014-08-19T16:52:16.716,ns_1@10.242.238.90:<0.32349.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 135 [rebalance:debug,2014-08-19T16:52:16.716,ns_1@10.242.238.90:<0.32349.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.6780.2>,#Ref<16550.0.2.182346>}] [ns_server:debug,2014-08-19T16:52:16.717,ns_1@10.242.238.90:<0.32349.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:debug,2014-08-19T16:52:16.718,ns_1@10.242.238.90:<0.32351.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 135 [ns_server:info,2014-08-19T16:52:16.720,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 134 state to replica [ns_server:info,2014-08-19T16:52:16.724,ns_1@10.242.238.90:<0.32354.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 134 to state replica [views:debug,2014-08-19T16:52:16.741,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/168. Updated state: replica (0) [ns_server:debug,2014-08-19T16:52:16.741,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",168,replica,0} [ns_server:debug,2014-08-19T16:52:16.742,ns_1@10.242.238.90:<0.32354.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_134_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:52:16.744,ns_1@10.242.238.90:<0.32354.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[134]}, {checkpoints,[{134,0}]}, {name,<<"replication_building_134_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[134]}, {takeover,false}, {suffix,"building_134_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",134,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,false}]} [rebalance:debug,2014-08-19T16:52:16.744,ns_1@10.242.238.90:<0.32354.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.32355.0> [rebalance:debug,2014-08-19T16:52:16.745,ns_1@10.242.238.90:<0.32354.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:52:16.745,ns_1@10.242.238.90:<0.32354.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.6796.2>,#Ref<16550.0.2.182419>}]} [rebalance:info,2014-08-19T16:52:16.745,ns_1@10.242.238.90:<0.32354.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 134 [rebalance:debug,2014-08-19T16:52:16.746,ns_1@10.242.238.90:<0.32354.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.6796.2>,#Ref<16550.0.2.182419>}] [ns_server:debug,2014-08-19T16:52:16.747,ns_1@10.242.238.90:<0.32354.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:debug,2014-08-19T16:52:16.747,ns_1@10.242.238.90:<0.32356.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 134 [ns_server:info,2014-08-19T16:52:16.749,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 133 state to replica [ns_server:info,2014-08-19T16:52:16.753,ns_1@10.242.238.90:<0.32359.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 133 to state replica [ns_server:debug,2014-08-19T16:52:16.772,ns_1@10.242.238.90:<0.32359.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_133_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:52:16.774,ns_1@10.242.238.90:<0.32359.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[133]}, {checkpoints,[{133,0}]}, {name,<<"replication_building_133_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[133]}, {takeover,false}, {suffix,"building_133_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",133,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,false}]} [rebalance:debug,2014-08-19T16:52:16.774,ns_1@10.242.238.90:<0.32359.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.32360.0> [rebalance:debug,2014-08-19T16:52:16.775,ns_1@10.242.238.90:<0.32359.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:52:16.775,ns_1@10.242.238.90:<0.32359.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.6812.2>,#Ref<16550.0.2.182503>}]} [rebalance:info,2014-08-19T16:52:16.775,ns_1@10.242.238.90:<0.32359.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 133 [rebalance:debug,2014-08-19T16:52:16.776,ns_1@10.242.238.90:<0.32359.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.6812.2>,#Ref<16550.0.2.182503>}] [ns_server:debug,2014-08-19T16:52:16.776,ns_1@10.242.238.90:<0.32359.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:debug,2014-08-19T16:52:16.777,ns_1@10.242.238.90:<0.32361.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 133 [ns_server:info,2014-08-19T16:52:16.779,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 132 state to replica [ns_server:info,2014-08-19T16:52:16.782,ns_1@10.242.238.90:<0.32364.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 132 to state replica [ns_server:debug,2014-08-19T16:52:16.801,ns_1@10.242.238.90:<0.32364.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_132_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:52:16.802,ns_1@10.242.238.90:<0.32364.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[132]}, {checkpoints,[{132,0}]}, {name,<<"replication_building_132_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[132]}, {takeover,false}, {suffix,"building_132_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",132,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,false}]} [rebalance:debug,2014-08-19T16:52:16.803,ns_1@10.242.238.90:<0.32364.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.32373.0> [rebalance:debug,2014-08-19T16:52:16.803,ns_1@10.242.238.90:<0.32364.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:52:16.803,ns_1@10.242.238.90:<0.32364.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.6828.2>,#Ref<16550.0.2.182576>}]} [rebalance:info,2014-08-19T16:52:16.804,ns_1@10.242.238.90:<0.32364.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 132 [rebalance:debug,2014-08-19T16:52:16.804,ns_1@10.242.238.90:<0.32364.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.6828.2>,#Ref<16550.0.2.182576>}] [ns_server:debug,2014-08-19T16:52:16.805,ns_1@10.242.238.90:<0.32364.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:debug,2014-08-19T16:52:16.805,ns_1@10.242.238.90:<0.32380.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 132 [ns_server:info,2014-08-19T16:52:16.807,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 131 state to replica [ns_server:info,2014-08-19T16:52:16.811,ns_1@10.242.238.90:<0.32383.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 131 to state replica [ns_server:debug,2014-08-19T16:52:16.830,ns_1@10.242.238.90:<0.32383.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_131_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:52:16.831,ns_1@10.242.238.90:<0.32383.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[131]}, {checkpoints,[{131,0}]}, {name,<<"replication_building_131_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[131]}, {takeover,false}, {suffix,"building_131_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",131,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,false}]} [rebalance:debug,2014-08-19T16:52:16.832,ns_1@10.242.238.90:<0.32383.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.32384.0> [rebalance:debug,2014-08-19T16:52:16.832,ns_1@10.242.238.90:<0.32383.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:52:16.832,ns_1@10.242.238.90:<0.32383.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.6844.2>,#Ref<16550.0.2.182659>}]} [rebalance:info,2014-08-19T16:52:16.832,ns_1@10.242.238.90:<0.32383.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 131 [rebalance:debug,2014-08-19T16:52:16.833,ns_1@10.242.238.90:<0.32383.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.6844.2>,#Ref<16550.0.2.182659>}] [ns_server:debug,2014-08-19T16:52:16.834,ns_1@10.242.238.90:<0.32383.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:debug,2014-08-19T16:52:16.835,ns_1@10.242.238.90:<0.32385.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 131 [ns_server:info,2014-08-19T16:52:16.836,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 130 state to replica [ns_server:info,2014-08-19T16:52:16.840,ns_1@10.242.238.90:<0.32388.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 130 to state replica [ns_server:debug,2014-08-19T16:52:16.841,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 166. Nacking mccouch update. [views:debug,2014-08-19T16:52:16.841,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/166. Updated state: replica (0) [ns_server:debug,2014-08-19T16:52:16.842,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",166,replica,0} [ns_server:debug,2014-08-19T16:52:16.842,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,984,673,545,362,724,596,413,958,647,519,698,570,387,1009,749,621,983, 672,544,361,723,595,412,957,646,518,697,569,386,1008,748,620,982,671,543,360, 722,594,411,956,645,517,696,568,385,1007,747,619,981,670,542,359,721,593,410, 955,644,516,695,567,384,1006,746,618,980,669,541,358,720,592,409,954,707,643, 579,515,396,1018,941,758,694,630,566,383,1005,992,745,681,617,553,370,979, 732,668,604,540,421,357,966,719,655,591,527,408,344,953,706,642,578,514,395, 1017,940,757,693,629,565,382,1004,991,744,680,616,552,369,978,731,667,603, 539,420,356,965,718,654,590,526,407,343,952,705,641,577,513,394,1016,939,756, 692,628,564,381,1003,990,743,679,615,551,368,977,730,666,602,538,419,355,964, 717,653,589,525,406,342,951,704,640,576,512,393,1015,938,755,691,627,563,380, 1002,989,742,678,614,550,367,976,729,665,601,537,418,354,963,716,652,588,524, 405,950,767,703,639,575,392,1014,754,690,626,562,379,1001,988,741,677,613, 549,366,975,728,664,600,536,417,353,170,962,715,651,587,523,404,949,766,702, 638,574,391,1013,753,689,625,561,378,1000,987,740,676,612,548,365,974,727, 663,599,535,416,352,169,961,714,650,586,522,403,948,765,701,637,573,390,1012, 999,752,688,624,560,377,986,739,675,611,547,364,973,726,662,598,534,415,351, 168,960,713,649,585,521,402,947,764,700,636,572,389,1011,998,751,687,623,559, 376,985,738,674,610,546,363,972,725,661,597,533,414,350,167,959,712,648,584, 520,401,1023,946,763,699,635,571,388,1010,997,686,558,375,737,609,426,971, 660,532,349,166,711,583,400,1022,945,762,634,996,685,557,374,736,608,425,970, 659,531,348,165,710,582,399,1021,944,761,633,995,684,556,373,735,607,424,969, 658,530,347,709,581,398,1020,943,760,632,994,683,555,372,734,606,423,968,657, 529,346,163,708,580,397,1019,942,759,631,993,682,554,371,733,605,422,967,656, 528,345] [ns_server:debug,2014-08-19T16:52:16.859,ns_1@10.242.238.90:<0.32388.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_130_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:52:16.860,ns_1@10.242.238.90:<0.32388.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[130]}, {checkpoints,[{130,0}]}, {name,<<"replication_building_130_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[130]}, {takeover,false}, {suffix,"building_130_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",130,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,false}]} [rebalance:debug,2014-08-19T16:52:16.861,ns_1@10.242.238.90:<0.32388.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.32389.0> [rebalance:debug,2014-08-19T16:52:16.861,ns_1@10.242.238.90:<0.32388.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:52:16.861,ns_1@10.242.238.90:<0.32388.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.6860.2>,#Ref<16550.0.2.182732>}]} [rebalance:info,2014-08-19T16:52:16.861,ns_1@10.242.238.90:<0.32388.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 130 [rebalance:debug,2014-08-19T16:52:16.862,ns_1@10.242.238.90:<0.32388.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.6860.2>,#Ref<16550.0.2.182732>}] [ns_server:debug,2014-08-19T16:52:16.863,ns_1@10.242.238.90:<0.32388.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:debug,2014-08-19T16:52:16.864,ns_1@10.242.238.90:<0.32390.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 130 [ns_server:info,2014-08-19T16:52:16.867,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 129 state to replica [ns_server:info,2014-08-19T16:52:16.871,ns_1@10.242.238.90:<0.32393.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 129 to state replica [views:debug,2014-08-19T16:52:16.886,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/166. Updated state: replica (0) [ns_server:debug,2014-08-19T16:52:16.887,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",166,replica,0} [ns_server:debug,2014-08-19T16:52:16.892,ns_1@10.242.238.90:<0.32393.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_129_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:52:16.893,ns_1@10.242.238.90:<0.32393.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[129]}, {checkpoints,[{129,0}]}, {name,<<"replication_building_129_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[129]}, {takeover,false}, {suffix,"building_129_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",129,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,false}]} [rebalance:debug,2014-08-19T16:52:16.894,ns_1@10.242.238.90:<0.32393.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.32394.0> [rebalance:debug,2014-08-19T16:52:16.894,ns_1@10.242.238.90:<0.32393.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:52:16.894,ns_1@10.242.238.90:<0.32393.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.6876.2>,#Ref<16550.0.2.182807>}]} [rebalance:info,2014-08-19T16:52:16.895,ns_1@10.242.238.90:<0.32393.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 129 [rebalance:debug,2014-08-19T16:52:16.895,ns_1@10.242.238.90:<0.32393.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.6876.2>,#Ref<16550.0.2.182807>}] [ns_server:debug,2014-08-19T16:52:16.896,ns_1@10.242.238.90:<0.32393.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:debug,2014-08-19T16:52:16.896,ns_1@10.242.238.90:<0.32395.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 129 [ns_server:info,2014-08-19T16:52:16.898,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 128 state to replica [ns_server:info,2014-08-19T16:52:16.902,ns_1@10.242.238.90:<0.32398.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 128 to state replica [ns_server:debug,2014-08-19T16:52:16.922,ns_1@10.242.238.90:<0.32398.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_128_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:52:16.924,ns_1@10.242.238.90:<0.32398.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[128]}, {checkpoints,[{128,0}]}, {name,<<"replication_building_128_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[128]}, {takeover,false}, {suffix,"building_128_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",128,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,false}]} [rebalance:debug,2014-08-19T16:52:16.924,ns_1@10.242.238.90:<0.32398.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.32399.0> [rebalance:debug,2014-08-19T16:52:16.924,ns_1@10.242.238.90:<0.32398.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:52:16.925,ns_1@10.242.238.90:<0.32398.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.6892.2>,#Ref<16550.0.2.182890>}]} [rebalance:info,2014-08-19T16:52:16.925,ns_1@10.242.238.90:<0.32398.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 128 [rebalance:debug,2014-08-19T16:52:16.925,ns_1@10.242.238.90:<0.32398.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.6892.2>,#Ref<16550.0.2.182890>}] [ns_server:debug,2014-08-19T16:52:16.926,ns_1@10.242.238.90:<0.32398.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:debug,2014-08-19T16:52:16.927,ns_1@10.242.238.90:<0.32400.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 128 [ns_server:info,2014-08-19T16:52:16.929,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 127 state to replica [ns_server:info,2014-08-19T16:52:16.935,ns_1@10.242.238.90:<0.32403.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 127 to state replica [ns_server:debug,2014-08-19T16:52:16.960,ns_1@10.242.238.90:<0.32403.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_127_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:52:16.965,ns_1@10.242.238.90:<0.32403.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[127]}, {checkpoints,[{127,0}]}, {name,<<"replication_building_127_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,[127]}, {takeover,false}, {suffix,"building_127_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",127,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,false}]} [rebalance:debug,2014-08-19T16:52:16.966,ns_1@10.242.238.90:<0.32403.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.32418.0> [rebalance:debug,2014-08-19T16:52:16.966,ns_1@10.242.238.90:<0.32403.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:52:16.966,ns_1@10.242.238.90:<0.32403.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.6908.2>,#Ref<16550.0.2.182963>}]} [rebalance:info,2014-08-19T16:52:16.967,ns_1@10.242.238.90:<0.32403.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 127 [rebalance:debug,2014-08-19T16:52:16.967,ns_1@10.242.238.90:<0.32403.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.6908.2>,#Ref<16550.0.2.182963>}] [ns_server:debug,2014-08-19T16:52:16.968,ns_1@10.242.238.90:<0.32403.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:debug,2014-08-19T16:52:16.969,ns_1@10.242.238.90:<0.32419.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 127 [ns_server:info,2014-08-19T16:52:16.970,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 126 state to replica [ns_server:info,2014-08-19T16:52:16.974,ns_1@10.242.238.90:<0.32422.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 126 to state replica [ns_server:debug,2014-08-19T16:52:16.993,ns_1@10.242.238.90:<0.32422.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_126_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:52:16.995,ns_1@10.242.238.90:<0.32422.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,"~"}, {checkpoints,[{126,0}]}, {name,<<"replication_building_126_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,"~"}, {takeover,false}, {suffix,"building_126_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",126,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,false}]} [rebalance:debug,2014-08-19T16:52:16.995,ns_1@10.242.238.90:<0.32422.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.32423.0> [rebalance:debug,2014-08-19T16:52:16.995,ns_1@10.242.238.90:<0.32422.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:52:16.996,ns_1@10.242.238.90:<0.32422.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.6924.2>,#Ref<16550.0.2.183046>}]} [rebalance:info,2014-08-19T16:52:16.996,ns_1@10.242.238.90:<0.32422.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 126 [rebalance:debug,2014-08-19T16:52:16.997,ns_1@10.242.238.90:<0.32422.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.6924.2>,#Ref<16550.0.2.183046>}] [ns_server:debug,2014-08-19T16:52:16.998,ns_1@10.242.238.90:<0.32422.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:debug,2014-08-19T16:52:16.998,ns_1@10.242.238.90:<0.32424.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 126 [ns_server:info,2014-08-19T16:52:17.000,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 125 state to replica [ns_server:info,2014-08-19T16:52:17.004,ns_1@10.242.238.90:<0.32427.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 125 to state replica [ns_server:debug,2014-08-19T16:52:17.021,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 164. Nacking mccouch update. [views:debug,2014-08-19T16:52:17.021,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/164. Updated state: replica (0) [ns_server:debug,2014-08-19T16:52:17.021,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",164,replica,0} [ns_server:debug,2014-08-19T16:52:17.022,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,984,673,545,362,724,596,413,958,647,519,698,570,387,1009,749,621,983, 672,544,361,723,595,412,957,646,518,697,569,386,1008,748,620,982,671,543,360, 722,594,411,956,645,517,696,568,385,1007,747,619,981,670,542,359,721,593,410, 955,644,516,695,567,384,1006,746,618,980,669,541,358,720,592,409,954,707,643, 579,515,396,1018,941,758,694,630,566,383,1005,992,745,681,617,553,370,979, 732,668,604,540,421,357,966,719,655,591,527,408,344,953,706,642,578,514,395, 1017,940,757,693,629,565,382,1004,991,744,680,616,552,369,978,731,667,603, 539,420,356,965,718,654,590,526,407,343,952,705,641,577,513,394,1016,939,756, 692,628,564,381,1003,990,743,679,615,551,368,977,730,666,602,538,419,355,964, 717,653,589,525,406,342,951,704,640,576,512,393,1015,938,755,691,627,563,380, 1002,989,742,678,614,550,367,976,729,665,601,537,418,354,963,716,652,588,524, 405,950,767,703,639,575,392,1014,754,690,626,562,379,1001,988,741,677,613, 549,366,975,728,664,600,536,417,353,170,962,715,651,587,523,404,949,766,702, 638,574,391,1013,753,689,625,561,378,1000,987,740,676,612,548,365,974,727, 663,599,535,416,352,169,961,714,650,586,522,403,948,765,701,637,573,390,1012, 999,752,688,624,560,377,986,739,675,611,547,364,973,726,662,598,534,415,351, 168,960,713,649,585,521,402,947,764,700,636,572,389,1011,998,751,687,623,559, 376,985,738,674,610,546,363,972,725,661,597,533,414,350,167,959,712,648,584, 520,401,1023,946,763,699,635,571,388,1010,997,686,558,375,737,609,426,971, 660,532,349,166,711,583,400,1022,945,762,634,996,685,557,374,736,608,425,970, 659,531,348,165,710,582,399,1021,944,761,633,995,684,556,373,735,607,424,969, 658,530,347,164,709,581,398,1020,943,760,632,994,683,555,372,734,606,423,968, 657,529,346,163,708,580,397,1019,942,759,631,993,682,554,371,733,605,422,967, 656,528,345] [ns_server:debug,2014-08-19T16:52:17.023,ns_1@10.242.238.90:<0.32427.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_125_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:52:17.024,ns_1@10.242.238.90:<0.32427.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,"}"}, {checkpoints,[{125,0}]}, {name,<<"replication_building_125_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,"}"}, {takeover,false}, {suffix,"building_125_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",125,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,false}]} [rebalance:debug,2014-08-19T16:52:17.024,ns_1@10.242.238.90:<0.32427.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.32428.0> [rebalance:debug,2014-08-19T16:52:17.025,ns_1@10.242.238.90:<0.32427.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:52:17.025,ns_1@10.242.238.90:<0.32427.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.6940.2>,#Ref<16550.0.2.183119>}]} [rebalance:info,2014-08-19T16:52:17.025,ns_1@10.242.238.90:<0.32427.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 125 [rebalance:debug,2014-08-19T16:52:17.025,ns_1@10.242.238.90:<0.32427.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.6940.2>,#Ref<16550.0.2.183119>}] [ns_server:debug,2014-08-19T16:52:17.027,ns_1@10.242.238.90:<0.32427.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:debug,2014-08-19T16:52:17.027,ns_1@10.242.238.90:<0.32429.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 125 [ns_server:info,2014-08-19T16:52:17.029,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 124 state to replica [ns_server:info,2014-08-19T16:52:17.033,ns_1@10.242.238.90:<0.32432.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 124 to state replica [ns_server:debug,2014-08-19T16:52:17.051,ns_1@10.242.238.90:<0.32432.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_124_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:52:17.053,ns_1@10.242.238.90:<0.32432.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,"|"}, {checkpoints,[{124,0}]}, {name,<<"replication_building_124_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,"|"}, {takeover,false}, {suffix,"building_124_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",124,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,false}]} [rebalance:debug,2014-08-19T16:52:17.053,ns_1@10.242.238.90:<0.32432.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.32433.0> [rebalance:debug,2014-08-19T16:52:17.054,ns_1@10.242.238.90:<0.32432.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:52:17.054,ns_1@10.242.238.90:<0.32432.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.6956.2>,#Ref<16550.0.2.183202>}]} [rebalance:info,2014-08-19T16:52:17.054,ns_1@10.242.238.90:<0.32432.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 124 [rebalance:debug,2014-08-19T16:52:17.055,ns_1@10.242.238.90:<0.32432.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.6956.2>,#Ref<16550.0.2.183202>}] [ns_server:debug,2014-08-19T16:52:17.056,ns_1@10.242.238.90:<0.32432.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:debug,2014-08-19T16:52:17.056,ns_1@10.242.238.90:<0.32434.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 124 [ns_server:info,2014-08-19T16:52:17.058,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 123 state to replica [ns_server:info,2014-08-19T16:52:17.062,ns_1@10.242.238.90:<0.32437.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 123 to state replica [views:debug,2014-08-19T16:52:17.071,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/164. Updated state: replica (0) [ns_server:debug,2014-08-19T16:52:17.071,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",164,replica,0} [ns_server:debug,2014-08-19T16:52:17.088,ns_1@10.242.238.90:<0.32437.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_123_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:52:17.089,ns_1@10.242.238.90:<0.32437.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,"{"}, {checkpoints,[{123,0}]}, {name,<<"replication_building_123_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,"{"}, {takeover,false}, {suffix,"building_123_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",123,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,false}]} [rebalance:debug,2014-08-19T16:52:17.090,ns_1@10.242.238.90:<0.32437.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.32438.0> [rebalance:debug,2014-08-19T16:52:17.090,ns_1@10.242.238.90:<0.32437.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:52:17.091,ns_1@10.242.238.90:<0.32437.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.6972.2>,#Ref<16550.0.2.183275>}]} [rebalance:info,2014-08-19T16:52:17.091,ns_1@10.242.238.90:<0.32437.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 123 [rebalance:debug,2014-08-19T16:52:17.091,ns_1@10.242.238.90:<0.32437.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.6972.2>,#Ref<16550.0.2.183275>}] [ns_server:debug,2014-08-19T16:52:17.092,ns_1@10.242.238.90:<0.32437.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:debug,2014-08-19T16:52:17.093,ns_1@10.242.238.90:<0.32439.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 123 [ns_server:info,2014-08-19T16:52:17.095,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 122 state to replica [ns_server:info,2014-08-19T16:52:17.101,ns_1@10.242.238.90:<0.32442.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 122 to state replica [ns_server:debug,2014-08-19T16:52:17.119,ns_1@10.242.238.90:<0.32442.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_122_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:52:17.121,ns_1@10.242.238.90:<0.32442.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,"z"}, {checkpoints,[{122,0}]}, {name,<<"replication_building_122_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,"z"}, {takeover,false}, {suffix,"building_122_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",122,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,false}]} [rebalance:debug,2014-08-19T16:52:17.121,ns_1@10.242.238.90:<0.32442.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.32443.0> [rebalance:debug,2014-08-19T16:52:17.121,ns_1@10.242.238.90:<0.32442.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:52:17.122,ns_1@10.242.238.90:<0.32442.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.6993.2>,#Ref<16550.0.2.183376>}]} [rebalance:info,2014-08-19T16:52:17.122,ns_1@10.242.238.90:<0.32442.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 122 [rebalance:debug,2014-08-19T16:52:17.123,ns_1@10.242.238.90:<0.32442.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.6993.2>,#Ref<16550.0.2.183376>}] [ns_server:debug,2014-08-19T16:52:17.124,ns_1@10.242.238.90:<0.32442.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:debug,2014-08-19T16:52:17.124,ns_1@10.242.238.90:<0.32452.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 122 [ns_server:info,2014-08-19T16:52:17.127,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 121 state to replica [ns_server:info,2014-08-19T16:52:17.132,ns_1@10.242.238.90:<0.32461.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 121 to state replica [ns_server:debug,2014-08-19T16:52:17.151,ns_1@10.242.238.90:<0.32461.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_121_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:52:17.164,ns_1@10.242.238.90:<0.32461.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,"y"}, {checkpoints,[{121,0}]}, {name,<<"replication_building_121_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,"y"}, {takeover,false}, {suffix,"building_121_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",121,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,false}]} [rebalance:debug,2014-08-19T16:52:17.164,ns_1@10.242.238.90:<0.32461.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.32462.0> [rebalance:debug,2014-08-19T16:52:17.165,ns_1@10.242.238.90:<0.32461.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:52:17.165,ns_1@10.242.238.90:<0.32461.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.7009.2>,#Ref<16550.0.2.183451>}]} [rebalance:info,2014-08-19T16:52:17.165,ns_1@10.242.238.90:<0.32461.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 121 [rebalance:debug,2014-08-19T16:52:17.165,ns_1@10.242.238.90:<0.32461.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.7009.2>,#Ref<16550.0.2.183451>}] [ns_server:debug,2014-08-19T16:52:17.167,ns_1@10.242.238.90:<0.32461.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:debug,2014-08-19T16:52:17.167,ns_1@10.242.238.90:<0.32463.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 121 [ns_server:info,2014-08-19T16:52:17.169,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 120 state to replica [ns_server:info,2014-08-19T16:52:17.173,ns_1@10.242.238.90:<0.32466.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 120 to state replica [ns_server:debug,2014-08-19T16:52:17.192,ns_1@10.242.238.90:<0.32466.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_120_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:52:17.194,ns_1@10.242.238.90:<0.32466.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,"x"}, {checkpoints,[{120,0}]}, {name,<<"replication_building_120_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,"x"}, {takeover,false}, {suffix,"building_120_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",120,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,false}]} [rebalance:debug,2014-08-19T16:52:17.195,ns_1@10.242.238.90:<0.32466.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.32467.0> [rebalance:debug,2014-08-19T16:52:17.195,ns_1@10.242.238.90:<0.32466.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:52:17.195,ns_1@10.242.238.90:<0.32466.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.7025.2>,#Ref<16550.0.2.183534>}]} [rebalance:info,2014-08-19T16:52:17.195,ns_1@10.242.238.90:<0.32466.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 120 [rebalance:debug,2014-08-19T16:52:17.196,ns_1@10.242.238.90:<0.32466.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.7025.2>,#Ref<16550.0.2.183534>}] [ns_server:debug,2014-08-19T16:52:17.196,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 162. Nacking mccouch update. [views:debug,2014-08-19T16:52:17.196,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/162. Updated state: replica (0) [ns_server:debug,2014-08-19T16:52:17.196,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",162,replica,0} [ns_server:debug,2014-08-19T16:52:17.197,ns_1@10.242.238.90:<0.32466.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:debug,2014-08-19T16:52:17.197,ns_1@10.242.238.90:<0.32468.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 120 [ns_server:debug,2014-08-19T16:52:17.198,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,984,673,545,362,724,596,413,958,647,519,698,570,387,1009,749,621,983, 672,544,361,723,595,412,957,646,518,697,569,386,1008,748,620,982,671,543,360, 722,594,411,956,645,517,696,568,385,1007,747,619,981,670,542,359,721,593,410, 955,644,516,695,567,384,1006,746,618,980,669,541,358,720,592,409,954,643,515, 941,758,694,630,566,383,1005,992,745,681,617,553,370,979,732,668,604,540,421, 357,966,719,655,591,527,408,344,953,706,642,578,514,395,1017,940,757,693,629, 565,382,1004,991,744,680,616,552,369,978,731,667,603,539,420,356,965,718,654, 590,526,407,343,952,705,641,577,513,394,1016,939,756,692,628,564,381,1003, 990,743,679,615,551,368,977,730,666,602,538,419,355,964,717,653,589,525,406, 342,951,704,640,576,512,393,1015,938,755,691,627,563,380,1002,989,742,678, 614,550,367,976,729,665,601,537,418,354,963,716,652,588,524,405,950,767,703, 639,575,392,1014,754,690,626,562,379,1001,988,741,677,613,549,366,975,728, 664,600,536,417,353,170,962,715,651,587,523,404,949,766,702,638,574,391,1013, 753,689,625,561,378,1000,987,740,676,612,548,365,974,727,663,599,535,416,352, 169,961,714,650,586,522,403,948,765,701,637,573,390,1012,999,752,688,624,560, 377,986,739,675,611,547,364,973,726,662,598,534,415,351,168,960,713,649,585, 521,402,947,764,700,636,572,389,1011,998,751,687,623,559,376,985,738,674,610, 546,363,972,725,661,597,533,414,350,167,959,712,648,584,520,401,1023,946,763, 699,635,571,388,1010,997,686,558,375,737,609,426,971,660,532,349,166,711,583, 400,1022,945,762,634,996,685,557,374,736,608,425,970,659,531,348,165,710,582, 399,1021,944,761,633,995,684,556,373,735,607,424,969,658,530,347,164,709,581, 398,1020,943,760,632,994,683,555,372,734,606,423,968,657,529,346,163,708,580, 397,1019,942,759,631,993,682,554,371,733,605,422,967,656,528,345,162,707,579, 396,1018] [ns_server:info,2014-08-19T16:52:17.198,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 119 state to replica [ns_server:info,2014-08-19T16:52:17.204,ns_1@10.242.238.90:<0.32471.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 119 to state replica [ns_server:debug,2014-08-19T16:52:17.223,ns_1@10.242.238.90:<0.32471.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_119_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:52:17.224,ns_1@10.242.238.90:<0.32471.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,"w"}, {checkpoints,[{119,0}]}, {name,<<"replication_building_119_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,"w"}, {takeover,false}, {suffix,"building_119_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",119,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,false}]} [rebalance:debug,2014-08-19T16:52:17.225,ns_1@10.242.238.90:<0.32471.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.32472.0> [rebalance:debug,2014-08-19T16:52:17.225,ns_1@10.242.238.90:<0.32471.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:52:17.225,ns_1@10.242.238.90:<0.32471.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.7041.2>,#Ref<16550.0.2.183607>}]} [rebalance:info,2014-08-19T16:52:17.226,ns_1@10.242.238.90:<0.32471.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 119 [rebalance:debug,2014-08-19T16:52:17.226,ns_1@10.242.238.90:<0.32471.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.7041.2>,#Ref<16550.0.2.183607>}] [ns_server:debug,2014-08-19T16:52:17.227,ns_1@10.242.238.90:<0.32471.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:debug,2014-08-19T16:52:17.228,ns_1@10.242.238.90:<0.32473.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 119 [ns_server:info,2014-08-19T16:52:17.229,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 118 state to replica [ns_server:info,2014-08-19T16:52:17.233,ns_1@10.242.238.90:<0.32476.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 118 to state replica [ns_server:debug,2014-08-19T16:52:17.252,ns_1@10.242.238.90:<0.32476.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_118_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:52:17.253,ns_1@10.242.238.90:<0.32476.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,"v"}, {checkpoints,[{118,0}]}, {name,<<"replication_building_118_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,"v"}, {takeover,false}, {suffix,"building_118_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",118,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,false}]} [rebalance:debug,2014-08-19T16:52:17.254,ns_1@10.242.238.90:<0.32476.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.32477.0> [rebalance:debug,2014-08-19T16:52:17.254,ns_1@10.242.238.90:<0.32476.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:52:17.254,ns_1@10.242.238.90:<0.32476.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.7057.2>,#Ref<16550.0.2.183680>}]} [rebalance:info,2014-08-19T16:52:17.255,ns_1@10.242.238.90:<0.32476.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 118 [rebalance:debug,2014-08-19T16:52:17.255,ns_1@10.242.238.90:<0.32476.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.7057.2>,#Ref<16550.0.2.183680>}] [ns_server:debug,2014-08-19T16:52:17.256,ns_1@10.242.238.90:<0.32476.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:debug,2014-08-19T16:52:17.256,ns_1@10.242.238.90:<0.32478.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 118 [ns_server:info,2014-08-19T16:52:17.258,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 117 state to replica [ns_server:info,2014-08-19T16:52:17.262,ns_1@10.242.238.90:<0.32481.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 117 to state replica [views:debug,2014-08-19T16:52:17.264,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/162. Updated state: replica (0) [ns_server:debug,2014-08-19T16:52:17.264,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",162,replica,0} [ns_server:debug,2014-08-19T16:52:17.281,ns_1@10.242.238.90:<0.32481.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_117_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:52:17.282,ns_1@10.242.238.90:<0.32481.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,"u"}, {checkpoints,[{117,0}]}, {name,<<"replication_building_117_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,"u"}, {takeover,false}, {suffix,"building_117_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",117,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,false}]} [rebalance:debug,2014-08-19T16:52:17.283,ns_1@10.242.238.90:<0.32481.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.32482.0> [rebalance:debug,2014-08-19T16:52:17.283,ns_1@10.242.238.90:<0.32481.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:52:17.283,ns_1@10.242.238.90:<0.32481.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.7073.2>,#Ref<16550.0.2.183763>}]} [rebalance:info,2014-08-19T16:52:17.284,ns_1@10.242.238.90:<0.32481.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 117 [rebalance:debug,2014-08-19T16:52:17.284,ns_1@10.242.238.90:<0.32481.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.7073.2>,#Ref<16550.0.2.183763>}] [ns_server:debug,2014-08-19T16:52:17.285,ns_1@10.242.238.90:<0.32481.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:debug,2014-08-19T16:52:17.286,ns_1@10.242.238.90:<0.32483.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 117 [ns_server:info,2014-08-19T16:52:17.287,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 116 state to replica [ns_server:info,2014-08-19T16:52:17.291,ns_1@10.242.238.90:<0.32486.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 116 to state replica [ns_server:debug,2014-08-19T16:52:17.310,ns_1@10.242.238.90:<0.32486.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_116_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:52:17.311,ns_1@10.242.238.90:<0.32486.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,"t"}, {checkpoints,[{116,0}]}, {name,<<"replication_building_116_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,"t"}, {takeover,false}, {suffix,"building_116_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",116,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,false}]} [rebalance:debug,2014-08-19T16:52:17.311,ns_1@10.242.238.90:<0.32486.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.32487.0> [rebalance:debug,2014-08-19T16:52:17.311,ns_1@10.242.238.90:<0.32486.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:52:17.312,ns_1@10.242.238.90:<0.32486.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.7089.2>,#Ref<16550.0.2.183836>}]} [rebalance:info,2014-08-19T16:52:17.312,ns_1@10.242.238.90:<0.32486.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 116 [rebalance:debug,2014-08-19T16:52:17.312,ns_1@10.242.238.90:<0.32486.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.7089.2>,#Ref<16550.0.2.183836>}] [ns_server:debug,2014-08-19T16:52:17.314,ns_1@10.242.238.90:<0.32486.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:debug,2014-08-19T16:52:17.314,ns_1@10.242.238.90:<0.32488.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 116 [ns_server:info,2014-08-19T16:52:17.316,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 115 state to replica [ns_server:info,2014-08-19T16:52:17.320,ns_1@10.242.238.90:<0.32505.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 115 to state replica [ns_server:debug,2014-08-19T16:52:17.339,ns_1@10.242.238.90:<0.32505.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_115_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:52:17.340,ns_1@10.242.238.90:<0.32505.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,"s"}, {checkpoints,[{115,0}]}, {name,<<"replication_building_115_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,"s"}, {takeover,false}, {suffix,"building_115_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",115,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,false}]} [rebalance:debug,2014-08-19T16:52:17.341,ns_1@10.242.238.90:<0.32505.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.32506.0> [rebalance:debug,2014-08-19T16:52:17.341,ns_1@10.242.238.90:<0.32505.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:52:17.342,ns_1@10.242.238.90:<0.32505.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.7105.2>,#Ref<16550.0.2.183909>}]} [rebalance:info,2014-08-19T16:52:17.342,ns_1@10.242.238.90:<0.32505.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 115 [rebalance:debug,2014-08-19T16:52:17.342,ns_1@10.242.238.90:<0.32505.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.7105.2>,#Ref<16550.0.2.183909>}] [ns_server:debug,2014-08-19T16:52:17.343,ns_1@10.242.238.90:<0.32505.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:debug,2014-08-19T16:52:17.344,ns_1@10.242.238.90:<0.32507.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 115 [ns_server:info,2014-08-19T16:52:17.347,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 114 state to replica [ns_server:info,2014-08-19T16:52:17.351,ns_1@10.242.238.90:<0.32510.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 114 to state replica [ns_server:debug,2014-08-19T16:52:17.371,ns_1@10.242.238.90:<0.32510.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_114_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:52:17.372,ns_1@10.242.238.90:<0.32510.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,"r"}, {checkpoints,[{114,0}]}, {name,<<"replication_building_114_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,"r"}, {takeover,false}, {suffix,"building_114_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",114,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,false}]} [rebalance:debug,2014-08-19T16:52:17.373,ns_1@10.242.238.90:<0.32510.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.32517.0> [rebalance:debug,2014-08-19T16:52:17.373,ns_1@10.242.238.90:<0.32510.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:52:17.378,ns_1@10.242.238.90:<0.32510.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.7122.2>,#Ref<16550.0.2.183997>}]} [rebalance:info,2014-08-19T16:52:17.378,ns_1@10.242.238.90:<0.32510.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 114 [rebalance:debug,2014-08-19T16:52:17.378,ns_1@10.242.238.90:<0.32510.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.7122.2>,#Ref<16550.0.2.183997>}] [ns_server:debug,2014-08-19T16:52:17.379,ns_1@10.242.238.90:<0.32510.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:debug,2014-08-19T16:52:17.380,ns_1@10.242.238.90:<0.32518.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 114 [ns_server:info,2014-08-19T16:52:17.381,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 113 state to replica [ns_server:info,2014-08-19T16:52:17.385,ns_1@10.242.238.90:<0.32521.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 113 to state replica [ns_server:debug,2014-08-19T16:52:17.389,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 160. Nacking mccouch update. [views:debug,2014-08-19T16:52:17.389,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/160. Updated state: replica (0) [ns_server:debug,2014-08-19T16:52:17.389,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",160,replica,0} [ns_server:debug,2014-08-19T16:52:17.390,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,984,673,545,362,724,596,413,958,647,519,698,570,387,1009,749,621,983, 672,544,361,723,595,412,957,646,518,697,569,386,1008,748,620,982,671,543,360, 722,594,411,956,645,517,696,568,385,1007,747,619,981,670,542,359,721,593,410, 955,644,516,695,567,384,1006,746,618,980,669,541,358,720,592,409,954,643,515, 941,758,694,630,566,383,1005,992,745,681,617,553,370,979,732,668,604,540,421, 357,966,719,655,591,527,408,344,953,706,642,578,514,395,1017,940,757,693,629, 565,382,1004,991,744,680,616,552,369,978,731,667,603,539,420,356,965,718,654, 590,526,407,343,160,952,705,641,577,513,394,1016,939,756,692,628,564,381, 1003,990,743,679,615,551,368,977,730,666,602,538,419,355,964,717,653,589,525, 406,342,951,704,640,576,512,393,1015,938,755,691,627,563,380,1002,989,742, 678,614,550,367,976,729,665,601,537,418,354,963,716,652,588,524,405,950,767, 703,639,575,392,1014,754,690,626,562,379,1001,988,741,677,613,549,366,975, 728,664,600,536,417,353,170,962,715,651,587,523,404,949,766,702,638,574,391, 1013,753,689,625,561,378,1000,987,740,676,612,548,365,974,727,663,599,535, 416,352,169,961,714,650,586,522,403,948,765,701,637,573,390,1012,999,752,688, 624,560,377,986,739,675,611,547,364,973,726,662,598,534,415,351,168,960,713, 649,585,521,402,947,764,700,636,572,389,1011,998,751,687,623,559,376,985,738, 674,610,546,363,972,725,661,597,533,414,350,167,959,712,648,584,520,401,1023, 946,763,699,635,571,388,1010,997,686,558,375,737,609,426,971,660,532,349,166, 711,583,400,1022,945,762,634,996,685,557,374,736,608,425,970,659,531,348,165, 710,582,399,1021,944,761,633,995,684,556,373,735,607,424,969,658,530,347,164, 709,581,398,1020,943,760,632,994,683,555,372,734,606,423,968,657,529,346,163, 708,580,397,1019,942,759,631,993,682,554,371,733,605,422,967,656,528,345,162, 707,579,396,1018] [ns_server:debug,2014-08-19T16:52:17.404,ns_1@10.242.238.90:<0.32521.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_113_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:52:17.406,ns_1@10.242.238.90:<0.32521.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,"q"}, {checkpoints,[{113,0}]}, {name,<<"replication_building_113_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,"q"}, {takeover,false}, {suffix,"building_113_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",113,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,false}]} [rebalance:debug,2014-08-19T16:52:17.406,ns_1@10.242.238.90:<0.32521.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.32522.0> [rebalance:debug,2014-08-19T16:52:17.407,ns_1@10.242.238.90:<0.32521.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:52:17.407,ns_1@10.242.238.90:<0.32521.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.7138.2>,#Ref<16550.0.2.184099>}]} [rebalance:info,2014-08-19T16:52:17.407,ns_1@10.242.238.90:<0.32521.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 113 [rebalance:debug,2014-08-19T16:52:17.408,ns_1@10.242.238.90:<0.32521.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.7138.2>,#Ref<16550.0.2.184099>}] [ns_server:debug,2014-08-19T16:52:17.409,ns_1@10.242.238.90:<0.32521.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:debug,2014-08-19T16:52:17.409,ns_1@10.242.238.90:<0.32523.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 113 [ns_server:info,2014-08-19T16:52:17.411,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 112 state to replica [ns_server:info,2014-08-19T16:52:17.414,ns_1@10.242.238.90:<0.32526.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 112 to state replica [ns_server:debug,2014-08-19T16:52:17.433,ns_1@10.242.238.90:<0.32526.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_112_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:52:17.435,ns_1@10.242.238.90:<0.32526.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,"p"}, {checkpoints,[{112,0}]}, {name,<<"replication_building_112_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,"p"}, {takeover,false}, {suffix,"building_112_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",112,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,false}]} [rebalance:debug,2014-08-19T16:52:17.435,ns_1@10.242.238.90:<0.32526.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.32527.0> [rebalance:debug,2014-08-19T16:52:17.436,ns_1@10.242.238.90:<0.32526.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:52:17.436,ns_1@10.242.238.90:<0.32526.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.7154.2>,#Ref<16550.0.2.184183>}]} [rebalance:info,2014-08-19T16:52:17.436,ns_1@10.242.238.90:<0.32526.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 112 [rebalance:debug,2014-08-19T16:52:17.437,ns_1@10.242.238.90:<0.32526.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.7154.2>,#Ref<16550.0.2.184183>}] [ns_server:debug,2014-08-19T16:52:17.438,ns_1@10.242.238.90:<0.32526.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:debug,2014-08-19T16:52:17.439,ns_1@10.242.238.90:<0.32528.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 112 [ns_server:info,2014-08-19T16:52:17.440,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 111 state to replica [ns_server:info,2014-08-19T16:52:17.445,ns_1@10.242.238.90:<0.32531.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 111 to state replica [views:debug,2014-08-19T16:52:17.460,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/160. Updated state: replica (0) [ns_server:debug,2014-08-19T16:52:17.460,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",160,replica,0} [ns_server:debug,2014-08-19T16:52:17.468,ns_1@10.242.238.90:<0.32531.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_111_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:52:17.470,ns_1@10.242.238.90:<0.32531.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,"o"}, {checkpoints,[{111,0}]}, {name,<<"replication_building_111_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,"o"}, {takeover,false}, {suffix,"building_111_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",111,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,false}]} [rebalance:debug,2014-08-19T16:52:17.470,ns_1@10.242.238.90:<0.32531.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.32532.0> [rebalance:debug,2014-08-19T16:52:17.470,ns_1@10.242.238.90:<0.32531.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:52:17.471,ns_1@10.242.238.90:<0.32531.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.7170.2>,#Ref<16550.0.2.184256>}]} [rebalance:info,2014-08-19T16:52:17.471,ns_1@10.242.238.90:<0.32531.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 111 [rebalance:debug,2014-08-19T16:52:17.471,ns_1@10.242.238.90:<0.32531.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.7170.2>,#Ref<16550.0.2.184256>}] [ns_server:debug,2014-08-19T16:52:17.472,ns_1@10.242.238.90:<0.32531.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:debug,2014-08-19T16:52:17.473,ns_1@10.242.238.90:<0.32533.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 111 [ns_server:info,2014-08-19T16:52:17.475,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 110 state to replica [ns_server:info,2014-08-19T16:52:17.479,ns_1@10.242.238.90:<0.32536.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 110 to state replica [ns_server:debug,2014-08-19T16:52:17.498,ns_1@10.242.238.90:<0.32536.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_110_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:52:17.499,ns_1@10.242.238.90:<0.32536.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,"n"}, {checkpoints,[{110,0}]}, {name,<<"replication_building_110_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,"n"}, {takeover,false}, {suffix,"building_110_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",110,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,false}]} [rebalance:debug,2014-08-19T16:52:17.500,ns_1@10.242.238.90:<0.32536.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.32537.0> [rebalance:debug,2014-08-19T16:52:17.500,ns_1@10.242.238.90:<0.32536.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:52:17.500,ns_1@10.242.238.90:<0.32536.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.7186.2>,#Ref<16550.0.2.184329>}]} [rebalance:info,2014-08-19T16:52:17.500,ns_1@10.242.238.90:<0.32536.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 110 [rebalance:debug,2014-08-19T16:52:17.501,ns_1@10.242.238.90:<0.32536.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.7186.2>,#Ref<16550.0.2.184329>}] [ns_server:debug,2014-08-19T16:52:17.502,ns_1@10.242.238.90:<0.32536.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:debug,2014-08-19T16:52:17.502,ns_1@10.242.238.90:<0.32538.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 110 [ns_server:info,2014-08-19T16:52:17.506,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 109 state to replica [ns_server:info,2014-08-19T16:52:17.510,ns_1@10.242.238.90:<0.32547.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 109 to state replica [ns_server:debug,2014-08-19T16:52:17.530,ns_1@10.242.238.90:<0.32547.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_109_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:52:17.531,ns_1@10.242.238.90:<0.32547.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,"m"}, {checkpoints,[{109,0}]}, {name,<<"replication_building_109_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,"m"}, {takeover,false}, {suffix,"building_109_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",109,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,false}]} [rebalance:debug,2014-08-19T16:52:17.531,ns_1@10.242.238.90:<0.32547.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.32556.0> [rebalance:debug,2014-08-19T16:52:17.532,ns_1@10.242.238.90:<0.32547.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:52:17.532,ns_1@10.242.238.90:<0.32547.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.7202.2>,#Ref<16550.0.2.184413>}]} [rebalance:info,2014-08-19T16:52:17.532,ns_1@10.242.238.90:<0.32547.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 109 [rebalance:debug,2014-08-19T16:52:17.532,ns_1@10.242.238.90:<0.32547.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.7202.2>,#Ref<16550.0.2.184413>}] [ns_server:debug,2014-08-19T16:52:17.534,ns_1@10.242.238.90:<0.32547.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:debug,2014-08-19T16:52:17.534,ns_1@10.242.238.90:<0.32557.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 109 [ns_server:info,2014-08-19T16:52:17.536,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 108 state to replica [ns_server:info,2014-08-19T16:52:17.539,ns_1@10.242.238.90:<0.32560.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 108 to state replica [ns_server:debug,2014-08-19T16:52:17.559,ns_1@10.242.238.90:<0.32560.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_108_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:52:17.560,ns_1@10.242.238.90:<0.32560.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,"l"}, {checkpoints,[{108,0}]}, {name,<<"replication_building_108_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,"l"}, {takeover,false}, {suffix,"building_108_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",108,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,false}]} [rebalance:debug,2014-08-19T16:52:17.561,ns_1@10.242.238.90:<0.32560.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.32561.0> [rebalance:debug,2014-08-19T16:52:17.561,ns_1@10.242.238.90:<0.32560.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:52:17.561,ns_1@10.242.238.90:<0.32560.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.7218.2>,#Ref<16550.0.2.184486>}]} [rebalance:info,2014-08-19T16:52:17.561,ns_1@10.242.238.90:<0.32560.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 108 [rebalance:debug,2014-08-19T16:52:17.562,ns_1@10.242.238.90:<0.32560.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.7218.2>,#Ref<16550.0.2.184486>}] [ns_server:debug,2014-08-19T16:52:17.563,ns_1@10.242.238.90:<0.32560.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:debug,2014-08-19T16:52:17.565,ns_1@10.242.238.90:<0.32562.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 108 [ns_server:info,2014-08-19T16:52:17.566,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 107 state to replica [ns_server:info,2014-08-19T16:52:17.572,ns_1@10.242.238.90:<0.32565.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 107 to state replica [ns_server:debug,2014-08-19T16:52:17.581,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 158. Nacking mccouch update. [views:debug,2014-08-19T16:52:17.581,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/158. Updated state: replica (0) [ns_server:debug,2014-08-19T16:52:17.581,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",158,replica,0} [ns_server:debug,2014-08-19T16:52:17.583,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,984,673,545,362,724,596,413,958,647,519,698,570,387,1009,749,621,983, 672,544,361,723,595,412,957,646,518,697,569,386,1008,748,620,982,671,543,360, 722,594,411,956,645,517,696,568,385,1007,747,619,981,670,542,359,721,593,410, 955,644,516,695,567,384,1006,746,618,980,669,541,358,720,592,409,954,643,515, 941,758,694,630,566,383,1005,992,745,681,617,553,370,979,732,668,604,540,421, 357,966,719,655,591,527,408,344,953,706,642,578,514,395,1017,940,757,693,629, 565,382,1004,991,744,680,616,552,369,978,731,667,603,539,420,356,965,718,654, 590,526,407,343,160,952,705,641,577,513,394,1016,939,756,692,628,564,381, 1003,990,743,679,615,551,368,977,730,666,602,538,419,355,964,717,653,589,525, 406,342,951,704,640,576,512,393,1015,938,755,691,627,563,380,1002,989,742, 678,614,550,367,976,729,665,601,537,418,354,963,716,652,588,524,405,158,950, 767,703,639,575,392,1014,754,690,626,562,379,1001,988,741,677,613,549,366, 975,728,664,600,536,417,353,170,962,715,651,587,523,404,949,766,702,638,574, 391,1013,753,689,625,561,378,1000,987,740,676,612,548,365,974,727,663,599, 535,416,352,169,961,714,650,586,522,403,948,765,701,637,573,390,1012,999,752, 688,624,560,377,986,739,675,611,547,364,973,726,662,598,534,415,351,168,960, 713,649,585,521,402,947,764,700,636,572,389,1011,998,751,687,623,559,376,985, 738,674,610,546,363,972,725,661,597,533,414,350,167,959,712,648,584,520,401, 1023,946,763,699,635,571,388,1010,997,686,558,375,737,609,426,971,660,532, 349,166,711,583,400,1022,945,762,634,996,685,557,374,736,608,425,970,659,531, 348,165,710,582,399,1021,944,761,633,995,684,556,373,735,607,424,969,658,530, 347,164,709,581,398,1020,943,760,632,994,683,555,372,734,606,423,968,657,529, 346,163,708,580,397,1019,942,759,631,993,682,554,371,733,605,422,967,656,528, 345,162,707,579,396,1018] [ns_server:debug,2014-08-19T16:52:17.590,ns_1@10.242.238.90:<0.32565.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_107_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:52:17.592,ns_1@10.242.238.90:<0.32565.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,"k"}, {checkpoints,[{107,0}]}, {name,<<"replication_building_107_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,"k"}, {takeover,false}, {suffix,"building_107_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",107,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,false}]} [rebalance:debug,2014-08-19T16:52:17.593,ns_1@10.242.238.90:<0.32565.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.32566.0> [rebalance:debug,2014-08-19T16:52:17.593,ns_1@10.242.238.90:<0.32565.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:52:17.593,ns_1@10.242.238.90:<0.32565.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.7235.2>,#Ref<16550.0.2.185183>}]} [rebalance:info,2014-08-19T16:52:17.594,ns_1@10.242.238.90:<0.32565.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 107 [rebalance:debug,2014-08-19T16:52:17.594,ns_1@10.242.238.90:<0.32565.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.7235.2>,#Ref<16550.0.2.185183>}] [ns_server:debug,2014-08-19T16:52:17.595,ns_1@10.242.238.90:<0.32565.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:debug,2014-08-19T16:52:17.596,ns_1@10.242.238.90:<0.32567.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 107 [ns_server:info,2014-08-19T16:52:17.597,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 106 state to replica [ns_server:info,2014-08-19T16:52:17.601,ns_1@10.242.238.90:<0.32570.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 106 to state replica [ns_server:debug,2014-08-19T16:52:17.620,ns_1@10.242.238.90:<0.32570.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_106_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:52:17.621,ns_1@10.242.238.90:<0.32570.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,"j"}, {checkpoints,[{106,0}]}, {name,<<"replication_building_106_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,"j"}, {takeover,false}, {suffix,"building_106_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",106,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,false}]} [rebalance:debug,2014-08-19T16:52:17.622,ns_1@10.242.238.90:<0.32570.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.32571.0> [rebalance:debug,2014-08-19T16:52:17.622,ns_1@10.242.238.90:<0.32570.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:52:17.623,ns_1@10.242.238.90:<0.32570.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.7251.2>,#Ref<16550.0.2.185679>}]} [rebalance:info,2014-08-19T16:52:17.623,ns_1@10.242.238.90:<0.32570.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 106 [rebalance:debug,2014-08-19T16:52:17.623,ns_1@10.242.238.90:<0.32570.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.7251.2>,#Ref<16550.0.2.185679>}] [ns_server:debug,2014-08-19T16:52:17.625,ns_1@10.242.238.90:<0.32570.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:debug,2014-08-19T16:52:17.625,ns_1@10.242.238.90:<0.32572.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 106 [ns_server:info,2014-08-19T16:52:17.627,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 105 state to replica [ns_server:info,2014-08-19T16:52:17.631,ns_1@10.242.238.90:<0.32575.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 105 to state replica [views:debug,2014-08-19T16:52:17.648,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/158. Updated state: replica (0) [ns_server:debug,2014-08-19T16:52:17.649,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",158,replica,0} [ns_server:debug,2014-08-19T16:52:17.652,ns_1@10.242.238.90:<0.32575.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_105_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:52:17.653,ns_1@10.242.238.90:<0.32575.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,"i"}, {checkpoints,[{105,0}]}, {name,<<"replication_building_105_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,"i"}, {takeover,false}, {suffix,"building_105_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",105,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,false}]} [rebalance:debug,2014-08-19T16:52:17.653,ns_1@10.242.238.90:<0.32575.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.32576.0> [rebalance:debug,2014-08-19T16:52:17.654,ns_1@10.242.238.90:<0.32575.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:52:17.654,ns_1@10.242.238.90:<0.32575.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.7267.2>,#Ref<16550.0.2.185756>}]} [rebalance:info,2014-08-19T16:52:17.654,ns_1@10.242.238.90:<0.32575.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 105 [rebalance:debug,2014-08-19T16:52:17.655,ns_1@10.242.238.90:<0.32575.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.7267.2>,#Ref<16550.0.2.185756>}] [ns_server:debug,2014-08-19T16:52:17.655,ns_1@10.242.238.90:<0.32575.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:debug,2014-08-19T16:52:17.656,ns_1@10.242.238.90:<0.32577.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 105 [ns_server:info,2014-08-19T16:52:17.658,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 104 state to replica [ns_server:info,2014-08-19T16:52:17.662,ns_1@10.242.238.90:<0.32580.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 104 to state replica [ns_server:debug,2014-08-19T16:52:17.681,ns_1@10.242.238.90:<0.32580.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_104_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:52:17.682,ns_1@10.242.238.90:<0.32580.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,"h"}, {checkpoints,[{104,0}]}, {name,<<"replication_building_104_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,"h"}, {takeover,false}, {suffix,"building_104_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",104,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,false}]} [rebalance:debug,2014-08-19T16:52:17.683,ns_1@10.242.238.90:<0.32580.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.32581.0> [rebalance:debug,2014-08-19T16:52:17.683,ns_1@10.242.238.90:<0.32580.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:52:17.684,ns_1@10.242.238.90:<0.32580.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.7283.2>,#Ref<16550.0.2.185830>}]} [rebalance:info,2014-08-19T16:52:17.684,ns_1@10.242.238.90:<0.32580.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 104 [rebalance:debug,2014-08-19T16:52:17.684,ns_1@10.242.238.90:<0.32580.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.7283.2>,#Ref<16550.0.2.185830>}] [ns_server:debug,2014-08-19T16:52:17.685,ns_1@10.242.238.90:<0.32580.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:debug,2014-08-19T16:52:17.685,ns_1@10.242.238.90:<0.32582.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 104 [ns_server:info,2014-08-19T16:52:17.687,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 103 state to replica [ns_server:info,2014-08-19T16:52:17.691,ns_1@10.242.238.90:<0.32585.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 103 to state replica [ns_server:debug,2014-08-19T16:52:17.710,ns_1@10.242.238.90:<0.32585.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_103_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:52:17.711,ns_1@10.242.238.90:<0.32585.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,"g"}, {checkpoints,[{103,0}]}, {name,<<"replication_building_103_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,"g"}, {takeover,false}, {suffix,"building_103_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",103,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,false}]} [rebalance:debug,2014-08-19T16:52:17.712,ns_1@10.242.238.90:<0.32585.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.32594.0> [rebalance:debug,2014-08-19T16:52:17.712,ns_1@10.242.238.90:<0.32585.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:52:17.712,ns_1@10.242.238.90:<0.32585.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.7299.2>,#Ref<16550.0.2.185907>}]} [rebalance:info,2014-08-19T16:52:17.712,ns_1@10.242.238.90:<0.32585.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 103 [rebalance:debug,2014-08-19T16:52:17.713,ns_1@10.242.238.90:<0.32585.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.7299.2>,#Ref<16550.0.2.185907>}] [ns_server:debug,2014-08-19T16:52:17.714,ns_1@10.242.238.90:<0.32585.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:debug,2014-08-19T16:52:17.715,ns_1@10.242.238.90:<0.32601.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 103 [ns_server:info,2014-08-19T16:52:17.716,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 102 state to replica [ns_server:info,2014-08-19T16:52:17.720,ns_1@10.242.238.90:<0.32604.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 102 to state replica [ns_server:debug,2014-08-19T16:52:17.739,ns_1@10.242.238.90:<0.32604.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_102_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:52:17.740,ns_1@10.242.238.90:<0.32604.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,"f"}, {checkpoints,[{102,0}]}, {name,<<"replication_building_102_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,"f"}, {takeover,false}, {suffix,"building_102_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",102,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,false}]} [rebalance:debug,2014-08-19T16:52:17.741,ns_1@10.242.238.90:<0.32604.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.32605.0> [rebalance:debug,2014-08-19T16:52:17.741,ns_1@10.242.238.90:<0.32604.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:52:17.742,ns_1@10.242.238.90:<0.32604.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.7315.2>,#Ref<16550.0.2.185984>}]} [rebalance:info,2014-08-19T16:52:17.742,ns_1@10.242.238.90:<0.32604.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 102 [rebalance:debug,2014-08-19T16:52:17.742,ns_1@10.242.238.90:<0.32604.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.7315.2>,#Ref<16550.0.2.185984>}] [ns_server:debug,2014-08-19T16:52:17.744,ns_1@10.242.238.90:<0.32604.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:debug,2014-08-19T16:52:17.744,ns_1@10.242.238.90:<0.32606.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 102 [ns_server:info,2014-08-19T16:52:17.748,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 101 state to replica [ns_server:info,2014-08-19T16:52:17.752,ns_1@10.242.238.90:<0.32610.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 101 to state replica [ns_server:debug,2014-08-19T16:52:17.767,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 156. Nacking mccouch update. [views:debug,2014-08-19T16:52:17.767,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/156. Updated state: replica (0) [ns_server:debug,2014-08-19T16:52:17.767,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",156,replica,0} [ns_server:debug,2014-08-19T16:52:17.768,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,984,673,545,362,724,596,413,958,647,519,698,570,387,1009,749,621,983, 672,544,361,723,595,412,957,646,518,697,569,386,1008,748,620,982,671,543,360, 722,594,411,956,645,517,696,568,385,1007,747,619,981,670,542,359,721,593,410, 955,644,516,695,567,384,1006,746,618,980,669,541,358,720,592,409,954,643,515, 941,758,694,630,566,383,1005,992,745,681,617,553,370,979,732,668,604,540,421, 357,966,719,655,591,527,408,344,953,706,642,578,514,395,1017,940,757,693,629, 565,382,1004,991,744,680,616,552,369,978,731,667,603,539,420,356,965,718,654, 590,526,407,343,160,952,705,641,577,513,394,1016,939,756,692,628,564,381, 1003,990,743,679,615,551,368,977,730,666,602,538,419,355,964,717,653,589,525, 406,342,951,704,640,576,512,393,1015,938,755,691,627,563,380,1002,989,742, 678,614,550,367,976,729,665,601,537,418,354,963,716,652,588,524,405,158,950, 767,703,639,575,392,1014,754,690,626,562,379,1001,988,741,677,613,549,366, 975,728,664,600,536,417,353,170,962,715,651,587,523,404,949,766,702,638,574, 391,1013,753,689,625,561,378,1000,987,740,676,612,548,365,974,727,663,599, 535,416,352,169,961,714,650,586,522,403,156,948,765,701,637,573,390,1012,999, 752,688,624,560,377,986,739,675,611,547,364,973,726,662,598,534,415,351,168, 960,713,649,585,521,402,947,764,700,636,572,389,1011,998,751,687,623,559,376, 985,738,674,610,546,363,972,725,661,597,533,414,350,167,959,712,648,584,520, 401,1023,946,763,699,635,571,388,1010,997,686,558,375,737,609,426,971,660, 532,349,166,711,583,400,1022,945,762,634,996,685,557,374,736,608,425,970,659, 531,348,165,710,582,399,1021,944,761,633,995,684,556,373,735,607,424,969,658, 530,347,164,709,581,398,1020,943,760,632,994,683,555,372,734,606,423,968,657, 529,346,163,708,580,397,1019,942,759,631,993,682,554,371,733,605,422,967,656, 528,345,162,707,579,396,1018] [ns_server:debug,2014-08-19T16:52:17.776,ns_1@10.242.238.90:<0.32610.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_101_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:52:17.778,ns_1@10.242.238.90:<0.32610.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,"e"}, {checkpoints,[{101,0}]}, {name,<<"replication_building_101_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,"e"}, {takeover,false}, {suffix,"building_101_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",101,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,false}]} [rebalance:debug,2014-08-19T16:52:17.779,ns_1@10.242.238.90:<0.32610.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.32611.0> [rebalance:debug,2014-08-19T16:52:17.779,ns_1@10.242.238.90:<0.32610.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:52:17.779,ns_1@10.242.238.90:<0.32610.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.7331.2>,#Ref<16550.0.2.186069>}]} [rebalance:info,2014-08-19T16:52:17.780,ns_1@10.242.238.90:<0.32610.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 101 [rebalance:debug,2014-08-19T16:52:17.780,ns_1@10.242.238.90:<0.32610.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.7331.2>,#Ref<16550.0.2.186069>}] [ns_server:debug,2014-08-19T16:52:17.781,ns_1@10.242.238.90:<0.32610.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:debug,2014-08-19T16:52:17.782,ns_1@10.242.238.90:<0.32612.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 101 [ns_server:info,2014-08-19T16:52:17.783,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 100 state to replica [ns_server:info,2014-08-19T16:52:17.787,ns_1@10.242.238.90:<0.32615.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 100 to state replica [ns_server:debug,2014-08-19T16:52:17.807,ns_1@10.242.238.90:<0.32615.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_100_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:52:17.808,ns_1@10.242.238.90:<0.32615.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,"d"}, {checkpoints,[{100,0}]}, {name,<<"replication_building_100_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,"d"}, {takeover,false}, {suffix,"building_100_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",100,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,false}]} [rebalance:debug,2014-08-19T16:52:17.809,ns_1@10.242.238.90:<0.32615.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.32616.0> [rebalance:debug,2014-08-19T16:52:17.809,ns_1@10.242.238.90:<0.32615.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:52:17.809,ns_1@10.242.238.90:<0.32615.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.7347.2>,#Ref<16550.0.2.186154>}]} [rebalance:info,2014-08-19T16:52:17.809,ns_1@10.242.238.90:<0.32615.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 100 [rebalance:debug,2014-08-19T16:52:17.810,ns_1@10.242.238.90:<0.32615.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.7347.2>,#Ref<16550.0.2.186154>}] [ns_server:debug,2014-08-19T16:52:17.811,ns_1@10.242.238.90:<0.32615.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:debug,2014-08-19T16:52:17.812,ns_1@10.242.238.90:<0.32617.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 100 [ns_server:info,2014-08-19T16:52:17.813,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 99 state to replica [views:debug,2014-08-19T16:52:17.817,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/156. Updated state: replica (0) [ns_server:debug,2014-08-19T16:52:17.817,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",156,replica,0} [ns_server:info,2014-08-19T16:52:17.818,ns_1@10.242.238.90:<0.32620.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 99 to state replica [ns_server:debug,2014-08-19T16:52:17.837,ns_1@10.242.238.90:<0.32620.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_99_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:52:17.838,ns_1@10.242.238.90:<0.32620.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,"c"}, {checkpoints,[{99,0}]}, {name,<<"replication_building_99_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,"c"}, {takeover,false}, {suffix,"building_99_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",99,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,false}]} [rebalance:debug,2014-08-19T16:52:17.839,ns_1@10.242.238.90:<0.32620.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.32621.0> [rebalance:debug,2014-08-19T16:52:17.839,ns_1@10.242.238.90:<0.32620.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:52:17.839,ns_1@10.242.238.90:<0.32620.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.7363.2>,#Ref<16550.0.2.186238>}]} [rebalance:info,2014-08-19T16:52:17.839,ns_1@10.242.238.90:<0.32620.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 99 [rebalance:debug,2014-08-19T16:52:17.840,ns_1@10.242.238.90:<0.32620.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.7363.2>,#Ref<16550.0.2.186238>}] [ns_server:debug,2014-08-19T16:52:17.840,ns_1@10.242.238.90:<0.32620.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:debug,2014-08-19T16:52:17.841,ns_1@10.242.238.90:<0.32622.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 99 [ns_server:info,2014-08-19T16:52:17.843,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 98 state to replica [ns_server:info,2014-08-19T16:52:17.848,ns_1@10.242.238.90:<0.32625.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 98 to state replica [ns_server:debug,2014-08-19T16:52:17.868,ns_1@10.242.238.90:<0.32625.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_98_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:52:17.869,ns_1@10.242.238.90:<0.32625.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,"b"}, {checkpoints,[{98,0}]}, {name,<<"replication_building_98_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,"b"}, {takeover,false}, {suffix,"building_98_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",98,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,false}]} [rebalance:debug,2014-08-19T16:52:17.870,ns_1@10.242.238.90:<0.32625.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.32626.0> [rebalance:debug,2014-08-19T16:52:17.870,ns_1@10.242.238.90:<0.32625.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:52:17.870,ns_1@10.242.238.90:<0.32625.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.7379.2>,#Ref<16550.0.2.186321>}]} [rebalance:info,2014-08-19T16:52:17.871,ns_1@10.242.238.90:<0.32625.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 98 [rebalance:debug,2014-08-19T16:52:17.871,ns_1@10.242.238.90:<0.32625.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.7379.2>,#Ref<16550.0.2.186321>}] [ns_server:debug,2014-08-19T16:52:17.872,ns_1@10.242.238.90:<0.32625.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:debug,2014-08-19T16:52:17.873,ns_1@10.242.238.90:<0.32627.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 98 [ns_server:info,2014-08-19T16:52:17.874,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 97 state to replica [ns_server:info,2014-08-19T16:52:17.879,ns_1@10.242.238.90:<0.32630.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 97 to state replica [ns_server:debug,2014-08-19T16:52:17.901,ns_1@10.242.238.90:<0.32630.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_97_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:52:17.902,ns_1@10.242.238.90:<0.32630.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,"a"}, {checkpoints,[{97,0}]}, {name,<<"replication_building_97_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,"a"}, {takeover,false}, {suffix,"building_97_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",97,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,false}]} [rebalance:debug,2014-08-19T16:52:17.903,ns_1@10.242.238.90:<0.32630.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.32645.0> [rebalance:debug,2014-08-19T16:52:17.903,ns_1@10.242.238.90:<0.32630.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:52:17.903,ns_1@10.242.238.90:<0.32630.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.7406.2>,#Ref<16550.0.2.186502>}]} [rebalance:info,2014-08-19T16:52:17.903,ns_1@10.242.238.90:<0.32630.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 97 [rebalance:debug,2014-08-19T16:52:17.904,ns_1@10.242.238.90:<0.32630.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.7406.2>,#Ref<16550.0.2.186502>}] [ns_server:debug,2014-08-19T16:52:17.905,ns_1@10.242.238.90:<0.32630.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:debug,2014-08-19T16:52:17.905,ns_1@10.242.238.90:<0.32646.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 97 [ns_server:info,2014-08-19T16:52:17.907,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 96 state to replica [ns_server:info,2014-08-19T16:52:17.911,ns_1@10.242.238.90:<0.32649.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 96 to state replica [ns_server:debug,2014-08-19T16:52:17.930,ns_1@10.242.238.90:<0.32649.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_96_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:52:17.931,ns_1@10.242.238.90:<0.32649.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,"`"}, {checkpoints,[{96,0}]}, {name,<<"replication_building_96_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,"`"}, {takeover,false}, {suffix,"building_96_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",96,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,false}]} [rebalance:debug,2014-08-19T16:52:17.932,ns_1@10.242.238.90:<0.32649.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.32650.0> [rebalance:debug,2014-08-19T16:52:17.932,ns_1@10.242.238.90:<0.32649.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:52:17.933,ns_1@10.242.238.90:<0.32649.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.7532.2>,#Ref<16550.0.2.187575>}]} [rebalance:info,2014-08-19T16:52:17.933,ns_1@10.242.238.90:<0.32649.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 96 [rebalance:debug,2014-08-19T16:52:17.933,ns_1@10.242.238.90:<0.32649.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.7532.2>,#Ref<16550.0.2.187575>}] [ns_server:debug,2014-08-19T16:52:17.934,ns_1@10.242.238.90:<0.32649.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:debug,2014-08-19T16:52:17.935,ns_1@10.242.238.90:<0.32651.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 96 [ns_server:info,2014-08-19T16:52:17.936,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 95 state to replica [ns_server:info,2014-08-19T16:52:17.940,ns_1@10.242.238.90:<0.32654.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 95 to state replica [ns_server:debug,2014-08-19T16:52:17.959,ns_1@10.242.238.90:<0.32654.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_95_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:52:17.960,ns_1@10.242.238.90:<0.32654.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,"_"}, {checkpoints,[{95,0}]}, {name,<<"replication_building_95_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,"_"}, {takeover,false}, {suffix,"building_95_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",95,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,false}]} [rebalance:debug,2014-08-19T16:52:17.961,ns_1@10.242.238.90:<0.32654.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.32655.0> [rebalance:debug,2014-08-19T16:52:17.961,ns_1@10.242.238.90:<0.32654.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:52:17.961,ns_1@10.242.238.90:<0.32654.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.7584.2>,#Ref<16550.0.2.187985>}]} [rebalance:info,2014-08-19T16:52:17.962,ns_1@10.242.238.90:<0.32654.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 95 [rebalance:debug,2014-08-19T16:52:17.962,ns_1@10.242.238.90:<0.32654.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.7584.2>,#Ref<16550.0.2.187985>}] [ns_server:debug,2014-08-19T16:52:17.962,ns_1@10.242.238.90:<0.32654.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:debug,2014-08-19T16:52:17.964,ns_1@10.242.238.90:<0.32656.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 95 [ns_server:info,2014-08-19T16:52:17.965,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 94 state to replica [ns_server:info,2014-08-19T16:52:17.973,ns_1@10.242.238.90:<0.32659.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 94 to state replica [ns_server:debug,2014-08-19T16:52:17.976,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 154. Nacking mccouch update. [views:debug,2014-08-19T16:52:17.976,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/154. Updated state: replica (0) [ns_server:debug,2014-08-19T16:52:17.976,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",154,replica,0} [ns_server:debug,2014-08-19T16:52:17.977,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,984,673,545,362,724,596,413,958,647,519,698,570,387,1009,749,621,983, 672,544,361,723,595,412,957,646,518,697,569,386,1008,748,620,982,671,543,360, 722,594,411,956,645,517,696,568,385,1007,747,619,981,670,542,359,721,593,410, 955,644,516,695,567,384,1006,746,618,980,669,541,358,720,592,409,954,643,515, 941,758,694,630,566,383,1005,992,745,681,617,553,370,979,732,668,604,540,421, 357,966,719,655,591,527,408,344,953,706,642,578,514,395,1017,940,757,693,629, 565,382,1004,991,744,680,616,552,369,978,731,667,603,539,420,356,965,718,654, 590,526,407,343,160,952,705,641,577,513,394,1016,939,756,692,628,564,381, 1003,990,743,679,615,551,368,977,730,666,602,538,419,355,964,717,653,589,525, 406,342,951,704,640,576,512,393,1015,938,755,691,627,563,380,1002,989,742, 678,614,550,367,976,729,665,601,537,418,354,963,716,652,588,524,405,158,950, 767,703,639,575,392,1014,754,690,626,562,379,1001,988,741,677,613,549,366, 975,728,664,600,536,417,353,170,962,715,651,587,523,404,949,766,702,638,574, 391,1013,753,689,625,561,378,1000,987,740,676,612,548,365,974,727,663,599, 535,416,352,169,961,714,650,586,522,403,156,948,765,701,637,573,390,1012,999, 752,688,624,560,377,986,739,675,611,547,364,973,726,662,598,534,415,351,168, 960,713,649,585,521,402,947,764,700,636,572,389,1011,998,751,687,623,559,376, 985,738,674,610,546,363,972,725,661,597,533,414,350,167,959,712,648,584,520, 401,154,1023,946,763,699,635,571,388,1010,997,686,558,375,737,609,426,971, 660,532,349,166,711,583,400,1022,945,762,634,996,685,557,374,736,608,425,970, 659,531,348,165,710,582,399,1021,944,761,633,995,684,556,373,735,607,424,969, 658,530,347,164,709,581,398,1020,943,760,632,994,683,555,372,734,606,423,968, 657,529,346,163,708,580,397,1019,942,759,631,993,682,554,371,733,605,422,967, 656,528,345,162,707,579,396,1018] [ns_server:debug,2014-08-19T16:52:17.992,ns_1@10.242.238.90:<0.32659.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_94_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:52:17.994,ns_1@10.242.238.90:<0.32659.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,"^"}, {checkpoints,[{94,0}]}, {name,<<"replication_building_94_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,"^"}, {takeover,false}, {suffix,"building_94_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",94,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,false}]} [rebalance:debug,2014-08-19T16:52:17.994,ns_1@10.242.238.90:<0.32659.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.32660.0> [rebalance:debug,2014-08-19T16:52:17.995,ns_1@10.242.238.90:<0.32659.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:52:17.995,ns_1@10.242.238.90:<0.32659.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.7600.2>,#Ref<16550.0.2.188069>}]} [rebalance:info,2014-08-19T16:52:17.995,ns_1@10.242.238.90:<0.32659.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 94 [rebalance:debug,2014-08-19T16:52:17.995,ns_1@10.242.238.90:<0.32659.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.7600.2>,#Ref<16550.0.2.188069>}] [ns_server:debug,2014-08-19T16:52:17.997,ns_1@10.242.238.90:<0.32659.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:debug,2014-08-19T16:52:17.998,ns_1@10.242.238.90:<0.32661.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 94 [ns_server:info,2014-08-19T16:52:17.999,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 93 state to replica [ns_server:info,2014-08-19T16:52:18.003,ns_1@10.242.238.90:<0.32664.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 93 to state replica [views:debug,2014-08-19T16:52:18.010,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/154. Updated state: replica (0) [ns_server:debug,2014-08-19T16:52:18.010,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",154,replica,0} [ns_server:debug,2014-08-19T16:52:18.021,ns_1@10.242.238.90:<0.32664.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_93_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:52:18.022,ns_1@10.242.238.90:<0.32664.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,"]"}, {checkpoints,[{93,0}]}, {name,<<"replication_building_93_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,"]"}, {takeover,false}, {suffix,"building_93_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",93,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,false}]} [rebalance:debug,2014-08-19T16:52:18.023,ns_1@10.242.238.90:<0.32664.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.32665.0> [rebalance:debug,2014-08-19T16:52:18.023,ns_1@10.242.238.90:<0.32664.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:52:18.024,ns_1@10.242.238.90:<0.32664.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.7616.2>,#Ref<16550.0.2.188152>}]} [rebalance:info,2014-08-19T16:52:18.024,ns_1@10.242.238.90:<0.32664.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 93 [rebalance:debug,2014-08-19T16:52:18.024,ns_1@10.242.238.90:<0.32664.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.7616.2>,#Ref<16550.0.2.188152>}] [ns_server:debug,2014-08-19T16:52:18.025,ns_1@10.242.238.90:<0.32664.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:debug,2014-08-19T16:52:18.026,ns_1@10.242.238.90:<0.32666.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 93 [ns_server:info,2014-08-19T16:52:18.027,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 92 state to replica [ns_server:info,2014-08-19T16:52:18.031,ns_1@10.242.238.90:<0.32669.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 92 to state replica [ns_server:debug,2014-08-19T16:52:18.050,ns_1@10.242.238.90:<0.32669.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_92_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:52:18.052,ns_1@10.242.238.90:<0.32669.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,"\\"}, {checkpoints,[{92,0}]}, {name,<<"replication_building_92_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,"\\"}, {takeover,false}, {suffix,"building_92_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",92,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,false}]} [rebalance:debug,2014-08-19T16:52:18.052,ns_1@10.242.238.90:<0.32669.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.32670.0> [rebalance:debug,2014-08-19T16:52:18.052,ns_1@10.242.238.90:<0.32669.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:52:18.053,ns_1@10.242.238.90:<0.32669.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.7632.2>,#Ref<16550.0.2.188235>}]} [rebalance:info,2014-08-19T16:52:18.053,ns_1@10.242.238.90:<0.32669.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 92 [rebalance:debug,2014-08-19T16:52:18.054,ns_1@10.242.238.90:<0.32669.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.7632.2>,#Ref<16550.0.2.188235>}] [ns_server:debug,2014-08-19T16:52:18.055,ns_1@10.242.238.90:<0.32669.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:debug,2014-08-19T16:52:18.055,ns_1@10.242.238.90:<0.32671.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 92 [ns_server:info,2014-08-19T16:52:18.057,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 91 state to replica [ns_server:info,2014-08-19T16:52:18.061,ns_1@10.242.238.90:<0.32674.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 91 to state replica [ns_server:debug,2014-08-19T16:52:18.085,ns_1@10.242.238.90:<0.32674.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_91_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:52:18.087,ns_1@10.242.238.90:<0.32674.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,"["}, {checkpoints,[{91,0}]}, {name,<<"replication_building_91_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,"["}, {takeover,false}, {suffix,"building_91_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",91,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,false}]} [rebalance:debug,2014-08-19T16:52:18.087,ns_1@10.242.238.90:<0.32674.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.32689.0> [rebalance:debug,2014-08-19T16:52:18.087,ns_1@10.242.238.90:<0.32674.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:52:18.088,ns_1@10.242.238.90:<0.32674.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.7648.2>,#Ref<16550.0.2.188308>}]} [rebalance:info,2014-08-19T16:52:18.088,ns_1@10.242.238.90:<0.32674.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 91 [rebalance:debug,2014-08-19T16:52:18.088,ns_1@10.242.238.90:<0.32674.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.7648.2>,#Ref<16550.0.2.188308>}] [ns_server:debug,2014-08-19T16:52:18.089,ns_1@10.242.238.90:<0.32674.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:debug,2014-08-19T16:52:18.090,ns_1@10.242.238.90:<0.32690.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 91 [ns_server:info,2014-08-19T16:52:18.091,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 90 state to replica [ns_server:info,2014-08-19T16:52:18.096,ns_1@10.242.238.90:<0.32693.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 90 to state replica [ns_server:debug,2014-08-19T16:52:18.114,ns_1@10.242.238.90:<0.32693.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_90_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:52:18.116,ns_1@10.242.238.90:<0.32693.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,"Z"}, {checkpoints,[{90,0}]}, {name,<<"replication_building_90_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,"Z"}, {takeover,false}, {suffix,"building_90_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",90,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,false}]} [rebalance:debug,2014-08-19T16:52:18.116,ns_1@10.242.238.90:<0.32693.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.32694.0> [rebalance:debug,2014-08-19T16:52:18.117,ns_1@10.242.238.90:<0.32693.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:52:18.117,ns_1@10.242.238.90:<0.32693.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.7664.2>,#Ref<16550.0.2.188405>}]} [rebalance:info,2014-08-19T16:52:18.117,ns_1@10.242.238.90:<0.32693.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 90 [rebalance:debug,2014-08-19T16:52:18.118,ns_1@10.242.238.90:<0.32693.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.7664.2>,#Ref<16550.0.2.188405>}] [ns_server:debug,2014-08-19T16:52:18.118,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 152. Nacking mccouch update. [views:debug,2014-08-19T16:52:18.118,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/152. Updated state: replica (0) [ns_server:debug,2014-08-19T16:52:18.118,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",152,replica,0} [ns_server:debug,2014-08-19T16:52:18.119,ns_1@10.242.238.90:<0.32693.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:debug,2014-08-19T16:52:18.120,ns_1@10.242.238.90:<0.32695.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 90 [ns_server:debug,2014-08-19T16:52:18.119,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,984,673,545,362,724,596,413,958,647,519,698,570,387,1009,749,621,983, 672,544,361,723,595,412,957,646,518,152,697,569,386,1008,748,620,982,671,543, 360,722,594,411,956,645,517,696,568,385,1007,747,619,981,670,542,359,721,593, 410,955,644,516,695,567,384,1006,746,618,980,669,541,358,720,592,409,954,643, 515,694,566,383,1005,992,745,681,617,553,370,979,732,668,604,540,421,357,966, 719,655,591,527,408,344,953,706,642,578,514,395,1017,940,757,693,629,565,382, 1004,991,744,680,616,552,369,978,731,667,603,539,420,356,965,718,654,590,526, 407,343,160,952,705,641,577,513,394,1016,939,756,692,628,564,381,1003,990, 743,679,615,551,368,977,730,666,602,538,419,355,964,717,653,589,525,406,342, 951,704,640,576,512,393,1015,938,755,691,627,563,380,1002,989,742,678,614, 550,367,976,729,665,601,537,418,354,963,716,652,588,524,405,158,950,767,703, 639,575,392,1014,754,690,626,562,379,1001,988,741,677,613,549,366,975,728, 664,600,536,417,353,170,962,715,651,587,523,404,949,766,702,638,574,391,1013, 753,689,625,561,378,1000,987,740,676,612,548,365,974,727,663,599,535,416,352, 169,961,714,650,586,522,403,156,948,765,701,637,573,390,1012,999,752,688,624, 560,377,986,739,675,611,547,364,973,726,662,598,534,415,351,168,960,713,649, 585,521,402,947,764,700,636,572,389,1011,998,751,687,623,559,376,985,738,674, 610,546,363,972,725,661,597,533,414,350,167,959,712,648,584,520,401,154,1023, 946,763,699,635,571,388,1010,997,686,558,375,737,609,426,971,660,532,349,166, 711,583,400,1022,945,762,634,996,685,557,374,736,608,425,970,659,531,348,165, 710,582,399,1021,944,761,633,995,684,556,373,735,607,424,969,658,530,347,164, 709,581,398,1020,943,760,632,994,683,555,372,734,606,423,968,657,529,346,163, 708,580,397,1019,942,759,631,993,682,554,371,733,605,422,967,656,528,345,162, 707,579,396,1018,941,758,630] [ns_server:info,2014-08-19T16:52:18.122,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 89 state to replica [ns_server:info,2014-08-19T16:52:18.125,ns_1@10.242.238.90:<0.32698.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 89 to state replica [ns_server:debug,2014-08-19T16:52:18.147,ns_1@10.242.238.90:<0.32698.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_89_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:52:18.149,ns_1@10.242.238.90:<0.32698.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,"Y"}, {checkpoints,[{89,0}]}, {name,<<"replication_building_89_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,"Y"}, {takeover,false}, {suffix,"building_89_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",89,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,false}]} [rebalance:debug,2014-08-19T16:52:18.149,ns_1@10.242.238.90:<0.32698.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.32699.0> [rebalance:debug,2014-08-19T16:52:18.149,ns_1@10.242.238.90:<0.32698.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:52:18.150,ns_1@10.242.238.90:<0.32698.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.7680.2>,#Ref<16550.0.2.188478>}]} [rebalance:info,2014-08-19T16:52:18.150,ns_1@10.242.238.90:<0.32698.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 89 [rebalance:debug,2014-08-19T16:52:18.150,ns_1@10.242.238.90:<0.32698.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.7680.2>,#Ref<16550.0.2.188478>}] [ns_server:debug,2014-08-19T16:52:18.152,ns_1@10.242.238.90:<0.32698.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:debug,2014-08-19T16:52:18.152,ns_1@10.242.238.90:<0.32700.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 89 [views:debug,2014-08-19T16:52:18.154,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/152. Updated state: replica (0) [ns_server:debug,2014-08-19T16:52:18.154,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",152,replica,0} [ns_server:info,2014-08-19T16:52:18.154,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 88 state to replica [ns_server:info,2014-08-19T16:52:18.160,ns_1@10.242.238.90:<0.32703.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 88 to state replica [ns_server:debug,2014-08-19T16:52:18.179,ns_1@10.242.238.90:<0.32703.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_88_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:52:18.180,ns_1@10.242.238.90:<0.32703.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,"X"}, {checkpoints,[{88,0}]}, {name,<<"replication_building_88_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,"X"}, {takeover,false}, {suffix,"building_88_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",88,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,false}]} [rebalance:debug,2014-08-19T16:52:18.181,ns_1@10.242.238.90:<0.32703.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.32704.0> [rebalance:debug,2014-08-19T16:52:18.181,ns_1@10.242.238.90:<0.32703.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:52:18.181,ns_1@10.242.238.90:<0.32703.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.7696.2>,#Ref<16550.0.2.188561>}]} [rebalance:info,2014-08-19T16:52:18.181,ns_1@10.242.238.90:<0.32703.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 88 [rebalance:debug,2014-08-19T16:52:18.182,ns_1@10.242.238.90:<0.32703.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.7696.2>,#Ref<16550.0.2.188561>}] [ns_server:debug,2014-08-19T16:52:18.182,ns_1@10.242.238.90:<0.32703.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:debug,2014-08-19T16:52:18.183,ns_1@10.242.238.90:<0.32705.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 88 [ns_server:info,2014-08-19T16:52:18.185,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 87 state to replica [ns_server:info,2014-08-19T16:52:18.188,ns_1@10.242.238.90:<0.32708.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 87 to state replica [ns_server:debug,2014-08-19T16:52:18.207,ns_1@10.242.238.90:<0.32708.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_87_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:52:18.209,ns_1@10.242.238.90:<0.32708.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,"W"}, {checkpoints,[{87,0}]}, {name,<<"replication_building_87_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,"W"}, {takeover,false}, {suffix,"building_87_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",87,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,false}]} [rebalance:debug,2014-08-19T16:52:18.209,ns_1@10.242.238.90:<0.32708.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.32717.0> [rebalance:debug,2014-08-19T16:52:18.209,ns_1@10.242.238.90:<0.32708.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:52:18.210,ns_1@10.242.238.90:<0.32708.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.7712.2>,#Ref<16550.0.2.188644>}]} [rebalance:info,2014-08-19T16:52:18.210,ns_1@10.242.238.90:<0.32708.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 87 [rebalance:debug,2014-08-19T16:52:18.210,ns_1@10.242.238.90:<0.32708.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.7712.2>,#Ref<16550.0.2.188644>}] [ns_server:debug,2014-08-19T16:52:18.212,ns_1@10.242.238.90:<0.32708.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:debug,2014-08-19T16:52:18.212,ns_1@10.242.238.90:<0.32724.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 87 [ns_server:info,2014-08-19T16:52:18.214,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 86 state to replica [ns_server:info,2014-08-19T16:52:18.217,ns_1@10.242.238.90:<0.32727.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 86 to state replica [ns_server:debug,2014-08-19T16:52:18.236,ns_1@10.242.238.90:<0.32727.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_building_86_'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:52:18.238,ns_1@10.242.238.90:<0.32727.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,"V"}, {checkpoints,[{86,0}]}, {name,<<"replication_building_86_'ns_1@10.242.238.90'">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{vbuckets,"V"}, {takeover,false}, {suffix,"building_86_'ns_1@10.242.238.90'"}, {note_tap_stats,{replica_building,"default",86,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}}, {username,"default"}, {password,get_from_config}, {set_to_pending_state,false}]} [rebalance:debug,2014-08-19T16:52:18.238,ns_1@10.242.238.90:<0.32727.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.32728.0> [rebalance:debug,2014-08-19T16:52:18.238,ns_1@10.242.238.90:<0.32727.0>:ebucketmigrator_srv:do_note_tap_stats:693]Handling note_tap_stats [rebalance:debug,2014-08-19T16:52:18.239,ns_1@10.242.238.90:<0.32727.0>:ebucketmigrator_srv:handle_call:335]Suspended had_backfill waiter {had_backfill,undefined,undefined,[{<16550.7728.2>,#Ref<16550.0.2.188727>}]} [rebalance:info,2014-08-19T16:52:18.239,ns_1@10.242.238.90:<0.32727.0>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 86 [rebalance:debug,2014-08-19T16:52:18.239,ns_1@10.242.238.90:<0.32727.0>:ebucketmigrator_srv:process_downstream:981]Replied had_backfill: true to [{<16550.7728.2>,#Ref<16550.0.2.188727>}] [ns_server:debug,2014-08-19T16:52:18.240,ns_1@10.242.238.90:<0.32727.0>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:debug,2014-08-19T16:52:18.241,ns_1@10.242.238.90:<0.32729.0>:janitor_agent:handle_call:793]Going to wait for persistence of checkpoint 1 in vbucket 86 [ns_server:debug,2014-08-19T16:52:18.245,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 150. Nacking mccouch update. [views:debug,2014-08-19T16:52:18.245,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/150. Updated state: replica (0) [ns_server:debug,2014-08-19T16:52:18.246,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",150,replica,0} [ns_server:debug,2014-08-19T16:52:18.248,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,984,673,545,362,724,596,413,958,647,519,698,570,387,1009,749,621,983, 672,544,361,723,595,412,957,646,518,152,697,569,386,1008,748,620,982,671,543, 360,722,594,411,956,645,517,696,568,385,1007,747,619,981,670,542,359,721,593, 410,955,644,516,150,695,567,384,1006,746,618,980,669,541,358,720,592,409,954, 643,515,694,566,383,1005,992,745,681,617,553,370,979,732,668,604,540,421,357, 966,719,655,591,527,408,344,953,706,642,578,514,395,1017,940,757,693,629,565, 382,1004,991,744,680,616,552,369,978,731,667,603,539,420,356,965,718,654,590, 526,407,343,160,952,705,641,577,513,394,1016,939,756,692,628,564,381,1003, 990,743,679,615,551,368,977,730,666,602,538,419,355,964,717,653,589,525,406, 342,951,704,640,576,512,393,1015,938,755,691,627,563,380,1002,989,742,678, 614,550,367,976,729,665,601,537,418,354,963,716,652,588,524,405,158,950,767, 703,639,575,392,1014,754,690,626,562,379,1001,988,741,677,613,549,366,975, 728,664,600,536,417,353,170,962,715,651,587,523,404,949,766,702,638,574,391, 1013,753,689,625,561,378,1000,987,740,676,612,548,365,974,727,663,599,535, 416,352,169,961,714,650,586,522,403,156,948,765,701,637,573,390,1012,999,752, 688,624,560,377,986,739,675,611,547,364,973,726,662,598,534,415,351,168,960, 713,649,585,521,402,947,764,700,636,572,389,1011,998,751,687,623,559,376,985, 738,674,610,546,363,972,725,661,597,533,414,350,167,959,712,648,584,520,401, 154,1023,946,763,699,635,571,388,1010,997,686,558,375,737,609,426,971,660, 532,349,166,711,583,400,1022,945,762,634,996,685,557,374,736,608,425,970,659, 531,348,165,710,582,399,1021,944,761,633,995,684,556,373,735,607,424,969,658, 530,347,164,709,581,398,1020,943,760,632,994,683,555,372,734,606,423,968,657, 529,346,163,708,580,397,1019,942,759,631,993,682,554,371,733,605,422,967,656, 528,345,162,707,579,396,1018,941,758,630] [views:debug,2014-08-19T16:52:18.296,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/150. Updated state: replica (0) [ns_server:debug,2014-08-19T16:52:18.296,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",150,replica,0} [ns_server:debug,2014-08-19T16:52:18.440,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 148. Nacking mccouch update. [views:debug,2014-08-19T16:52:18.440,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/148. Updated state: replica (0) [ns_server:debug,2014-08-19T16:52:18.440,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",148,replica,0} [ns_server:debug,2014-08-19T16:52:18.441,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,984,673,545,362,724,596,413,958,647,519,698,570,387,1009,749,621,983, 672,544,361,723,595,412,957,646,518,152,697,569,386,1008,748,620,982,671,543, 360,722,594,411,956,645,517,696,568,385,1007,747,619,981,670,542,359,721,593, 410,955,644,516,150,695,567,384,1006,746,618,980,669,541,358,720,592,409,954, 643,515,694,566,383,1005,992,745,681,617,553,370,979,732,668,604,540,421,357, 966,719,655,591,527,408,344,953,706,642,578,514,395,148,1017,940,757,693,629, 565,382,1004,991,744,680,616,552,369,978,731,667,603,539,420,356,965,718,654, 590,526,407,343,160,952,705,641,577,513,394,1016,939,756,692,628,564,381, 1003,990,743,679,615,551,368,977,730,666,602,538,419,355,964,717,653,589,525, 406,342,951,704,640,576,512,393,1015,938,755,691,627,563,380,1002,989,742, 678,614,550,367,976,729,665,601,537,418,354,963,716,652,588,524,405,158,950, 767,703,639,575,392,1014,754,690,626,562,379,1001,988,741,677,613,549,366, 975,728,664,600,536,417,353,170,962,715,651,587,523,404,949,766,702,638,574, 391,1013,753,689,625,561,378,1000,987,740,676,612,548,365,974,727,663,599, 535,416,352,169,961,714,650,586,522,403,156,948,765,701,637,573,390,1012,999, 752,688,624,560,377,986,739,675,611,547,364,973,726,662,598,534,415,351,168, 960,713,649,585,521,402,947,764,700,636,572,389,1011,998,751,687,623,559,376, 985,738,674,610,546,363,972,725,661,597,533,414,350,167,959,712,648,584,520, 401,154,1023,946,763,699,635,571,388,1010,997,686,558,375,737,609,426,971, 660,532,349,166,711,583,400,1022,945,762,634,996,685,557,374,736,608,425,970, 659,531,348,165,710,582,399,1021,944,761,633,995,684,556,373,735,607,424,969, 658,530,347,164,709,581,398,1020,943,760,632,994,683,555,372,734,606,423,968, 657,529,346,163,708,580,397,1019,942,759,631,993,682,554,371,733,605,422,967, 656,528,345,162,707,579,396,1018,941,758,630] [views:debug,2014-08-19T16:52:18.491,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/148. Updated state: replica (0) [ns_server:debug,2014-08-19T16:52:18.491,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",148,replica,0} [ns_server:debug,2014-08-19T16:52:18.583,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 146. Nacking mccouch update. [views:debug,2014-08-19T16:52:18.583,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/146. Updated state: replica (0) [ns_server:debug,2014-08-19T16:52:18.583,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",146,replica,0} [ns_server:debug,2014-08-19T16:52:18.584,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,984,673,545,362,724,596,413,958,647,519,698,570,387,1009,749,621,983, 672,544,361,723,595,412,957,646,518,152,697,569,386,1008,748,620,982,671,543, 360,722,594,411,956,645,517,696,568,385,1007,747,619,981,670,542,359,721,593, 410,955,644,516,150,695,567,384,1006,746,618,980,669,541,358,720,592,409,954, 643,515,694,566,383,1005,992,745,681,617,553,370,979,732,668,604,540,421,357, 966,719,655,591,527,408,344,953,706,642,578,514,395,148,1017,940,757,693,629, 565,382,1004,991,744,680,616,552,369,978,731,667,603,539,420,356,965,718,654, 590,526,407,343,160,952,705,641,577,513,394,1016,939,756,692,628,564,381, 1003,990,743,679,615,551,368,977,730,666,602,538,419,355,964,717,653,589,525, 406,342,951,704,640,576,512,393,146,1015,938,755,691,627,563,380,1002,989, 742,678,614,550,367,976,729,665,601,537,418,354,963,716,652,588,524,405,158, 950,767,703,639,575,392,1014,754,690,626,562,379,1001,988,741,677,613,549, 366,975,728,664,600,536,417,353,170,962,715,651,587,523,404,949,766,702,638, 574,391,1013,753,689,625,561,378,1000,987,740,676,612,548,365,974,727,663, 599,535,416,352,169,961,714,650,586,522,403,156,948,765,701,637,573,390,1012, 999,752,688,624,560,377,986,739,675,611,547,364,973,726,662,598,534,415,351, 168,960,713,649,585,521,402,947,764,700,636,572,389,1011,998,751,687,623,559, 376,985,738,674,610,546,363,972,725,661,597,533,414,350,167,959,712,648,584, 520,401,154,1023,946,763,699,635,571,388,1010,997,686,558,375,737,609,426, 971,660,532,349,166,711,583,400,1022,945,762,634,996,685,557,374,736,608,425, 970,659,531,348,165,710,582,399,1021,944,761,633,995,684,556,373,735,607,424, 969,658,530,347,164,709,581,398,1020,943,760,632,994,683,555,372,734,606,423, 968,657,529,346,163,708,580,397,1019,942,759,631,993,682,554,371,733,605,422, 967,656,528,345,162,707,579,396,1018,941,758,630] [views:debug,2014-08-19T16:52:18.633,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/146. Updated state: replica (0) [ns_server:debug,2014-08-19T16:52:18.633,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",146,replica,0} [ns_server:debug,2014-08-19T16:52:18.708,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 144. Nacking mccouch update. [views:debug,2014-08-19T16:52:18.709,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/144. Updated state: replica (0) [ns_server:debug,2014-08-19T16:52:18.709,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",144,replica,0} [ns_server:debug,2014-08-19T16:52:18.710,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,984,673,545,362,724,596,413,958,647,519,698,570,387,1009,749,621,983, 672,544,361,723,595,412,957,646,518,152,697,569,386,1008,748,620,982,671,543, 360,722,594,411,956,645,517,696,568,385,1007,747,619,981,670,542,359,721,593, 410,955,644,516,150,695,567,384,1006,746,618,980,669,541,358,720,592,409,954, 643,515,694,566,383,1005,992,745,681,617,553,370,979,732,668,604,540,421,357, 966,719,655,591,527,408,344,953,706,642,578,514,395,148,1017,940,757,693,629, 565,382,1004,991,744,680,616,552,369,978,731,667,603,539,420,356,965,718,654, 590,526,407,343,160,952,705,641,577,513,394,1016,939,756,692,628,564,381, 1003,990,743,679,615,551,368,977,730,666,602,538,419,355,964,717,653,589,525, 406,342,951,704,640,576,512,393,146,1015,938,755,691,627,563,380,1002,989, 742,678,614,550,367,976,729,665,601,537,418,354,963,716,652,588,524,405,158, 950,767,703,639,575,392,1014,754,690,626,562,379,1001,988,741,677,613,549, 366,975,728,664,600,536,417,353,170,962,715,651,587,523,404,949,766,702,638, 574,391,144,1013,753,689,625,561,378,1000,987,740,676,612,548,365,974,727, 663,599,535,416,352,169,961,714,650,586,522,403,156,948,765,701,637,573,390, 1012,999,752,688,624,560,377,986,739,675,611,547,364,973,726,662,598,534,415, 351,168,960,713,649,585,521,402,947,764,700,636,572,389,1011,998,751,687,623, 559,376,985,738,674,610,546,363,972,725,661,597,533,414,350,167,959,712,648, 584,520,401,154,1023,946,763,699,635,571,388,1010,997,686,558,375,737,609, 426,971,660,532,349,166,711,583,400,1022,945,762,634,996,685,557,374,736,608, 425,970,659,531,348,165,710,582,399,1021,944,761,633,995,684,556,373,735,607, 424,969,658,530,347,164,709,581,398,1020,943,760,632,994,683,555,372,734,606, 423,968,657,529,346,163,708,580,397,1019,942,759,631,993,682,554,371,733,605, 422,967,656,528,345,162,707,579,396,1018,941,758,630] [views:debug,2014-08-19T16:52:18.742,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/144. Updated state: replica (0) [ns_server:debug,2014-08-19T16:52:18.743,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",144,replica,0} [ns_server:debug,2014-08-19T16:52:18.843,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 142. Nacking mccouch update. [views:debug,2014-08-19T16:52:18.843,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/142. Updated state: replica (0) [ns_server:debug,2014-08-19T16:52:18.843,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",142,replica,0} [ns_server:debug,2014-08-19T16:52:18.844,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,984,673,545,362,724,596,413,958,647,519,698,570,387,1009,749,621,983, 672,544,361,723,595,412,957,646,518,152,697,569,386,1008,748,620,982,671,543, 360,722,594,411,956,645,517,696,568,385,1007,747,619,981,670,542,359,721,593, 410,955,644,516,150,695,567,384,1006,746,618,980,669,541,358,720,592,409,954, 643,515,694,566,383,1005,745,617,979,732,668,604,540,421,357,966,719,655,591, 527,408,344,953,706,642,578,514,395,148,1017,940,757,693,629,565,382,1004, 991,744,680,616,552,369,978,731,667,603,539,420,356,965,718,654,590,526,407, 343,160,952,705,641,577,513,394,1016,939,756,692,628,564,381,1003,990,743, 679,615,551,368,977,730,666,602,538,419,355,964,717,653,589,525,406,342,951, 704,640,576,512,393,146,1015,938,755,691,627,563,380,1002,989,742,678,614, 550,367,976,729,665,601,537,418,354,963,716,652,588,524,405,158,950,767,703, 639,575,392,1014,754,690,626,562,379,1001,988,741,677,613,549,366,975,728, 664,600,536,417,353,170,962,715,651,587,523,404,949,766,702,638,574,391,144, 1013,753,689,625,561,378,1000,987,740,676,612,548,365,974,727,663,599,535, 416,352,169,961,714,650,586,522,403,156,948,765,701,637,573,390,1012,999,752, 688,624,560,377,986,739,675,611,547,364,973,726,662,598,534,415,351,168,960, 713,649,585,521,402,947,764,700,636,572,389,142,1011,998,751,687,623,559,376, 985,738,674,610,546,363,972,725,661,597,533,414,350,167,959,712,648,584,520, 401,154,1023,946,763,699,635,571,388,1010,997,686,558,375,737,609,426,971, 660,532,349,166,711,583,400,1022,945,762,634,996,685,557,374,736,608,425,970, 659,531,348,165,710,582,399,1021,944,761,633,995,684,556,373,735,607,424,969, 658,530,347,164,709,581,398,1020,943,760,632,994,683,555,372,734,606,423,968, 657,529,346,163,708,580,397,1019,942,759,631,993,682,554,371,733,605,422,967, 656,528,345,162,707,579,396,1018,941,758,630,992,681,553,370] [views:debug,2014-08-19T16:52:18.902,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/142. Updated state: replica (0) [ns_server:debug,2014-08-19T16:52:18.902,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",142,replica,0} [ns_server:debug,2014-08-19T16:52:18.977,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 140. Nacking mccouch update. [views:debug,2014-08-19T16:52:18.977,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/140. Updated state: replica (0) [ns_server:debug,2014-08-19T16:52:18.977,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",140,replica,0} [ns_server:debug,2014-08-19T16:52:18.978,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,984,673,545,362,724,596,413,958,647,519,698,570,387,1009,749,621,983, 672,544,361,723,595,412,957,646,518,152,697,569,386,1008,748,620,982,671,543, 360,722,594,411,956,645,517,696,568,385,1007,747,619,981,670,542,359,721,593, 410,955,644,516,150,695,567,384,1006,746,618,980,669,541,358,720,592,409,954, 643,515,694,566,383,1005,745,617,979,732,668,604,540,421,357,966,719,655,591, 527,408,344,953,706,642,578,514,395,148,1017,940,757,693,629,565,382,1004, 991,744,680,616,552,369,978,731,667,603,539,420,356,965,718,654,590,526,407, 343,160,952,705,641,577,513,394,1016,939,756,692,628,564,381,1003,990,743, 679,615,551,368,977,730,666,602,538,419,355,964,717,653,589,525,406,342,951, 704,640,576,512,393,146,1015,938,755,691,627,563,380,1002,989,742,678,614, 550,367,976,729,665,601,537,418,354,963,716,652,588,524,405,158,950,767,703, 639,575,392,1014,754,690,626,562,379,1001,988,741,677,613,549,366,975,728, 664,600,536,417,353,170,962,715,651,587,523,404,949,766,702,638,574,391,144, 1013,753,689,625,561,378,1000,987,740,676,612,548,365,974,727,663,599,535, 416,352,169,961,714,650,586,522,403,156,948,765,701,637,573,390,1012,999,752, 688,624,560,377,986,739,675,611,547,364,973,726,662,598,534,415,351,168,960, 713,649,585,521,402,947,764,700,636,572,389,142,1011,998,751,687,623,559,376, 985,738,674,610,546,363,972,725,661,597,533,414,350,167,959,712,648,584,520, 401,154,1023,946,763,699,635,571,388,1010,997,686,558,375,737,609,426,971, 660,532,349,166,711,583,400,1022,945,762,634,140,996,685,557,374,736,608,425, 970,659,531,348,165,710,582,399,1021,944,761,633,995,684,556,373,735,607,424, 969,658,530,347,164,709,581,398,1020,943,760,632,994,683,555,372,734,606,423, 968,657,529,346,163,708,580,397,1019,942,759,631,993,682,554,371,733,605,422, 967,656,528,345,162,707,579,396,1018,941,758,630,992,681,553,370] [views:debug,2014-08-19T16:52:19.038,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/140. Updated state: replica (0) [ns_server:debug,2014-08-19T16:52:19.039,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",140,replica,0} [rebalance:debug,2014-08-19T16:52:19.040,ns_1@10.242.238.90:<0.32729.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:52:19.040,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.32729.0> (ok) [rebalance:debug,2014-08-19T16:52:19.041,ns_1@10.242.238.90:<0.32727.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:52:19.042,ns_1@10.242.238.90:<0.32727.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:52:19.042,ns_1@10.242.238.90:<0.45.1>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:52:19.042,ns_1@10.242.238.90:<0.45.1>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:52:19.042,ns_1@10.242.238.90:<0.32727.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:info,2014-08-19T16:52:19.047,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 86 state to replica [ns_server:info,2014-08-19T16:52:19.047,ns_1@10.242.238.90:tap_replication_manager-default<0.18775.0>:tap_replication_manager:start_child:172]Starting replication from 'ns_1@10.242.238.88' for "V" [error_logger:info,2014-08-19T16:52:19.049,ns_1@10.242.238.90:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,'ns_vbm_new_sup-default'} started: [{pid,<0.46.1>}, {name,{new_child_id,"V",'ns_1@10.242.238.88'}}, {mfargs, {ebucketmigrator_srv,start_link, [{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{on_not_ready_vbuckets, #Fun}, {username,"default"}, {password,get_from_config}, {vbuckets,"V"}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]]}}, {restart_type,temporary}, {shutdown,60000}, {child_type,worker}] [ns_server:info,2014-08-19T16:52:19.050,ns_1@10.242.238.90:<0.46.1>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 86 to state replica [ns_server:debug,2014-08-19T16:52:19.059,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:52:19.062,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:19.062,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 3229 us [ns_server:debug,2014-08-19T16:52:19.062,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:19.063,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{86, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.90']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:52:19.079,ns_1@10.242.238.90:<0.46.1>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_ns_1@10.242.238.90 [rebalance:info,2014-08-19T16:52:19.084,ns_1@10.242.238.90:<0.46.1>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,"V"}, {checkpoints,[{86,1}]}, {name,<<"replication_ns_1@10.242.238.90">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{on_not_ready_vbuckets,#Fun}, {username,"default"}, {password,get_from_config}, {vbuckets,"V"}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]} [rebalance:debug,2014-08-19T16:52:19.085,ns_1@10.242.238.90:<0.46.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.48.1> [rebalance:info,2014-08-19T16:52:19.086,ns_1@10.242.238.90:<0.46.1>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [ns_server:debug,2014-08-19T16:52:19.172,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 161. Nacking mccouch update. [views:debug,2014-08-19T16:52:19.172,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/161. Updated state: replica (0) [ns_server:debug,2014-08-19T16:52:19.172,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",161,replica,0} [ns_server:debug,2014-08-19T16:52:19.173,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,984,673,545,362,724,596,413,958,647,519,698,570,387,1009,749,621,983, 672,544,361,723,595,412,957,646,518,152,697,569,386,1008,748,620,982,671,543, 360,722,594,411,956,645,517,696,568,385,1007,747,619,981,670,542,359,721,593, 410,955,644,516,150,695,567,384,1006,746,618,980,669,541,358,720,592,409,954, 643,515,694,566,383,1005,745,617,979,732,668,604,540,421,357,966,719,655,591, 527,408,344,161,953,706,642,578,514,395,148,1017,940,757,693,629,565,382, 1004,991,744,680,616,552,369,978,731,667,603,539,420,356,965,718,654,590,526, 407,343,160,952,705,641,577,513,394,1016,939,756,692,628,564,381,1003,990, 743,679,615,551,368,977,730,666,602,538,419,355,964,717,653,589,525,406,342, 951,704,640,576,512,393,146,1015,938,755,691,627,563,380,1002,989,742,678, 614,550,367,976,729,665,601,537,418,354,963,716,652,588,524,405,158,950,767, 703,639,575,392,1014,754,690,626,562,379,1001,988,741,677,613,549,366,975, 728,664,600,536,417,353,170,962,715,651,587,523,404,949,766,702,638,574,391, 144,1013,753,689,625,561,378,1000,987,740,676,612,548,365,974,727,663,599, 535,416,352,169,961,714,650,586,522,403,156,948,765,701,637,573,390,1012,999, 752,688,624,560,377,986,739,675,611,547,364,973,726,662,598,534,415,351,168, 960,713,649,585,521,402,947,764,700,636,572,389,142,1011,998,751,687,623,559, 376,985,738,674,610,546,363,972,725,661,597,533,414,350,167,959,712,648,584, 520,401,154,1023,946,763,699,635,571,388,1010,997,686,558,375,737,609,426, 971,660,532,349,166,711,583,400,1022,945,762,634,140,996,685,557,374,736,608, 425,970,659,531,348,165,710,582,399,1021,944,761,633,995,684,556,373,735,607, 424,969,658,530,347,164,709,581,398,1020,943,760,632,994,683,555,372,734,606, 423,968,657,529,346,163,708,580,397,1019,942,759,631,993,682,554,371,733,605, 422,967,656,528,345,162,707,579,396,1018,941,758,630,992,681,553,370] [views:debug,2014-08-19T16:52:19.206,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/161. Updated state: replica (0) [ns_server:debug,2014-08-19T16:52:19.206,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",161,replica,0} [ns_server:debug,2014-08-19T16:52:19.314,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 159. Nacking mccouch update. [views:debug,2014-08-19T16:52:19.314,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/159. Updated state: replica (0) [ns_server:debug,2014-08-19T16:52:19.314,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",159,replica,0} [ns_server:debug,2014-08-19T16:52:19.315,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,984,673,545,362,724,596,413,958,647,519,698,570,387,1009,749,621,983, 672,544,361,723,595,412,957,646,518,152,697,569,386,1008,748,620,982,671,543, 360,722,594,411,956,645,517,696,568,385,1007,747,619,981,670,542,359,721,593, 410,955,644,516,150,695,567,384,1006,746,618,980,669,541,358,720,592,409,954, 643,515,694,566,383,1005,745,617,979,732,668,604,540,421,357,966,719,655,591, 527,408,344,161,953,706,642,578,514,395,148,1017,940,757,693,629,565,382, 1004,991,744,680,616,552,369,978,731,667,603,539,420,356,965,718,654,590,526, 407,343,160,952,705,641,577,513,394,1016,939,756,692,628,564,381,1003,990, 743,679,615,551,368,977,730,666,602,538,419,355,964,717,653,589,525,406,342, 159,951,704,640,576,512,393,146,1015,938,755,691,627,563,380,1002,989,742, 678,614,550,367,976,729,665,601,537,418,354,963,716,652,588,524,405,158,950, 767,703,639,575,392,1014,754,690,626,562,379,1001,988,741,677,613,549,366, 975,728,664,600,536,417,353,170,962,715,651,587,523,404,949,766,702,638,574, 391,144,1013,753,689,625,561,378,1000,987,740,676,612,548,365,974,727,663, 599,535,416,352,169,961,714,650,586,522,403,156,948,765,701,637,573,390,1012, 999,752,688,624,560,377,986,739,675,611,547,364,973,726,662,598,534,415,351, 168,960,713,649,585,521,402,947,764,700,636,572,389,142,1011,998,751,687,623, 559,376,985,738,674,610,546,363,972,725,661,597,533,414,350,167,959,712,648, 584,520,401,154,1023,946,763,699,635,571,388,1010,997,686,558,375,737,609, 426,971,660,532,349,166,711,583,400,1022,945,762,634,140,996,685,557,374,736, 608,425,970,659,531,348,165,710,582,399,1021,944,761,633,995,684,556,373,735, 607,424,969,658,530,347,164,709,581,398,1020,943,760,632,994,683,555,372,734, 606,423,968,657,529,346,163,708,580,397,1019,942,759,631,993,682,554,371,733, 605,422,967,656,528,345,162,707,579,396,1018,941,758,630,992,681,553,370] [views:debug,2014-08-19T16:52:19.391,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/159. Updated state: replica (0) [ns_server:debug,2014-08-19T16:52:19.391,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",159,replica,0} [ns_server:debug,2014-08-19T16:52:19.557,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 157. Nacking mccouch update. [views:debug,2014-08-19T16:52:19.557,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/157. Updated state: replica (0) [ns_server:debug,2014-08-19T16:52:19.557,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",157,replica,0} [ns_server:debug,2014-08-19T16:52:19.558,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,984,673,545,362,724,596,413,958,647,519,698,570,387,1009,749,621,983, 672,544,361,723,595,412,957,646,518,152,697,569,386,1008,748,620,982,671,543, 360,722,594,411,956,645,517,696,568,385,1007,747,619,981,670,542,359,721,593, 410,955,644,516,150,695,567,384,1006,746,618,980,669,541,358,720,592,409,954, 643,515,694,566,383,1005,745,617,979,732,668,604,540,421,357,966,719,655,591, 527,408,344,161,953,706,642,578,514,395,148,1017,940,757,693,629,565,382, 1004,991,744,680,616,552,369,978,731,667,603,539,420,356,965,718,654,590,526, 407,343,160,952,705,641,577,513,394,1016,939,756,692,628,564,381,1003,990, 743,679,615,551,368,977,730,666,602,538,419,355,964,717,653,589,525,406,342, 159,951,704,640,576,512,393,146,1015,938,755,691,627,563,380,1002,989,742, 678,614,550,367,976,729,665,601,537,418,354,963,716,652,588,524,405,158,950, 767,703,639,575,392,1014,754,690,626,562,379,1001,988,741,677,613,549,366, 975,728,664,600,536,417,353,170,962,715,651,587,523,404,157,949,766,702,638, 574,391,144,1013,753,689,625,561,378,1000,987,740,676,612,548,365,974,727, 663,599,535,416,352,169,961,714,650,586,522,403,156,948,765,701,637,573,390, 1012,999,752,688,624,560,377,986,739,675,611,547,364,973,726,662,598,534,415, 351,168,960,713,649,585,521,402,947,764,700,636,572,389,142,1011,998,751,687, 623,559,376,985,738,674,610,546,363,972,725,661,597,533,414,350,167,959,712, 648,584,520,401,154,1023,946,763,699,635,571,388,1010,997,686,558,375,737, 609,426,971,660,532,349,166,711,583,400,1022,945,762,634,140,996,685,557,374, 736,608,425,970,659,531,348,165,710,582,399,1021,944,761,633,995,684,556,373, 735,607,424,969,658,530,347,164,709,581,398,1020,943,760,632,994,683,555,372, 734,606,423,968,657,529,346,163,708,580,397,1019,942,759,631,993,682,554,371, 733,605,422,967,656,528,345,162,707,579,396,1018,941,758,630,992,681,553,370] [views:debug,2014-08-19T16:52:19.633,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/157. Updated state: replica (0) [ns_server:debug,2014-08-19T16:52:19.633,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",157,replica,0} [ns_server:debug,2014-08-19T16:52:19.799,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 155. Nacking mccouch update. [views:debug,2014-08-19T16:52:19.799,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/155. Updated state: replica (0) [ns_server:debug,2014-08-19T16:52:19.799,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",155,replica,0} [ns_server:debug,2014-08-19T16:52:19.800,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,984,673,545,362,724,596,413,958,647,519,698,570,387,1009,749,621,983, 672,544,361,723,595,412,957,646,518,152,697,569,386,1008,748,620,982,671,543, 360,722,594,411,956,645,517,696,568,385,1007,747,619,981,670,542,359,721,593, 410,955,644,516,150,695,567,384,1006,746,618,980,669,541,358,720,592,409,954, 643,515,694,566,383,1005,745,617,979,668,540,357,966,719,655,591,527,408,344, 161,953,706,642,578,514,395,148,1017,940,757,693,629,565,382,1004,991,744, 680,616,552,369,978,731,667,603,539,420,356,965,718,654,590,526,407,343,160, 952,705,641,577,513,394,1016,939,756,692,628,564,381,1003,990,743,679,615, 551,368,977,730,666,602,538,419,355,964,717,653,589,525,406,342,159,951,704, 640,576,512,393,146,1015,938,755,691,627,563,380,1002,989,742,678,614,550, 367,976,729,665,601,537,418,354,963,716,652,588,524,405,158,950,767,703,639, 575,392,1014,754,690,626,562,379,1001,988,741,677,613,549,366,975,728,664, 600,536,417,353,170,962,715,651,587,523,404,157,949,766,702,638,574,391,144, 1013,753,689,625,561,378,1000,987,740,676,612,548,365,974,727,663,599,535, 416,352,169,961,714,650,586,522,403,156,948,765,701,637,573,390,1012,999,752, 688,624,560,377,986,739,675,611,547,364,973,726,662,598,534,415,351,168,960, 713,649,585,521,402,155,947,764,700,636,572,389,142,1011,998,751,687,623,559, 376,985,738,674,610,546,363,972,725,661,597,533,414,350,167,959,712,648,584, 520,401,154,1023,946,763,699,635,571,388,1010,997,686,558,375,737,609,426, 971,660,532,349,166,711,583,400,1022,945,762,634,140,996,685,557,374,736,608, 425,970,659,531,348,165,710,582,399,1021,944,761,633,995,684,556,373,735,607, 424,969,658,530,347,164,709,581,398,1020,943,760,632,994,683,555,372,734,606, 423,968,657,529,346,163,708,580,397,1019,942,759,631,993,682,554,371,733,605, 422,967,656,528,345,162,707,579,396,1018,941,758,630,992,681,553,370,732,604, 421] [views:debug,2014-08-19T16:52:19.834,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/155. Updated state: replica (0) [ns_server:debug,2014-08-19T16:52:19.834,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",155,replica,0} [ns_server:debug,2014-08-19T16:52:19.986,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 153. Nacking mccouch update. [views:debug,2014-08-19T16:52:19.986,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/153. Updated state: replica (0) [ns_server:debug,2014-08-19T16:52:19.986,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",153,replica,0} [ns_server:debug,2014-08-19T16:52:19.987,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,984,673,545,362,724,596,413,958,647,519,153,698,570,387,1009,749,621, 983,672,544,361,723,595,412,957,646,518,152,697,569,386,1008,748,620,982,671, 543,360,722,594,411,956,645,517,696,568,385,1007,747,619,981,670,542,359,721, 593,410,955,644,516,150,695,567,384,1006,746,618,980,669,541,358,720,592,409, 954,643,515,694,566,383,1005,745,617,979,668,540,357,966,719,655,591,527,408, 344,161,953,706,642,578,514,395,148,1017,940,757,693,629,565,382,1004,991, 744,680,616,552,369,978,731,667,603,539,420,356,965,718,654,590,526,407,343, 160,952,705,641,577,513,394,1016,939,756,692,628,564,381,1003,990,743,679, 615,551,368,977,730,666,602,538,419,355,964,717,653,589,525,406,342,159,951, 704,640,576,512,393,146,1015,938,755,691,627,563,380,1002,989,742,678,614, 550,367,976,729,665,601,537,418,354,963,716,652,588,524,405,158,950,767,703, 639,575,392,1014,754,690,626,562,379,1001,988,741,677,613,549,366,975,728, 664,600,536,417,353,170,962,715,651,587,523,404,157,949,766,702,638,574,391, 144,1013,753,689,625,561,378,1000,987,740,676,612,548,365,974,727,663,599, 535,416,352,169,961,714,650,586,522,403,156,948,765,701,637,573,390,1012,999, 752,688,624,560,377,986,739,675,611,547,364,973,726,662,598,534,415,351,168, 960,713,649,585,521,402,155,947,764,700,636,572,389,142,1011,998,751,687,623, 559,376,985,738,674,610,546,363,972,725,661,597,533,414,350,167,959,712,648, 584,520,401,154,1023,946,763,699,635,571,388,1010,997,686,558,375,737,609, 426,971,660,532,349,166,711,583,400,1022,945,762,634,140,996,685,557,374,736, 608,425,970,659,531,348,165,710,582,399,1021,944,761,633,995,684,556,373,735, 607,424,969,658,530,347,164,709,581,398,1020,943,760,632,994,683,555,372,734, 606,423,968,657,529,346,163,708,580,397,1019,942,759,631,993,682,554,371,733, 605,422,967,656,528,345,162,707,579,396,1018,941,758,630,992,681,553,370,732, 604,421] [views:debug,2014-08-19T16:52:20.037,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/153. Updated state: replica (0) [ns_server:debug,2014-08-19T16:52:20.037,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",153,replica,0} [ns_server:debug,2014-08-19T16:52:20.186,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 151. Nacking mccouch update. [views:debug,2014-08-19T16:52:20.186,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/151. Updated state: replica (0) [ns_server:debug,2014-08-19T16:52:20.187,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",151,replica,0} [ns_server:debug,2014-08-19T16:52:20.187,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,984,673,545,362,724,596,413,958,647,519,153,698,570,387,1009,749,621, 983,672,544,361,723,595,412,957,646,518,152,697,569,386,1008,748,620,982,671, 543,360,722,594,411,956,645,517,151,696,568,385,1007,747,619,981,670,542,359, 721,593,410,955,644,516,150,695,567,384,1006,746,618,980,669,541,358,720,592, 409,954,643,515,694,566,383,1005,745,617,979,668,540,357,966,719,655,591,527, 408,344,161,953,706,642,578,514,395,148,1017,940,757,693,629,565,382,1004, 991,744,680,616,552,369,978,731,667,603,539,420,356,965,718,654,590,526,407, 343,160,952,705,641,577,513,394,1016,939,756,692,628,564,381,1003,990,743, 679,615,551,368,977,730,666,602,538,419,355,964,717,653,589,525,406,342,159, 951,704,640,576,512,393,146,1015,938,755,691,627,563,380,1002,989,742,678, 614,550,367,976,729,665,601,537,418,354,963,716,652,588,524,405,158,950,767, 703,639,575,392,1014,754,690,626,562,379,1001,988,741,677,613,549,366,975, 728,664,600,536,417,353,170,962,715,651,587,523,404,157,949,766,702,638,574, 391,144,1013,753,689,625,561,378,1000,987,740,676,612,548,365,974,727,663, 599,535,416,352,169,961,714,650,586,522,403,156,948,765,701,637,573,390,1012, 999,752,688,624,560,377,986,739,675,611,547,364,973,726,662,598,534,415,351, 168,960,713,649,585,521,402,155,947,764,700,636,572,389,142,1011,998,751,687, 623,559,376,985,738,674,610,546,363,972,725,661,597,533,414,350,167,959,712, 648,584,520,401,154,1023,946,763,699,635,571,388,1010,997,686,558,375,737, 609,426,971,660,532,349,166,711,583,400,1022,945,762,634,140,996,685,557,374, 736,608,425,970,659,531,348,165,710,582,399,1021,944,761,633,995,684,556,373, 735,607,424,969,658,530,347,164,709,581,398,1020,943,760,632,994,683,555,372, 734,606,423,968,657,529,346,163,708,580,397,1019,942,759,631,993,682,554,371, 733,605,422,967,656,528,345,162,707,579,396,1018,941,758,630,992,681,553,370, 732,604,421] [views:debug,2014-08-19T16:52:20.254,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/151. Updated state: replica (0) [ns_server:debug,2014-08-19T16:52:20.254,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",151,replica,0} [ns_server:debug,2014-08-19T16:52:20.397,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 149. Nacking mccouch update. [views:debug,2014-08-19T16:52:20.397,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/149. Updated state: replica (0) [ns_server:debug,2014-08-19T16:52:20.397,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",149,replica,0} [ns_server:debug,2014-08-19T16:52:20.398,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,984,673,545,362,724,596,413,958,647,519,153,698,570,387,1009,749,621, 983,672,544,361,723,595,412,957,646,518,152,697,569,386,1008,748,620,982,671, 543,360,722,594,411,956,645,517,151,696,568,385,1007,747,619,981,670,542,359, 721,593,410,955,644,516,150,695,567,384,1006,746,618,980,669,541,358,720,592, 409,954,643,515,149,694,566,383,1005,745,617,979,668,540,357,966,719,655,591, 527,408,344,161,953,706,642,578,514,395,148,1017,940,757,693,629,565,382, 1004,991,744,680,616,552,369,978,731,667,603,539,420,356,965,718,654,590,526, 407,343,160,952,705,641,577,513,394,1016,939,756,692,628,564,381,1003,990, 743,679,615,551,368,977,730,666,602,538,419,355,964,717,653,589,525,406,342, 159,951,704,640,576,512,393,146,1015,938,755,691,627,563,380,1002,989,742, 678,614,550,367,976,729,665,601,537,418,354,963,716,652,588,524,405,158,950, 767,703,639,575,392,1014,754,690,626,562,379,1001,988,741,677,613,549,366, 975,728,664,600,536,417,353,170,962,715,651,587,523,404,157,949,766,702,638, 574,391,144,1013,753,689,625,561,378,1000,987,740,676,612,548,365,974,727, 663,599,535,416,352,169,961,714,650,586,522,403,156,948,765,701,637,573,390, 1012,999,752,688,624,560,377,986,739,675,611,547,364,973,726,662,598,534,415, 351,168,960,713,649,585,521,402,155,947,764,700,636,572,389,142,1011,998,751, 687,623,559,376,985,738,674,610,546,363,972,725,661,597,533,414,350,167,959, 712,648,584,520,401,154,1023,946,763,699,635,571,388,1010,997,686,558,375, 737,609,426,971,660,532,349,166,711,583,400,1022,945,762,634,140,996,685,557, 374,736,608,425,970,659,531,348,165,710,582,399,1021,944,761,633,995,684,556, 373,735,607,424,969,658,530,347,164,709,581,398,1020,943,760,632,994,683,555, 372,734,606,423,968,657,529,346,163,708,580,397,1019,942,759,631,993,682,554, 371,733,605,422,967,656,528,345,162,707,579,396,1018,941,758,630,992,681,553, 370,732,604,421] [views:debug,2014-08-19T16:52:20.465,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/149. Updated state: replica (0) [ns_server:debug,2014-08-19T16:52:20.465,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",149,replica,0} [ns_server:debug,2014-08-19T16:52:20.598,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 147. Nacking mccouch update. [views:debug,2014-08-19T16:52:20.598,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/147. Updated state: replica (0) [ns_server:debug,2014-08-19T16:52:20.598,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",147,replica,0} [ns_server:debug,2014-08-19T16:52:20.599,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,984,673,545,362,724,596,413,958,647,519,153,698,570,387,1009,749,621, 983,672,544,361,723,595,412,957,646,518,152,697,569,386,1008,748,620,982,671, 543,360,722,594,411,956,645,517,151,696,568,385,1007,747,619,981,670,542,359, 721,593,410,955,644,516,150,695,567,384,1006,746,618,980,669,541,358,720,592, 409,954,643,515,149,694,566,383,1005,745,617,979,668,540,357,966,719,655,591, 527,408,344,161,953,706,642,578,514,395,148,1017,940,757,693,629,565,382, 1004,991,744,680,616,552,369,978,731,667,603,539,420,356,965,718,654,590,526, 407,343,160,952,705,641,577,513,394,147,1016,939,756,692,628,564,381,1003, 990,743,679,615,551,368,977,730,666,602,538,419,355,964,717,653,589,525,406, 342,159,951,704,640,576,512,393,146,1015,938,755,691,627,563,380,1002,989, 742,678,614,550,367,976,729,665,601,537,418,354,963,716,652,588,524,405,158, 950,767,703,639,575,392,1014,754,690,626,562,379,1001,988,741,677,613,549, 366,975,728,664,600,536,417,353,170,962,715,651,587,523,404,157,949,766,702, 638,574,391,144,1013,753,689,625,561,378,1000,987,740,676,612,548,365,974, 727,663,599,535,416,352,169,961,714,650,586,522,403,156,948,765,701,637,573, 390,1012,999,752,688,624,560,377,986,739,675,611,547,364,973,726,662,598,534, 415,351,168,960,713,649,585,521,402,155,947,764,700,636,572,389,142,1011,998, 751,687,623,559,376,985,738,674,610,546,363,972,725,661,597,533,414,350,167, 959,712,648,584,520,401,154,1023,946,763,699,635,571,388,1010,997,686,558, 375,737,609,426,971,660,532,349,166,711,583,400,1022,945,762,634,140,996,685, 557,374,736,608,425,970,659,531,348,165,710,582,399,1021,944,761,633,995,684, 556,373,735,607,424,969,658,530,347,164,709,581,398,1020,943,760,632,994,683, 555,372,734,606,423,968,657,529,346,163,708,580,397,1019,942,759,631,993,682, 554,371,733,605,422,967,656,528,345,162,707,579,396,1018,941,758,630,992,681, 553,370,732,604,421] [views:debug,2014-08-19T16:52:20.665,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/147. Updated state: replica (0) [ns_server:debug,2014-08-19T16:52:20.665,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",147,replica,0} [ns_server:debug,2014-08-19T16:52:20.808,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 145. Nacking mccouch update. [views:debug,2014-08-19T16:52:20.808,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/145. Updated state: replica (0) [ns_server:debug,2014-08-19T16:52:20.808,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",145,replica,0} [ns_server:debug,2014-08-19T16:52:20.809,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,984,673,545,362,724,596,413,958,647,519,153,698,570,387,1009,749,621, 983,672,544,361,723,595,412,957,646,518,152,697,569,386,1008,748,620,982,671, 543,360,722,594,411,956,645,517,151,696,568,385,1007,747,619,981,670,542,359, 721,593,410,955,644,516,150,695,567,384,1006,746,618,980,669,541,358,720,592, 409,954,643,515,149,694,566,383,1005,745,617,979,668,540,357,719,591,408,953, 706,642,578,514,395,148,1017,940,757,693,629,565,382,1004,991,744,680,616, 552,369,978,731,667,603,539,420,356,965,718,654,590,526,407,343,160,952,705, 641,577,513,394,147,1016,939,756,692,628,564,381,1003,990,743,679,615,551, 368,977,730,666,602,538,419,355,964,717,653,589,525,406,342,159,951,704,640, 576,512,393,146,1015,938,755,691,627,563,380,1002,989,742,678,614,550,367, 976,729,665,601,537,418,354,963,716,652,588,524,405,158,950,767,703,639,575, 392,145,1014,754,690,626,562,379,1001,988,741,677,613,549,366,975,728,664, 600,536,417,353,170,962,715,651,587,523,404,157,949,766,702,638,574,391,144, 1013,753,689,625,561,378,1000,987,740,676,612,548,365,974,727,663,599,535, 416,352,169,961,714,650,586,522,403,156,948,765,701,637,573,390,1012,999,752, 688,624,560,377,986,739,675,611,547,364,973,726,662,598,534,415,351,168,960, 713,649,585,521,402,155,947,764,700,636,572,389,142,1011,998,751,687,623,559, 376,985,738,674,610,546,363,972,725,661,597,533,414,350,167,959,712,648,584, 520,401,154,1023,946,763,699,635,571,388,1010,997,686,558,375,737,609,426, 971,660,532,349,166,711,583,400,1022,945,762,634,140,996,685,557,374,736,608, 425,970,659,531,348,165,710,582,399,1021,944,761,633,995,684,556,373,735,607, 424,969,658,530,347,164,709,581,398,1020,943,760,632,994,683,555,372,734,606, 423,968,657,529,346,163,708,580,397,1019,942,759,631,993,682,554,371,733,605, 422,967,656,528,345,162,707,579,396,1018,941,758,630,992,681,553,370,732,604, 421,966,655,527,344,161] [views:debug,2014-08-19T16:52:20.842,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/145. Updated state: replica (0) [ns_server:debug,2014-08-19T16:52:20.842,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",145,replica,0} [ns_server:debug,2014-08-19T16:52:20.926,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 143. Nacking mccouch update. [views:debug,2014-08-19T16:52:20.926,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/143. Updated state: replica (0) [ns_server:debug,2014-08-19T16:52:20.926,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",143,replica,0} [ns_server:debug,2014-08-19T16:52:20.927,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,984,673,545,362,724,596,413,958,647,519,153,698,570,387,1009,749,621, 983,672,544,361,723,595,412,957,646,518,152,697,569,386,1008,748,620,982,671, 543,360,722,594,411,956,645,517,151,696,568,385,1007,747,619,981,670,542,359, 721,593,410,955,644,516,150,695,567,384,1006,746,618,980,669,541,358,720,592, 409,954,643,515,149,694,566,383,1005,745,617,979,668,540,357,719,591,408,953, 706,642,578,514,395,148,1017,940,757,693,629,565,382,1004,991,744,680,616, 552,369,978,731,667,603,539,420,356,965,718,654,590,526,407,343,160,952,705, 641,577,513,394,147,1016,939,756,692,628,564,381,1003,990,743,679,615,551, 368,977,730,666,602,538,419,355,964,717,653,589,525,406,342,159,951,704,640, 576,512,393,146,1015,938,755,691,627,563,380,1002,989,742,678,614,550,367, 976,729,665,601,537,418,354,963,716,652,588,524,405,158,950,767,703,639,575, 392,145,1014,754,690,626,562,379,1001,988,741,677,613,549,366,975,728,664, 600,536,417,353,170,962,715,651,587,523,404,157,949,766,702,638,574,391,144, 1013,753,689,625,561,378,1000,987,740,676,612,548,365,974,727,663,599,535, 416,352,169,961,714,650,586,522,403,156,948,765,701,637,573,390,143,1012,999, 752,688,624,560,377,986,739,675,611,547,364,973,726,662,598,534,415,351,168, 960,713,649,585,521,402,155,947,764,700,636,572,389,142,1011,998,751,687,623, 559,376,985,738,674,610,546,363,972,725,661,597,533,414,350,167,959,712,648, 584,520,401,154,1023,946,763,699,635,571,388,1010,997,686,558,375,737,609, 426,971,660,532,349,166,711,583,400,1022,945,762,634,140,996,685,557,374,736, 608,425,970,659,531,348,165,710,582,399,1021,944,761,633,995,684,556,373,735, 607,424,969,658,530,347,164,709,581,398,1020,943,760,632,994,683,555,372,734, 606,423,968,657,529,346,163,708,580,397,1019,942,759,631,993,682,554,371,733, 605,422,967,656,528,345,162,707,579,396,1018,941,758,630,992,681,553,370,732, 604,421,966,655,527,344,161] [views:debug,2014-08-19T16:52:20.959,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/143. Updated state: replica (0) [ns_server:debug,2014-08-19T16:52:20.960,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",143,replica,0} [ns_server:debug,2014-08-19T16:52:21.051,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 141. Nacking mccouch update. [views:debug,2014-08-19T16:52:21.052,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/141. Updated state: replica (0) [ns_server:debug,2014-08-19T16:52:21.052,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",141,replica,0} [ns_server:debug,2014-08-19T16:52:21.053,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,984,673,545,362,724,596,413,958,647,519,153,698,570,387,1009,749,621, 983,672,544,361,723,595,412,957,646,518,152,697,569,386,1008,748,620,982,671, 543,360,722,594,411,956,645,517,151,696,568,385,1007,747,619,981,670,542,359, 721,593,410,955,644,516,150,695,567,384,1006,746,618,980,669,541,358,720,592, 409,954,643,515,149,694,566,383,1005,745,617,979,668,540,357,719,591,408,953, 706,642,578,514,395,148,1017,940,757,693,629,565,382,1004,991,744,680,616, 552,369,978,731,667,603,539,420,356,965,718,654,590,526,407,343,160,952,705, 641,577,513,394,147,1016,939,756,692,628,564,381,1003,990,743,679,615,551, 368,977,730,666,602,538,419,355,964,717,653,589,525,406,342,159,951,704,640, 576,512,393,146,1015,938,755,691,627,563,380,1002,989,742,678,614,550,367, 976,729,665,601,537,418,354,963,716,652,588,524,405,158,950,767,703,639,575, 392,145,1014,754,690,626,562,379,1001,988,741,677,613,549,366,975,728,664, 600,536,417,353,170,962,715,651,587,523,404,157,949,766,702,638,574,391,144, 1013,753,689,625,561,378,1000,987,740,676,612,548,365,974,727,663,599,535, 416,352,169,961,714,650,586,522,403,156,948,765,701,637,573,390,143,1012,999, 752,688,624,560,377,986,739,675,611,547,364,973,726,662,598,534,415,351,168, 960,713,649,585,521,402,155,947,764,700,636,572,389,142,1011,998,751,687,623, 559,376,985,738,674,610,546,363,972,725,661,597,533,414,350,167,959,712,648, 584,520,401,154,1023,946,763,699,635,571,388,141,1010,997,686,558,375,737, 609,426,971,660,532,349,166,711,583,400,1022,945,762,634,140,996,685,557,374, 736,608,425,970,659,531,348,165,710,582,399,1021,944,761,633,995,684,556,373, 735,607,424,969,658,530,347,164,709,581,398,1020,943,760,632,994,683,555,372, 734,606,423,968,657,529,346,163,708,580,397,1019,942,759,631,993,682,554,371, 733,605,422,967,656,528,345,162,707,579,396,1018,941,758,630,992,681,553,370, 732,604,421,966,655,527,344,161] [views:debug,2014-08-19T16:52:21.102,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/141. Updated state: replica (0) [ns_server:debug,2014-08-19T16:52:21.103,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",141,replica,0} [ns_server:debug,2014-08-19T16:52:21.186,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 139. Nacking mccouch update. [views:debug,2014-08-19T16:52:21.186,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/139. Updated state: replica (0) [ns_server:debug,2014-08-19T16:52:21.186,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",139,replica,0} [ns_server:debug,2014-08-19T16:52:21.187,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,984,673,545,362,724,596,413,958,647,519,153,698,570,387,1009,749,621, 983,672,544,361,723,595,412,957,646,518,152,697,569,386,1008,748,620,982,671, 543,360,722,594,411,956,645,517,151,696,568,385,1007,747,619,981,670,542,359, 721,593,410,955,644,516,150,695,567,384,1006,746,618,980,669,541,358,720,592, 409,954,643,515,149,694,566,383,1005,745,617,979,668,540,357,719,591,408,953, 706,642,578,514,395,148,1017,940,757,693,629,565,382,1004,991,744,680,616, 552,369,978,731,667,603,539,420,356,965,718,654,590,526,407,343,160,952,705, 641,577,513,394,147,1016,939,756,692,628,564,381,1003,990,743,679,615,551, 368,977,730,666,602,538,419,355,964,717,653,589,525,406,342,159,951,704,640, 576,512,393,146,1015,938,755,691,627,563,380,1002,989,742,678,614,550,367, 976,729,665,601,537,418,354,963,716,652,588,524,405,158,950,767,703,639,575, 392,145,1014,754,690,626,562,379,1001,988,741,677,613,549,366,975,728,664, 600,536,417,353,170,962,715,651,587,523,404,157,949,766,702,638,574,391,144, 1013,753,689,625,561,378,1000,987,740,676,612,548,365,974,727,663,599,535, 416,352,169,961,714,650,586,522,403,156,948,765,701,637,573,390,143,1012,999, 752,688,624,560,377,986,739,675,611,547,364,973,726,662,598,534,415,351,168, 960,713,649,585,521,402,155,947,764,700,636,572,389,142,1011,998,751,687,623, 559,376,985,738,674,610,546,363,972,725,661,597,533,414,350,167,959,712,648, 584,520,401,154,1023,946,763,699,635,571,388,141,1010,997,686,558,375,737, 609,426,971,660,532,349,166,711,583,400,1022,945,762,634,140,996,685,557,374, 736,608,425,970,659,531,348,165,710,582,399,1021,944,761,633,139,995,684,556, 373,735,607,424,969,658,530,347,164,709,581,398,1020,943,760,632,994,683,555, 372,734,606,423,968,657,529,346,163,708,580,397,1019,942,759,631,993,682,554, 371,733,605,422,967,656,528,345,162,707,579,396,1018,941,758,630,992,681,553, 370,732,604,421,966,655,527,344,161] [views:debug,2014-08-19T16:52:21.220,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/139. Updated state: replica (0) [ns_server:debug,2014-08-19T16:52:21.220,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",139,replica,0} [ns_server:debug,2014-08-19T16:52:21.296,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 137. Nacking mccouch update. [views:debug,2014-08-19T16:52:21.296,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/137. Updated state: replica (0) [ns_server:debug,2014-08-19T16:52:21.296,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",137,replica,0} [ns_server:debug,2014-08-19T16:52:21.299,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,984,673,545,362,724,596,413,958,647,519,153,698,570,387,1009,749,621, 983,672,544,361,723,595,412,957,646,518,152,697,569,386,1008,748,620,982,671, 543,360,722,594,411,956,645,517,151,696,568,385,1007,747,619,981,670,542,359, 721,593,410,955,644,516,150,695,567,384,1006,746,618,980,669,541,358,720,592, 409,954,643,515,149,694,566,383,1005,745,617,979,668,540,357,719,591,408,953, 706,642,578,514,395,148,1017,940,757,693,629,565,382,1004,991,744,680,616, 552,369,978,731,667,603,539,420,356,965,718,654,590,526,407,343,160,952,705, 641,577,513,394,147,1016,939,756,692,628,564,381,1003,990,743,679,615,551, 368,977,730,666,602,538,419,355,964,717,653,589,525,406,342,159,951,704,640, 576,512,393,146,1015,938,755,691,627,563,380,1002,989,742,678,614,550,367, 976,729,665,601,537,418,354,963,716,652,588,524,405,158,950,767,703,639,575, 392,145,1014,754,690,626,562,379,1001,988,741,677,613,549,366,975,728,664, 600,536,417,353,170,962,715,651,587,523,404,157,949,766,702,638,574,391,144, 1013,753,689,625,561,378,1000,987,740,676,612,548,365,974,727,663,599,535, 416,352,169,961,714,650,586,522,403,156,948,765,701,637,573,390,143,1012,999, 752,688,624,560,377,986,739,675,611,547,364,973,726,662,598,534,415,351,168, 960,713,649,585,521,402,155,947,764,700,636,572,389,142,1011,998,751,687,623, 559,376,985,738,674,610,546,363,972,725,661,597,533,414,350,167,959,712,648, 584,520,401,154,1023,946,763,699,635,571,388,141,1010,997,686,558,375,737, 609,426,971,660,532,349,166,711,583,400,1022,945,762,634,140,996,685,557,374, 736,608,425,970,659,531,348,165,710,582,399,1021,944,761,633,139,995,684,556, 373,735,607,424,969,658,530,347,164,709,581,398,1020,943,760,632,994,683,555, 372,734,606,423,968,657,529,346,163,708,580,397,1019,942,759,631,137,993,682, 554,371,733,605,422,967,656,528,345,162,707,579,396,1018,941,758,630,992,681, 553,370,732,604,421,966,655,527,344,161] [views:debug,2014-08-19T16:52:21.349,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/137. Updated state: replica (0) [ns_server:debug,2014-08-19T16:52:21.349,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",137,replica,0} [ns_server:debug,2014-08-19T16:52:21.482,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 135. Nacking mccouch update. [views:debug,2014-08-19T16:52:21.482,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/135. Updated state: replica (0) [ns_server:debug,2014-08-19T16:52:21.483,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",135,replica,0} [ns_server:debug,2014-08-19T16:52:21.484,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,984,673,545,362,724,596,413,958,647,519,153,698,570,387,1009,749,621, 983,672,544,361,723,595,412,957,646,518,152,697,569,386,1008,748,620,982,671, 543,360,722,594,411,956,645,517,151,696,568,385,1007,747,619,981,670,542,359, 721,593,410,955,644,516,150,695,567,384,1006,746,618,980,669,541,358,720,592, 409,954,643,515,149,694,566,383,1005,745,617,979,668,540,357,719,591,408,953, 642,514,148,940,757,693,629,565,382,135,1004,991,744,680,616,552,369,978,731, 667,603,539,420,356,965,718,654,590,526,407,343,160,952,705,641,577,513,394, 147,1016,939,756,692,628,564,381,1003,990,743,679,615,551,368,977,730,666, 602,538,419,355,964,717,653,589,525,406,342,159,951,704,640,576,512,393,146, 1015,938,755,691,627,563,380,1002,989,742,678,614,550,367,976,729,665,601, 537,418,354,963,716,652,588,524,405,158,950,767,703,639,575,392,145,1014,754, 690,626,562,379,1001,988,741,677,613,549,366,975,728,664,600,536,417,353,170, 962,715,651,587,523,404,157,949,766,702,638,574,391,144,1013,753,689,625,561, 378,1000,987,740,676,612,548,365,974,727,663,599,535,416,352,169,961,714,650, 586,522,403,156,948,765,701,637,573,390,143,1012,999,752,688,624,560,377,986, 739,675,611,547,364,973,726,662,598,534,415,351,168,960,713,649,585,521,402, 155,947,764,700,636,572,389,142,1011,998,751,687,623,559,376,985,738,674,610, 546,363,972,725,661,597,533,414,350,167,959,712,648,584,520,401,154,1023,946, 763,699,635,571,388,141,1010,997,686,558,375,737,609,426,971,660,532,349,166, 711,583,400,1022,945,762,634,140,996,685,557,374,736,608,425,970,659,531,348, 165,710,582,399,1021,944,761,633,139,995,684,556,373,735,607,424,969,658,530, 347,164,709,581,398,1020,943,760,632,994,683,555,372,734,606,423,968,657,529, 346,163,708,580,397,1019,942,759,631,137,993,682,554,371,733,605,422,967,656, 528,345,162,707,579,396,1018,941,758,630,992,681,553,370,732,604,421,966,655, 527,344,161,706,578,395,1017] [views:debug,2014-08-19T16:52:21.558,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/135. Updated state: replica (0) [ns_server:debug,2014-08-19T16:52:21.558,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",135,replica,0} [ns_server:debug,2014-08-19T16:52:21.691,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 133. Nacking mccouch update. [views:debug,2014-08-19T16:52:21.692,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/133. Updated state: replica (0) [ns_server:debug,2014-08-19T16:52:21.692,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",133,replica,0} [ns_server:debug,2014-08-19T16:52:21.693,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,984,673,545,362,724,596,413,958,647,519,153,698,570,387,1009,749,621, 983,672,544,361,723,595,412,957,646,518,152,697,569,386,1008,748,620,982,671, 543,360,722,594,411,956,645,517,151,696,568,385,1007,747,619,981,670,542,359, 721,593,410,955,644,516,150,695,567,384,1006,746,618,980,669,541,358,720,592, 409,954,643,515,149,694,566,383,1005,745,617,979,668,540,357,719,591,408,953, 642,514,148,940,757,693,629,565,382,135,1004,991,744,680,616,552,369,978,731, 667,603,539,420,356,965,718,654,590,526,407,343,160,952,705,641,577,513,394, 147,1016,939,756,692,628,564,381,1003,990,743,679,615,551,368,977,730,666, 602,538,419,355,964,717,653,589,525,406,342,159,951,704,640,576,512,393,146, 1015,938,755,691,627,563,380,133,1002,989,742,678,614,550,367,976,729,665, 601,537,418,354,963,716,652,588,524,405,158,950,767,703,639,575,392,145,1014, 754,690,626,562,379,1001,988,741,677,613,549,366,975,728,664,600,536,417,353, 170,962,715,651,587,523,404,157,949,766,702,638,574,391,144,1013,753,689,625, 561,378,1000,987,740,676,612,548,365,974,727,663,599,535,416,352,169,961,714, 650,586,522,403,156,948,765,701,637,573,390,143,1012,999,752,688,624,560,377, 986,739,675,611,547,364,973,726,662,598,534,415,351,168,960,713,649,585,521, 402,155,947,764,700,636,572,389,142,1011,998,751,687,623,559,376,985,738,674, 610,546,363,972,725,661,597,533,414,350,167,959,712,648,584,520,401,154,1023, 946,763,699,635,571,388,141,1010,997,686,558,375,737,609,426,971,660,532,349, 166,711,583,400,1022,945,762,634,140,996,685,557,374,736,608,425,970,659,531, 348,165,710,582,399,1021,944,761,633,139,995,684,556,373,735,607,424,969,658, 530,347,164,709,581,398,1020,943,760,632,994,683,555,372,734,606,423,968,657, 529,346,163,708,580,397,1019,942,759,631,137,993,682,554,371,733,605,422,967, 656,528,345,162,707,579,396,1018,941,758,630,992,681,553,370,732,604,421,966, 655,527,344,161,706,578,395,1017] [views:debug,2014-08-19T16:52:21.759,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/133. Updated state: replica (0) [ns_server:debug,2014-08-19T16:52:21.759,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",133,replica,0} [ns_server:debug,2014-08-19T16:52:21.917,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 131. Nacking mccouch update. [views:debug,2014-08-19T16:52:21.917,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/131. Updated state: replica (0) [ns_server:debug,2014-08-19T16:52:21.918,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",131,replica,0} [ns_server:debug,2014-08-19T16:52:21.919,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,984,673,545,362,724,596,413,958,647,519,153,698,570,387,1009,749,621, 983,672,544,361,723,595,412,957,646,518,152,697,569,386,1008,748,620,982,671, 543,360,722,594,411,956,645,517,151,696,568,385,1007,747,619,981,670,542,359, 721,593,410,955,644,516,150,695,567,384,1006,746,618,980,669,541,358,720,592, 409,954,643,515,149,694,566,383,1005,745,617,979,668,540,357,719,591,408,953, 642,514,148,940,757,693,629,565,382,135,1004,991,744,680,616,552,369,978,731, 667,603,539,420,356,965,718,654,590,526,407,343,160,952,705,641,577,513,394, 147,1016,939,756,692,628,564,381,1003,990,743,679,615,551,368,977,730,666, 602,538,419,355,964,717,653,589,525,406,342,159,951,704,640,576,512,393,146, 1015,938,755,691,627,563,380,133,1002,989,742,678,614,550,367,976,729,665, 601,537,418,354,963,716,652,588,524,405,158,950,767,703,639,575,392,145,1014, 754,690,626,562,379,1001,988,741,677,613,549,366,975,728,664,600,536,417,353, 170,962,715,651,587,523,404,157,949,766,702,638,574,391,144,1013,753,689,625, 561,378,131,1000,987,740,676,612,548,365,974,727,663,599,535,416,352,169,961, 714,650,586,522,403,156,948,765,701,637,573,390,143,1012,999,752,688,624,560, 377,986,739,675,611,547,364,973,726,662,598,534,415,351,168,960,713,649,585, 521,402,155,947,764,700,636,572,389,142,1011,998,751,687,623,559,376,985,738, 674,610,546,363,972,725,661,597,533,414,350,167,959,712,648,584,520,401,154, 1023,946,763,699,635,571,388,141,1010,997,686,558,375,737,609,426,971,660, 532,349,166,711,583,400,1022,945,762,634,140,996,685,557,374,736,608,425,970, 659,531,348,165,710,582,399,1021,944,761,633,139,995,684,556,373,735,607,424, 969,658,530,347,164,709,581,398,1020,943,760,632,994,683,555,372,734,606,423, 968,657,529,346,163,708,580,397,1019,942,759,631,137,993,682,554,371,733,605, 422,967,656,528,345,162,707,579,396,1018,941,758,630,992,681,553,370,732,604, 421,966,655,527,344,161,706,578,395,1017] [ns_server:debug,2014-08-19T16:52:21.988,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:52:21.994,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:21.994,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 5674 us [ns_server:debug,2014-08-19T16:52:21.994,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:21.995,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{1, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.89']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [views:debug,2014-08-19T16:52:22.001,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/131. Updated state: replica (0) [ns_server:debug,2014-08-19T16:52:22.001,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",131,replica,0} [ns_server:debug,2014-08-19T16:52:22.152,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 129. Nacking mccouch update. [views:debug,2014-08-19T16:52:22.152,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/129. Updated state: replica (0) [ns_server:debug,2014-08-19T16:52:22.152,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",129,replica,0} [ns_server:debug,2014-08-19T16:52:22.153,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,984,673,545,362,724,596,413,958,647,519,153,698,570,387,1009,749,621, 983,672,544,361,723,595,412,957,646,518,152,697,569,386,1008,748,620,982,671, 543,360,722,594,411,956,645,517,151,696,568,385,1007,747,619,981,670,542,359, 721,593,410,955,644,516,150,695,567,384,1006,746,618,980,669,541,358,720,592, 409,954,643,515,149,694,566,383,1005,745,617,979,668,540,357,719,591,408,953, 642,514,148,940,757,693,629,565,382,135,1004,991,744,680,616,552,369,978,731, 667,603,539,420,356,965,718,654,590,526,407,343,160,952,705,641,577,513,394, 147,1016,939,756,692,628,564,381,1003,990,743,679,615,551,368,977,730,666, 602,538,419,355,964,717,653,589,525,406,342,159,951,704,640,576,512,393,146, 1015,938,755,691,627,563,380,133,1002,989,742,678,614,550,367,976,729,665, 601,537,418,354,963,716,652,588,524,405,158,950,767,703,639,575,392,145,1014, 754,690,626,562,379,1001,988,741,677,613,549,366,975,728,664,600,536,417,353, 170,962,715,651,587,523,404,157,949,766,702,638,574,391,144,1013,753,689,625, 561,378,131,1000,987,740,676,612,548,365,974,727,663,599,535,416,352,169,961, 714,650,586,522,403,156,948,765,701,637,573,390,143,1012,999,752,688,624,560, 377,986,739,675,611,547,364,973,726,662,598,534,415,351,168,960,713,649,585, 521,402,155,947,764,700,636,572,389,142,1011,998,751,687,623,559,376,129,985, 738,674,610,546,363,972,725,661,597,533,414,350,167,959,712,648,584,520,401, 154,1023,946,763,699,635,571,388,141,1010,997,686,558,375,737,609,426,971, 660,532,349,166,711,583,400,1022,945,762,634,140,996,685,557,374,736,608,425, 970,659,531,348,165,710,582,399,1021,944,761,633,139,995,684,556,373,735,607, 424,969,658,530,347,164,709,581,398,1020,943,760,632,994,683,555,372,734,606, 423,968,657,529,346,163,708,580,397,1019,942,759,631,137,993,682,554,371,733, 605,422,967,656,528,345,162,707,579,396,1018,941,758,630,992,681,553,370,732, 604,421,966,655,527,344,161,706,578,395,1017] [views:debug,2014-08-19T16:52:22.211,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/129. Updated state: replica (0) [ns_server:debug,2014-08-19T16:52:22.211,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",129,replica,0} [ns_server:debug,2014-08-19T16:52:22.297,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 127. Nacking mccouch update. [views:debug,2014-08-19T16:52:22.297,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/127. Updated state: replica (0) [ns_server:debug,2014-08-19T16:52:22.297,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",127,replica,0} [ns_server:debug,2014-08-19T16:52:22.298,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,984,673,545,362,724,596,413,958,647,519,153,698,570,387,1009,749,621, 127,983,672,544,361,723,595,412,957,646,518,152,697,569,386,1008,748,620,982, 671,543,360,722,594,411,956,645,517,151,696,568,385,1007,747,619,981,670,542, 359,721,593,410,955,644,516,150,695,567,384,1006,746,618,980,669,541,358,720, 592,409,954,643,515,149,694,566,383,1005,745,617,979,668,540,357,719,591,408, 953,642,514,148,940,757,693,629,565,382,135,1004,991,744,680,616,552,369,978, 731,667,603,539,420,356,965,718,654,590,526,407,343,160,952,705,641,577,513, 394,147,1016,939,756,692,628,564,381,1003,990,743,679,615,551,368,977,730, 666,602,538,419,355,964,717,653,589,525,406,342,159,951,704,640,576,512,393, 146,1015,938,755,691,627,563,380,133,1002,989,742,678,614,550,367,976,729, 665,601,537,418,354,963,716,652,588,524,405,158,950,767,703,639,575,392,145, 1014,754,690,626,562,379,1001,988,741,677,613,549,366,975,728,664,600,536, 417,353,170,962,715,651,587,523,404,157,949,766,702,638,574,391,144,1013,753, 689,625,561,378,131,1000,987,740,676,612,548,365,974,727,663,599,535,416,352, 169,961,714,650,586,522,403,156,948,765,701,637,573,390,143,1012,999,752,688, 624,560,377,986,739,675,611,547,364,973,726,662,598,534,415,351,168,960,713, 649,585,521,402,155,947,764,700,636,572,389,142,1011,998,751,687,623,559,376, 129,985,738,674,610,546,363,972,725,661,597,533,414,350,167,959,712,648,584, 520,401,154,1023,946,763,699,635,571,388,141,1010,997,686,558,375,737,609, 426,971,660,532,349,166,711,583,400,1022,945,762,634,140,996,685,557,374,736, 608,425,970,659,531,348,165,710,582,399,1021,944,761,633,139,995,684,556,373, 735,607,424,969,658,530,347,164,709,581,398,1020,943,760,632,994,683,555,372, 734,606,423,968,657,529,346,163,708,580,397,1019,942,759,631,137,993,682,554, 371,733,605,422,967,656,528,345,162,707,579,396,1018,941,758,630,992,681,553, 370,732,604,421,966,655,527,344,161,706,578,395,1017] [views:debug,2014-08-19T16:52:22.348,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/127. Updated state: replica (0) [ns_server:debug,2014-08-19T16:52:22.348,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",127,replica,0} [ns_server:debug,2014-08-19T16:52:22.497,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 125. Nacking mccouch update. [views:debug,2014-08-19T16:52:22.497,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/125. Updated state: replica (0) [ns_server:debug,2014-08-19T16:52:22.498,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",125,replica,0} [ns_server:debug,2014-08-19T16:52:22.499,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,984,673,545,362,724,596,413,958,647,519,153,698,570,387,1009,749,621, 127,983,672,544,361,723,595,412,957,646,518,152,697,569,386,1008,748,620,982, 671,543,360,722,594,411,956,645,517,151,696,568,385,1007,747,619,125,981,670, 542,359,721,593,410,955,644,516,150,695,567,384,1006,746,618,980,669,541,358, 720,592,409,954,643,515,149,694,566,383,1005,745,617,979,668,540,357,719,591, 408,953,642,514,148,693,565,382,1004,991,744,680,616,552,369,978,731,667,603, 539,420,356,965,718,654,590,526,407,343,160,952,705,641,577,513,394,147,1016, 939,756,692,628,564,381,1003,990,743,679,615,551,368,977,730,666,602,538,419, 355,964,717,653,589,525,406,342,159,951,704,640,576,512,393,146,1015,938,755, 691,627,563,380,133,1002,989,742,678,614,550,367,976,729,665,601,537,418,354, 963,716,652,588,524,405,158,950,767,703,639,575,392,145,1014,754,690,626,562, 379,1001,988,741,677,613,549,366,975,728,664,600,536,417,353,170,962,715,651, 587,523,404,157,949,766,702,638,574,391,144,1013,753,689,625,561,378,131, 1000,987,740,676,612,548,365,974,727,663,599,535,416,352,169,961,714,650,586, 522,403,156,948,765,701,637,573,390,143,1012,999,752,688,624,560,377,986,739, 675,611,547,364,973,726,662,598,534,415,351,168,960,713,649,585,521,402,155, 947,764,700,636,572,389,142,1011,998,751,687,623,559,376,129,985,738,674,610, 546,363,972,725,661,597,533,414,350,167,959,712,648,584,520,401,154,1023,946, 763,699,635,571,388,141,1010,997,686,558,375,737,609,426,971,660,532,349,166, 711,583,400,1022,945,762,634,140,996,685,557,374,736,608,425,970,659,531,348, 165,710,582,399,1021,944,761,633,139,995,684,556,373,735,607,424,969,658,530, 347,164,709,581,398,1020,943,760,632,994,683,555,372,734,606,423,968,657,529, 346,163,708,580,397,1019,942,759,631,137,993,682,554,371,733,605,422,967,656, 528,345,162,707,579,396,1018,941,758,630,992,681,553,370,732,604,421,966,655, 527,344,161,706,578,395,1017,940,757,629,135] [views:debug,2014-08-19T16:52:22.532,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/125. Updated state: replica (0) [ns_server:debug,2014-08-19T16:52:22.532,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",125,replica,0} [ns_server:debug,2014-08-19T16:52:22.632,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 123. Nacking mccouch update. [views:debug,2014-08-19T16:52:22.632,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/123. Updated state: replica (0) [ns_server:debug,2014-08-19T16:52:22.632,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",123,replica,0} [ns_server:debug,2014-08-19T16:52:22.633,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,984,673,545,362,724,596,413,958,647,519,153,698,570,387,1009,749,621, 127,983,672,544,361,723,595,412,957,646,518,152,697,569,386,1008,748,620,982, 671,543,360,722,594,411,956,645,517,151,696,568,385,1007,747,619,125,981,670, 542,359,721,593,410,955,644,516,150,695,567,384,1006,746,618,980,669,541,358, 720,592,409,954,643,515,149,694,566,383,1005,745,617,123,979,668,540,357,719, 591,408,953,642,514,148,693,565,382,1004,991,744,680,616,552,369,978,731,667, 603,539,420,356,965,718,654,590,526,407,343,160,952,705,641,577,513,394,147, 1016,939,756,692,628,564,381,1003,990,743,679,615,551,368,977,730,666,602, 538,419,355,964,717,653,589,525,406,342,159,951,704,640,576,512,393,146,1015, 938,755,691,627,563,380,133,1002,989,742,678,614,550,367,976,729,665,601,537, 418,354,963,716,652,588,524,405,158,950,767,703,639,575,392,145,1014,754,690, 626,562,379,1001,988,741,677,613,549,366,975,728,664,600,536,417,353,170,962, 715,651,587,523,404,157,949,766,702,638,574,391,144,1013,753,689,625,561,378, 131,1000,987,740,676,612,548,365,974,727,663,599,535,416,352,169,961,714,650, 586,522,403,156,948,765,701,637,573,390,143,1012,999,752,688,624,560,377,986, 739,675,611,547,364,973,726,662,598,534,415,351,168,960,713,649,585,521,402, 155,947,764,700,636,572,389,142,1011,998,751,687,623,559,376,129,985,738,674, 610,546,363,972,725,661,597,533,414,350,167,959,712,648,584,520,401,154,1023, 946,763,699,635,571,388,141,1010,997,686,558,375,737,609,426,971,660,532,349, 166,711,583,400,1022,945,762,634,140,996,685,557,374,736,608,425,970,659,531, 348,165,710,582,399,1021,944,761,633,139,995,684,556,373,735,607,424,969,658, 530,347,164,709,581,398,1020,943,760,632,994,683,555,372,734,606,423,968,657, 529,346,163,708,580,397,1019,942,759,631,137,993,682,554,371,733,605,422,967, 656,528,345,162,707,579,396,1018,941,758,630,992,681,553,370,732,604,421,966, 655,527,344,161,706,578,395,1017,940,757,629,135] [views:debug,2014-08-19T16:52:22.666,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/123. Updated state: replica (0) [ns_server:debug,2014-08-19T16:52:22.666,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",123,replica,0} [ns_server:debug,2014-08-19T16:52:22.766,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 121. Nacking mccouch update. [views:debug,2014-08-19T16:52:22.766,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/121. Updated state: replica (0) [ns_server:debug,2014-08-19T16:52:22.766,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",121,replica,0} [ns_server:debug,2014-08-19T16:52:22.767,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,984,673,545,362,724,596,413,958,647,519,153,698,570,387,1009,749,621, 127,983,672,544,361,723,595,412,957,646,518,152,697,569,386,1008,748,620,982, 671,543,360,722,594,411,956,645,517,151,696,568,385,1007,747,619,125,981,670, 542,359,721,593,410,955,644,516,150,695,567,384,1006,746,618,980,669,541,358, 720,592,409,954,643,515,149,694,566,383,1005,745,617,123,979,668,540,357,719, 591,408,953,642,514,148,693,565,382,1004,991,744,680,616,552,369,978,731,667, 603,539,420,356,965,718,654,590,526,407,343,160,952,705,641,577,513,394,147, 1016,939,756,692,628,564,381,1003,990,743,679,615,551,368,121,977,730,666, 602,538,419,355,964,717,653,589,525,406,342,159,951,704,640,576,512,393,146, 1015,938,755,691,627,563,380,133,1002,989,742,678,614,550,367,976,729,665, 601,537,418,354,963,716,652,588,524,405,158,950,767,703,639,575,392,145,1014, 754,690,626,562,379,1001,988,741,677,613,549,366,975,728,664,600,536,417,353, 170,962,715,651,587,523,404,157,949,766,702,638,574,391,144,1013,753,689,625, 561,378,131,1000,987,740,676,612,548,365,974,727,663,599,535,416,352,169,961, 714,650,586,522,403,156,948,765,701,637,573,390,143,1012,999,752,688,624,560, 377,986,739,675,611,547,364,973,726,662,598,534,415,351,168,960,713,649,585, 521,402,155,947,764,700,636,572,389,142,1011,998,751,687,623,559,376,129,985, 738,674,610,546,363,972,725,661,597,533,414,350,167,959,712,648,584,520,401, 154,1023,946,763,699,635,571,388,141,1010,997,686,558,375,737,609,426,971, 660,532,349,166,711,583,400,1022,945,762,634,140,996,685,557,374,736,608,425, 970,659,531,348,165,710,582,399,1021,944,761,633,139,995,684,556,373,735,607, 424,969,658,530,347,164,709,581,398,1020,943,760,632,994,683,555,372,734,606, 423,968,657,529,346,163,708,580,397,1019,942,759,631,137,993,682,554,371,733, 605,422,967,656,528,345,162,707,579,396,1018,941,758,630,992,681,553,370,732, 604,421,966,655,527,344,161,706,578,395,1017,940,757,629,135] [views:debug,2014-08-19T16:52:22.824,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/121. Updated state: replica (0) [ns_server:debug,2014-08-19T16:52:22.825,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",121,replica,0} [ns_server:debug,2014-08-19T16:52:22.952,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 119. Nacking mccouch update. [views:debug,2014-08-19T16:52:22.952,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/119. Updated state: replica (0) [ns_server:debug,2014-08-19T16:52:22.952,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",119,replica,0} [ns_server:debug,2014-08-19T16:52:22.953,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,984,673,545,362,724,596,413,958,647,519,153,698,570,387,1009,749,621, 127,983,672,544,361,723,595,412,957,646,518,152,697,569,386,1008,748,620,982, 671,543,360,722,594,411,956,645,517,151,696,568,385,1007,747,619,125,981,670, 542,359,721,593,410,955,644,516,150,695,567,384,1006,746,618,980,669,541,358, 720,592,409,954,643,515,149,694,566,383,1005,745,617,123,979,668,540,357,719, 591,408,953,642,514,148,693,565,382,1004,991,744,680,616,552,369,978,731,667, 603,539,420,356,965,718,654,590,526,407,343,160,952,705,641,577,513,394,147, 1016,939,756,692,628,564,381,1003,990,743,679,615,551,368,121,977,730,666, 602,538,419,355,964,717,653,589,525,406,342,159,951,704,640,576,512,393,146, 1015,938,755,691,627,563,380,133,1002,989,742,678,614,550,367,976,729,665, 601,537,418,354,963,716,652,588,524,405,158,950,767,703,639,575,392,145,1014, 754,690,626,562,379,1001,988,741,677,613,549,366,119,975,728,664,600,536,417, 353,170,962,715,651,587,523,404,157,949,766,702,638,574,391,144,1013,753,689, 625,561,378,131,1000,987,740,676,612,548,365,974,727,663,599,535,416,352,169, 961,714,650,586,522,403,156,948,765,701,637,573,390,143,1012,999,752,688,624, 560,377,986,739,675,611,547,364,973,726,662,598,534,415,351,168,960,713,649, 585,521,402,155,947,764,700,636,572,389,142,1011,998,751,687,623,559,376,129, 985,738,674,610,546,363,972,725,661,597,533,414,350,167,959,712,648,584,520, 401,154,1023,946,763,699,635,571,388,141,1010,997,686,558,375,737,609,426, 971,660,532,349,166,711,583,400,1022,945,762,634,140,996,685,557,374,736,608, 425,970,659,531,348,165,710,582,399,1021,944,761,633,139,995,684,556,373,735, 607,424,969,658,530,347,164,709,581,398,1020,943,760,632,994,683,555,372,734, 606,423,968,657,529,346,163,708,580,397,1019,942,759,631,137,993,682,554,371, 733,605,422,967,656,528,345,162,707,579,396,1018,941,758,630,992,681,553,370, 732,604,421,966,655,527,344,161,706,578,395,1017,940,757,629,135] [views:debug,2014-08-19T16:52:23.011,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/119. Updated state: replica (0) [ns_server:debug,2014-08-19T16:52:23.011,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",119,replica,0} [ns_server:debug,2014-08-19T16:52:23.145,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 117. Nacking mccouch update. [views:debug,2014-08-19T16:52:23.145,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/117. Updated state: replica (0) [ns_server:debug,2014-08-19T16:52:23.145,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",117,replica,0} [ns_server:debug,2014-08-19T16:52:23.146,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,984,673,545,362,724,596,413,958,647,519,153,698,570,387,1009,749,621, 127,983,672,544,361,723,595,412,957,646,518,152,697,569,386,1008,748,620,982, 671,543,360,722,594,411,956,645,517,151,696,568,385,1007,747,619,125,981,670, 542,359,721,593,410,955,644,516,150,695,567,384,1006,746,618,980,669,541,358, 720,592,409,954,643,515,149,694,566,383,1005,745,617,123,979,668,540,357,719, 591,408,953,642,514,148,693,565,382,1004,991,744,680,616,552,369,978,731,667, 603,539,420,356,965,718,654,590,526,407,343,160,952,705,641,577,513,394,147, 1016,939,756,692,628,564,381,1003,990,743,679,615,551,368,121,977,730,666, 602,538,419,355,964,717,653,589,525,406,342,159,951,704,640,576,512,393,146, 1015,938,755,691,627,563,380,133,1002,989,742,678,614,550,367,976,729,665, 601,537,418,354,963,716,652,588,524,405,158,950,767,703,639,575,392,145,1014, 754,690,626,562,379,1001,988,741,677,613,549,366,119,975,728,664,600,536,417, 353,170,962,715,651,587,523,404,157,949,766,702,638,574,391,144,1013,753,689, 625,561,378,131,1000,987,740,676,612,548,365,974,727,663,599,535,416,352,169, 961,714,650,586,522,403,156,948,765,701,637,573,390,143,1012,999,752,688,624, 560,377,986,739,675,611,547,364,117,973,726,662,598,534,415,351,168,960,713, 649,585,521,402,155,947,764,700,636,572,389,142,1011,998,751,687,623,559,376, 129,985,738,674,610,546,363,972,725,661,597,533,414,350,167,959,712,648,584, 520,401,154,1023,946,763,699,635,571,388,141,1010,997,686,558,375,737,609, 426,971,660,532,349,166,711,583,400,1022,945,762,634,140,996,685,557,374,736, 608,425,970,659,531,348,165,710,582,399,1021,944,761,633,139,995,684,556,373, 735,607,424,969,658,530,347,164,709,581,398,1020,943,760,632,994,683,555,372, 734,606,423,968,657,529,346,163,708,580,397,1019,942,759,631,137,993,682,554, 371,733,605,422,967,656,528,345,162,707,579,396,1018,941,758,630,992,681,553, 370,732,604,421,966,655,527,344,161,706,578,395,1017,940,757,629,135] [views:debug,2014-08-19T16:52:23.212,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/117. Updated state: replica (0) [ns_server:debug,2014-08-19T16:52:23.212,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",117,replica,0} [ns_server:debug,2014-08-19T16:52:23.337,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 115. Nacking mccouch update. [views:debug,2014-08-19T16:52:23.337,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/115. Updated state: replica (0) [ns_server:debug,2014-08-19T16:52:23.337,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",115,replica,0} [ns_server:debug,2014-08-19T16:52:23.338,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,984,673,545,362,724,596,413,958,647,519,153,698,570,387,1009,749,621, 127,983,672,544,361,723,595,412,957,646,518,152,697,569,386,1008,748,620,982, 671,543,360,722,594,411,956,645,517,151,696,568,385,1007,747,619,125,981,670, 542,359,721,593,410,955,644,516,150,695,567,384,1006,746,618,980,669,541,358, 720,592,409,954,643,515,149,694,566,383,1005,745,617,123,979,668,540,357,719, 591,408,953,642,514,148,693,565,382,1004,744,616,978,731,667,603,539,420,356, 965,718,654,590,526,407,343,160,952,705,641,577,513,394,147,1016,939,756,692, 628,564,381,1003,990,743,679,615,551,368,121,977,730,666,602,538,419,355,964, 717,653,589,525,406,342,159,951,704,640,576,512,393,146,1015,938,755,691,627, 563,380,133,1002,989,742,678,614,550,367,976,729,665,601,537,418,354,963,716, 652,588,524,405,158,950,767,703,639,575,392,145,1014,754,690,626,562,379, 1001,988,741,677,613,549,366,119,975,728,664,600,536,417,353,170,962,715,651, 587,523,404,157,949,766,702,638,574,391,144,1013,753,689,625,561,378,131, 1000,987,740,676,612,548,365,974,727,663,599,535,416,352,169,961,714,650,586, 522,403,156,948,765,701,637,573,390,143,1012,999,752,688,624,560,377,986,739, 675,611,547,364,117,973,726,662,598,534,415,351,168,960,713,649,585,521,402, 155,947,764,700,636,572,389,142,1011,998,751,687,623,559,376,129,985,738,674, 610,546,363,972,725,661,597,533,414,350,167,959,712,648,584,520,401,154,1023, 946,763,699,635,571,388,141,1010,997,686,558,375,737,609,426,115,971,660,532, 349,166,711,583,400,1022,945,762,634,140,996,685,557,374,736,608,425,970,659, 531,348,165,710,582,399,1021,944,761,633,139,995,684,556,373,735,607,424,969, 658,530,347,164,709,581,398,1020,943,760,632,994,683,555,372,734,606,423,968, 657,529,346,163,708,580,397,1019,942,759,631,137,993,682,554,371,733,605,422, 967,656,528,345,162,707,579,396,1018,941,758,630,992,681,553,370,732,604,421, 966,655,527,344,161,706,578,395,1017,940,757,629,135,991,680,552,369] [views:debug,2014-08-19T16:52:23.405,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/115. Updated state: replica (0) [ns_server:debug,2014-08-19T16:52:23.405,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",115,replica,0} [ns_server:debug,2014-08-19T16:52:23.529,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 113. Nacking mccouch update. [views:debug,2014-08-19T16:52:23.529,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/113. Updated state: replica (0) [ns_server:debug,2014-08-19T16:52:23.529,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",113,replica,0} [ns_server:debug,2014-08-19T16:52:23.531,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,984,673,545,362,724,596,413,958,647,519,153,698,570,387,1009,749,621, 127,983,672,544,361,723,595,412,957,646,518,152,697,569,386,1008,748,620,982, 671,543,360,722,594,411,956,645,517,151,696,568,385,1007,747,619,125,981,670, 542,359,721,593,410,955,644,516,150,695,567,384,1006,746,618,980,669,541,358, 720,592,409,954,643,515,149,694,566,383,1005,745,617,123,979,668,540,357,719, 591,408,953,642,514,148,693,565,382,1004,744,616,978,731,667,603,539,420,356, 965,718,654,590,526,407,343,160,952,705,641,577,513,394,147,1016,939,756,692, 628,564,381,1003,990,743,679,615,551,368,121,977,730,666,602,538,419,355,964, 717,653,589,525,406,342,159,951,704,640,576,512,393,146,1015,938,755,691,627, 563,380,133,1002,989,742,678,614,550,367,976,729,665,601,537,418,354,963,716, 652,588,524,405,158,950,767,703,639,575,392,145,1014,754,690,626,562,379, 1001,988,741,677,613,549,366,119,975,728,664,600,536,417,353,170,962,715,651, 587,523,404,157,949,766,702,638,574,391,144,1013,753,689,625,561,378,131, 1000,987,740,676,612,548,365,974,727,663,599,535,416,352,169,961,714,650,586, 522,403,156,948,765,701,637,573,390,143,1012,999,752,688,624,560,377,986,739, 675,611,547,364,117,973,726,662,598,534,415,351,168,960,713,649,585,521,402, 155,947,764,700,636,572,389,142,1011,998,751,687,623,559,376,129,985,738,674, 610,546,363,972,725,661,597,533,414,350,167,959,712,648,584,520,401,154,1023, 946,763,699,635,571,388,141,1010,997,686,558,375,737,609,426,115,971,660,532, 349,166,711,583,400,1022,945,762,634,140,996,685,557,374,736,608,425,970,659, 531,348,165,710,582,399,1021,944,761,633,139,995,684,556,373,735,607,424,113, 969,658,530,347,164,709,581,398,1020,943,760,632,994,683,555,372,734,606,423, 968,657,529,346,163,708,580,397,1019,942,759,631,137,993,682,554,371,733,605, 422,967,656,528,345,162,707,579,396,1018,941,758,630,992,681,553,370,732,604, 421,966,655,527,344,161,706,578,395,1017,940,757,629,135,991,680,552,369] [views:debug,2014-08-19T16:52:23.598,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/113. Updated state: replica (0) [ns_server:debug,2014-08-19T16:52:23.598,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",113,replica,0} [ns_server:debug,2014-08-19T16:52:23.723,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 111. Nacking mccouch update. [views:debug,2014-08-19T16:52:23.723,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/111. Updated state: replica (0) [ns_server:debug,2014-08-19T16:52:23.723,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",111,replica,0} [ns_server:debug,2014-08-19T16:52:23.724,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,984,673,545,362,724,596,413,958,647,519,153,698,570,387,1009,749,621, 127,983,672,544,361,723,595,412,957,646,518,152,697,569,386,1008,748,620,982, 671,543,360,722,594,411,956,645,517,151,696,568,385,1007,747,619,125,981,670, 542,359,721,593,410,955,644,516,150,695,567,384,1006,746,618,980,669,541,358, 720,592,409,954,643,515,149,694,566,383,1005,745,617,123,979,668,540,357,719, 591,408,953,642,514,148,693,565,382,1004,744,616,978,731,667,603,539,420,356, 965,718,654,590,526,407,343,160,952,705,641,577,513,394,147,1016,939,756,692, 628,564,381,1003,990,743,679,615,551,368,121,977,730,666,602,538,419,355,964, 717,653,589,525,406,342,159,951,704,640,576,512,393,146,1015,938,755,691,627, 563,380,133,1002,989,742,678,614,550,367,976,729,665,601,537,418,354,963,716, 652,588,524,405,158,950,767,703,639,575,392,145,1014,754,690,626,562,379, 1001,988,741,677,613,549,366,119,975,728,664,600,536,417,353,170,962,715,651, 587,523,404,157,949,766,702,638,574,391,144,1013,753,689,625,561,378,131, 1000,987,740,676,612,548,365,974,727,663,599,535,416,352,169,961,714,650,586, 522,403,156,948,765,701,637,573,390,143,1012,999,752,688,624,560,377,986,739, 675,611,547,364,117,973,726,662,598,534,415,351,168,960,713,649,585,521,402, 155,947,764,700,636,572,389,142,1011,998,751,687,623,559,376,129,985,738,674, 610,546,363,972,725,661,597,533,414,350,167,959,712,648,584,520,401,154,1023, 946,763,699,635,571,388,141,1010,997,686,558,375,737,609,426,115,971,660,532, 349,166,711,583,400,1022,945,762,634,140,996,685,557,374,736,608,425,970,659, 531,348,165,710,582,399,1021,944,761,633,139,995,684,556,373,735,607,424,113, 969,658,530,347,164,709,581,398,1020,943,760,632,994,683,555,372,734,606,423, 968,657,529,346,163,708,580,397,1019,942,759,631,137,993,682,554,371,733,605, 422,111,967,656,528,345,162,707,579,396,1018,941,758,630,992,681,553,370,732, 604,421,966,655,527,344,161,706,578,395,1017,940,757,629,135,991,680,552,369] [views:debug,2014-08-19T16:52:23.783,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/111. Updated state: replica (0) [ns_server:debug,2014-08-19T16:52:23.784,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",111,replica,0} [ns_server:debug,2014-08-19T16:52:23.959,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 109. Nacking mccouch update. [views:debug,2014-08-19T16:52:23.959,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/109. Updated state: replica (0) [ns_server:debug,2014-08-19T16:52:23.959,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",109,replica,0} [ns_server:debug,2014-08-19T16:52:23.960,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,984,673,545,362,724,596,413,958,647,519,153,698,570,387,1009,749,621, 127,983,672,544,361,723,595,412,957,646,518,152,697,569,386,1008,748,620,982, 671,543,360,722,594,411,956,645,517,151,696,568,385,1007,747,619,125,981,670, 542,359,721,593,410,955,644,516,150,695,567,384,1006,746,618,980,669,541,358, 720,592,409,954,643,515,149,694,566,383,1005,745,617,123,979,668,540,357,719, 591,408,953,642,514,148,693,565,382,1004,744,616,978,731,667,603,539,420,356, 109,965,718,654,590,526,407,343,160,952,705,641,577,513,394,147,1016,939,756, 692,628,564,381,1003,990,743,679,615,551,368,121,977,730,666,602,538,419,355, 964,717,653,589,525,406,342,159,951,704,640,576,512,393,146,1015,938,755,691, 627,563,380,133,1002,989,742,678,614,550,367,976,729,665,601,537,418,354,963, 716,652,588,524,405,158,950,767,703,639,575,392,145,1014,754,690,626,562,379, 1001,988,741,677,613,549,366,119,975,728,664,600,536,417,353,170,962,715,651, 587,523,404,157,949,766,702,638,574,391,144,1013,753,689,625,561,378,131, 1000,987,740,676,612,548,365,974,727,663,599,535,416,352,169,961,714,650,586, 522,403,156,948,765,701,637,573,390,143,1012,999,752,688,624,560,377,986,739, 675,611,547,364,117,973,726,662,598,534,415,351,168,960,713,649,585,521,402, 155,947,764,700,636,572,389,142,1011,998,751,687,623,559,376,129,985,738,674, 610,546,363,972,725,661,597,533,414,350,167,959,712,648,584,520,401,154,1023, 946,763,699,635,571,388,141,1010,997,686,558,375,737,609,426,115,971,660,532, 349,166,711,583,400,1022,945,762,634,140,996,685,557,374,736,608,425,970,659, 531,348,165,710,582,399,1021,944,761,633,139,995,684,556,373,735,607,424,113, 969,658,530,347,164,709,581,398,1020,943,760,632,994,683,555,372,734,606,423, 968,657,529,346,163,708,580,397,1019,942,759,631,137,993,682,554,371,733,605, 422,111,967,656,528,345,162,707,579,396,1018,941,758,630,992,681,553,370,732, 604,421,966,655,527,344,161,706,578,395,1017,940,757,629,135,991,680,552,369] [views:debug,2014-08-19T16:52:24.018,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/109. Updated state: replica (0) [ns_server:debug,2014-08-19T16:52:24.018,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",109,replica,0} [ns_server:debug,2014-08-19T16:52:24.176,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 107. Nacking mccouch update. [views:debug,2014-08-19T16:52:24.176,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/107. Updated state: replica (0) [ns_server:debug,2014-08-19T16:52:24.177,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",107,replica,0} [ns_server:debug,2014-08-19T16:52:24.178,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,984,673,545,362,724,596,413,958,647,519,153,698,570,387,1009,749,621, 127,983,672,544,361,723,595,412,957,646,518,152,697,569,386,1008,748,620,982, 671,543,360,722,594,411,956,645,517,151,696,568,385,1007,747,619,125,981,670, 542,359,721,593,410,955,644,516,150,695,567,384,1006,746,618,980,669,541,358, 720,592,409,954,643,515,149,694,566,383,1005,745,617,123,979,668,540,357,719, 591,408,953,642,514,148,693,565,382,1004,744,616,978,731,667,603,539,420,356, 109,965,718,654,590,526,407,343,160,952,705,641,577,513,394,147,1016,939,756, 692,628,564,381,1003,990,743,679,615,551,368,121,977,730,666,602,538,419,355, 964,717,653,589,525,406,342,159,951,704,640,576,512,393,146,1015,938,755,691, 627,563,380,133,1002,989,742,678,614,550,367,976,729,665,601,537,418,354,107, 963,716,652,588,524,405,158,950,767,703,639,575,392,145,1014,754,690,626,562, 379,1001,988,741,677,613,549,366,119,975,728,664,600,536,417,353,170,962,715, 651,587,523,404,157,949,766,702,638,574,391,144,1013,753,689,625,561,378,131, 1000,987,740,676,612,548,365,974,727,663,599,535,416,352,169,961,714,650,586, 522,403,156,948,765,701,637,573,390,143,1012,999,752,688,624,560,377,986,739, 675,611,547,364,117,973,726,662,598,534,415,351,168,960,713,649,585,521,402, 155,947,764,700,636,572,389,142,1011,998,751,687,623,559,376,129,985,738,674, 610,546,363,972,725,661,597,533,414,350,167,959,712,648,584,520,401,154,1023, 946,763,699,635,571,388,141,1010,997,686,558,375,737,609,426,115,971,660,532, 349,166,711,583,400,1022,945,762,634,140,996,685,557,374,736,608,425,970,659, 531,348,165,710,582,399,1021,944,761,633,139,995,684,556,373,735,607,424,113, 969,658,530,347,164,709,581,398,1020,943,760,632,994,683,555,372,734,606,423, 968,657,529,346,163,708,580,397,1019,942,759,631,137,993,682,554,371,733,605, 422,111,967,656,528,345,162,707,579,396,1018,941,758,630,992,681,553,370,732, 604,421,966,655,527,344,161,706,578,395,1017,940,757,629,135,991,680,552,369] [views:debug,2014-08-19T16:52:24.244,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/107. Updated state: replica (0) [ns_server:debug,2014-08-19T16:52:24.244,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",107,replica,0} [ns_server:debug,2014-08-19T16:52:24.385,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 105. Nacking mccouch update. [views:debug,2014-08-19T16:52:24.385,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/105. Updated state: replica (0) [ns_server:debug,2014-08-19T16:52:24.385,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",105,replica,0} [ns_server:debug,2014-08-19T16:52:24.387,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,984,673,545,362,724,596,413,958,647,519,153,698,570,387,1009,749,621, 127,983,672,544,361,723,595,412,957,646,518,152,697,569,386,1008,748,620,982, 671,543,360,722,594,411,956,645,517,151,696,568,385,1007,747,619,125,981,670, 542,359,721,593,410,955,644,516,150,695,567,384,1006,746,618,980,669,541,358, 720,592,409,954,643,515,149,694,566,383,1005,745,617,123,979,668,540,357,719, 591,408,953,642,514,148,693,565,382,1004,744,616,978,667,539,356,965,718,654, 590,526,407,343,160,952,705,641,577,513,394,147,1016,939,756,692,628,564,381, 1003,990,743,679,615,551,368,121,977,730,666,602,538,419,355,964,717,653,589, 525,406,342,159,951,704,640,576,512,393,146,1015,938,755,691,627,563,380,133, 1002,989,742,678,614,550,367,976,729,665,601,537,418,354,107,963,716,652,588, 524,405,158,950,767,703,639,575,392,145,1014,754,690,626,562,379,1001,988, 741,677,613,549,366,119,975,728,664,600,536,417,353,170,962,715,651,587,523, 404,157,949,766,702,638,574,391,144,1013,753,689,625,561,378,131,1000,987, 740,676,612,548,365,974,727,663,599,535,416,352,169,105,961,714,650,586,522, 403,156,948,765,701,637,573,390,143,1012,999,752,688,624,560,377,986,739,675, 611,547,364,117,973,726,662,598,534,415,351,168,960,713,649,585,521,402,155, 947,764,700,636,572,389,142,1011,998,751,687,623,559,376,129,985,738,674,610, 546,363,972,725,661,597,533,414,350,167,959,712,648,584,520,401,154,1023,946, 763,699,635,571,388,141,1010,997,686,558,375,737,609,426,115,971,660,532,349, 166,711,583,400,1022,945,762,634,140,996,685,557,374,736,608,425,970,659,531, 348,165,710,582,399,1021,944,761,633,139,995,684,556,373,735,607,424,113,969, 658,530,347,164,709,581,398,1020,943,760,632,994,683,555,372,734,606,423,968, 657,529,346,163,708,580,397,1019,942,759,631,137,993,682,554,371,733,605,422, 111,967,656,528,345,162,707,579,396,1018,941,758,630,992,681,553,370,732,604, 421,966,655,527,344,161,706,578,395,1017,940,757,629,135,991,680,552,369,731, 603,420,109] [views:debug,2014-08-19T16:52:24.452,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/105. Updated state: replica (0) [ns_server:debug,2014-08-19T16:52:24.453,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",105,replica,0} [ns_server:debug,2014-08-19T16:52:24.595,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 103. Nacking mccouch update. [views:debug,2014-08-19T16:52:24.595,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/103. Updated state: replica (0) [ns_server:debug,2014-08-19T16:52:24.595,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",103,replica,0} [ns_server:debug,2014-08-19T16:52:24.596,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,984,673,545,362,724,596,413,958,647,519,153,698,570,387,1009,749,621, 127,983,672,544,361,723,595,412,957,646,518,152,697,569,386,1008,748,620,982, 671,543,360,722,594,411,956,645,517,151,696,568,385,1007,747,619,125,981,670, 542,359,721,593,410,955,644,516,150,695,567,384,1006,746,618,980,669,541,358, 720,592,409,954,643,515,149,694,566,383,1005,745,617,123,979,668,540,357,719, 591,408,953,642,514,148,693,565,382,1004,744,616,978,667,539,356,965,718,654, 590,526,407,343,160,952,705,641,577,513,394,147,1016,939,756,692,628,564,381, 1003,990,743,679,615,551,368,121,977,730,666,602,538,419,355,964,717,653,589, 525,406,342,159,951,704,640,576,512,393,146,1015,938,755,691,627,563,380,133, 1002,989,742,678,614,550,367,976,729,665,601,537,418,354,107,963,716,652,588, 524,405,158,950,767,703,639,575,392,145,1014,754,690,626,562,379,1001,988, 741,677,613,549,366,119,975,728,664,600,536,417,353,170,962,715,651,587,523, 404,157,949,766,702,638,574,391,144,1013,753,689,625,561,378,131,1000,987, 740,676,612,548,365,974,727,663,599,535,416,352,169,105,961,714,650,586,522, 403,156,948,765,701,637,573,390,143,1012,999,752,688,624,560,377,986,739,675, 611,547,364,117,973,726,662,598,534,415,351,168,960,713,649,585,521,402,155, 947,764,700,636,572,389,142,1011,998,751,687,623,559,376,129,985,738,674,610, 546,363,972,725,661,597,533,414,350,167,103,959,712,648,584,520,401,154,1023, 946,763,699,635,571,388,141,1010,997,686,558,375,737,609,426,115,971,660,532, 349,166,711,583,400,1022,945,762,634,140,996,685,557,374,736,608,425,970,659, 531,348,165,710,582,399,1021,944,761,633,139,995,684,556,373,735,607,424,113, 969,658,530,347,164,709,581,398,1020,943,760,632,994,683,555,372,734,606,423, 968,657,529,346,163,708,580,397,1019,942,759,631,137,993,682,554,371,733,605, 422,111,967,656,528,345,162,707,579,396,1018,941,758,630,992,681,553,370,732, 604,421,966,655,527,344,161,706,578,395,1017,940,757,629,135,991,680,552,369, 731,603,420,109] [views:debug,2014-08-19T16:52:24.645,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/103. Updated state: replica (0) [ns_server:debug,2014-08-19T16:52:24.645,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",103,replica,0} [ns_server:debug,2014-08-19T16:52:24.749,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 101. Nacking mccouch update. [views:debug,2014-08-19T16:52:24.749,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/101. Updated state: replica (0) [ns_server:debug,2014-08-19T16:52:24.749,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",101,replica,0} [ns_server:debug,2014-08-19T16:52:24.750,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,984,673,545,362,724,596,413,958,647,519,153,698,570,387,1009,749,621, 127,983,672,544,361,723,595,412,101,957,646,518,152,697,569,386,1008,748,620, 982,671,543,360,722,594,411,956,645,517,151,696,568,385,1007,747,619,125,981, 670,542,359,721,593,410,955,644,516,150,695,567,384,1006,746,618,980,669,541, 358,720,592,409,954,643,515,149,694,566,383,1005,745,617,123,979,668,540,357, 719,591,408,953,642,514,148,693,565,382,1004,744,616,978,667,539,356,965,718, 654,590,526,407,343,160,952,705,641,577,513,394,147,1016,939,756,692,628,564, 381,1003,990,743,679,615,551,368,121,977,730,666,602,538,419,355,964,717,653, 589,525,406,342,159,951,704,640,576,512,393,146,1015,938,755,691,627,563,380, 133,1002,989,742,678,614,550,367,976,729,665,601,537,418,354,107,963,716,652, 588,524,405,158,950,767,703,639,575,392,145,1014,754,690,626,562,379,1001, 988,741,677,613,549,366,119,975,728,664,600,536,417,353,170,962,715,651,587, 523,404,157,949,766,702,638,574,391,144,1013,753,689,625,561,378,131,1000, 987,740,676,612,548,365,974,727,663,599,535,416,352,169,105,961,714,650,586, 522,403,156,948,765,701,637,573,390,143,1012,999,752,688,624,560,377,986,739, 675,611,547,364,117,973,726,662,598,534,415,351,168,960,713,649,585,521,402, 155,947,764,700,636,572,389,142,1011,998,751,687,623,559,376,129,985,738,674, 610,546,363,972,725,661,597,533,414,350,167,103,959,712,648,584,520,401,154, 1023,946,763,699,635,571,388,141,1010,997,686,558,375,737,609,426,115,971, 660,532,349,166,711,583,400,1022,945,762,634,140,996,685,557,374,736,608,425, 970,659,531,348,165,710,582,399,1021,944,761,633,139,995,684,556,373,735,607, 424,113,969,658,530,347,164,709,581,398,1020,943,760,632,994,683,555,372,734, 606,423,968,657,529,346,163,708,580,397,1019,942,759,631,137,993,682,554,371, 733,605,422,111,967,656,528,345,162,707,579,396,1018,941,758,630,992,681,553, 370,732,604,421,966,655,527,344,161,706,578,395,1017,940,757,629,135,991,680, 552,369,731,603,420,109] [views:debug,2014-08-19T16:52:24.783,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/101. Updated state: replica (0) [ns_server:debug,2014-08-19T16:52:24.783,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",101,replica,0} [ns_server:debug,2014-08-19T16:52:24.849,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 99. Nacking mccouch update. [views:debug,2014-08-19T16:52:24.849,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/99. Updated state: replica (0) [ns_server:debug,2014-08-19T16:52:24.850,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",99,replica,0} [ns_server:debug,2014-08-19T16:52:24.851,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,984,673,545,362,724,596,413,958,647,519,153,698,570,387,1009,749,621, 127,983,672,544,361,723,595,412,101,957,646,518,152,697,569,386,1008,748,620, 982,671,543,360,722,594,411,956,645,517,151,696,568,385,1007,747,619,125,981, 670,542,359,99,721,593,410,955,644,516,150,695,567,384,1006,746,618,980,669, 541,358,720,592,409,954,643,515,149,694,566,383,1005,745,617,123,979,668,540, 357,719,591,408,953,642,514,148,693,565,382,1004,744,616,978,667,539,356,965, 718,654,590,526,407,343,160,952,705,641,577,513,394,147,1016,939,756,692,628, 564,381,1003,990,743,679,615,551,368,121,977,730,666,602,538,419,355,964,717, 653,589,525,406,342,159,951,704,640,576,512,393,146,1015,938,755,691,627,563, 380,133,1002,989,742,678,614,550,367,976,729,665,601,537,418,354,107,963,716, 652,588,524,405,158,950,767,703,639,575,392,145,1014,754,690,626,562,379, 1001,988,741,677,613,549,366,119,975,728,664,600,536,417,353,170,962,715,651, 587,523,404,157,949,766,702,638,574,391,144,1013,753,689,625,561,378,131, 1000,987,740,676,612,548,365,974,727,663,599,535,416,352,169,105,961,714,650, 586,522,403,156,948,765,701,637,573,390,143,1012,999,752,688,624,560,377,986, 739,675,611,547,364,117,973,726,662,598,534,415,351,168,960,713,649,585,521, 402,155,947,764,700,636,572,389,142,1011,998,751,687,623,559,376,129,985,738, 674,610,546,363,972,725,661,597,533,414,350,167,103,959,712,648,584,520,401, 154,1023,946,763,699,635,571,388,141,1010,997,686,558,375,737,609,426,115, 971,660,532,349,166,711,583,400,1022,945,762,634,140,996,685,557,374,736,608, 425,970,659,531,348,165,710,582,399,1021,944,761,633,139,995,684,556,373,735, 607,424,113,969,658,530,347,164,709,581,398,1020,943,760,632,994,683,555,372, 734,606,423,968,657,529,346,163,708,580,397,1019,942,759,631,137,993,682,554, 371,733,605,422,111,967,656,528,345,162,707,579,396,1018,941,758,630,992,681, 553,370,732,604,421,966,655,527,344,161,706,578,395,1017,940,757,629,135,991, 680,552,369,731,603,420,109] [views:debug,2014-08-19T16:52:24.883,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/99. Updated state: replica (0) [ns_server:debug,2014-08-19T16:52:24.883,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",99,replica,0} [ns_server:debug,2014-08-19T16:52:25.009,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 97. Nacking mccouch update. [views:debug,2014-08-19T16:52:25.009,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/97. Updated state: replica (0) [ns_server:debug,2014-08-19T16:52:25.009,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",97,replica,0} [ns_server:debug,2014-08-19T16:52:25.010,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,984,673,545,362,724,596,413,958,647,519,153,698,570,387,1009,749,621, 127,983,672,544,361,723,595,412,101,957,646,518,152,697,569,386,1008,748,620, 982,671,543,360,722,594,411,956,645,517,151,696,568,385,1007,747,619,125,981, 670,542,359,99,721,593,410,955,644,516,150,695,567,384,1006,746,618,980,669, 541,358,720,592,409,954,643,515,149,694,566,383,1005,745,617,123,979,668,540, 357,97,719,591,408,953,642,514,148,693,565,382,1004,744,616,978,667,539,356, 965,718,654,590,526,407,343,160,952,705,641,577,513,394,147,1016,939,756,692, 628,564,381,1003,990,743,679,615,551,368,121,977,730,666,602,538,419,355,964, 717,653,589,525,406,342,159,951,704,640,576,512,393,146,1015,938,755,691,627, 563,380,133,1002,989,742,678,614,550,367,976,729,665,601,537,418,354,107,963, 716,652,588,524,405,158,950,767,703,639,575,392,145,1014,754,690,626,562,379, 1001,988,741,677,613,549,366,119,975,728,664,600,536,417,353,170,962,715,651, 587,523,404,157,949,766,702,638,574,391,144,1013,753,689,625,561,378,131, 1000,987,740,676,612,548,365,974,727,663,599,535,416,352,169,105,961,714,650, 586,522,403,156,948,765,701,637,573,390,143,1012,999,752,688,624,560,377,986, 739,675,611,547,364,117,973,726,662,598,534,415,351,168,960,713,649,585,521, 402,155,947,764,700,636,572,389,142,1011,998,751,687,623,559,376,129,985,738, 674,610,546,363,972,725,661,597,533,414,350,167,103,959,712,648,584,520,401, 154,1023,946,763,699,635,571,388,141,1010,997,686,558,375,737,609,426,115, 971,660,532,349,166,711,583,400,1022,945,762,634,140,996,685,557,374,736,608, 425,970,659,531,348,165,710,582,399,1021,944,761,633,139,995,684,556,373,735, 607,424,113,969,658,530,347,164,709,581,398,1020,943,760,632,994,683,555,372, 734,606,423,968,657,529,346,163,708,580,397,1019,942,759,631,137,993,682,554, 371,733,605,422,111,967,656,528,345,162,707,579,396,1018,941,758,630,992,681, 553,370,732,604,421,966,655,527,344,161,706,578,395,1017,940,757,629,135,991, 680,552,369,731,603,420,109] [views:debug,2014-08-19T16:52:25.043,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/97. Updated state: replica (0) [ns_server:debug,2014-08-19T16:52:25.043,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",97,replica,0} [ns_server:debug,2014-08-19T16:52:25.143,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 95. Nacking mccouch update. [views:debug,2014-08-19T16:52:25.143,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/95. Updated state: replica (0) [ns_server:debug,2014-08-19T16:52:25.143,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",95,replica,0} [ns_server:debug,2014-08-19T16:52:25.144,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,984,673,545,362,724,596,413,958,647,519,153,698,570,387,1009,749,621, 127,983,672,544,361,723,595,412,101,957,646,518,152,697,569,386,1008,748,620, 982,671,543,360,722,594,411,956,645,517,151,696,568,385,1007,747,619,125,981, 670,542,359,99,721,593,410,955,644,516,150,695,567,384,1006,746,618,980,669, 541,358,720,592,409,954,643,515,149,694,566,383,1005,745,617,123,979,668,540, 357,97,719,591,408,953,642,514,148,693,565,382,1004,744,616,978,667,539,356, 718,590,407,952,705,641,577,513,394,147,1016,939,756,692,628,564,381,1003, 990,743,679,615,551,368,121,977,730,666,602,538,419,355,964,95,717,653,589, 525,406,342,159,951,704,640,576,512,393,146,1015,938,755,691,627,563,380,133, 1002,989,742,678,614,550,367,976,729,665,601,537,418,354,107,963,716,652,588, 524,405,158,950,767,703,639,575,392,145,1014,754,690,626,562,379,1001,988, 741,677,613,549,366,119,975,728,664,600,536,417,353,170,962,715,651,587,523, 404,157,949,766,702,638,574,391,144,1013,753,689,625,561,378,131,1000,987, 740,676,612,548,365,974,727,663,599,535,416,352,169,105,961,714,650,586,522, 403,156,948,765,701,637,573,390,143,1012,999,752,688,624,560,377,986,739,675, 611,547,364,117,973,726,662,598,534,415,351,168,960,713,649,585,521,402,155, 947,764,700,636,572,389,142,1011,998,751,687,623,559,376,129,985,738,674,610, 546,363,972,725,661,597,533,414,350,167,103,959,712,648,584,520,401,154,1023, 946,763,699,635,571,388,141,1010,997,686,558,375,737,609,426,115,971,660,532, 349,166,711,583,400,1022,945,762,634,140,996,685,557,374,736,608,425,970,659, 531,348,165,710,582,399,1021,944,761,633,139,995,684,556,373,735,607,424,113, 969,658,530,347,164,709,581,398,1020,943,760,632,994,683,555,372,734,606,423, 968,657,529,346,163,708,580,397,1019,942,759,631,137,993,682,554,371,733,605, 422,111,967,656,528,345,162,707,579,396,1018,941,758,630,992,681,553,370,732, 604,421,966,655,527,344,161,706,578,395,1017,940,757,629,135,991,680,552,369, 731,603,420,109,965,654,526,343,160] [views:debug,2014-08-19T16:52:25.227,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/95. Updated state: replica (0) [ns_server:debug,2014-08-19T16:52:25.228,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",95,replica,0} [ns_server:debug,2014-08-19T16:52:25.402,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 93. Nacking mccouch update. [views:debug,2014-08-19T16:52:25.402,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/93. Updated state: replica (0) [ns_server:debug,2014-08-19T16:52:25.402,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",93,replica,0} [ns_server:debug,2014-08-19T16:52:25.403,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,984,673,545,362,724,596,413,958,647,519,153,698,570,387,1009,749,621, 127,983,672,544,361,723,595,412,101,957,646,518,152,697,569,386,1008,748,620, 982,671,543,360,722,594,411,956,645,517,151,696,568,385,1007,747,619,125,981, 670,542,359,99,721,593,410,955,644,516,150,695,567,384,1006,746,618,980,669, 541,358,720,592,409,954,643,515,149,694,566,383,1005,745,617,123,979,668,540, 357,97,719,591,408,953,642,514,148,693,565,382,1004,744,616,978,667,539,356, 718,590,407,952,705,641,577,513,394,147,1016,939,756,692,628,564,381,1003, 990,743,679,615,551,368,121,977,730,666,602,538,419,355,964,95,717,653,589, 525,406,342,159,951,704,640,576,512,393,146,1015,938,755,691,627,563,380,133, 1002,989,742,678,614,550,367,976,729,665,601,537,418,354,107,963,716,652,588, 524,405,158,950,767,703,639,575,392,145,1014,754,690,626,562,379,1001,988, 741,677,613,549,366,119,975,728,664,600,536,417,353,170,962,93,715,651,587, 523,404,157,949,766,702,638,574,391,144,1013,753,689,625,561,378,131,1000, 987,740,676,612,548,365,974,727,663,599,535,416,352,169,105,961,714,650,586, 522,403,156,948,765,701,637,573,390,143,1012,999,752,688,624,560,377,986,739, 675,611,547,364,117,973,726,662,598,534,415,351,168,960,713,649,585,521,402, 155,947,764,700,636,572,389,142,1011,998,751,687,623,559,376,129,985,738,674, 610,546,363,972,725,661,597,533,414,350,167,103,959,712,648,584,520,401,154, 1023,946,763,699,635,571,388,141,1010,997,686,558,375,737,609,426,115,971, 660,532,349,166,711,583,400,1022,945,762,634,140,996,685,557,374,736,608,425, 970,659,531,348,165,710,582,399,1021,944,761,633,139,995,684,556,373,735,607, 424,113,969,658,530,347,164,709,581,398,1020,943,760,632,994,683,555,372,734, 606,423,968,657,529,346,163,708,580,397,1019,942,759,631,137,993,682,554,371, 733,605,422,111,967,656,528,345,162,707,579,396,1018,941,758,630,992,681,553, 370,732,604,421,966,655,527,344,161,706,578,395,1017,940,757,629,135,991,680, 552,369,731,603,420,109,965,654,526,343,160] [views:debug,2014-08-19T16:52:25.446,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/93. Updated state: replica (0) [ns_server:debug,2014-08-19T16:52:25.446,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",93,replica,0} [ns_server:debug,2014-08-19T16:52:25.522,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 91. Nacking mccouch update. [views:debug,2014-08-19T16:52:25.522,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/91. Updated state: replica (0) [ns_server:debug,2014-08-19T16:52:25.522,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",91,replica,0} [ns_server:debug,2014-08-19T16:52:25.523,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,984,673,545,362,724,596,413,958,647,519,153,698,570,387,1009,749,621, 127,983,672,544,361,723,595,412,101,957,646,518,152,697,569,386,1008,748,620, 982,671,543,360,722,594,411,956,645,517,151,696,568,385,1007,747,619,125,981, 670,542,359,99,721,593,410,955,644,516,150,695,567,384,1006,746,618,980,669, 541,358,720,592,409,954,643,515,149,694,566,383,1005,745,617,123,979,668,540, 357,97,719,591,408,953,642,514,148,693,565,382,1004,744,616,978,667,539,356, 718,590,407,952,705,641,577,513,394,147,1016,939,756,692,628,564,381,1003, 990,743,679,615,551,368,121,977,730,666,602,538,419,355,964,95,717,653,589, 525,406,342,159,951,704,640,576,512,393,146,1015,938,755,691,627,563,380,133, 1002,989,742,678,614,550,367,976,729,665,601,537,418,354,107,963,716,652,588, 524,405,158,950,767,703,639,575,392,145,1014,754,690,626,562,379,1001,988, 741,677,613,549,366,119,975,728,664,600,536,417,353,170,962,93,715,651,587, 523,404,157,949,766,702,638,574,391,144,1013,753,689,625,561,378,131,1000, 987,740,676,612,548,365,974,727,663,599,535,416,352,169,105,961,714,650,586, 522,403,156,948,765,701,637,573,390,143,1012,999,752,688,624,560,377,986,739, 675,611,547,364,117,973,726,662,598,534,415,351,168,960,91,713,649,585,521, 402,155,947,764,700,636,572,389,142,1011,998,751,687,623,559,376,129,985,738, 674,610,546,363,972,725,661,597,533,414,350,167,103,959,712,648,584,520,401, 154,1023,946,763,699,635,571,388,141,1010,997,686,558,375,737,609,426,115, 971,660,532,349,166,711,583,400,1022,945,762,634,140,996,685,557,374,736,608, 425,970,659,531,348,165,710,582,399,1021,944,761,633,139,995,684,556,373,735, 607,424,113,969,658,530,347,164,709,581,398,1020,943,760,632,994,683,555,372, 734,606,423,968,657,529,346,163,708,580,397,1019,942,759,631,137,993,682,554, 371,733,605,422,111,967,656,528,345,162,707,579,396,1018,941,758,630,992,681, 553,370,732,604,421,966,655,527,344,161,706,578,395,1017,940,757,629,135,991, 680,552,369,731,603,420,109,965,654,526,343,160] [views:debug,2014-08-19T16:52:25.557,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/91. Updated state: replica (0) [ns_server:debug,2014-08-19T16:52:25.557,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",91,replica,0} [ns_server:debug,2014-08-19T16:52:25.648,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 89. Nacking mccouch update. [views:debug,2014-08-19T16:52:25.648,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/89. Updated state: replica (0) [ns_server:debug,2014-08-19T16:52:25.648,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",89,replica,0} [ns_server:debug,2014-08-19T16:52:25.649,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,984,673,545,362,724,596,413,958,647,519,153,698,570,387,1009,749,621, 127,983,672,544,361,723,595,412,101,957,646,518,152,697,569,386,1008,748,620, 982,671,543,360,722,594,411,956,645,517,151,696,568,385,1007,747,619,125,981, 670,542,359,99,721,593,410,955,644,516,150,695,567,384,1006,746,618,980,669, 541,358,720,592,409,954,643,515,149,694,566,383,1005,745,617,123,979,668,540, 357,97,719,591,408,953,642,514,148,693,565,382,1004,744,616,978,667,539,356, 718,590,407,952,705,641,577,513,394,147,1016,939,756,692,628,564,381,1003, 990,743,679,615,551,368,121,977,730,666,602,538,419,355,964,95,717,653,589, 525,406,342,159,951,704,640,576,512,393,146,1015,938,755,691,627,563,380,133, 1002,989,742,678,614,550,367,976,729,665,601,537,418,354,107,963,716,652,588, 524,405,158,950,767,703,639,575,392,145,1014,754,690,626,562,379,1001,988, 741,677,613,549,366,119,975,728,664,600,536,417,353,170,962,93,715,651,587, 523,404,157,949,766,702,638,574,391,144,1013,753,689,625,561,378,131,1000, 987,740,676,612,548,365,974,727,663,599,535,416,352,169,105,961,714,650,586, 522,403,156,948,765,701,637,573,390,143,1012,999,752,688,624,560,377,986,739, 675,611,547,364,117,973,726,662,598,534,415,351,168,960,91,713,649,585,521, 402,155,947,764,700,636,572,389,142,1011,998,751,687,623,559,376,129,985,738, 674,610,546,363,972,725,661,597,533,414,350,167,103,959,712,648,584,520,401, 154,1023,946,763,699,635,571,388,141,1010,997,686,558,375,737,609,426,115, 971,660,532,349,166,89,711,583,400,1022,945,762,634,140,996,685,557,374,736, 608,425,970,659,531,348,165,710,582,399,1021,944,761,633,139,995,684,556,373, 735,607,424,113,969,658,530,347,164,709,581,398,1020,943,760,632,994,683,555, 372,734,606,423,968,657,529,346,163,708,580,397,1019,942,759,631,137,993,682, 554,371,733,605,422,111,967,656,528,345,162,707,579,396,1018,941,758,630,992, 681,553,370,732,604,421,966,655,527,344,161,706,578,395,1017,940,757,629,135, 991,680,552,369,731,603,420,109,965,654,526,343,160] [views:debug,2014-08-19T16:52:25.708,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/89. Updated state: replica (0) [ns_server:debug,2014-08-19T16:52:25.708,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",89,replica,0} [ns_server:debug,2014-08-19T16:52:25.782,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 87. Nacking mccouch update. [views:debug,2014-08-19T16:52:25.782,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/87. Updated state: replica (0) [ns_server:debug,2014-08-19T16:52:25.782,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",87,replica,0} [ns_server:debug,2014-08-19T16:52:25.783,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,984,673,545,362,724,596,413,958,647,519,153,698,570,387,1009,749,621, 127,983,672,544,361,723,595,412,101,957,646,518,152,697,569,386,1008,748,620, 982,671,543,360,722,594,411,956,645,517,151,696,568,385,1007,747,619,125,981, 670,542,359,99,721,593,410,955,644,516,150,695,567,384,1006,746,618,980,669, 541,358,720,592,409,954,643,515,149,694,566,383,1005,745,617,123,979,668,540, 357,97,719,591,408,953,642,514,148,693,565,382,1004,744,616,978,667,539,356, 718,590,407,952,705,641,577,513,394,147,1016,939,756,692,628,564,381,1003, 990,743,679,615,551,368,121,977,730,666,602,538,419,355,964,95,717,653,589, 525,406,342,159,951,704,640,576,512,393,146,1015,938,755,691,627,563,380,133, 1002,989,742,678,614,550,367,976,729,665,601,537,418,354,107,963,716,652,588, 524,405,158,950,767,703,639,575,392,145,1014,754,690,626,562,379,1001,988, 741,677,613,549,366,119,975,728,664,600,536,417,353,170,962,93,715,651,587, 523,404,157,949,766,702,638,574,391,144,1013,753,689,625,561,378,131,1000, 987,740,676,612,548,365,974,727,663,599,535,416,352,169,105,961,714,650,586, 522,403,156,948,765,701,637,573,390,143,1012,999,752,688,624,560,377,986,739, 675,611,547,364,117,973,726,662,598,534,415,351,168,960,91,713,649,585,521, 402,155,947,764,700,636,572,389,142,1011,998,751,687,623,559,376,129,985,738, 674,610,546,363,972,725,661,597,533,414,350,167,103,959,712,648,584,520,401, 154,1023,946,763,699,635,571,388,141,1010,997,686,558,375,737,609,426,115, 971,660,532,349,166,89,711,583,400,1022,945,762,634,140,996,685,557,374,736, 608,425,970,659,531,348,165,710,582,399,1021,944,761,633,139,995,684,556,373, 735,607,424,113,969,658,530,347,164,87,709,581,398,1020,943,760,632,994,683, 555,372,734,606,423,968,657,529,346,163,708,580,397,1019,942,759,631,137,993, 682,554,371,733,605,422,111,967,656,528,345,162,707,579,396,1018,941,758,630, 992,681,553,370,732,604,421,966,655,527,344,161,706,578,395,1017,940,757,629, 135,991,680,552,369,731,603,420,109,965,654,526,343,160] [views:debug,2014-08-19T16:52:25.817,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/87. Updated state: replica (0) [ns_server:debug,2014-08-19T16:52:25.817,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",87,replica,0} [ns_server:debug,2014-08-19T16:52:25.916,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 138. Nacking mccouch update. [views:debug,2014-08-19T16:52:25.916,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/138. Updated state: replica (0) [ns_server:debug,2014-08-19T16:52:25.917,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",138,replica,0} [ns_server:debug,2014-08-19T16:52:25.918,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,984,673,545,362,724,596,413,958,647,519,153,698,570,387,1009,749,621, 127,983,672,544,361,723,595,412,101,957,646,518,152,697,569,386,1008,748,620, 982,671,543,360,722,594,411,956,645,517,151,696,568,385,1007,747,619,125,981, 670,542,359,99,721,593,410,955,644,516,150,695,567,384,1006,746,618,980,669, 541,358,720,592,409,954,643,515,149,694,566,383,1005,745,617,123,979,668,540, 357,97,719,591,408,953,642,514,148,693,565,382,1004,744,616,978,667,539,356, 718,590,407,952,641,513,147,939,756,692,628,564,381,1003,990,743,679,615,551, 368,121,977,730,666,602,538,419,355,964,95,717,653,589,525,406,342,159,951, 704,640,576,512,393,146,1015,938,755,691,627,563,380,133,1002,989,742,678, 614,550,367,976,729,665,601,537,418,354,107,963,716,652,588,524,405,158,950, 767,703,639,575,392,145,1014,754,690,626,562,379,1001,988,741,677,613,549, 366,119,975,728,664,600,536,417,353,170,962,93,715,651,587,523,404,157,949, 766,702,638,574,391,144,1013,753,689,625,561,378,131,1000,987,740,676,612, 548,365,974,727,663,599,535,416,352,169,105,961,714,650,586,522,403,156,948, 765,701,637,573,390,143,1012,999,752,688,624,560,377,986,739,675,611,547,364, 117,973,726,662,598,534,415,351,168,960,91,713,649,585,521,402,155,947,764, 700,636,572,389,142,1011,998,751,687,623,559,376,129,985,738,674,610,546,363, 972,725,661,597,533,414,350,167,103,959,712,648,584,520,401,154,1023,946,763, 699,635,571,388,141,1010,997,686,558,375,737,609,426,115,971,660,532,349,166, 89,711,583,400,1022,945,762,634,140,996,685,557,374,736,608,425,970,659,531, 348,165,710,582,399,1021,944,761,633,139,995,684,556,373,735,607,424,113,969, 658,530,347,164,87,709,581,398,1020,943,760,632,138,994,683,555,372,734,606, 423,968,657,529,346,163,708,580,397,1019,942,759,631,137,993,682,554,371,733, 605,422,111,967,656,528,345,162,707,579,396,1018,941,758,630,992,681,553,370, 732,604,421,966,655,527,344,161,706,578,395,1017,940,757,629,135,991,680,552, 369,731,603,420,109,965,654,526,343,160,705,577,394,1016] [views:debug,2014-08-19T16:52:25.976,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/138. Updated state: replica (0) [ns_server:debug,2014-08-19T16:52:25.976,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",138,replica,0} [ns_server:debug,2014-08-19T16:52:26.136,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 136. Nacking mccouch update. [views:debug,2014-08-19T16:52:26.136,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/136. Updated state: replica (0) [ns_server:debug,2014-08-19T16:52:26.136,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",136,replica,0} [ns_server:debug,2014-08-19T16:52:26.137,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,984,673,545,362,724,596,413,958,647,519,153,698,570,387,1009,749,621, 127,983,672,544,361,723,595,412,101,957,646,518,152,697,569,386,1008,748,620, 982,671,543,360,722,594,411,956,645,517,151,696,568,385,1007,747,619,125,981, 670,542,359,99,721,593,410,955,644,516,150,695,567,384,1006,746,618,980,669, 541,358,720,592,409,954,643,515,149,694,566,383,1005,745,617,123,979,668,540, 357,97,719,591,408,953,642,514,148,693,565,382,1004,744,616,978,667,539,356, 718,590,407,952,641,513,147,939,756,692,628,564,381,1003,990,743,679,615,551, 368,121,977,730,666,602,538,419,355,964,95,717,653,589,525,406,342,159,951, 704,640,576,512,393,146,1015,938,755,691,627,563,380,133,1002,989,742,678, 614,550,367,976,729,665,601,537,418,354,107,963,716,652,588,524,405,158,950, 767,703,639,575,392,145,1014,754,690,626,562,379,1001,988,741,677,613,549, 366,119,975,728,664,600,536,417,353,170,962,93,715,651,587,523,404,157,949, 766,702,638,574,391,144,1013,753,689,625,561,378,131,1000,987,740,676,612, 548,365,974,727,663,599,535,416,352,169,105,961,714,650,586,522,403,156,948, 765,701,637,573,390,143,1012,999,752,688,624,560,377,986,739,675,611,547,364, 117,973,726,662,598,534,415,351,168,960,91,713,649,585,521,402,155,947,764, 700,636,572,389,142,1011,998,751,687,623,559,376,129,985,738,674,610,546,363, 972,725,661,597,533,414,350,167,103,959,712,648,584,520,401,154,1023,946,763, 699,635,571,388,141,1010,997,686,558,375,737,609,426,115,971,660,532,349,166, 89,711,583,400,1022,945,762,634,140,996,685,557,374,736,608,425,970,659,531, 348,165,710,582,399,1021,944,761,633,139,995,684,556,373,735,607,424,113,969, 658,530,347,164,87,709,581,398,1020,943,760,632,138,994,683,555,372,734,606, 423,968,657,529,346,163,708,580,397,1019,942,759,631,137,993,682,554,371,733, 605,422,111,967,656,528,345,162,707,579,396,1018,941,758,630,136,992,681,553, 370,732,604,421,966,655,527,344,161,706,578,395,1017,940,757,629,135,991,680, 552,369,731,603,420,109,965,654,526,343,160,705,577,394,1016] [views:debug,2014-08-19T16:52:26.195,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/136. Updated state: replica (0) [ns_server:debug,2014-08-19T16:52:26.196,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",136,replica,0} [ns_server:debug,2014-08-19T16:52:26.336,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 134. Nacking mccouch update. [views:debug,2014-08-19T16:52:26.337,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/134. Updated state: replica (0) [ns_server:debug,2014-08-19T16:52:26.337,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",134,replica,0} [ns_server:debug,2014-08-19T16:52:26.338,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,984,673,545,362,724,596,413,958,647,519,153,698,570,387,1009,749,621, 127,983,672,544,361,723,595,412,101,957,646,518,152,697,569,386,1008,748,620, 982,671,543,360,722,594,411,956,645,517,151,696,568,385,1007,747,619,125,981, 670,542,359,99,721,593,410,955,644,516,150,695,567,384,1006,746,618,980,669, 541,358,720,592,409,954,643,515,149,694,566,383,1005,745,617,123,979,668,540, 357,97,719,591,408,953,642,514,148,693,565,382,1004,744,616,978,667,539,356, 718,590,407,952,641,513,147,939,756,692,628,564,381,134,1003,990,743,679,615, 551,368,121,977,730,666,602,538,419,355,964,95,717,653,589,525,406,342,159, 951,704,640,576,512,393,146,1015,938,755,691,627,563,380,133,1002,989,742, 678,614,550,367,976,729,665,601,537,418,354,107,963,716,652,588,524,405,158, 950,767,703,639,575,392,145,1014,754,690,626,562,379,1001,988,741,677,613, 549,366,119,975,728,664,600,536,417,353,170,962,93,715,651,587,523,404,157, 949,766,702,638,574,391,144,1013,753,689,625,561,378,131,1000,987,740,676, 612,548,365,974,727,663,599,535,416,352,169,105,961,714,650,586,522,403,156, 948,765,701,637,573,390,143,1012,999,752,688,624,560,377,986,739,675,611,547, 364,117,973,726,662,598,534,415,351,168,960,91,713,649,585,521,402,155,947, 764,700,636,572,389,142,1011,998,751,687,623,559,376,129,985,738,674,610,546, 363,972,725,661,597,533,414,350,167,103,959,712,648,584,520,401,154,1023,946, 763,699,635,571,388,141,1010,997,686,558,375,737,609,426,115,971,660,532,349, 166,89,711,583,400,1022,945,762,634,140,996,685,557,374,736,608,425,970,659, 531,348,165,710,582,399,1021,944,761,633,139,995,684,556,373,735,607,424,113, 969,658,530,347,164,87,709,581,398,1020,943,760,632,138,994,683,555,372,734, 606,423,968,657,529,346,163,708,580,397,1019,942,759,631,137,993,682,554,371, 733,605,422,111,967,656,528,345,162,707,579,396,1018,941,758,630,136,992,681, 553,370,732,604,421,966,655,527,344,161,706,578,395,1017,940,757,629,135,991, 680,552,369,731,603,420,109,965,654,526,343,160,705,577,394,1016] [views:debug,2014-08-19T16:52:26.395,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/134. Updated state: replica (0) [ns_server:debug,2014-08-19T16:52:26.395,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",134,replica,0} [ns_server:debug,2014-08-19T16:52:26.521,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 132. Nacking mccouch update. [views:debug,2014-08-19T16:52:26.521,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/132. Updated state: replica (0) [ns_server:debug,2014-08-19T16:52:26.521,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",132,replica,0} [ns_server:debug,2014-08-19T16:52:26.522,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,984,673,545,362,724,596,413,958,647,519,153,698,570,387,1009,749,621, 127,983,672,544,361,723,595,412,101,957,646,518,152,697,569,386,1008,748,620, 982,671,543,360,722,594,411,956,645,517,151,696,568,385,1007,747,619,125,981, 670,542,359,99,721,593,410,955,644,516,150,695,567,384,1006,746,618,980,669, 541,358,720,592,409,954,643,515,149,694,566,383,1005,745,617,123,979,668,540, 357,97,719,591,408,953,642,514,148,693,565,382,1004,744,616,978,667,539,356, 718,590,407,952,641,513,147,939,756,692,628,564,381,134,1003,990,743,679,615, 551,368,121,977,730,666,602,538,419,355,964,95,717,653,589,525,406,342,159, 951,704,640,576,512,393,146,1015,938,755,691,627,563,380,133,1002,989,742, 678,614,550,367,976,729,665,601,537,418,354,107,963,716,652,588,524,405,158, 950,767,703,639,575,392,145,1014,754,690,626,562,379,132,1001,988,741,677, 613,549,366,119,975,728,664,600,536,417,353,170,962,93,715,651,587,523,404, 157,949,766,702,638,574,391,144,1013,753,689,625,561,378,131,1000,987,740, 676,612,548,365,974,727,663,599,535,416,352,169,105,961,714,650,586,522,403, 156,948,765,701,637,573,390,143,1012,999,752,688,624,560,377,986,739,675,611, 547,364,117,973,726,662,598,534,415,351,168,960,91,713,649,585,521,402,155, 947,764,700,636,572,389,142,1011,998,751,687,623,559,376,129,985,738,674,610, 546,363,972,725,661,597,533,414,350,167,103,959,712,648,584,520,401,154,1023, 946,763,699,635,571,388,141,1010,997,686,558,375,737,609,426,115,971,660,532, 349,166,89,711,583,400,1022,945,762,634,140,996,685,557,374,736,608,425,970, 659,531,348,165,710,582,399,1021,944,761,633,139,995,684,556,373,735,607,424, 113,969,658,530,347,164,87,709,581,398,1020,943,760,632,138,994,683,555,372, 734,606,423,968,657,529,346,163,708,580,397,1019,942,759,631,137,993,682,554, 371,733,605,422,111,967,656,528,345,162,707,579,396,1018,941,758,630,136,992, 681,553,370,732,604,421,966,655,527,344,161,706,578,395,1017,940,757,629,135, 991,680,552,369,731,603,420,109,965,654,526,343,160,705,577,394,1016] [views:debug,2014-08-19T16:52:26.588,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/132. Updated state: replica (0) [ns_server:debug,2014-08-19T16:52:26.589,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",132,replica,0} [ns_server:debug,2014-08-19T16:52:26.730,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 130. Nacking mccouch update. [views:debug,2014-08-19T16:52:26.730,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/130. Updated state: replica (0) [ns_server:debug,2014-08-19T16:52:26.730,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",130,replica,0} [ns_server:debug,2014-08-19T16:52:26.731,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,984,673,545,362,724,596,413,958,647,519,153,698,570,387,1009,749,621, 127,983,672,544,361,723,595,412,101,957,646,518,152,697,569,386,1008,748,620, 982,671,543,360,722,594,411,956,645,517,151,696,568,385,1007,747,619,125,981, 670,542,359,99,721,593,410,955,644,516,150,695,567,384,1006,746,618,980,669, 541,358,720,592,409,954,643,515,149,694,566,383,1005,745,617,123,979,668,540, 357,97,719,591,408,953,642,514,148,693,565,382,1004,744,616,978,667,539,356, 718,590,407,952,641,513,147,939,756,692,628,564,381,134,1003,990,743,679,615, 551,368,121,977,730,666,602,538,419,355,964,95,717,653,589,525,406,342,159, 951,704,640,576,512,393,146,1015,938,755,691,627,563,380,133,1002,989,742, 678,614,550,367,976,729,665,601,537,418,354,107,963,716,652,588,524,405,158, 950,767,703,639,575,392,145,1014,754,690,626,562,379,132,1001,988,741,677, 613,549,366,119,975,728,664,600,536,417,353,170,962,93,715,651,587,523,404, 157,949,766,702,638,574,391,144,1013,753,689,625,561,378,131,1000,987,740, 676,612,548,365,974,727,663,599,535,416,352,169,105,961,714,650,586,522,403, 156,948,765,701,637,573,390,143,1012,999,752,688,624,560,377,130,986,739,675, 611,547,364,117,973,726,662,598,534,415,351,168,960,91,713,649,585,521,402, 155,947,764,700,636,572,389,142,1011,998,751,687,623,559,376,129,985,738,674, 610,546,363,972,725,661,597,533,414,350,167,103,959,712,648,584,520,401,154, 1023,946,763,699,635,571,388,141,1010,997,686,558,375,737,609,426,115,971, 660,532,349,166,89,711,583,400,1022,945,762,634,140,996,685,557,374,736,608, 425,970,659,531,348,165,710,582,399,1021,944,761,633,139,995,684,556,373,735, 607,424,113,969,658,530,347,164,87,709,581,398,1020,943,760,632,138,994,683, 555,372,734,606,423,968,657,529,346,163,708,580,397,1019,942,759,631,137,993, 682,554,371,733,605,422,111,967,656,528,345,162,707,579,396,1018,941,758,630, 136,992,681,553,370,732,604,421,966,655,527,344,161,706,578,395,1017,940,757, 629,135,991,680,552,369,731,603,420,109,965,654,526,343,160,705,577,394,1016] [views:debug,2014-08-19T16:52:26.780,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/130. Updated state: replica (0) [ns_server:debug,2014-08-19T16:52:26.780,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",130,replica,0} [ns_server:debug,2014-08-19T16:52:26.875,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 128. Nacking mccouch update. [views:debug,2014-08-19T16:52:26.875,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/128. Updated state: replica (0) [ns_server:debug,2014-08-19T16:52:26.875,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",128,replica,0} [ns_server:debug,2014-08-19T16:52:26.876,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,128,984,673,545,362,724,596,413,958,647,519,153,698,570,387,1009,749, 621,127,983,672,544,361,723,595,412,101,957,646,518,152,697,569,386,1008,748, 620,982,671,543,360,722,594,411,956,645,517,151,696,568,385,1007,747,619,125, 981,670,542,359,99,721,593,410,955,644,516,150,695,567,384,1006,746,618,980, 669,541,358,720,592,409,954,643,515,149,694,566,383,1005,745,617,123,979,668, 540,357,97,719,591,408,953,642,514,148,693,565,382,1004,744,616,978,667,539, 356,718,590,407,952,641,513,147,692,564,381,1003,990,743,679,615,551,368,121, 977,730,666,602,538,419,355,964,95,717,653,589,525,406,342,159,951,704,640, 576,512,393,146,1015,938,755,691,627,563,380,133,1002,989,742,678,614,550, 367,976,729,665,601,537,418,354,107,963,716,652,588,524,405,158,950,767,703, 639,575,392,145,1014,754,690,626,562,379,132,1001,988,741,677,613,549,366, 119,975,728,664,600,536,417,353,170,962,93,715,651,587,523,404,157,949,766, 702,638,574,391,144,1013,753,689,625,561,378,131,1000,987,740,676,612,548, 365,974,727,663,599,535,416,352,169,105,961,714,650,586,522,403,156,948,765, 701,637,573,390,143,1012,999,752,688,624,560,377,130,986,739,675,611,547,364, 117,973,726,662,598,534,415,351,168,960,91,713,649,585,521,402,155,947,764, 700,636,572,389,142,1011,998,751,687,623,559,376,129,985,738,674,610,546,363, 972,725,661,597,533,414,350,167,103,959,712,648,584,520,401,154,1023,946,763, 699,635,571,388,141,1010,997,686,558,375,737,609,426,115,971,660,532,349,166, 89,711,583,400,1022,945,762,634,140,996,685,557,374,736,608,425,970,659,531, 348,165,710,582,399,1021,944,761,633,139,995,684,556,373,735,607,424,113,969, 658,530,347,164,87,709,581,398,1020,943,760,632,138,994,683,555,372,734,606, 423,968,657,529,346,163,708,580,397,1019,942,759,631,137,993,682,554,371,733, 605,422,111,967,656,528,345,162,707,579,396,1018,941,758,630,136,992,681,553, 370,732,604,421,966,655,527,344,161,706,578,395,1017,940,757,629,135,991,680, 552,369,731,603,420,109,965,654,526,343,160,705,577,394,1016,939,756,628,134] [views:debug,2014-08-19T16:52:26.910,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/128. Updated state: replica (0) [ns_server:debug,2014-08-19T16:52:26.910,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",128,replica,0} [ns_server:debug,2014-08-19T16:52:27.009,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 126. Nacking mccouch update. [views:debug,2014-08-19T16:52:27.009,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/126. Updated state: replica (0) [ns_server:debug,2014-08-19T16:52:27.010,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",126,replica,0} [ns_server:debug,2014-08-19T16:52:27.011,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,128,984,673,545,362,724,596,413,958,647,519,153,698,570,387,1009,749, 621,127,983,672,544,361,723,595,412,101,957,646,518,152,697,569,386,1008,748, 620,126,982,671,543,360,722,594,411,956,645,517,151,696,568,385,1007,747,619, 125,981,670,542,359,99,721,593,410,955,644,516,150,695,567,384,1006,746,618, 980,669,541,358,720,592,409,954,643,515,149,694,566,383,1005,745,617,123,979, 668,540,357,97,719,591,408,953,642,514,148,693,565,382,1004,744,616,978,667, 539,356,718,590,407,952,641,513,147,692,564,381,1003,990,743,679,615,551,368, 121,977,730,666,602,538,419,355,964,95,717,653,589,525,406,342,159,951,704, 640,576,512,393,146,1015,938,755,691,627,563,380,133,1002,989,742,678,614, 550,367,976,729,665,601,537,418,354,107,963,716,652,588,524,405,158,950,767, 703,639,575,392,145,1014,754,690,626,562,379,132,1001,988,741,677,613,549, 366,119,975,728,664,600,536,417,353,170,962,93,715,651,587,523,404,157,949, 766,702,638,574,391,144,1013,753,689,625,561,378,131,1000,987,740,676,612, 548,365,974,727,663,599,535,416,352,169,105,961,714,650,586,522,403,156,948, 765,701,637,573,390,143,1012,999,752,688,624,560,377,130,986,739,675,611,547, 364,117,973,726,662,598,534,415,351,168,960,91,713,649,585,521,402,155,947, 764,700,636,572,389,142,1011,998,751,687,623,559,376,129,985,738,674,610,546, 363,972,725,661,597,533,414,350,167,103,959,712,648,584,520,401,154,1023,946, 763,699,635,571,388,141,1010,997,686,558,375,737,609,426,115,971,660,532,349, 166,89,711,583,400,1022,945,762,634,140,996,685,557,374,736,608,425,970,659, 531,348,165,710,582,399,1021,944,761,633,139,995,684,556,373,735,607,424,113, 969,658,530,347,164,87,709,581,398,1020,943,760,632,138,994,683,555,372,734, 606,423,968,657,529,346,163,708,580,397,1019,942,759,631,137,993,682,554,371, 733,605,422,111,967,656,528,345,162,707,579,396,1018,941,758,630,136,992,681, 553,370,732,604,421,966,655,527,344,161,706,578,395,1017,940,757,629,135,991, 680,552,369,731,603,420,109,965,654,526,343,160,705,577,394,1016,939,756,628, 134] [views:debug,2014-08-19T16:52:27.069,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/126. Updated state: replica (0) [ns_server:debug,2014-08-19T16:52:27.069,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",126,replica,0} [ns_server:debug,2014-08-19T16:52:27.144,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 124. Nacking mccouch update. [views:debug,2014-08-19T16:52:27.144,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/124. Updated state: replica (0) [ns_server:debug,2014-08-19T16:52:27.144,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",124,replica,0} [ns_server:debug,2014-08-19T16:52:27.145,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,128,984,673,545,362,724,596,413,958,647,519,153,698,570,387,1009,749, 621,127,983,672,544,361,723,595,412,101,957,646,518,152,697,569,386,1008,748, 620,126,982,671,543,360,722,594,411,956,645,517,151,696,568,385,1007,747,619, 125,981,670,542,359,99,721,593,410,955,644,516,150,695,567,384,1006,746,618, 124,980,669,541,358,720,592,409,954,643,515,149,694,566,383,1005,745,617,123, 979,668,540,357,97,719,591,408,953,642,514,148,693,565,382,1004,744,616,978, 667,539,356,718,590,407,952,641,513,147,692,564,381,1003,990,743,679,615,551, 368,121,977,730,666,602,538,419,355,964,95,717,653,589,525,406,342,159,951, 704,640,576,512,393,146,1015,938,755,691,627,563,380,133,1002,989,742,678, 614,550,367,976,729,665,601,537,418,354,107,963,716,652,588,524,405,158,950, 767,703,639,575,392,145,1014,754,690,626,562,379,132,1001,988,741,677,613, 549,366,119,975,728,664,600,536,417,353,170,962,93,715,651,587,523,404,157, 949,766,702,638,574,391,144,1013,753,689,625,561,378,131,1000,987,740,676, 612,548,365,974,727,663,599,535,416,352,169,105,961,714,650,586,522,403,156, 948,765,701,637,573,390,143,1012,999,752,688,624,560,377,130,986,739,675,611, 547,364,117,973,726,662,598,534,415,351,168,960,91,713,649,585,521,402,155, 947,764,700,636,572,389,142,1011,998,751,687,623,559,376,129,985,738,674,610, 546,363,972,725,661,597,533,414,350,167,103,959,712,648,584,520,401,154,1023, 946,763,699,635,571,388,141,1010,997,686,558,375,737,609,426,115,971,660,532, 349,166,89,711,583,400,1022,945,762,634,140,996,685,557,374,736,608,425,970, 659,531,348,165,710,582,399,1021,944,761,633,139,995,684,556,373,735,607,424, 113,969,658,530,347,164,87,709,581,398,1020,943,760,632,138,994,683,555,372, 734,606,423,968,657,529,346,163,708,580,397,1019,942,759,631,137,993,682,554, 371,733,605,422,111,967,656,528,345,162,707,579,396,1018,941,758,630,136,992, 681,553,370,732,604,421,966,655,527,344,161,706,578,395,1017,940,757,629,135, 991,680,552,369,731,603,420,109,965,654,526,343,160,705,577,394,1016,939,756, 628,134] [views:debug,2014-08-19T16:52:27.194,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/124. Updated state: replica (0) [ns_server:debug,2014-08-19T16:52:27.194,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",124,replica,0} [ns_server:debug,2014-08-19T16:52:27.200,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:52:27.204,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:27.204,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 4300 us [ns_server:debug,2014-08-19T16:52:27.205,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:27.205,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{173, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:52:27.252,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:52:27.256,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:27.256,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 3845 us [ns_server:debug,2014-08-19T16:52:27.257,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{174, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:52:27.258,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:27.299,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:52:27.302,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:27.302,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 3380 us [ns_server:debug,2014-08-19T16:52:27.302,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:27.303,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{176, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:52:27.344,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:52:27.350,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:27.350,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 5716 us [ns_server:debug,2014-08-19T16:52:27.351,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:27.351,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{175, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:52:27.369,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 122. Nacking mccouch update. [views:debug,2014-08-19T16:52:27.369,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/122. Updated state: replica (0) [ns_server:debug,2014-08-19T16:52:27.370,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",122,replica,0} [ns_server:debug,2014-08-19T16:52:27.371,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,128,984,673,545,362,724,596,413,958,647,519,153,698,570,387,1009,749, 621,127,983,672,544,361,723,595,412,101,957,646,518,152,697,569,386,1008,748, 620,126,982,671,543,360,722,594,411,956,645,517,151,696,568,385,1007,747,619, 125,981,670,542,359,99,721,593,410,955,644,516,150,695,567,384,1006,746,618, 124,980,669,541,358,720,592,409,954,643,515,149,694,566,383,1005,745,617,123, 979,668,540,357,97,719,591,408,953,642,514,148,693,565,382,1004,744,616,122, 978,667,539,356,718,590,407,952,641,513,147,692,564,381,1003,990,743,679,615, 551,368,121,977,730,666,602,538,419,355,964,95,717,653,589,525,406,342,159, 951,704,640,576,512,393,146,1015,938,755,691,627,563,380,133,1002,989,742, 678,614,550,367,976,729,665,601,537,418,354,107,963,716,652,588,524,405,158, 950,767,703,639,575,392,145,1014,754,690,626,562,379,132,1001,988,741,677, 613,549,366,119,975,728,664,600,536,417,353,170,962,93,715,651,587,523,404, 157,949,766,702,638,574,391,144,1013,753,689,625,561,378,131,1000,987,740, 676,612,548,365,974,727,663,599,535,416,352,169,105,961,714,650,586,522,403, 156,948,765,701,637,573,390,143,1012,999,752,688,624,560,377,130,986,739,675, 611,547,364,117,973,726,662,598,534,415,351,168,960,91,713,649,585,521,402, 155,947,764,700,636,572,389,142,1011,998,751,687,623,559,376,129,985,738,674, 610,546,363,972,725,661,597,533,414,350,167,103,959,712,648,584,520,401,154, 1023,946,763,699,635,571,388,141,1010,997,686,558,375,737,609,426,115,971, 660,532,349,166,89,711,583,400,1022,945,762,634,140,996,685,557,374,736,608, 425,970,659,531,348,165,710,582,399,1021,944,761,633,139,995,684,556,373,735, 607,424,113,969,658,530,347,164,87,709,581,398,1020,943,760,632,138,994,683, 555,372,734,606,423,968,657,529,346,163,708,580,397,1019,942,759,631,137,993, 682,554,371,733,605,422,111,967,656,528,345,162,707,579,396,1018,941,758,630, 136,992,681,553,370,732,604,421,966,655,527,344,161,706,578,395,1017,940,757, 629,135,991,680,552,369,731,603,420,109,965,654,526,343,160,705,577,394,1016, 939,756,628,134] [ns_server:debug,2014-08-19T16:52:27.395,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:52:27.396,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:27.396,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 1588 us [ns_server:debug,2014-08-19T16:52:27.397,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:27.397,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{178, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [views:debug,2014-08-19T16:52:27.420,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/122. Updated state: replica (0) [ns_server:debug,2014-08-19T16:52:27.420,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",122,replica,0} [ns_server:debug,2014-08-19T16:52:27.446,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:52:27.453,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:27.454,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 7508 us [ns_server:debug,2014-08-19T16:52:27.454,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:27.455,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{177, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:52:27.494,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:52:27.497,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:27.497,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 3431 us [ns_server:debug,2014-08-19T16:52:27.498,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:27.498,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{179, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:52:27.507,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 120. Nacking mccouch update. [views:debug,2014-08-19T16:52:27.507,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/120. Updated state: replica (0) [ns_server:debug,2014-08-19T16:52:27.507,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",120,replica,0} [ns_server:debug,2014-08-19T16:52:27.508,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,128,984,673,545,362,724,596,413,958,647,519,153,698,570,387,1009,749, 621,127,983,672,544,361,723,595,412,101,957,646,518,152,697,569,386,1008,748, 620,126,982,671,543,360,722,594,411,956,645,517,151,696,568,385,1007,747,619, 125,981,670,542,359,99,721,593,410,955,644,516,150,695,567,384,1006,746,618, 124,980,669,541,358,720,592,409,954,643,515,149,694,566,383,1005,745,617,123, 979,668,540,357,97,719,591,408,953,642,514,148,693,565,382,1004,744,616,122, 978,667,539,356,718,590,407,952,641,513,147,692,564,381,1003,990,743,679,615, 551,368,121,977,730,666,602,538,419,355,964,95,717,653,589,525,406,342,159, 951,704,640,576,512,393,146,1015,938,755,691,627,563,380,133,1002,989,742, 678,614,550,367,120,976,729,665,601,537,418,354,107,963,716,652,588,524,405, 158,950,767,703,639,575,392,145,1014,754,690,626,562,379,132,1001,988,741, 677,613,549,366,119,975,728,664,600,536,417,353,170,962,93,715,651,587,523, 404,157,949,766,702,638,574,391,144,1013,753,689,625,561,378,131,1000,987, 740,676,612,548,365,974,727,663,599,535,416,352,169,105,961,714,650,586,522, 403,156,948,765,701,637,573,390,143,1012,999,752,688,624,560,377,130,986,739, 675,611,547,364,117,973,726,662,598,534,415,351,168,960,91,713,649,585,521, 402,155,947,764,700,636,572,389,142,1011,998,751,687,623,559,376,129,985,738, 674,610,546,363,972,725,661,597,533,414,350,167,103,959,712,648,584,520,401, 154,1023,946,763,699,635,571,388,141,1010,997,686,558,375,737,609,426,115, 971,660,532,349,166,89,711,583,400,1022,945,762,634,140,996,685,557,374,736, 608,425,970,659,531,348,165,710,582,399,1021,944,761,633,139,995,684,556,373, 735,607,424,113,969,658,530,347,164,87,709,581,398,1020,943,760,632,138,994, 683,555,372,734,606,423,968,657,529,346,163,708,580,397,1019,942,759,631,137, 993,682,554,371,733,605,422,111,967,656,528,345,162,707,579,396,1018,941,758, 630,136,992,681,553,370,732,604,421,966,655,527,344,161,706,578,395,1017,940, 757,629,135,991,680,552,369,731,603,420,109,965,654,526,343,160,705,577,394, 1016,939,756,628,134] [ns_server:debug,2014-08-19T16:52:27.545,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:52:27.548,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:27.549,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 3265 us [ns_server:debug,2014-08-19T16:52:27.549,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:27.550,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{180, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [views:debug,2014-08-19T16:52:27.566,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/120. Updated state: replica (0) [ns_server:debug,2014-08-19T16:52:27.566,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",120,replica,0} [ns_server:debug,2014-08-19T16:52:27.592,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:52:27.593,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:27.593,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 1114 us [ns_server:debug,2014-08-19T16:52:27.594,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:27.594,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{181, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:52:27.632,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 118. Nacking mccouch update. [views:debug,2014-08-19T16:52:27.633,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/118. Updated state: replica (0) [ns_server:debug,2014-08-19T16:52:27.633,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",118,replica,0} [ns_server:debug,2014-08-19T16:52:27.634,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,128,984,673,545,362,724,596,413,958,647,519,153,698,570,387,1009,749, 621,127,983,672,544,361,723,595,412,101,957,646,518,152,697,569,386,1008,748, 620,126,982,671,543,360,722,594,411,956,645,517,151,696,568,385,1007,747,619, 125,981,670,542,359,99,721,593,410,955,644,516,150,695,567,384,1006,746,618, 124,980,669,541,358,720,592,409,954,643,515,149,694,566,383,1005,745,617,123, 979,668,540,357,97,719,591,408,953,642,514,148,693,565,382,1004,744,616,122, 978,667,539,356,718,590,407,952,641,513,147,692,564,381,1003,743,615,121,977, 730,666,602,538,419,355,964,95,717,653,589,525,406,342,159,951,704,640,576, 512,393,146,1015,938,755,691,627,563,380,133,1002,989,742,678,614,550,367, 120,976,729,665,601,537,418,354,107,963,716,652,588,524,405,158,950,767,703, 639,575,392,145,1014,754,690,626,562,379,132,1001,988,741,677,613,549,366, 119,975,728,664,600,536,417,353,170,962,93,715,651,587,523,404,157,949,766, 702,638,574,391,144,1013,753,689,625,561,378,131,1000,987,740,676,612,548, 365,118,974,727,663,599,535,416,352,169,105,961,714,650,586,522,403,156,948, 765,701,637,573,390,143,1012,999,752,688,624,560,377,130,986,739,675,611,547, 364,117,973,726,662,598,534,415,351,168,960,91,713,649,585,521,402,155,947, 764,700,636,572,389,142,1011,998,751,687,623,559,376,129,985,738,674,610,546, 363,972,725,661,597,533,414,350,167,103,959,712,648,584,520,401,154,1023,946, 763,699,635,571,388,141,1010,997,686,558,375,737,609,426,115,971,660,532,349, 166,89,711,583,400,1022,945,762,634,140,996,685,557,374,736,608,425,970,659, 531,348,165,710,582,399,1021,944,761,633,139,995,684,556,373,735,607,424,113, 969,658,530,347,164,87,709,581,398,1020,943,760,632,138,994,683,555,372,734, 606,423,968,657,529,346,163,708,580,397,1019,942,759,631,137,993,682,554,371, 733,605,422,111,967,656,528,345,162,707,579,396,1018,941,758,630,136,992,681, 553,370,732,604,421,966,655,527,344,161,706,578,395,1017,940,757,629,135,991, 680,552,369,731,603,420,109,965,654,526,343,160,705,577,394,1016,939,756,628, 134,990,679,551,368] [ns_server:debug,2014-08-19T16:52:27.639,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:52:27.642,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:27.642,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 3835 us [ns_server:debug,2014-08-19T16:52:27.643,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:27.643,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{182, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [views:debug,2014-08-19T16:52:27.666,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/118. Updated state: replica (0) [ns_server:debug,2014-08-19T16:52:27.666,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",118,replica,0} [ns_server:debug,2014-08-19T16:52:27.689,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:52:27.692,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:27.692,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 3187 us [ns_server:debug,2014-08-19T16:52:27.692,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:27.693,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{183, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:52:27.738,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:52:27.743,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:27.744,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 5169 us [ns_server:debug,2014-08-19T16:52:27.744,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{184, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:52:27.745,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:27.750,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 116. Nacking mccouch update. [views:debug,2014-08-19T16:52:27.750,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/116. Updated state: replica (0) [ns_server:debug,2014-08-19T16:52:27.750,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",116,replica,0} [ns_server:debug,2014-08-19T16:52:27.751,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,128,984,673,545,362,724,596,413,958,647,519,153,698,570,387,1009,749, 621,127,983,672,544,361,723,595,412,101,957,646,518,152,697,569,386,1008,748, 620,126,982,671,543,360,722,594,411,956,645,517,151,696,568,385,1007,747,619, 125,981,670,542,359,99,721,593,410,955,644,516,150,695,567,384,1006,746,618, 124,980,669,541,358,720,592,409,954,643,515,149,694,566,383,1005,745,617,123, 979,668,540,357,97,719,591,408,953,642,514,148,693,565,382,1004,744,616,122, 978,667,539,356,718,590,407,952,641,513,147,692,564,381,1003,743,615,121,977, 730,666,602,538,419,355,964,95,717,653,589,525,406,342,159,951,704,640,576, 512,393,146,1015,938,755,691,627,563,380,133,1002,989,742,678,614,550,367, 120,976,729,665,601,537,418,354,107,963,716,652,588,524,405,158,950,767,703, 639,575,392,145,1014,754,690,626,562,379,132,1001,988,741,677,613,549,366, 119,975,728,664,600,536,417,353,170,962,93,715,651,587,523,404,157,949,766, 702,638,574,391,144,1013,753,689,625,561,378,131,1000,987,740,676,612,548, 365,118,974,727,663,599,535,416,352,169,105,961,714,650,586,522,403,156,948, 765,701,637,573,390,143,1012,999,752,688,624,560,377,130,986,739,675,611,547, 364,117,973,726,662,598,534,415,351,168,960,91,713,649,585,521,402,155,947, 764,700,636,572,389,142,1011,998,751,687,623,559,376,129,985,738,674,610,546, 363,116,972,725,661,597,533,414,350,167,103,959,712,648,584,520,401,154,1023, 946,763,699,635,571,388,141,1010,997,686,558,375,737,609,426,115,971,660,532, 349,166,89,711,583,400,1022,945,762,634,140,996,685,557,374,736,608,425,970, 659,531,348,165,710,582,399,1021,944,761,633,139,995,684,556,373,735,607,424, 113,969,658,530,347,164,87,709,581,398,1020,943,760,632,138,994,683,555,372, 734,606,423,968,657,529,346,163,708,580,397,1019,942,759,631,137,993,682,554, 371,733,605,422,111,967,656,528,345,162,707,579,396,1018,941,758,630,136,992, 681,553,370,732,604,421,966,655,527,344,161,706,578,395,1017,940,757,629,135, 991,680,552,369,731,603,420,109,965,654,526,343,160,705,577,394,1016,939,756, 628,134,990,679,551,368] [ns_server:debug,2014-08-19T16:52:27.784,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [views:debug,2014-08-19T16:52:27.785,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/116. Updated state: replica (0) [ns_server:debug,2014-08-19T16:52:27.785,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",116,replica,0} [ns_server:debug,2014-08-19T16:52:27.786,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 1727 us [ns_server:debug,2014-08-19T16:52:27.787,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{185, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:52:27.787,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:27.787,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:27.857,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:52:27.859,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 114. Nacking mccouch update. [views:debug,2014-08-19T16:52:27.859,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/114. Updated state: replica (0) [ns_server:debug,2014-08-19T16:52:27.860,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",114,replica,0} [ns_server:debug,2014-08-19T16:52:27.860,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,128,984,673,545,362,724,596,413,958,647,519,153,698,570,387,1009,749, 621,127,983,672,544,361,723,595,412,101,957,646,518,152,697,569,386,1008,748, 620,126,982,671,543,360,722,594,411,956,645,517,151,696,568,385,1007,747,619, 125,981,670,542,359,99,721,593,410,955,644,516,150,695,567,384,1006,746,618, 124,980,669,541,358,720,592,409,954,643,515,149,694,566,383,1005,745,617,123, 979,668,540,357,97,719,591,408,953,642,514,148,693,565,382,1004,744,616,122, 978,667,539,356,718,590,407,952,641,513,147,692,564,381,1003,743,615,121,977, 730,666,602,538,419,355,964,95,717,653,589,525,406,342,159,951,704,640,576, 512,393,146,1015,938,755,691,627,563,380,133,1002,989,742,678,614,550,367, 120,976,729,665,601,537,418,354,107,963,716,652,588,524,405,158,950,767,703, 639,575,392,145,1014,754,690,626,562,379,132,1001,988,741,677,613,549,366, 119,975,728,664,600,536,417,353,170,962,93,715,651,587,523,404,157,949,766, 702,638,574,391,144,1013,753,689,625,561,378,131,1000,987,740,676,612,548, 365,118,974,727,663,599,535,416,352,169,105,961,714,650,586,522,403,156,948, 765,701,637,573,390,143,1012,999,752,688,624,560,377,130,986,739,675,611,547, 364,117,973,726,662,598,534,415,351,168,960,91,713,649,585,521,402,155,947, 764,700,636,572,389,142,1011,998,751,687,623,559,376,129,985,738,674,610,546, 363,116,972,725,661,597,533,414,350,167,103,959,712,648,584,520,401,154,1023, 946,763,699,635,571,388,141,1010,997,686,558,375,737,609,426,115,971,660,532, 349,166,89,711,583,400,1022,945,762,634,140,996,685,557,374,736,608,425,114, 970,659,531,348,165,710,582,399,1021,944,761,633,139,995,684,556,373,735,607, 424,113,969,658,530,347,164,87,709,581,398,1020,943,760,632,138,994,683,555, 372,734,606,423,968,657,529,346,163,708,580,397,1019,942,759,631,137,993,682, 554,371,733,605,422,111,967,656,528,345,162,707,579,396,1018,941,758,630,136, 992,681,553,370,732,604,421,966,655,527,344,161,706,578,395,1017,940,757,629, 135,991,680,552,369,731,603,420,109,965,654,526,343,160,705,577,394,1016,939, 756,628,134,990,679,551,368] [ns_server:debug,2014-08-19T16:52:27.861,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 3575 us [ns_server:debug,2014-08-19T16:52:27.862,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:27.862,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{186, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:52:27.865,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [views:debug,2014-08-19T16:52:27.893,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/114. Updated state: replica (0) [ns_server:debug,2014-08-19T16:52:27.893,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",114,replica,0} [ns_server:debug,2014-08-19T16:52:27.934,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:52:27.939,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:27.940,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 5604 us [ns_server:debug,2014-08-19T16:52:27.941,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:27.941,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{187, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:52:27.976,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 112. Nacking mccouch update. [views:debug,2014-08-19T16:52:27.976,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/112. Updated state: replica (0) [ns_server:debug,2014-08-19T16:52:27.977,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",112,replica,0} [ns_server:debug,2014-08-19T16:52:27.978,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,128,984,673,545,362,724,596,413,958,647,519,153,698,570,387,1009,749, 621,127,983,672,544,361,723,595,412,101,957,646,518,152,697,569,386,1008,748, 620,126,982,671,543,360,722,594,411,956,645,517,151,696,568,385,1007,747,619, 125,981,670,542,359,99,721,593,410,955,644,516,150,695,567,384,1006,746,618, 124,980,669,541,358,720,592,409,954,643,515,149,694,566,383,1005,745,617,123, 979,668,540,357,97,719,591,408,953,642,514,148,693,565,382,1004,744,616,122, 978,667,539,356,718,590,407,952,641,513,147,692,564,381,1003,743,615,121,977, 730,666,602,538,419,355,964,95,717,653,589,525,406,342,159,951,704,640,576, 512,393,146,1015,938,755,691,627,563,380,133,1002,989,742,678,614,550,367, 120,976,729,665,601,537,418,354,107,963,716,652,588,524,405,158,950,767,703, 639,575,392,145,1014,754,690,626,562,379,132,1001,988,741,677,613,549,366, 119,975,728,664,600,536,417,353,170,962,93,715,651,587,523,404,157,949,766, 702,638,574,391,144,1013,753,689,625,561,378,131,1000,987,740,676,612,548, 365,118,974,727,663,599,535,416,352,169,105,961,714,650,586,522,403,156,948, 765,701,637,573,390,143,1012,999,752,688,624,560,377,130,986,739,675,611,547, 364,117,973,726,662,598,534,415,351,168,960,91,713,649,585,521,402,155,947, 764,700,636,572,389,142,1011,998,751,687,623,559,376,129,985,738,674,610,546, 363,116,972,725,661,597,533,414,350,167,103,959,712,648,584,520,401,154,1023, 946,763,699,635,571,388,141,1010,997,686,558,375,737,609,426,115,971,660,532, 349,166,89,711,583,400,1022,945,762,634,140,996,685,557,374,736,608,425,114, 970,659,531,348,165,710,582,399,1021,944,761,633,139,995,684,556,373,735,607, 424,113,969,658,530,347,164,87,709,581,398,1020,943,760,632,138,994,683,555, 372,734,606,423,112,968,657,529,346,163,708,580,397,1019,942,759,631,137,993, 682,554,371,733,605,422,111,967,656,528,345,162,707,579,396,1018,941,758,630, 136,992,681,553,370,732,604,421,966,655,527,344,161,706,578,395,1017,940,757, 629,135,991,680,552,369,731,603,420,109,965,654,526,343,160,705,577,394,1016, 939,756,628,134,990,679,551,368] [ns_server:debug,2014-08-19T16:52:27.982,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:52:27.984,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:27.984,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 1917 us [ns_server:debug,2014-08-19T16:52:27.984,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:27.985,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{188, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [views:debug,2014-08-19T16:52:28.011,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/112. Updated state: replica (0) [ns_server:debug,2014-08-19T16:52:28.011,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",112,replica,0} [ns_server:debug,2014-08-19T16:52:28.030,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:52:28.031,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:28.032,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 1655 us [ns_server:debug,2014-08-19T16:52:28.032,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:28.032,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{190, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:52:28.073,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:52:28.082,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:28.082,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 8617 us [ns_server:debug,2014-08-19T16:52:28.082,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:28.083,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{189, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:52:28.125,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:52:28.127,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:28.128,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 2476 us [ns_server:debug,2014-08-19T16:52:28.128,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:28.128,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{192, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:52:28.154,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 110. Nacking mccouch update. [views:debug,2014-08-19T16:52:28.154,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/110. Updated state: replica (0) [ns_server:debug,2014-08-19T16:52:28.154,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",110,replica,0} [ns_server:debug,2014-08-19T16:52:28.156,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,128,984,673,545,362,724,596,413,958,647,519,153,698,570,387,1009,749, 621,127,983,672,544,361,723,595,412,101,957,646,518,152,697,569,386,1008,748, 620,126,982,671,543,360,722,594,411,956,645,517,151,696,568,385,1007,747,619, 125,981,670,542,359,99,721,593,410,955,644,516,150,695,567,384,1006,746,618, 124,980,669,541,358,720,592,409,954,643,515,149,694,566,383,1005,745,617,123, 979,668,540,357,97,719,591,408,953,642,514,148,693,565,382,1004,744,616,122, 978,667,539,356,718,590,407,952,641,513,147,692,564,381,1003,743,615,121,977, 730,666,602,538,419,355,964,95,717,653,589,525,406,342,159,951,704,640,576, 512,393,146,1015,938,755,691,627,563,380,133,1002,989,742,678,614,550,367, 120,976,729,665,601,537,418,354,107,963,716,652,588,524,405,158,950,767,703, 639,575,392,145,1014,754,690,626,562,379,132,1001,988,741,677,613,549,366, 119,975,728,664,600,536,417,353,170,962,93,715,651,587,523,404,157,949,766, 702,638,574,391,144,1013,753,689,625,561,378,131,1000,987,740,676,612,548, 365,118,974,727,663,599,535,416,352,169,105,961,714,650,586,522,403,156,948, 765,701,637,573,390,143,1012,999,752,688,624,560,377,130,986,739,675,611,547, 364,117,973,726,662,598,534,415,351,168,960,91,713,649,585,521,402,155,947, 764,700,636,572,389,142,1011,998,751,687,623,559,376,129,985,738,674,610,546, 363,116,972,725,661,597,533,414,350,167,103,959,712,648,584,520,401,154,1023, 946,763,699,635,571,388,141,1010,997,686,558,375,737,609,426,115,971,660,532, 349,166,89,711,583,400,1022,945,762,634,140,996,685,557,374,736,608,425,114, 970,659,531,348,165,710,582,399,1021,944,761,633,139,995,684,556,373,735,607, 424,113,969,658,530,347,164,87,709,581,398,1020,943,760,632,138,994,683,555, 372,734,606,423,112,968,657,529,346,163,708,580,397,1019,942,759,631,137,993, 682,554,371,733,605,422,111,967,656,528,345,162,707,579,396,1018,941,758,630, 136,992,681,553,370,732,604,421,110,966,655,527,344,161,706,578,395,1017,940, 757,629,135,991,680,552,369,731,603,420,109,965,654,526,343,160,705,577,394, 1016,939,756,628,134,990,679,551,368] [ns_server:debug,2014-08-19T16:52:28.170,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:52:28.172,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:28.172,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 1441 us [ns_server:debug,2014-08-19T16:52:28.173,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{191, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:52:28.174,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [views:debug,2014-08-19T16:52:28.205,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/110. Updated state: replica (0) [ns_server:debug,2014-08-19T16:52:28.205,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",110,replica,0} [ns_server:debug,2014-08-19T16:52:28.216,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:52:28.218,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:28.218,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 1669 us [ns_server:debug,2014-08-19T16:52:28.218,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:28.219,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{194, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:52:28.260,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:52:28.264,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:28.264,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 3366 us [ns_server:debug,2014-08-19T16:52:28.264,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:28.265,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{193, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:52:28.307,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:52:28.310,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:28.311,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 3157 us [ns_server:debug,2014-08-19T16:52:28.311,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:28.311,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{195, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:52:28.346,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 108. Nacking mccouch update. [views:debug,2014-08-19T16:52:28.347,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/108. Updated state: replica (0) [ns_server:debug,2014-08-19T16:52:28.347,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",108,replica,0} [ns_server:debug,2014-08-19T16:52:28.348,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,128,984,673,545,362,724,596,413,958,647,519,153,698,570,387,1009,749, 621,127,983,672,544,361,723,595,412,101,957,646,518,152,697,569,386,1008,748, 620,126,982,671,543,360,722,594,411,956,645,517,151,696,568,385,1007,747,619, 125,981,670,542,359,99,721,593,410,955,644,516,150,695,567,384,1006,746,618, 124,980,669,541,358,720,592,409,954,643,515,149,694,566,383,1005,745,617,123, 979,668,540,357,97,719,591,408,953,642,514,148,693,565,382,1004,744,616,122, 978,667,539,356,718,590,407,952,641,513,147,692,564,381,1003,743,615,121,977, 666,538,355,964,95,717,653,589,525,406,342,159,951,704,640,576,512,393,146, 1015,938,755,691,627,563,380,133,1002,989,742,678,614,550,367,120,976,729, 665,601,537,418,354,107,963,716,652,588,524,405,158,950,767,703,639,575,392, 145,1014,754,690,626,562,379,132,1001,988,741,677,613,549,366,119,975,728, 664,600,536,417,353,170,962,93,715,651,587,523,404,157,949,766,702,638,574, 391,144,1013,753,689,625,561,378,131,1000,987,740,676,612,548,365,118,974, 727,663,599,535,416,352,169,105,961,714,650,586,522,403,156,948,765,701,637, 573,390,143,1012,999,752,688,624,560,377,130,986,739,675,611,547,364,117,973, 726,662,598,534,415,351,168,960,91,713,649,585,521,402,155,947,764,700,636, 572,389,142,1011,998,751,687,623,559,376,129,985,738,674,610,546,363,116,972, 725,661,597,533,414,350,167,103,959,712,648,584,520,401,154,1023,946,763,699, 635,571,388,141,1010,997,686,558,375,737,609,426,115,971,660,532,349,166,89, 711,583,400,1022,945,762,634,140,996,685,557,374,736,608,425,114,970,659,531, 348,165,710,582,399,1021,944,761,633,139,995,684,556,373,735,607,424,113,969, 658,530,347,164,87,709,581,398,1020,943,760,632,138,994,683,555,372,734,606, 423,112,968,657,529,346,163,708,580,397,1019,942,759,631,137,993,682,554,371, 733,605,422,111,967,656,528,345,162,707,579,396,1018,941,758,630,136,992,681, 553,370,732,604,421,110,966,655,527,344,161,706,578,395,1017,940,757,629,135, 991,680,552,369,731,603,420,109,965,654,526,343,160,705,577,394,1016,939,756, 628,134,990,679,551,368,730,602,419,108] [ns_server:debug,2014-08-19T16:52:28.351,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:52:28.359,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:28.359,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 7648 us [ns_server:debug,2014-08-19T16:52:28.359,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:28.360,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{196, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:52:28.410,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:52:28.412,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:28.412,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 2086 us [ns_server:debug,2014-08-19T16:52:28.413,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:28.413,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{198, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [views:debug,2014-08-19T16:52:28.415,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/108. Updated state: replica (0) [ns_server:debug,2014-08-19T16:52:28.416,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",108,replica,0} [ns_server:debug,2014-08-19T16:52:28.458,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:52:28.461,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:28.461,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 3270 us [ns_server:debug,2014-08-19T16:52:28.462,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:28.462,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{197, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:52:28.512,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:52:28.514,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:28.514,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:28.514,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 2273 us [ns_server:debug,2014-08-19T16:52:28.515,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{199, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:52:28.562,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:52:28.563,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:28.564,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 1820 us [ns_server:debug,2014-08-19T16:52:28.565,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:28.565,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{200, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:52:28.574,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 106. Nacking mccouch update. [views:debug,2014-08-19T16:52:28.574,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/106. Updated state: replica (0) [ns_server:debug,2014-08-19T16:52:28.574,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",106,replica,0} [ns_server:debug,2014-08-19T16:52:28.575,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,128,984,673,545,362,724,596,413,958,647,519,153,698,570,387,1009,749, 621,127,983,672,544,361,723,595,412,101,957,646,518,152,697,569,386,1008,748, 620,126,982,671,543,360,722,594,411,956,645,517,151,696,568,385,1007,747,619, 125,981,670,542,359,99,721,593,410,955,644,516,150,695,567,384,1006,746,618, 124,980,669,541,358,720,592,409,954,643,515,149,694,566,383,1005,745,617,123, 979,668,540,357,97,719,591,408,953,642,514,148,693,565,382,1004,744,616,122, 978,667,539,356,718,590,407,952,641,513,147,692,564,381,1003,743,615,121,977, 666,538,355,964,95,717,653,589,525,406,342,159,951,704,640,576,512,393,146, 1015,938,755,691,627,563,380,133,1002,989,742,678,614,550,367,120,976,729, 665,601,537,418,354,107,963,716,652,588,524,405,158,950,767,703,639,575,392, 145,1014,754,690,626,562,379,132,1001,988,741,677,613,549,366,119,975,728, 664,600,536,417,353,170,106,962,93,715,651,587,523,404,157,949,766,702,638, 574,391,144,1013,753,689,625,561,378,131,1000,987,740,676,612,548,365,118, 974,727,663,599,535,416,352,169,105,961,714,650,586,522,403,156,948,765,701, 637,573,390,143,1012,999,752,688,624,560,377,130,986,739,675,611,547,364,117, 973,726,662,598,534,415,351,168,960,91,713,649,585,521,402,155,947,764,700, 636,572,389,142,1011,998,751,687,623,559,376,129,985,738,674,610,546,363,116, 972,725,661,597,533,414,350,167,103,959,712,648,584,520,401,154,1023,946,763, 699,635,571,388,141,1010,997,686,558,375,737,609,426,115,971,660,532,349,166, 89,711,583,400,1022,945,762,634,140,996,685,557,374,736,608,425,114,970,659, 531,348,165,710,582,399,1021,944,761,633,139,995,684,556,373,735,607,424,113, 969,658,530,347,164,87,709,581,398,1020,943,760,632,138,994,683,555,372,734, 606,423,112,968,657,529,346,163,708,580,397,1019,942,759,631,137,993,682,554, 371,733,605,422,111,967,656,528,345,162,707,579,396,1018,941,758,630,136,992, 681,553,370,732,604,421,110,966,655,527,344,161,706,578,395,1017,940,757,629, 135,991,680,552,369,731,603,420,109,965,654,526,343,160,705,577,394,1016,939, 756,628,134,990,679,551,368,730,602,419,108] [ns_server:debug,2014-08-19T16:52:28.611,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:52:28.613,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:28.614,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{202, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:52:28.614,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 2643 us [ns_server:debug,2014-08-19T16:52:28.615,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [views:debug,2014-08-19T16:52:28.657,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/106. Updated state: replica (0) [ns_server:debug,2014-08-19T16:52:28.658,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",106,replica,0} [ns_server:debug,2014-08-19T16:52:28.667,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:52:28.673,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:28.673,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 5912 us [ns_server:debug,2014-08-19T16:52:28.674,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:28.674,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{201, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:52:28.713,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:52:28.716,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:28.717,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 3441 us [ns_server:debug,2014-08-19T16:52:28.717,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:28.717,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{203, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:52:28.766,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:52:28.769,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:28.769,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 3166 us [ns_server:debug,2014-08-19T16:52:28.769,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:28.770,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{204, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:52:28.808,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 104. Nacking mccouch update. [views:debug,2014-08-19T16:52:28.808,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/104. Updated state: replica (0) [ns_server:debug,2014-08-19T16:52:28.808,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",104,replica,0} [ns_server:debug,2014-08-19T16:52:28.809,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,128,984,673,545,362,724,596,413,958,647,519,153,698,570,387,1009,749, 621,127,983,672,544,361,723,595,412,101,957,646,518,152,697,569,386,1008,748, 620,126,982,671,543,360,722,594,411,956,645,517,151,696,568,385,1007,747,619, 125,981,670,542,359,99,721,593,410,955,644,516,150,695,567,384,1006,746,618, 124,980,669,541,358,720,592,409,954,643,515,149,694,566,383,1005,745,617,123, 979,668,540,357,97,719,591,408,953,642,514,148,693,565,382,1004,744,616,122, 978,667,539,356,718,590,407,952,641,513,147,692,564,381,1003,743,615,121,977, 666,538,355,964,95,717,653,589,525,406,342,159,951,704,640,576,512,393,146, 1015,938,755,691,627,563,380,133,1002,989,742,678,614,550,367,120,976,729, 665,601,537,418,354,107,963,716,652,588,524,405,158,950,767,703,639,575,392, 145,1014,754,690,626,562,379,132,1001,988,741,677,613,549,366,119,975,728, 664,600,536,417,353,170,106,962,93,715,651,587,523,404,157,949,766,702,638, 574,391,144,1013,753,689,625,561,378,131,1000,987,740,676,612,548,365,118, 974,727,663,599,535,416,352,169,105,961,714,650,586,522,403,156,948,765,701, 637,573,390,143,1012,999,752,688,624,560,377,130,986,739,675,611,547,364,117, 973,726,662,598,534,415,351,168,104,960,91,713,649,585,521,402,155,947,764, 700,636,572,389,142,1011,998,751,687,623,559,376,129,985,738,674,610,546,363, 116,972,725,661,597,533,414,350,167,103,959,712,648,584,520,401,154,1023,946, 763,699,635,571,388,141,1010,997,686,558,375,737,609,426,115,971,660,532,349, 166,89,711,583,400,1022,945,762,634,140,996,685,557,374,736,608,425,114,970, 659,531,348,165,710,582,399,1021,944,761,633,139,995,684,556,373,735,607,424, 113,969,658,530,347,164,87,709,581,398,1020,943,760,632,138,994,683,555,372, 734,606,423,112,968,657,529,346,163,708,580,397,1019,942,759,631,137,993,682, 554,371,733,605,422,111,967,656,528,345,162,707,579,396,1018,941,758,630,136, 992,681,553,370,732,604,421,110,966,655,527,344,161,706,578,395,1017,940,757, 629,135,991,680,552,369,731,603,420,109,965,654,526,343,160,705,577,394,1016, 939,756,628,134,990,679,551,368,730,602,419,108] [ns_server:debug,2014-08-19T16:52:28.813,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:52:28.816,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:28.817,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 3480 us [ns_server:debug,2014-08-19T16:52:28.817,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:28.818,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{206, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [views:debug,2014-08-19T16:52:28.842,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/104. Updated state: replica (0) [ns_server:debug,2014-08-19T16:52:28.842,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",104,replica,0} [ns_server:debug,2014-08-19T16:52:28.865,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:52:28.867,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 1885 us [ns_server:debug,2014-08-19T16:52:28.867,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:28.867,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:28.868,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{205, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:52:28.910,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:52:28.913,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:28.913,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 3101 us [ns_server:debug,2014-08-19T16:52:28.913,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:28.914,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{208, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:52:28.959,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:52:28.960,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 102. Nacking mccouch update. [views:debug,2014-08-19T16:52:28.960,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/102. Updated state: replica (0) [ns_server:debug,2014-08-19T16:52:28.961,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",102,replica,0} [ns_server:debug,2014-08-19T16:52:28.962,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,128,984,673,545,362,724,596,413,102,958,647,519,153,698,570,387,1009, 749,621,127,983,672,544,361,723,595,412,101,957,646,518,152,697,569,386,1008, 748,620,126,982,671,543,360,722,594,411,956,645,517,151,696,568,385,1007,747, 619,125,981,670,542,359,99,721,593,410,955,644,516,150,695,567,384,1006,746, 618,124,980,669,541,358,720,592,409,954,643,515,149,694,566,383,1005,745,617, 123,979,668,540,357,97,719,591,408,953,642,514,148,693,565,382,1004,744,616, 122,978,667,539,356,718,590,407,952,641,513,147,692,564,381,1003,743,615,121, 977,666,538,355,964,95,717,653,589,525,406,342,159,951,704,640,576,512,393, 146,1015,938,755,691,627,563,380,133,1002,989,742,678,614,550,367,120,976, 729,665,601,537,418,354,107,963,716,652,588,524,405,158,950,767,703,639,575, 392,145,1014,754,690,626,562,379,132,1001,988,741,677,613,549,366,119,975, 728,664,600,536,417,353,170,106,962,93,715,651,587,523,404,157,949,766,702, 638,574,391,144,1013,753,689,625,561,378,131,1000,987,740,676,612,548,365, 118,974,727,663,599,535,416,352,169,105,961,714,650,586,522,403,156,948,765, 701,637,573,390,143,1012,999,752,688,624,560,377,130,986,739,675,611,547,364, 117,973,726,662,598,534,415,351,168,104,960,91,713,649,585,521,402,155,947, 764,700,636,572,389,142,1011,998,751,687,623,559,376,129,985,738,674,610,546, 363,116,972,725,661,597,533,414,350,167,103,959,712,648,584,520,401,154,1023, 946,763,699,635,571,388,141,1010,997,686,558,375,737,609,426,115,971,660,532, 349,166,89,711,583,400,1022,945,762,634,140,996,685,557,374,736,608,425,114, 970,659,531,348,165,710,582,399,1021,944,761,633,139,995,684,556,373,735,607, 424,113,969,658,530,347,164,87,709,581,398,1020,943,760,632,138,994,683,555, 372,734,606,423,112,968,657,529,346,163,708,580,397,1019,942,759,631,137,993, 682,554,371,733,605,422,111,967,656,528,345,162,707,579,396,1018,941,758,630, 136,992,681,553,370,732,604,421,110,966,655,527,344,161,706,578,395,1017,940, 757,629,135,991,680,552,369,731,603,420,109,965,654,526,343,160,705,577,394, 1016,939,756,628,134,990,679,551,368,730,602,419,108] [ns_server:debug,2014-08-19T16:52:28.965,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:28.965,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 5361 us [ns_server:debug,2014-08-19T16:52:28.966,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:28.966,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{207, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [views:debug,2014-08-19T16:52:28.994,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/102. Updated state: replica (0) [ns_server:debug,2014-08-19T16:52:28.994,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",102,replica,0} [ns_server:debug,2014-08-19T16:52:29.017,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:52:29.019,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:29.019,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 1896 us [ns_server:debug,2014-08-19T16:52:29.020,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{210, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:52:29.020,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:29.064,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:52:29.067,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:29.067,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 1752 us [ns_server:debug,2014-08-19T16:52:29.068,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:29.069,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{209, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:52:29.069,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 100. Nacking mccouch update. [views:debug,2014-08-19T16:52:29.069,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/100. Updated state: replica (0) [ns_server:debug,2014-08-19T16:52:29.070,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",100,replica,0} [ns_server:debug,2014-08-19T16:52:29.071,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,128,984,673,545,362,724,596,413,102,958,647,519,153,698,570,387,1009, 749,621,127,983,672,544,361,723,595,412,101,957,646,518,152,697,569,386,1008, 748,620,126,982,671,543,360,722,594,411,100,956,645,517,151,696,568,385,1007, 747,619,125,981,670,542,359,99,721,593,410,955,644,516,150,695,567,384,1006, 746,618,124,980,669,541,358,720,592,409,954,643,515,149,694,566,383,1005,745, 617,123,979,668,540,357,97,719,591,408,953,642,514,148,693,565,382,1004,744, 616,122,978,667,539,356,718,590,407,952,641,513,147,692,564,381,1003,743,615, 121,977,666,538,355,964,95,717,653,589,525,406,342,159,951,704,640,576,512, 393,146,1015,938,755,691,627,563,380,133,1002,989,742,678,614,550,367,120, 976,729,665,601,537,418,354,107,963,716,652,588,524,405,158,950,767,703,639, 575,392,145,1014,754,690,626,562,379,132,1001,988,741,677,613,549,366,119, 975,728,664,600,536,417,353,170,106,962,93,715,651,587,523,404,157,949,766, 702,638,574,391,144,1013,753,689,625,561,378,131,1000,987,740,676,612,548, 365,118,974,727,663,599,535,416,352,169,105,961,714,650,586,522,403,156,948, 765,701,637,573,390,143,1012,999,752,688,624,560,377,130,986,739,675,611,547, 364,117,973,726,662,598,534,415,351,168,104,960,91,713,649,585,521,402,155, 947,764,700,636,572,389,142,1011,998,751,687,623,559,376,129,985,738,674,610, 546,363,116,972,725,661,597,533,414,350,167,103,959,712,648,584,520,401,154, 1023,946,763,699,635,571,388,141,1010,997,686,558,375,737,609,426,115,971, 660,532,349,166,89,711,583,400,1022,945,762,634,140,996,685,557,374,736,608, 425,114,970,659,531,348,165,710,582,399,1021,944,761,633,139,995,684,556,373, 735,607,424,113,969,658,530,347,164,87,709,581,398,1020,943,760,632,138,994, 683,555,372,734,606,423,112,968,657,529,346,163,708,580,397,1019,942,759,631, 137,993,682,554,371,733,605,422,111,967,656,528,345,162,707,579,396,1018,941, 758,630,136,992,681,553,370,732,604,421,110,966,655,527,344,161,706,578,395, 1017,940,757,629,135,991,680,552,369,731,603,420,109,965,654,526,343,160,705, 577,394,1016,939,756,628,134,990,679,551,368,730,602,419,108] [views:debug,2014-08-19T16:52:29.103,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/100. Updated state: replica (0) [ns_server:debug,2014-08-19T16:52:29.104,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",100,replica,0} [ns_server:debug,2014-08-19T16:52:29.127,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:52:29.130,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:29.130,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 3732 us [ns_server:debug,2014-08-19T16:52:29.131,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:29.131,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{212, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:52:29.179,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:52:29.180,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:29.181,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 1368 us [ns_server:debug,2014-08-19T16:52:29.182,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:29.182,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{211, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:52:29.195,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 98. Nacking mccouch update. [views:debug,2014-08-19T16:52:29.195,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/98. Updated state: replica (0) [ns_server:debug,2014-08-19T16:52:29.195,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",98,replica,0} [ns_server:debug,2014-08-19T16:52:29.197,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,128,984,673,545,362,724,596,413,102,958,647,519,153,698,570,387,1009, 749,621,127,983,672,544,361,723,595,412,101,957,646,518,152,697,569,386,1008, 748,620,126,982,671,543,360,722,594,411,100,956,645,517,151,696,568,385,1007, 747,619,125,981,670,542,359,99,721,593,410,955,644,516,150,695,567,384,1006, 746,618,124,980,669,541,358,98,720,592,409,954,643,515,149,694,566,383,1005, 745,617,123,979,668,540,357,97,719,591,408,953,642,514,148,693,565,382,1004, 744,616,122,978,667,539,356,718,590,407,952,641,513,147,692,564,381,1003,743, 615,121,977,666,538,355,95,717,589,406,951,704,640,576,512,393,146,1015,938, 755,691,627,563,380,133,1002,989,742,678,614,550,367,120,976,729,665,601,537, 418,354,107,963,716,652,588,524,405,158,950,767,703,639,575,392,145,1014,754, 690,626,562,379,132,1001,988,741,677,613,549,366,119,975,728,664,600,536,417, 353,170,106,962,93,715,651,587,523,404,157,949,766,702,638,574,391,144,1013, 753,689,625,561,378,131,1000,987,740,676,612,548,365,118,974,727,663,599,535, 416,352,169,105,961,714,650,586,522,403,156,948,765,701,637,573,390,143,1012, 999,752,688,624,560,377,130,986,739,675,611,547,364,117,973,726,662,598,534, 415,351,168,104,960,91,713,649,585,521,402,155,947,764,700,636,572,389,142, 1011,998,751,687,623,559,376,129,985,738,674,610,546,363,116,972,725,661,597, 533,414,350,167,103,959,712,648,584,520,401,154,1023,946,763,699,635,571,388, 141,1010,997,686,558,375,737,609,426,115,971,660,532,349,166,89,711,583,400, 1022,945,762,634,140,996,685,557,374,736,608,425,114,970,659,531,348,165,710, 582,399,1021,944,761,633,139,995,684,556,373,735,607,424,113,969,658,530,347, 164,87,709,581,398,1020,943,760,632,138,994,683,555,372,734,606,423,112,968, 657,529,346,163,708,580,397,1019,942,759,631,137,993,682,554,371,733,605,422, 111,967,656,528,345,162,707,579,396,1018,941,758,630,136,992,681,553,370,732, 604,421,110,966,655,527,344,161,706,578,395,1017,940,757,629,135,991,680,552, 369,731,603,420,109,965,654,526,343,160,705,577,394,1016,939,756,628,134,990, 679,551,368,730,602,419,108,964,653,525,342,159] [views:debug,2014-08-19T16:52:29.229,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/98. Updated state: replica (0) [ns_server:debug,2014-08-19T16:52:29.229,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",98,replica,0} [ns_server:debug,2014-08-19T16:52:29.245,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:29.245,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:52:29.245,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 126 us [ns_server:debug,2014-08-19T16:52:29.245,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:29.246,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{214, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:52:29.293,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:52:29.298,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:29.299,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 5391 us [ns_server:debug,2014-08-19T16:52:29.300,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:29.300,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{213, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:52:29.313,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 96. Nacking mccouch update. [views:debug,2014-08-19T16:52:29.313,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/96. Updated state: replica (0) [ns_server:debug,2014-08-19T16:52:29.313,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",96,replica,0} [ns_server:debug,2014-08-19T16:52:29.314,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,128,984,673,545,362,724,596,413,102,958,647,519,153,698,570,387,1009, 749,621,127,983,672,544,361,723,595,412,101,957,646,518,152,697,569,386,1008, 748,620,126,982,671,543,360,722,594,411,100,956,645,517,151,696,568,385,1007, 747,619,125,981,670,542,359,99,721,593,410,955,644,516,150,695,567,384,1006, 746,618,124,980,669,541,358,98,720,592,409,954,643,515,149,694,566,383,1005, 745,617,123,979,668,540,357,97,719,591,408,953,642,514,148,693,565,382,1004, 744,616,122,978,667,539,356,96,718,590,407,952,641,513,147,692,564,381,1003, 743,615,121,977,666,538,355,95,717,589,406,951,704,640,576,512,393,146,1015, 938,755,691,627,563,380,133,1002,989,742,678,614,550,367,120,976,729,665,601, 537,418,354,107,963,716,652,588,524,405,158,950,767,703,639,575,392,145,1014, 754,690,626,562,379,132,1001,988,741,677,613,549,366,119,975,728,664,600,536, 417,353,170,106,962,93,715,651,587,523,404,157,949,766,702,638,574,391,144, 1013,753,689,625,561,378,131,1000,987,740,676,612,548,365,118,974,727,663, 599,535,416,352,169,105,961,714,650,586,522,403,156,948,765,701,637,573,390, 143,1012,999,752,688,624,560,377,130,986,739,675,611,547,364,117,973,726,662, 598,534,415,351,168,104,960,91,713,649,585,521,402,155,947,764,700,636,572, 389,142,1011,998,751,687,623,559,376,129,985,738,674,610,546,363,116,972,725, 661,597,533,414,350,167,103,959,712,648,584,520,401,154,1023,946,763,699,635, 571,388,141,1010,997,686,558,375,737,609,426,115,971,660,532,349,166,89,711, 583,400,1022,945,762,634,140,996,685,557,374,736,608,425,114,970,659,531,348, 165,710,582,399,1021,944,761,633,139,995,684,556,373,735,607,424,113,969,658, 530,347,164,87,709,581,398,1020,943,760,632,138,994,683,555,372,734,606,423, 112,968,657,529,346,163,708,580,397,1019,942,759,631,137,993,682,554,371,733, 605,422,111,967,656,528,345,162,707,579,396,1018,941,758,630,136,992,681,553, 370,732,604,421,110,966,655,527,344,161,706,578,395,1017,940,757,629,135,991, 680,552,369,731,603,420,109,965,654,526,343,160,705,577,394,1016,939,756,628, 134,990,679,551,368,730,602,419,108,964,653,525,342,159] [views:debug,2014-08-19T16:52:29.347,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/96. Updated state: replica (0) [ns_server:debug,2014-08-19T16:52:29.347,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",96,replica,0} [ns_server:debug,2014-08-19T16:52:29.351,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:52:29.354,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:29.355,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 3459 us [ns_server:debug,2014-08-19T16:52:29.355,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:29.355,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{215, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:52:29.401,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:52:29.402,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:29.403,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 1341 us [ns_server:debug,2014-08-19T16:52:29.403,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:29.404,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{216, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:52:29.433,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 94. Nacking mccouch update. [views:debug,2014-08-19T16:52:29.433,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/94. Updated state: replica (0) [ns_server:debug,2014-08-19T16:52:29.434,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",94,replica,0} [ns_server:debug,2014-08-19T16:52:29.435,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,128,984,673,545,362,724,596,413,102,958,647,519,153,698,570,387,1009, 749,621,127,983,672,544,361,723,595,412,101,957,646,518,152,697,569,386,1008, 748,620,126,982,671,543,360,722,594,411,100,956,645,517,151,696,568,385,1007, 747,619,125,981,670,542,359,99,721,593,410,955,644,516,150,695,567,384,1006, 746,618,124,980,669,541,358,98,720,592,409,954,643,515,149,694,566,383,1005, 745,617,123,979,668,540,357,97,719,591,408,953,642,514,148,693,565,382,1004, 744,616,122,978,667,539,356,96,718,590,407,952,641,513,147,692,564,381,1003, 743,615,121,977,666,538,355,95,717,589,406,951,704,640,576,512,393,146,1015, 938,755,691,627,563,380,133,1002,989,742,678,614,550,367,120,976,729,665,601, 537,418,354,107,963,94,716,652,588,524,405,158,950,767,703,639,575,392,145, 1014,754,690,626,562,379,132,1001,988,741,677,613,549,366,119,975,728,664, 600,536,417,353,170,106,962,93,715,651,587,523,404,157,949,766,702,638,574, 391,144,1013,753,689,625,561,378,131,1000,987,740,676,612,548,365,118,974, 727,663,599,535,416,352,169,105,961,714,650,586,522,403,156,948,765,701,637, 573,390,143,1012,999,752,688,624,560,377,130,986,739,675,611,547,364,117,973, 726,662,598,534,415,351,168,104,960,91,713,649,585,521,402,155,947,764,700, 636,572,389,142,1011,998,751,687,623,559,376,129,985,738,674,610,546,363,116, 972,725,661,597,533,414,350,167,103,959,712,648,584,520,401,154,1023,946,763, 699,635,571,388,141,1010,997,686,558,375,737,609,426,115,971,660,532,349,166, 89,711,583,400,1022,945,762,634,140,996,685,557,374,736,608,425,114,970,659, 531,348,165,710,582,399,1021,944,761,633,139,995,684,556,373,735,607,424,113, 969,658,530,347,164,87,709,581,398,1020,943,760,632,138,994,683,555,372,734, 606,423,112,968,657,529,346,163,708,580,397,1019,942,759,631,137,993,682,554, 371,733,605,422,111,967,656,528,345,162,707,579,396,1018,941,758,630,136,992, 681,553,370,732,604,421,110,966,655,527,344,161,706,578,395,1017,940,757,629, 135,991,680,552,369,731,603,420,109,965,654,526,343,160,705,577,394,1016,939, 756,628,134,990,679,551,368,730,602,419,108,964,653,525,342,159] [ns_server:debug,2014-08-19T16:52:29.467,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:52:29.468,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:29.468,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 1643 us [ns_server:debug,2014-08-19T16:52:29.469,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:29.469,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{218, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:52:29.510,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:52:29.513,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:29.514,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 3368 us [ns_server:debug,2014-08-19T16:52:29.514,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:29.515,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{217, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [views:debug,2014-08-19T16:52:29.517,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/94. Updated state: replica (0) [ns_server:debug,2014-08-19T16:52:29.517,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",94,replica,0} [ns_server:debug,2014-08-19T16:52:29.559,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:52:29.562,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:29.562,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 3131 us [ns_server:debug,2014-08-19T16:52:29.563,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:29.563,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{220, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:52:29.610,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:52:29.613,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:29.613,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 2314 us [ns_server:debug,2014-08-19T16:52:29.614,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{219, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:52:29.614,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:29.658,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:52:29.661,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:29.662,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 3477 us [ns_server:debug,2014-08-19T16:52:29.662,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:29.662,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{221, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:52:29.692,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 92. Nacking mccouch update. [views:debug,2014-08-19T16:52:29.693,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/92. Updated state: replica (0) [ns_server:debug,2014-08-19T16:52:29.693,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",92,replica,0} [ns_server:debug,2014-08-19T16:52:29.694,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,128,984,673,545,362,724,596,413,102,958,647,519,153,698,570,387,1009, 749,621,127,983,672,544,361,723,595,412,101,957,646,518,152,697,569,386,1008, 748,620,126,982,671,543,360,722,594,411,100,956,645,517,151,696,568,385,1007, 747,619,125,981,670,542,359,99,721,593,410,955,644,516,150,695,567,384,1006, 746,618,124,980,669,541,358,98,720,592,409,954,643,515,149,694,566,383,1005, 745,617,123,979,668,540,357,97,719,591,408,953,642,514,148,693,565,382,1004, 744,616,122,978,667,539,356,96,718,590,407,952,641,513,147,692,564,381,1003, 743,615,121,977,666,538,355,95,717,589,406,951,704,640,576,512,393,146,1015, 938,755,691,627,563,380,133,1002,989,742,678,614,550,367,120,976,729,665,601, 537,418,354,107,963,94,716,652,588,524,405,158,950,767,703,639,575,392,145, 1014,754,690,626,562,379,132,1001,988,741,677,613,549,366,119,975,728,664, 600,536,417,353,170,106,962,93,715,651,587,523,404,157,949,766,702,638,574, 391,144,1013,753,689,625,561,378,131,1000,987,740,676,612,548,365,118,974, 727,663,599,535,416,352,169,105,961,92,714,650,586,522,403,156,948,765,701, 637,573,390,143,1012,999,752,688,624,560,377,130,986,739,675,611,547,364,117, 973,726,662,598,534,415,351,168,104,960,91,713,649,585,521,402,155,947,764, 700,636,572,389,142,1011,998,751,687,623,559,376,129,985,738,674,610,546,363, 116,972,725,661,597,533,414,350,167,103,959,712,648,584,520,401,154,1023,946, 763,699,635,571,388,141,1010,997,686,558,375,737,609,426,115,971,660,532,349, 166,89,711,583,400,1022,945,762,634,140,996,685,557,374,736,608,425,114,970, 659,531,348,165,710,582,399,1021,944,761,633,139,995,684,556,373,735,607,424, 113,969,658,530,347,164,87,709,581,398,1020,943,760,632,138,994,683,555,372, 734,606,423,112,968,657,529,346,163,708,580,397,1019,942,759,631,137,993,682, 554,371,733,605,422,111,967,656,528,345,162,707,579,396,1018,941,758,630,136, 992,681,553,370,732,604,421,110,966,655,527,344,161,706,578,395,1017,940,757, 629,135,991,680,552,369,731,603,420,109,965,654,526,343,160,705,577,394,1016, 939,756,628,134,990,679,551,368,730,602,419,108,964,653,525,342,159] [ns_server:debug,2014-08-19T16:52:29.710,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:52:29.711,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 21 us [ns_server:debug,2014-08-19T16:52:29.711,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:29.711,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:29.712,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{222, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [views:debug,2014-08-19T16:52:29.776,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/92. Updated state: replica (0) [ns_server:debug,2014-08-19T16:52:29.777,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",92,replica,0} [ns_server:debug,2014-08-19T16:52:29.780,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:52:29.783,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:29.783,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 3401 us [ns_server:debug,2014-08-19T16:52:29.784,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:29.784,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{223, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:52:29.827,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:52:29.830,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:29.831,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 3419 us [ns_server:debug,2014-08-19T16:52:29.831,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:29.831,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{224, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:52:29.884,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:52:29.885,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:29.885,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 90. Nacking mccouch update. [views:debug,2014-08-19T16:52:29.885,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/90. Updated state: replica (0) [ns_server:debug,2014-08-19T16:52:29.885,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",90,replica,0} [ns_server:debug,2014-08-19T16:52:29.885,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 1621 us [ns_server:debug,2014-08-19T16:52:29.886,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{225, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:52:29.887,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,128,984,673,545,362,724,596,413,102,958,647,519,153,698,570,387,1009, 749,621,127,983,672,544,361,723,595,412,101,957,646,518,152,697,569,386,1008, 748,620,126,982,671,543,360,722,594,411,100,956,645,517,151,696,568,385,1007, 747,619,125,981,670,542,359,99,721,593,410,955,644,516,150,695,567,384,1006, 746,618,124,980,669,541,358,98,720,592,409,954,643,515,149,694,566,383,1005, 745,617,123,979,668,540,357,97,719,591,408,953,642,514,148,693,565,382,1004, 744,616,122,978,667,539,356,96,718,590,407,952,641,513,147,692,564,381,1003, 743,615,121,977,666,538,355,95,717,589,406,951,704,640,576,512,393,146,1015, 938,755,691,627,563,380,133,1002,989,742,678,614,550,367,120,976,729,665,601, 537,418,354,107,963,94,716,652,588,524,405,158,950,767,703,639,575,392,145, 1014,754,690,626,562,379,132,1001,988,741,677,613,549,366,119,975,728,664, 600,536,417,353,170,106,962,93,715,651,587,523,404,157,949,766,702,638,574, 391,144,1013,753,689,625,561,378,131,1000,987,740,676,612,548,365,118,974, 727,663,599,535,416,352,169,105,961,92,714,650,586,522,403,156,948,765,701, 637,573,390,143,1012,999,752,688,624,560,377,130,986,739,675,611,547,364,117, 973,726,662,598,534,415,351,168,104,960,91,713,649,585,521,402,155,947,764, 700,636,572,389,142,1011,998,751,687,623,559,376,129,985,738,674,610,546,363, 116,972,725,661,597,533,414,350,167,103,959,90,712,648,584,520,401,154,1023, 946,763,699,635,571,388,141,1010,997,686,558,375,737,609,426,115,971,660,532, 349,166,89,711,583,400,1022,945,762,634,140,996,685,557,374,736,608,425,114, 970,659,531,348,165,710,582,399,1021,944,761,633,139,995,684,556,373,735,607, 424,113,969,658,530,347,164,87,709,581,398,1020,943,760,632,138,994,683,555, 372,734,606,423,112,968,657,529,346,163,708,580,397,1019,942,759,631,137,993, 682,554,371,733,605,422,111,967,656,528,345,162,707,579,396,1018,941,758,630, 136,992,681,553,370,732,604,421,110,966,655,527,344,161,706,578,395,1017,940, 757,629,135,991,680,552,369,731,603,420,109,965,654,526,343,160,705,577,394, 1016,939,756,628,134,990,679,551,368,730,602,419,108,964,653,525,342,159] [ns_server:debug,2014-08-19T16:52:29.888,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:29.933,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:52:29.935,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:29.935,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 1946 us [ns_server:debug,2014-08-19T16:52:29.936,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:29.938,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{226, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [views:debug,2014-08-19T16:52:29.969,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/90. Updated state: replica (0) [ns_server:debug,2014-08-19T16:52:29.969,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",90,replica,0} [ns_server:debug,2014-08-19T16:52:29.983,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:52:29.984,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:29.985,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 1793 us [ns_server:debug,2014-08-19T16:52:29.985,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{228, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:52:29.985,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:30.034,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:52:30.038,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:30.039,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 3954 us [ns_server:debug,2014-08-19T16:52:30.039,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:30.039,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{227, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:52:30.085,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:52:30.089,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:30.089,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 4392 us [ns_server:debug,2014-08-19T16:52:30.090,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:30.091,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{230, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:52:30.111,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 88. Nacking mccouch update. [views:debug,2014-08-19T16:52:30.111,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/88. Updated state: replica (0) [ns_server:debug,2014-08-19T16:52:30.112,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",88,replica,0} [ns_server:debug,2014-08-19T16:52:30.112,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,128,984,673,545,362,724,596,413,102,958,647,519,153,698,570,387,1009, 749,621,127,983,672,544,361,723,595,412,101,957,646,518,152,697,569,386,1008, 748,620,126,982,671,543,360,722,594,411,100,956,645,517,151,696,568,385,1007, 747,619,125,981,670,542,359,99,721,593,410,955,644,516,150,695,567,384,1006, 746,618,124,980,669,541,358,98,720,592,409,954,643,515,149,694,566,383,1005, 745,617,123,979,668,540,357,97,719,591,408,953,642,514,148,693,565,382,1004, 744,616,122,978,667,539,356,96,718,590,407,952,641,513,147,692,564,381,1003, 743,615,121,977,666,538,355,95,717,589,406,951,640,512,146,938,755,691,627, 563,380,133,1002,989,742,678,614,550,367,120,976,729,665,601,537,418,354,107, 963,94,716,652,588,524,405,158,950,767,703,639,575,392,145,1014,754,690,626, 562,379,132,1001,988,741,677,613,549,366,119,975,728,664,600,536,417,353,170, 106,962,93,715,651,587,523,404,157,949,766,702,638,574,391,144,1013,753,689, 625,561,378,131,1000,987,740,676,612,548,365,118,974,727,663,599,535,416,352, 169,105,961,92,714,650,586,522,403,156,948,765,701,637,573,390,143,1012,999, 752,688,624,560,377,130,986,739,675,611,547,364,117,973,726,662,598,534,415, 351,168,104,960,91,713,649,585,521,402,155,947,764,700,636,572,389,142,1011, 998,751,687,623,559,376,129,985,738,674,610,546,363,116,972,725,661,597,533, 414,350,167,103,959,90,712,648,584,520,401,154,1023,946,763,699,635,571,388, 141,1010,997,686,558,375,737,609,426,115,971,660,532,349,166,89,711,583,400, 1022,945,762,634,140,996,685,557,374,736,608,425,114,970,659,531,348,165,88, 710,582,399,1021,944,761,633,139,995,684,556,373,735,607,424,113,969,658,530, 347,164,87,709,581,398,1020,943,760,632,138,994,683,555,372,734,606,423,112, 968,657,529,346,163,708,580,397,1019,942,759,631,137,993,682,554,371,733,605, 422,111,967,656,528,345,162,707,579,396,1018,941,758,630,136,992,681,553,370, 732,604,421,110,966,655,527,344,161,706,578,395,1017,940,757,629,135,991,680, 552,369,731,603,420,109,965,654,526,343,160,705,577,394,1016,939,756,628,134, 990,679,551,368,730,602,419,108,964,653,525,342,159,704,576,393,1015] [ns_server:debug,2014-08-19T16:52:30.148,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:52:30.149,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:30.150,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 1150 us [ns_server:debug,2014-08-19T16:52:30.150,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:30.151,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{229, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [views:debug,2014-08-19T16:52:30.178,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/88. Updated state: replica (0) [ns_server:debug,2014-08-19T16:52:30.178,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",88,replica,0} [ns_server:debug,2014-08-19T16:52:30.198,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:30.198,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:52:30.198,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 134 us [ns_server:debug,2014-08-19T16:52:30.199,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:30.199,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{232, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:52:30.247,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:52:30.253,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:30.253,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 5775 us [ns_server:debug,2014-08-19T16:52:30.254,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:30.254,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{231, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:52:30.296,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:52:30.300,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:30.300,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 3351 us [ns_server:debug,2014-08-19T16:52:30.300,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:30.301,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{234, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:52:30.320,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 86. Nacking mccouch update. [views:debug,2014-08-19T16:52:30.320,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/86. Updated state: replica (1) [ns_server:debug,2014-08-19T16:52:30.320,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",86,replica,1} [ns_server:debug,2014-08-19T16:52:30.322,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,128,984,673,545,362,724,596,413,102,958,647,519,153,698,570,387,1009, 749,621,127,983,672,544,361,723,595,412,101,957,646,518,152,697,569,386,1008, 748,620,126,982,671,543,360,722,594,411,100,956,645,517,151,696,568,385,1007, 747,619,125,981,670,542,359,99,721,593,410,955,644,516,150,695,567,384,1006, 746,618,124,980,669,541,358,98,720,592,409,954,643,515,149,694,566,383,1005, 745,617,123,979,668,540,357,97,719,591,408,953,642,514,148,693,565,382,1004, 744,616,122,978,667,539,356,96,718,590,407,952,641,513,147,692,564,381,1003, 743,615,121,977,666,538,355,95,717,589,406,951,640,512,146,938,755,691,627, 563,380,133,1002,989,742,678,614,550,367,120,976,729,665,601,537,418,354,107, 963,94,716,652,588,524,405,158,950,767,703,639,575,392,145,1014,754,690,626, 562,379,132,1001,988,741,677,613,549,366,119,975,728,664,600,536,417,353,170, 106,962,93,715,651,587,523,404,157,949,766,702,638,574,391,144,1013,753,689, 625,561,378,131,1000,987,740,676,612,548,365,118,974,727,663,599,535,416,352, 169,105,961,92,714,650,586,522,403,156,948,765,701,637,573,390,143,1012,999, 752,688,624,560,377,130,986,739,675,611,547,364,117,973,726,662,598,534,415, 351,168,104,960,91,713,649,585,521,402,155,947,764,700,636,572,389,142,1011, 998,751,687,623,559,376,129,985,738,674,610,546,363,116,972,725,661,597,533, 414,350,167,103,959,90,712,648,584,520,401,154,1023,946,763,699,635,571,388, 141,1010,997,686,558,375,737,609,426,115,971,660,532,349,166,89,711,583,400, 1022,945,762,634,140,996,685,557,374,736,608,425,114,970,659,531,348,165,88, 710,582,399,1021,944,761,633,139,995,684,556,373,735,607,424,113,969,658,530, 347,164,87,709,581,398,1020,943,760,632,138,994,683,555,372,734,606,423,112, 968,657,529,346,163,86,708,580,397,1019,942,759,631,137,993,682,554,371,733, 605,422,111,967,656,528,345,162,707,579,396,1018,941,758,630,136,992,681,553, 370,732,604,421,110,966,655,527,344,161,706,578,395,1017,940,757,629,135,991, 680,552,369,731,603,420,109,965,654,526,343,160,705,577,394,1016,939,756,628, 134,990,679,551,368,730,602,419,108,964,653,525,342,159,704,576,393,1015] [ns_server:debug,2014-08-19T16:52:30.346,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:52:30.349,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:30.349,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 2919 us [ns_server:debug,2014-08-19T16:52:30.351,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:30.352,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{233, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [views:debug,2014-08-19T16:52:30.387,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/86. Updated state: replica (1) [ns_server:debug,2014-08-19T16:52:30.387,ns_1@10.242.238.90:<0.18779.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",86,replica,1} [rebalance:debug,2014-08-19T16:52:30.388,ns_1@10.242.238.90:<0.32705.0>:janitor_agent:handle_call:795]Done [rebalance:debug,2014-08-19T16:52:30.388,ns_1@10.242.238.90:<0.32700.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:52:30.388,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.32705.0> (ok) [ns_server:debug,2014-08-19T16:52:30.389,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.32700.0> (ok) [rebalance:debug,2014-08-19T16:52:30.390,ns_1@10.242.238.90:<0.32698.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:52:30.391,ns_1@10.242.238.90:<0.32698.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:52:30.391,ns_1@10.242.238.90:<0.1083.1>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [rebalance:debug,2014-08-19T16:52:30.391,ns_1@10.242.238.90:<0.32703.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:52:30.391,ns_1@10.242.238.90:<0.1083.1>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [ns_server:debug,2014-08-19T16:52:30.391,ns_1@10.242.238.90:<0.32703.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [rebalance:info,2014-08-19T16:52:30.391,ns_1@10.242.238.90:<0.32698.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:52:30.391,ns_1@10.242.238.90:<0.1084.1>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:52:30.392,ns_1@10.242.238.90:<0.1084.1>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:52:30.392,ns_1@10.242.238.90:<0.32703.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:52:30.399,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:52:30.404,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 5309 us [ns_server:debug,2014-08-19T16:52:30.404,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:30.405,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:30.406,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{236, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:52:30.452,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:52:30.453,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:30.453,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 1152 us [ns_server:debug,2014-08-19T16:52:30.454,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:30.454,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{235, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:info,2014-08-19T16:52:30.455,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 89 state to replica [ns_server:info,2014-08-19T16:52:30.456,ns_1@10.242.238.90:tap_replication_manager-default<0.18775.0>:tap_replication_manager:change_vbucket_filter:200]Going to change replication from 'ns_1@10.242.238.88' to have "VY" ("Y", []) [ns_server:debug,2014-08-19T16:52:30.457,ns_1@10.242.238.90:<0.1088.1>:ns_vbm_new_sup:do_perform_vbucket_filter_change:135]Registered myself under id:{"default", {new_child_id,"VY",'ns_1@10.242.238.88'}, #Ref<0.0.1.131503>} Args:[{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{old_state_retriever,#Fun}, {on_not_ready_vbuckets,#Fun}, {username,"default"}, {password,get_from_config}, {vbuckets,"VY"}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]] [ns_server:debug,2014-08-19T16:52:30.457,ns_1@10.242.238.90:<0.1088.1>:ns_vbm_new_sup:do_perform_vbucket_filter_change:139]Linked myself to old ebucketmigrator <0.46.1> [ns_server:info,2014-08-19T16:52:30.457,ns_1@10.242.238.90:<0.46.1>:ebucketmigrator_srv:handle_call:270]Starting new-style vbucket filter change on stream `replication_ns_1@10.242.238.90` [ns_server:info,2014-08-19T16:52:30.481,ns_1@10.242.238.90:<0.46.1>:ebucketmigrator_srv:handle_call:297]Changing vbucket filter on tap stream `replication_ns_1@10.242.238.90`: [{86,1},{89,1}] [ns_server:info,2014-08-19T16:52:30.481,ns_1@10.242.238.90:<0.46.1>:ebucketmigrator_srv:handle_call:307]Successfully changed vbucket filter on tap stream `replication_ns_1@10.242.238.90`. [rebalance:debug,2014-08-19T16:52:30.482,ns_1@10.242.238.90:<0.32690.0>:janitor_agent:handle_call:795]Done [rebalance:debug,2014-08-19T16:52:30.482,ns_1@10.242.238.90:<0.32695.0>:janitor_agent:handle_call:795]Done [ns_server:info,2014-08-19T16:52:30.482,ns_1@10.242.238.90:<0.46.1>:ebucketmigrator_srv:process_upstream:1027]Got vbucket filter change completion message. Silencing upstream sender [ns_server:info,2014-08-19T16:52:30.482,ns_1@10.242.238.90:<0.46.1>:ebucketmigrator_srv:handle_info:366]Got reply from upstream silencing request. Completing state transition to a new ebucketmigrator. [ns_server:debug,2014-08-19T16:52:30.482,ns_1@10.242.238.90:<0.46.1>:ebucketmigrator_srv:complete_native_vb_filter_change:170]Proceeding with reading unread binaries [ns_server:debug,2014-08-19T16:52:30.482,ns_1@10.242.238.90:<0.46.1>:ebucketmigrator_srv:confirm_downstream:197]Going to confirm reception downstream messages [ns_server:debug,2014-08-19T16:52:30.482,ns_1@10.242.238.90:<0.46.1>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:52:30.482,ns_1@10.242.238.90:<0.1090.1>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:52:30.483,ns_1@10.242.238.90:<0.1090.1>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:52:30.483,ns_1@10.242.238.90:<0.46.1>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:52:30.483,ns_1@10.242.238.90:<0.46.1>:ebucketmigrator_srv:confirm_downstream:201]Confirmed upstream messages are feeded to kernel [ns_server:debug,2014-08-19T16:52:30.483,ns_1@10.242.238.90:<0.46.1>:ebucketmigrator_srv:reply_and_die:210]Passed old state to caller [ns_server:debug,2014-08-19T16:52:30.483,ns_1@10.242.238.90:<0.46.1>:ebucketmigrator_srv:reply_and_die:213]Sent out state. Preparing to die [ns_server:debug,2014-08-19T16:52:30.483,ns_1@10.242.238.90:<0.1088.1>:ns_vbm_new_sup:do_perform_vbucket_filter_change:143]Got old state from previous ebucketmigrator: <0.46.1> [ns_server:debug,2014-08-19T16:52:30.483,ns_1@10.242.238.90:<0.1088.1>:ns_vbm_new_sup:perform_vbucket_filter_change_loop:184]Sent old state to new instance [ns_server:info,2014-08-19T16:52:30.484,ns_1@10.242.238.90:<0.1092.1>:ns_vbm_new_sup:mk_old_state_retriever:83]Got vbucket filter change old state. Proceeding vbucket filter change operation [ns_server:debug,2014-08-19T16:52:30.484,ns_1@10.242.238.90:<0.1092.1>:ebucketmigrator_srv:init:494]Got old ebucketmigrator state from <0.46.1>: {state,#Port<0.20537>,#Port<0.20533>,#Port<0.20538>,#Port<0.20534>,<0.48.1>, <<"cut off">>,<<"cut off">>,[],7,false,false,0, {1408,452750,482358}, completed, {<0.1088.1>,#Ref<0.0.1.131516>}, <<"replication_ns_1@10.242.238.90">>,<0.46.1>, {had_backfill,false,undefined,[]}, completed,false}. [ns_server:debug,2014-08-19T16:52:30.484,ns_1@10.242.238.90:<0.17162.0>:ns_process_registry:handle_info:98]Got exit msg: {'EXIT',<0.1088.1>,{#Ref<0.0.1.131505>,<0.1092.1>}} [ns_server:debug,2014-08-19T16:52:30.484,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.32690.0> (ok) [error_logger:info,2014-08-19T16:52:30.484,ns_1@10.242.238.90:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,'ns_vbm_new_sup-default'} started: [{pid,<0.1092.1>}, {name,{new_child_id,"VY",'ns_1@10.242.238.88'}}, {mfargs, {ebucketmigrator_srv,start_link, [{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{old_state_retriever, #Fun}, {on_not_ready_vbuckets, #Fun}, {username,"default"}, {password,get_from_config}, {vbuckets,"VY"}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]]}}, {restart_type,temporary}, {shutdown,60000}, {child_type,worker}] [ns_server:debug,2014-08-19T16:52:30.484,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.32695.0> (ok) [rebalance:debug,2014-08-19T16:52:30.485,ns_1@10.242.238.90:<0.32674.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:52:30.486,ns_1@10.242.238.90:<0.32674.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:52:30.486,ns_1@10.242.238.90:<0.1093.1>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:52:30.486,ns_1@10.242.238.90:<0.1093.1>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:52:30.486,ns_1@10.242.238.90:<0.32674.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [rebalance:debug,2014-08-19T16:52:30.487,ns_1@10.242.238.90:<0.32693.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:52:30.487,ns_1@10.242.238.90:<0.32693.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:52:30.487,ns_1@10.242.238.90:<0.1094.1>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:52:30.487,ns_1@10.242.238.90:<0.1094.1>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:52:30.488,ns_1@10.242.238.90:<0.32693.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:52:30.490,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:52:30.493,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:30.493,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 3112 us [ns_server:debug,2014-08-19T16:52:30.493,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:30.494,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{89, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.90']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:info,2014-08-19T16:52:30.495,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 88 state to replica [ns_server:info,2014-08-19T16:52:30.495,ns_1@10.242.238.90:tap_replication_manager-default<0.18775.0>:tap_replication_manager:change_vbucket_filter:200]Going to change replication from 'ns_1@10.242.238.88' to have "VXY" ("X", []) [ns_server:debug,2014-08-19T16:52:30.496,ns_1@10.242.238.90:<0.1096.1>:ns_vbm_new_sup:do_perform_vbucket_filter_change:135]Registered myself under id:{"default", {new_child_id,"VXY",'ns_1@10.242.238.88'}, #Ref<0.0.1.131690>} Args:[{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{old_state_retriever,#Fun}, {on_not_ready_vbuckets,#Fun}, {username,"default"}, {password,get_from_config}, {vbuckets,"VXY"}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]] [ns_server:debug,2014-08-19T16:52:30.496,ns_1@10.242.238.90:<0.1096.1>:ns_vbm_new_sup:do_perform_vbucket_filter_change:139]Linked myself to old ebucketmigrator <0.1092.1> [ns_server:debug,2014-08-19T16:52:30.507,ns_1@10.242.238.90:<0.1092.1>:ebucketmigrator_srv:init:621]Reusing old upstream: [{vbuckets,"VY"},{name,<<"replication_ns_1@10.242.238.90">>},{takeover,false}] [rebalance:debug,2014-08-19T16:52:30.508,ns_1@10.242.238.90:<0.1092.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.1098.1> [ns_server:info,2014-08-19T16:52:30.508,ns_1@10.242.238.90:<0.1092.1>:ebucketmigrator_srv:handle_call:270]Starting new-style vbucket filter change on stream `replication_ns_1@10.242.238.90` [ns_server:info,2014-08-19T16:52:30.529,ns_1@10.242.238.90:<0.1092.1>:ebucketmigrator_srv:handle_call:297]Changing vbucket filter on tap stream `replication_ns_1@10.242.238.90`: [{86,1},{88,1},{89,1}] [ns_server:info,2014-08-19T16:52:30.529,ns_1@10.242.238.90:<0.1092.1>:ebucketmigrator_srv:handle_call:307]Successfully changed vbucket filter on tap stream `replication_ns_1@10.242.238.90`. [ns_server:info,2014-08-19T16:52:30.530,ns_1@10.242.238.90:<0.1092.1>:ebucketmigrator_srv:process_upstream:1027]Got vbucket filter change completion message. Silencing upstream sender [ns_server:info,2014-08-19T16:52:30.530,ns_1@10.242.238.90:<0.1092.1>:ebucketmigrator_srv:handle_info:366]Got reply from upstream silencing request. Completing state transition to a new ebucketmigrator. [ns_server:debug,2014-08-19T16:52:30.530,ns_1@10.242.238.90:<0.1092.1>:ebucketmigrator_srv:complete_native_vb_filter_change:170]Proceeding with reading unread binaries [ns_server:debug,2014-08-19T16:52:30.530,ns_1@10.242.238.90:<0.1092.1>:ebucketmigrator_srv:confirm_downstream:197]Going to confirm reception downstream messages [ns_server:debug,2014-08-19T16:52:30.530,ns_1@10.242.238.90:<0.1092.1>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:52:30.530,ns_1@10.242.238.90:<0.1099.1>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:52:30.530,ns_1@10.242.238.90:<0.1099.1>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:52:30.531,ns_1@10.242.238.90:<0.1092.1>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:52:30.531,ns_1@10.242.238.90:<0.1092.1>:ebucketmigrator_srv:confirm_downstream:201]Confirmed upstream messages are feeded to kernel [ns_server:debug,2014-08-19T16:52:30.531,ns_1@10.242.238.90:<0.1092.1>:ebucketmigrator_srv:reply_and_die:210]Passed old state to caller [ns_server:debug,2014-08-19T16:52:30.531,ns_1@10.242.238.90:<0.1092.1>:ebucketmigrator_srv:reply_and_die:213]Sent out state. Preparing to die [ns_server:debug,2014-08-19T16:52:30.531,ns_1@10.242.238.90:<0.1096.1>:ns_vbm_new_sup:do_perform_vbucket_filter_change:143]Got old state from previous ebucketmigrator: <0.1092.1> [ns_server:debug,2014-08-19T16:52:30.531,ns_1@10.242.238.90:<0.1096.1>:ns_vbm_new_sup:perform_vbucket_filter_change_loop:184]Sent old state to new instance [ns_server:info,2014-08-19T16:52:30.532,ns_1@10.242.238.90:<0.1101.1>:ns_vbm_new_sup:mk_old_state_retriever:83]Got vbucket filter change old state. Proceeding vbucket filter change operation [ns_server:debug,2014-08-19T16:52:30.532,ns_1@10.242.238.90:<0.1101.1>:ebucketmigrator_srv:init:494]Got old ebucketmigrator state from <0.1092.1>: {state,#Port<0.20537>,#Port<0.20533>,#Port<0.20538>,#Port<0.20534>,<0.1098.1>, <<"cut off">>,<<"cut off">>,[],10,false,false,0, {1408,452750,530234}, completed, {<0.1096.1>,#Ref<0.0.1.131703>}, <<"replication_ns_1@10.242.238.90">>,<0.1092.1>, {had_backfill,false,undefined,[]}, completed,false}. [ns_server:debug,2014-08-19T16:52:30.532,ns_1@10.242.238.90:<0.17162.0>:ns_process_registry:handle_info:98]Got exit msg: {'EXIT',<0.1096.1>,{#Ref<0.0.1.131692>,<0.1101.1>}} [error_logger:info,2014-08-19T16:52:30.532,ns_1@10.242.238.90:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,'ns_vbm_new_sup-default'} started: [{pid,<0.1101.1>}, {name,{new_child_id,"VXY",'ns_1@10.242.238.88'}}, {mfargs, {ebucketmigrator_srv,start_link, [{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{old_state_retriever, #Fun}, {on_not_ready_vbuckets, #Fun}, {username,"default"}, {password,get_from_config}, {vbuckets,"VXY"}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]]}}, {restart_type,temporary}, {shutdown,60000}, {child_type,worker}] [ns_server:debug,2014-08-19T16:52:30.537,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:52:30.544,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 6980 us [ns_server:debug,2014-08-19T16:52:30.544,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:30.545,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:30.545,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{88, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.90']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:52:30.558,ns_1@10.242.238.90:<0.1101.1>:ebucketmigrator_srv:init:621]Reusing old upstream: [{vbuckets,"VXY"}, {name,<<"replication_ns_1@10.242.238.90">>}, {takeover,false}] [rebalance:debug,2014-08-19T16:52:30.558,ns_1@10.242.238.90:<0.1101.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.1103.1> [rebalance:debug,2014-08-19T16:52:30.574,ns_1@10.242.238.90:<0.32671.0>:janitor_agent:handle_call:795]Done [rebalance:debug,2014-08-19T16:52:30.574,ns_1@10.242.238.90:<0.32666.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:52:30.574,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.32671.0> (ok) [ns_server:debug,2014-08-19T16:52:30.574,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.32666.0> (ok) [rebalance:debug,2014-08-19T16:52:30.575,ns_1@10.242.238.90:<0.32669.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:52:30.576,ns_1@10.242.238.90:<0.32669.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:52:30.576,ns_1@10.242.238.90:<0.1104.1>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:52:30.576,ns_1@10.242.238.90:<0.1104.1>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:52:30.576,ns_1@10.242.238.90:<0.32669.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [rebalance:debug,2014-08-19T16:52:30.577,ns_1@10.242.238.90:<0.32664.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:52:30.577,ns_1@10.242.238.90:<0.32664.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:52:30.577,ns_1@10.242.238.90:<0.1105.1>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:52:30.577,ns_1@10.242.238.90:<0.1105.1>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:52:30.577,ns_1@10.242.238.90:<0.32664.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:52:30.584,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:52:30.587,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:30.587,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 3409 us [ns_server:debug,2014-08-19T16:52:30.588,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:30.588,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{238, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:52:30.631,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:52:30.634,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:30.635,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 2989 us [ns_server:debug,2014-08-19T16:52:30.635,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:30.635,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{237, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [rebalance:debug,2014-08-19T16:52:30.666,ns_1@10.242.238.90:<0.32661.0>:janitor_agent:handle_call:795]Done [rebalance:debug,2014-08-19T16:52:30.666,ns_1@10.242.238.90:<0.32656.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:52:30.666,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.32661.0> (ok) [ns_server:debug,2014-08-19T16:52:30.666,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.32656.0> (ok) [rebalance:debug,2014-08-19T16:52:30.667,ns_1@10.242.238.90:<0.32654.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:52:30.668,ns_1@10.242.238.90:<0.32654.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:52:30.668,ns_1@10.242.238.90:<0.1108.1>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [rebalance:debug,2014-08-19T16:52:30.668,ns_1@10.242.238.90:<0.32659.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:52:30.668,ns_1@10.242.238.90:<0.1108.1>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:52:30.668,ns_1@10.242.238.90:<0.32654.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:52:30.668,ns_1@10.242.238.90:<0.32659.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:52:30.668,ns_1@10.242.238.90:<0.1109.1>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:52:30.668,ns_1@10.242.238.90:<0.1109.1>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:52:30.668,ns_1@10.242.238.90:<0.32659.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:52:30.681,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:52:30.684,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:30.685,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:30.685,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 3491 us [ns_server:debug,2014-08-19T16:52:30.685,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{240, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:52:30.735,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:52:30.738,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:30.739,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 3315 us [ns_server:debug,2014-08-19T16:52:30.739,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:30.739,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{239, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:info,2014-08-19T16:52:30.741,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 91 state to replica [ns_server:info,2014-08-19T16:52:30.741,ns_1@10.242.238.90:tap_replication_manager-default<0.18775.0>:tap_replication_manager:change_vbucket_filter:200]Going to change replication from 'ns_1@10.242.238.88' to have "VXY[" ("[", []) [ns_server:debug,2014-08-19T16:52:30.742,ns_1@10.242.238.90:<0.1112.1>:ns_vbm_new_sup:do_perform_vbucket_filter_change:135]Registered myself under id:{"default", {new_child_id,"VXY[",'ns_1@10.242.238.88'}, #Ref<0.0.1.132100>} Args:[{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{old_state_retriever,#Fun}, {on_not_ready_vbuckets,#Fun}, {username,"default"}, {password,get_from_config}, {vbuckets,"VXY["}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]] [ns_server:debug,2014-08-19T16:52:30.743,ns_1@10.242.238.90:<0.1112.1>:ns_vbm_new_sup:do_perform_vbucket_filter_change:139]Linked myself to old ebucketmigrator <0.1101.1> [ns_server:info,2014-08-19T16:52:30.743,ns_1@10.242.238.90:<0.1101.1>:ebucketmigrator_srv:handle_call:270]Starting new-style vbucket filter change on stream `replication_ns_1@10.242.238.90` [rebalance:debug,2014-08-19T16:52:30.750,ns_1@10.242.238.90:<0.32651.0>:janitor_agent:handle_call:795]Done [rebalance:debug,2014-08-19T16:52:30.750,ns_1@10.242.238.90:<0.32646.0>:janitor_agent:handle_call:795]Done [ns_server:info,2014-08-19T16:52:30.764,ns_1@10.242.238.90:<0.1101.1>:ebucketmigrator_srv:handle_call:297]Changing vbucket filter on tap stream `replication_ns_1@10.242.238.90`: [{86,1},{88,1},{89,1},{91,1}] [ns_server:info,2014-08-19T16:52:30.765,ns_1@10.242.238.90:<0.1101.1>:ebucketmigrator_srv:handle_call:307]Successfully changed vbucket filter on tap stream `replication_ns_1@10.242.238.90`. [ns_server:info,2014-08-19T16:52:30.766,ns_1@10.242.238.90:<0.1101.1>:ebucketmigrator_srv:process_upstream:1027]Got vbucket filter change completion message. Silencing upstream sender [ns_server:info,2014-08-19T16:52:30.766,ns_1@10.242.238.90:<0.1101.1>:ebucketmigrator_srv:handle_info:366]Got reply from upstream silencing request. Completing state transition to a new ebucketmigrator. [ns_server:debug,2014-08-19T16:52:30.766,ns_1@10.242.238.90:<0.1101.1>:ebucketmigrator_srv:complete_native_vb_filter_change:170]Proceeding with reading unread binaries [ns_server:debug,2014-08-19T16:52:30.766,ns_1@10.242.238.90:<0.1101.1>:ebucketmigrator_srv:confirm_downstream:197]Going to confirm reception downstream messages [ns_server:debug,2014-08-19T16:52:30.766,ns_1@10.242.238.90:<0.1101.1>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:52:30.766,ns_1@10.242.238.90:<0.1114.1>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:52:30.766,ns_1@10.242.238.90:<0.1114.1>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:52:30.767,ns_1@10.242.238.90:<0.1101.1>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:52:30.767,ns_1@10.242.238.90:<0.1101.1>:ebucketmigrator_srv:confirm_downstream:201]Confirmed upstream messages are feeded to kernel [ns_server:debug,2014-08-19T16:52:30.767,ns_1@10.242.238.90:<0.1101.1>:ebucketmigrator_srv:reply_and_die:210]Passed old state to caller [ns_server:debug,2014-08-19T16:52:30.767,ns_1@10.242.238.90:<0.1101.1>:ebucketmigrator_srv:reply_and_die:213]Sent out state. Preparing to die [ns_server:debug,2014-08-19T16:52:30.767,ns_1@10.242.238.90:<0.1112.1>:ns_vbm_new_sup:do_perform_vbucket_filter_change:143]Got old state from previous ebucketmigrator: <0.1101.1> [ns_server:debug,2014-08-19T16:52:30.767,ns_1@10.242.238.90:<0.1112.1>:ns_vbm_new_sup:perform_vbucket_filter_change_loop:184]Sent old state to new instance [ns_server:info,2014-08-19T16:52:30.767,ns_1@10.242.238.90:<0.1116.1>:ns_vbm_new_sup:mk_old_state_retriever:83]Got vbucket filter change old state. Proceeding vbucket filter change operation [ns_server:debug,2014-08-19T16:52:30.767,ns_1@10.242.238.90:<0.1116.1>:ebucketmigrator_srv:init:494]Got old ebucketmigrator state from <0.1101.1>: {state,#Port<0.20537>,#Port<0.20533>,#Port<0.20538>,#Port<0.20534>,<0.1103.1>, <<"cut off">>,<<"cut off">>,[],13,false,false,0, {1408,452750,766127}, completed, {<0.1112.1>,#Ref<0.0.1.132113>}, <<"replication_ns_1@10.242.238.90">>,<0.1101.1>, {had_backfill,false,undefined,[]}, completed,false}. [ns_server:debug,2014-08-19T16:52:30.768,ns_1@10.242.238.90:<0.17162.0>:ns_process_registry:handle_info:98]Got exit msg: {'EXIT',<0.1112.1>,{#Ref<0.0.1.132102>,<0.1116.1>}} [error_logger:info,2014-08-19T16:52:30.768,ns_1@10.242.238.90:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,'ns_vbm_new_sup-default'} started: [{pid,<0.1116.1>}, {name,{new_child_id,"VXY[",'ns_1@10.242.238.88'}}, {mfargs, {ebucketmigrator_srv,start_link, [{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{old_state_retriever, #Fun}, {on_not_ready_vbuckets, #Fun}, {username,"default"}, {password,get_from_config}, {vbuckets,"VXY["}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]]}}, {restart_type,temporary}, {shutdown,60000}, {child_type,worker}] [ns_server:debug,2014-08-19T16:52:30.768,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.32651.0> (ok) [ns_server:debug,2014-08-19T16:52:30.768,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.32646.0> (ok) [rebalance:debug,2014-08-19T16:52:30.769,ns_1@10.242.238.90:<0.32649.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:52:30.769,ns_1@10.242.238.90:<0.32649.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:52:30.770,ns_1@10.242.238.90:<0.1117.1>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:52:30.770,ns_1@10.242.238.90:<0.1117.1>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:52:30.770,ns_1@10.242.238.90:<0.32649.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [rebalance:debug,2014-08-19T16:52:30.771,ns_1@10.242.238.90:<0.32630.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:52:30.771,ns_1@10.242.238.90:<0.32630.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:52:30.771,ns_1@10.242.238.90:<0.1118.1>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:52:30.771,ns_1@10.242.238.90:<0.1118.1>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:52:30.772,ns_1@10.242.238.90:<0.32630.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:52:30.774,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:52:30.776,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:30.777,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 3220 us [ns_server:debug,2014-08-19T16:52:30.777,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:30.777,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{91, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.90']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:info,2014-08-19T16:52:30.779,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 90 state to replica [ns_server:info,2014-08-19T16:52:30.779,ns_1@10.242.238.90:tap_replication_manager-default<0.18775.0>:tap_replication_manager:change_vbucket_filter:200]Going to change replication from 'ns_1@10.242.238.88' to have "VXYZ[" ("Z", []) [ns_server:debug,2014-08-19T16:52:30.780,ns_1@10.242.238.90:<0.1120.1>:ns_vbm_new_sup:do_perform_vbucket_filter_change:135]Registered myself under id:{"default", {new_child_id,"VXYZ[",'ns_1@10.242.238.88'}, #Ref<0.0.1.132281>} Args:[{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{old_state_retriever,#Fun}, {on_not_ready_vbuckets,#Fun}, {username,"default"}, {password,get_from_config}, {vbuckets,"VXYZ["}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]] [ns_server:debug,2014-08-19T16:52:30.780,ns_1@10.242.238.90:<0.1120.1>:ns_vbm_new_sup:do_perform_vbucket_filter_change:139]Linked myself to old ebucketmigrator <0.1116.1> [ns_server:debug,2014-08-19T16:52:30.793,ns_1@10.242.238.90:<0.1116.1>:ebucketmigrator_srv:init:621]Reusing old upstream: [{vbuckets,"VXY["}, {name,<<"replication_ns_1@10.242.238.90">>}, {takeover,false}] [rebalance:debug,2014-08-19T16:52:30.794,ns_1@10.242.238.90:<0.1116.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.1122.1> [ns_server:info,2014-08-19T16:52:30.794,ns_1@10.242.238.90:<0.1116.1>:ebucketmigrator_srv:handle_call:270]Starting new-style vbucket filter change on stream `replication_ns_1@10.242.238.90` [ns_server:info,2014-08-19T16:52:30.814,ns_1@10.242.238.90:<0.1116.1>:ebucketmigrator_srv:handle_call:297]Changing vbucket filter on tap stream `replication_ns_1@10.242.238.90`: [{86,1},{88,1},{89,1},{90,1},{91,1}] [ns_server:info,2014-08-19T16:52:30.814,ns_1@10.242.238.90:<0.1116.1>:ebucketmigrator_srv:handle_call:307]Successfully changed vbucket filter on tap stream `replication_ns_1@10.242.238.90`. [ns_server:info,2014-08-19T16:52:30.815,ns_1@10.242.238.90:<0.1116.1>:ebucketmigrator_srv:process_upstream:1027]Got vbucket filter change completion message. Silencing upstream sender [ns_server:info,2014-08-19T16:52:30.815,ns_1@10.242.238.90:<0.1116.1>:ebucketmigrator_srv:handle_info:366]Got reply from upstream silencing request. Completing state transition to a new ebucketmigrator. [ns_server:debug,2014-08-19T16:52:30.815,ns_1@10.242.238.90:<0.1116.1>:ebucketmigrator_srv:complete_native_vb_filter_change:170]Proceeding with reading unread binaries [ns_server:debug,2014-08-19T16:52:30.815,ns_1@10.242.238.90:<0.1116.1>:ebucketmigrator_srv:confirm_downstream:197]Going to confirm reception downstream messages [ns_server:debug,2014-08-19T16:52:30.815,ns_1@10.242.238.90:<0.1116.1>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:52:30.815,ns_1@10.242.238.90:<0.1123.1>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:52:30.816,ns_1@10.242.238.90:<0.1123.1>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:52:30.816,ns_1@10.242.238.90:<0.1116.1>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:52:30.816,ns_1@10.242.238.90:<0.1116.1>:ebucketmigrator_srv:confirm_downstream:201]Confirmed upstream messages are feeded to kernel [ns_server:debug,2014-08-19T16:52:30.816,ns_1@10.242.238.90:<0.1116.1>:ebucketmigrator_srv:reply_and_die:210]Passed old state to caller [ns_server:debug,2014-08-19T16:52:30.816,ns_1@10.242.238.90:<0.1116.1>:ebucketmigrator_srv:reply_and_die:213]Sent out state. Preparing to die [ns_server:debug,2014-08-19T16:52:30.816,ns_1@10.242.238.90:<0.1120.1>:ns_vbm_new_sup:do_perform_vbucket_filter_change:143]Got old state from previous ebucketmigrator: <0.1116.1> [ns_server:debug,2014-08-19T16:52:30.816,ns_1@10.242.238.90:<0.1120.1>:ns_vbm_new_sup:perform_vbucket_filter_change_loop:184]Sent old state to new instance [ns_server:info,2014-08-19T16:52:30.817,ns_1@10.242.238.90:<0.1125.1>:ns_vbm_new_sup:mk_old_state_retriever:83]Got vbucket filter change old state. Proceeding vbucket filter change operation [ns_server:debug,2014-08-19T16:52:30.817,ns_1@10.242.238.90:<0.1125.1>:ebucketmigrator_srv:init:494]Got old ebucketmigrator state from <0.1116.1>: {state,#Port<0.20537>,#Port<0.20533>,#Port<0.20538>,#Port<0.20534>,<0.1122.1>, <<"cut off">>,<<"cut off">>,[],16,false,false,0, {1408,452750,815352}, completed, {<0.1120.1>,#Ref<0.0.1.132294>}, <<"replication_ns_1@10.242.238.90">>,<0.1116.1>, {had_backfill,false,undefined,[]}, completed,false}. [ns_server:debug,2014-08-19T16:52:30.817,ns_1@10.242.238.90:<0.17162.0>:ns_process_registry:handle_info:98]Got exit msg: {'EXIT',<0.1120.1>,{#Ref<0.0.1.132283>,<0.1125.1>}} [error_logger:info,2014-08-19T16:52:30.817,ns_1@10.242.238.90:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,'ns_vbm_new_sup-default'} started: [{pid,<0.1125.1>}, {name,{new_child_id,"VXYZ[",'ns_1@10.242.238.88'}}, {mfargs, {ebucketmigrator_srv,start_link, [{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{old_state_retriever, #Fun}, {on_not_ready_vbuckets, #Fun}, {username,"default"}, {password,get_from_config}, {vbuckets,"VXYZ["}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]]}}, {restart_type,temporary}, {shutdown,60000}, {child_type,worker}] [rebalance:debug,2014-08-19T16:52:30.818,ns_1@10.242.238.90:<0.32627.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:52:30.818,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.32627.0> (ok) [rebalance:debug,2014-08-19T16:52:30.819,ns_1@10.242.238.90:<0.32622.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:52:30.820,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.32622.0> (ok) [rebalance:debug,2014-08-19T16:52:30.821,ns_1@10.242.238.90:<0.32625.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:52:30.821,ns_1@10.242.238.90:<0.32625.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:52:30.821,ns_1@10.242.238.90:<0.1126.1>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:52:30.821,ns_1@10.242.238.90:<0.1126.1>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:debug,2014-08-19T16:52:30.821,ns_1@10.242.238.90:<0.32620.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:52:30.822,ns_1@10.242.238.90:<0.32620.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [rebalance:info,2014-08-19T16:52:30.822,ns_1@10.242.238.90:<0.32625.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:52:30.822,ns_1@10.242.238.90:<0.1127.1>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:52:30.822,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:52:30.822,ns_1@10.242.238.90:<0.1127.1>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:52:30.822,ns_1@10.242.238.90:<0.32620.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:52:30.826,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:30.826,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 4197 us [ns_server:debug,2014-08-19T16:52:30.827,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:30.827,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{90, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.90']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:52:30.839,ns_1@10.242.238.90:<0.1125.1>:ebucketmigrator_srv:init:621]Reusing old upstream: [{vbuckets,"VXYZ["}, {name,<<"replication_ns_1@10.242.238.90">>}, {takeover,false}] [rebalance:debug,2014-08-19T16:52:30.840,ns_1@10.242.238.90:<0.1125.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.1129.1> [ns_server:debug,2014-08-19T16:52:30.867,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:52:30.870,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:30.870,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 3280 us [ns_server:debug,2014-08-19T16:52:30.871,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:30.871,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{242, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [rebalance:debug,2014-08-19T16:52:30.885,ns_1@10.242.238.90:<0.32617.0>:janitor_agent:handle_call:795]Done [rebalance:debug,2014-08-19T16:52:30.885,ns_1@10.242.238.90:<0.32612.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:52:30.885,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.32617.0> (ok) [ns_server:debug,2014-08-19T16:52:30.885,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.32612.0> (ok) [rebalance:debug,2014-08-19T16:52:30.886,ns_1@10.242.238.90:<0.32615.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:52:30.887,ns_1@10.242.238.90:<0.32615.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:52:30.887,ns_1@10.242.238.90:<0.1131.1>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:52:30.887,ns_1@10.242.238.90:<0.1131.1>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:debug,2014-08-19T16:52:30.887,ns_1@10.242.238.90:<0.32610.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [rebalance:info,2014-08-19T16:52:30.887,ns_1@10.242.238.90:<0.32615.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:52:30.887,ns_1@10.242.238.90:<0.32610.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:52:30.887,ns_1@10.242.238.90:<0.1132.1>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:52:30.887,ns_1@10.242.238.90:<0.1132.1>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:52:30.888,ns_1@10.242.238.90:<0.32610.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:52:30.922,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:52:30.925,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:30.925,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 3312 us [ns_server:debug,2014-08-19T16:52:30.926,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:30.926,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{241, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:info,2014-08-19T16:52:30.926,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 92 state to replica [ns_server:info,2014-08-19T16:52:30.927,ns_1@10.242.238.90:tap_replication_manager-default<0.18775.0>:tap_replication_manager:change_vbucket_filter:200]Going to change replication from 'ns_1@10.242.238.88' to have "VXYZ[\\" ("\\", []) [ns_server:debug,2014-08-19T16:52:30.928,ns_1@10.242.238.90:<0.1134.1>:ns_vbm_new_sup:do_perform_vbucket_filter_change:135]Registered myself under id:{"default", {new_child_id,"VXYZ[\\",'ns_1@10.242.238.88'}, #Ref<0.0.1.132625>} Args:[{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{old_state_retriever,#Fun}, {on_not_ready_vbuckets,#Fun}, {username,"default"}, {password,get_from_config}, {vbuckets,"VXYZ[\\"}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]] [ns_server:debug,2014-08-19T16:52:30.928,ns_1@10.242.238.90:<0.1134.1>:ns_vbm_new_sup:do_perform_vbucket_filter_change:139]Linked myself to old ebucketmigrator <0.1125.1> [ns_server:info,2014-08-19T16:52:30.928,ns_1@10.242.238.90:<0.1125.1>:ebucketmigrator_srv:handle_call:270]Starting new-style vbucket filter change on stream `replication_ns_1@10.242.238.90` [ns_server:info,2014-08-19T16:52:30.949,ns_1@10.242.238.90:<0.1125.1>:ebucketmigrator_srv:handle_call:297]Changing vbucket filter on tap stream `replication_ns_1@10.242.238.90`: [{86,1},{88,1},{89,1},{90,1},{91,1},{92,1}] [ns_server:info,2014-08-19T16:52:30.950,ns_1@10.242.238.90:<0.1125.1>:ebucketmigrator_srv:handle_call:307]Successfully changed vbucket filter on tap stream `replication_ns_1@10.242.238.90`. [ns_server:info,2014-08-19T16:52:30.950,ns_1@10.242.238.90:<0.1125.1>:ebucketmigrator_srv:process_upstream:1027]Got vbucket filter change completion message. Silencing upstream sender [ns_server:info,2014-08-19T16:52:30.951,ns_1@10.242.238.90:<0.1125.1>:ebucketmigrator_srv:handle_info:366]Got reply from upstream silencing request. Completing state transition to a new ebucketmigrator. [ns_server:debug,2014-08-19T16:52:30.951,ns_1@10.242.238.90:<0.1125.1>:ebucketmigrator_srv:complete_native_vb_filter_change:170]Proceeding with reading unread binaries [ns_server:debug,2014-08-19T16:52:30.951,ns_1@10.242.238.90:<0.1125.1>:ebucketmigrator_srv:confirm_downstream:197]Going to confirm reception downstream messages [ns_server:debug,2014-08-19T16:52:30.951,ns_1@10.242.238.90:<0.1125.1>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:52:30.951,ns_1@10.242.238.90:<0.1136.1>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:52:30.952,ns_1@10.242.238.90:<0.1136.1>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:52:30.952,ns_1@10.242.238.90:<0.1125.1>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [rebalance:debug,2014-08-19T16:52:30.952,ns_1@10.242.238.90:<0.32606.0>:janitor_agent:handle_call:795]Done [rebalance:debug,2014-08-19T16:52:30.952,ns_1@10.242.238.90:<0.32601.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:52:30.953,ns_1@10.242.238.90:<0.1125.1>:ebucketmigrator_srv:confirm_downstream:201]Confirmed upstream messages are feeded to kernel [ns_server:debug,2014-08-19T16:52:30.953,ns_1@10.242.238.90:<0.1125.1>:ebucketmigrator_srv:reply_and_die:210]Passed old state to caller [ns_server:debug,2014-08-19T16:52:30.953,ns_1@10.242.238.90:<0.1125.1>:ebucketmigrator_srv:reply_and_die:213]Sent out state. Preparing to die [ns_server:debug,2014-08-19T16:52:30.953,ns_1@10.242.238.90:<0.1134.1>:ns_vbm_new_sup:do_perform_vbucket_filter_change:143]Got old state from previous ebucketmigrator: <0.1125.1> [ns_server:debug,2014-08-19T16:52:30.954,ns_1@10.242.238.90:<0.1134.1>:ns_vbm_new_sup:perform_vbucket_filter_change_loop:184]Sent old state to new instance [ns_server:info,2014-08-19T16:52:30.954,ns_1@10.242.238.90:<0.1138.1>:ns_vbm_new_sup:mk_old_state_retriever:83]Got vbucket filter change old state. Proceeding vbucket filter change operation [ns_server:debug,2014-08-19T16:52:30.954,ns_1@10.242.238.90:<0.1138.1>:ebucketmigrator_srv:init:494]Got old ebucketmigrator state from <0.1125.1>: {state,#Port<0.20537>,#Port<0.20533>,#Port<0.20538>,#Port<0.20534>,<0.1129.1>, <<"cut off">>,<<"cut off">>,[],19,false,false,0, {1408,452750,950922}, completed, {<0.1134.1>,#Ref<0.0.1.132638>}, <<"replication_ns_1@10.242.238.90">>,<0.1125.1>, {had_backfill,false,undefined,[]}, completed,false}. [ns_server:debug,2014-08-19T16:52:30.954,ns_1@10.242.238.90:<0.17162.0>:ns_process_registry:handle_info:98]Got exit msg: {'EXIT',<0.1134.1>,{#Ref<0.0.1.132627>,<0.1138.1>}} [ns_server:debug,2014-08-19T16:52:30.954,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.32606.0> (ok) [ns_server:debug,2014-08-19T16:52:30.954,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.32601.0> (ok) [error_logger:info,2014-08-19T16:52:30.954,ns_1@10.242.238.90:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,'ns_vbm_new_sup-default'} started: [{pid,<0.1138.1>}, {name,{new_child_id,"VXYZ[\\",'ns_1@10.242.238.88'}}, {mfargs, {ebucketmigrator_srv,start_link, [{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{old_state_retriever, #Fun}, {on_not_ready_vbuckets, #Fun}, {username,"default"}, {password,get_from_config}, {vbuckets,"VXYZ[\\"}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]]}}, {restart_type,temporary}, {shutdown,60000}, {child_type,worker}] [rebalance:debug,2014-08-19T16:52:30.956,ns_1@10.242.238.90:<0.32585.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:52:30.956,ns_1@10.242.238.90:<0.32585.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:52:30.956,ns_1@10.242.238.90:<0.1139.1>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:52:30.956,ns_1@10.242.238.90:<0.1139.1>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:52:30.956,ns_1@10.242.238.90:<0.32585.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [rebalance:debug,2014-08-19T16:52:30.957,ns_1@10.242.238.90:<0.32604.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:52:30.958,ns_1@10.242.238.90:<0.32604.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:52:30.958,ns_1@10.242.238.90:<0.1140.1>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:52:30.958,ns_1@10.242.238.90:<0.1140.1>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:52:30.958,ns_1@10.242.238.90:<0.32604.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:52:30.960,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:52:30.963,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:30.963,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 3283 us [ns_server:debug,2014-08-19T16:52:30.964,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:30.964,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{92, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.90']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:info,2014-08-19T16:52:30.965,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 93 state to replica [ns_server:info,2014-08-19T16:52:30.965,ns_1@10.242.238.90:tap_replication_manager-default<0.18775.0>:tap_replication_manager:change_vbucket_filter:200]Going to change replication from 'ns_1@10.242.238.88' to have "VXYZ[\\]" ("]", []) [ns_server:debug,2014-08-19T16:52:30.966,ns_1@10.242.238.90:<0.1142.1>:ns_vbm_new_sup:do_perform_vbucket_filter_change:135]Registered myself under id:{"default", {new_child_id,"VXYZ[\\]",'ns_1@10.242.238.88'}, #Ref<0.0.1.132805>} Args:[{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{old_state_retriever,#Fun}, {on_not_ready_vbuckets,#Fun}, {username,"default"}, {password,get_from_config}, {vbuckets,"VXYZ[\\]"}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]] [ns_server:debug,2014-08-19T16:52:30.966,ns_1@10.242.238.90:<0.1142.1>:ns_vbm_new_sup:do_perform_vbucket_filter_change:139]Linked myself to old ebucketmigrator <0.1138.1> [ns_server:debug,2014-08-19T16:52:30.976,ns_1@10.242.238.90:<0.1138.1>:ebucketmigrator_srv:init:621]Reusing old upstream: [{vbuckets,"VXYZ[\\"}, {name,<<"replication_ns_1@10.242.238.90">>}, {takeover,false}] [rebalance:debug,2014-08-19T16:52:30.976,ns_1@10.242.238.90:<0.1138.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.1144.1> [ns_server:info,2014-08-19T16:52:30.977,ns_1@10.242.238.90:<0.1138.1>:ebucketmigrator_srv:handle_call:270]Starting new-style vbucket filter change on stream `replication_ns_1@10.242.238.90` [ns_server:info,2014-08-19T16:52:30.996,ns_1@10.242.238.90:<0.1138.1>:ebucketmigrator_srv:handle_call:297]Changing vbucket filter on tap stream `replication_ns_1@10.242.238.90`: [{86,1},{88,1},{89,1},{90,1},{91,1},{92,1},{93,1}] [ns_server:info,2014-08-19T16:52:30.997,ns_1@10.242.238.90:<0.1138.1>:ebucketmigrator_srv:handle_call:307]Successfully changed vbucket filter on tap stream `replication_ns_1@10.242.238.90`. [ns_server:info,2014-08-19T16:52:30.998,ns_1@10.242.238.90:<0.1138.1>:ebucketmigrator_srv:process_upstream:1027]Got vbucket filter change completion message. Silencing upstream sender [ns_server:info,2014-08-19T16:52:30.998,ns_1@10.242.238.90:<0.1138.1>:ebucketmigrator_srv:handle_info:366]Got reply from upstream silencing request. Completing state transition to a new ebucketmigrator. [ns_server:debug,2014-08-19T16:52:30.998,ns_1@10.242.238.90:<0.1138.1>:ebucketmigrator_srv:complete_native_vb_filter_change:170]Proceeding with reading unread binaries [ns_server:debug,2014-08-19T16:52:30.998,ns_1@10.242.238.90:<0.1138.1>:ebucketmigrator_srv:confirm_downstream:197]Going to confirm reception downstream messages [ns_server:debug,2014-08-19T16:52:30.998,ns_1@10.242.238.90:<0.1138.1>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:52:30.998,ns_1@10.242.238.90:<0.1145.1>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:52:30.998,ns_1@10.242.238.90:<0.1145.1>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:52:30.998,ns_1@10.242.238.90:<0.1138.1>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:52:30.999,ns_1@10.242.238.90:<0.1138.1>:ebucketmigrator_srv:confirm_downstream:201]Confirmed upstream messages are feeded to kernel [ns_server:debug,2014-08-19T16:52:30.999,ns_1@10.242.238.90:<0.1138.1>:ebucketmigrator_srv:reply_and_die:210]Passed old state to caller [ns_server:debug,2014-08-19T16:52:30.999,ns_1@10.242.238.90:<0.1138.1>:ebucketmigrator_srv:reply_and_die:213]Sent out state. Preparing to die [ns_server:debug,2014-08-19T16:52:30.999,ns_1@10.242.238.90:<0.1142.1>:ns_vbm_new_sup:do_perform_vbucket_filter_change:143]Got old state from previous ebucketmigrator: <0.1138.1> [ns_server:debug,2014-08-19T16:52:30.999,ns_1@10.242.238.90:<0.1142.1>:ns_vbm_new_sup:perform_vbucket_filter_change_loop:184]Sent old state to new instance [ns_server:info,2014-08-19T16:52:30.999,ns_1@10.242.238.90:<0.1147.1>:ns_vbm_new_sup:mk_old_state_retriever:83]Got vbucket filter change old state. Proceeding vbucket filter change operation [ns_server:debug,2014-08-19T16:52:31.000,ns_1@10.242.238.90:<0.1147.1>:ebucketmigrator_srv:init:494]Got old ebucketmigrator state from <0.1138.1>: {state,#Port<0.20537>,#Port<0.20533>,#Port<0.20538>,#Port<0.20534>,<0.1144.1>, <<"cut off">>,<<"cut off">>,[],22,false,false,0, {1408,452750,998000}, completed, {<0.1142.1>,#Ref<0.0.1.132818>}, <<"replication_ns_1@10.242.238.90">>,<0.1138.1>, {had_backfill,false,undefined,[]}, completed,false}. [ns_server:debug,2014-08-19T16:52:31.000,ns_1@10.242.238.90:<0.17162.0>:ns_process_registry:handle_info:98]Got exit msg: {'EXIT',<0.1142.1>,{#Ref<0.0.1.132807>,<0.1147.1>}} [error_logger:info,2014-08-19T16:52:31.000,ns_1@10.242.238.90:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,'ns_vbm_new_sup-default'} started: [{pid,<0.1147.1>}, {name,{new_child_id,"VXYZ[\\]",'ns_1@10.242.238.88'}}, {mfargs, {ebucketmigrator_srv,start_link, [{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{old_state_retriever, #Fun}, {on_not_ready_vbuckets, #Fun}, {username,"default"}, {password,get_from_config}, {vbuckets,"VXYZ[\\]"}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]]}}, {restart_type,temporary}, {shutdown,60000}, {child_type,worker}] [ns_server:debug,2014-08-19T16:52:31.005,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:52:31.008,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:31.008,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 3455 us [ns_server:debug,2014-08-19T16:52:31.009,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:31.009,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{93, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.90']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:52:31.021,ns_1@10.242.238.90:<0.1147.1>:ebucketmigrator_srv:init:621]Reusing old upstream: [{vbuckets,"VXYZ[\\]"}, {name,<<"replication_ns_1@10.242.238.90">>}, {takeover,false}] [rebalance:debug,2014-08-19T16:52:31.021,ns_1@10.242.238.90:<0.1147.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.1149.1> [rebalance:debug,2014-08-19T16:52:31.029,ns_1@10.242.238.90:<0.32582.0>:janitor_agent:handle_call:795]Done [rebalance:debug,2014-08-19T16:52:31.029,ns_1@10.242.238.90:<0.32577.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:52:31.029,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.32582.0> (ok) [ns_server:debug,2014-08-19T16:52:31.030,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.32577.0> (ok) [rebalance:debug,2014-08-19T16:52:31.031,ns_1@10.242.238.90:<0.32580.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:52:31.031,ns_1@10.242.238.90:<0.32580.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:52:31.031,ns_1@10.242.238.90:<0.1150.1>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [rebalance:debug,2014-08-19T16:52:31.031,ns_1@10.242.238.90:<0.32575.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:52:31.031,ns_1@10.242.238.90:<0.1150.1>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [ns_server:debug,2014-08-19T16:52:31.032,ns_1@10.242.238.90:<0.32575.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:52:31.032,ns_1@10.242.238.90:<0.1151.1>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [rebalance:info,2014-08-19T16:52:31.032,ns_1@10.242.238.90:<0.32580.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:52:31.032,ns_1@10.242.238.90:<0.1151.1>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:52:31.032,ns_1@10.242.238.90:<0.32575.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:52:31.052,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:52:31.055,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:31.055,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 3242 us [ns_server:debug,2014-08-19T16:52:31.056,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{243, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:52:31.056,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:31.102,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:52:31.106,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:31.106,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 3801 us [ns_server:debug,2014-08-19T16:52:31.106,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:31.107,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{244, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:info,2014-08-19T16:52:31.112,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 95 state to replica [ns_server:info,2014-08-19T16:52:31.113,ns_1@10.242.238.90:tap_replication_manager-default<0.18775.0>:tap_replication_manager:change_vbucket_filter:200]Going to change replication from 'ns_1@10.242.238.88' to have "VXYZ[\\]_" ("_", []) [ns_server:debug,2014-08-19T16:52:31.114,ns_1@10.242.238.90:<0.1154.1>:ns_vbm_new_sup:do_perform_vbucket_filter_change:135]Registered myself under id:{"default", {new_child_id,"VXYZ[\\]_",'ns_1@10.242.238.88'}, #Ref<0.0.1.133102>} Args:[{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{old_state_retriever,#Fun}, {on_not_ready_vbuckets,#Fun}, {username,"default"}, {password,get_from_config}, {vbuckets,"VXYZ[\\]_"}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]] [ns_server:debug,2014-08-19T16:52:31.114,ns_1@10.242.238.90:<0.1154.1>:ns_vbm_new_sup:do_perform_vbucket_filter_change:139]Linked myself to old ebucketmigrator <0.1147.1> [ns_server:info,2014-08-19T16:52:31.114,ns_1@10.242.238.90:<0.1147.1>:ebucketmigrator_srv:handle_call:270]Starting new-style vbucket filter change on stream `replication_ns_1@10.242.238.90` [ns_server:info,2014-08-19T16:52:31.134,ns_1@10.242.238.90:<0.1147.1>:ebucketmigrator_srv:handle_call:297]Changing vbucket filter on tap stream `replication_ns_1@10.242.238.90`: [{86,1},{88,1},{89,1},{90,1},{91,1},{92,1},{93,1},{95,1}] [ns_server:info,2014-08-19T16:52:31.135,ns_1@10.242.238.90:<0.1147.1>:ebucketmigrator_srv:handle_call:307]Successfully changed vbucket filter on tap stream `replication_ns_1@10.242.238.90`. [ns_server:info,2014-08-19T16:52:31.135,ns_1@10.242.238.90:<0.1147.1>:ebucketmigrator_srv:process_upstream:1027]Got vbucket filter change completion message. Silencing upstream sender [ns_server:info,2014-08-19T16:52:31.135,ns_1@10.242.238.90:<0.1147.1>:ebucketmigrator_srv:handle_info:366]Got reply from upstream silencing request. Completing state transition to a new ebucketmigrator. [ns_server:debug,2014-08-19T16:52:31.135,ns_1@10.242.238.90:<0.1147.1>:ebucketmigrator_srv:complete_native_vb_filter_change:170]Proceeding with reading unread binaries [ns_server:debug,2014-08-19T16:52:31.135,ns_1@10.242.238.90:<0.1147.1>:ebucketmigrator_srv:confirm_downstream:197]Going to confirm reception downstream messages [ns_server:debug,2014-08-19T16:52:31.136,ns_1@10.242.238.90:<0.1147.1>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:52:31.136,ns_1@10.242.238.90:<0.1156.1>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:52:31.136,ns_1@10.242.238.90:<0.1156.1>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:52:31.136,ns_1@10.242.238.90:<0.1147.1>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:52:31.136,ns_1@10.242.238.90:<0.1147.1>:ebucketmigrator_srv:confirm_downstream:201]Confirmed upstream messages are feeded to kernel [ns_server:debug,2014-08-19T16:52:31.136,ns_1@10.242.238.90:<0.1147.1>:ebucketmigrator_srv:reply_and_die:210]Passed old state to caller [ns_server:debug,2014-08-19T16:52:31.136,ns_1@10.242.238.90:<0.1147.1>:ebucketmigrator_srv:reply_and_die:213]Sent out state. Preparing to die [ns_server:debug,2014-08-19T16:52:31.136,ns_1@10.242.238.90:<0.1154.1>:ns_vbm_new_sup:do_perform_vbucket_filter_change:143]Got old state from previous ebucketmigrator: <0.1147.1> [ns_server:debug,2014-08-19T16:52:31.137,ns_1@10.242.238.90:<0.1154.1>:ns_vbm_new_sup:perform_vbucket_filter_change_loop:184]Sent old state to new instance [ns_server:info,2014-08-19T16:52:31.137,ns_1@10.242.238.90:<0.1158.1>:ns_vbm_new_sup:mk_old_state_retriever:83]Got vbucket filter change old state. Proceeding vbucket filter change operation [ns_server:debug,2014-08-19T16:52:31.137,ns_1@10.242.238.90:<0.1158.1>:ebucketmigrator_srv:init:494]Got old ebucketmigrator state from <0.1147.1>: {state,#Port<0.20537>,#Port<0.20533>,#Port<0.20538>,#Port<0.20534>,<0.1149.1>, <<"cut off">>,<<"cut off">>,[],25,false,false,0, {1408,452751,135511}, completed, {<0.1154.1>,#Ref<0.0.1.133115>}, <<"replication_ns_1@10.242.238.90">>,<0.1147.1>, {had_backfill,false,undefined,[]}, completed,false}. [ns_server:debug,2014-08-19T16:52:31.137,ns_1@10.242.238.90:<0.17162.0>:ns_process_registry:handle_info:98]Got exit msg: {'EXIT',<0.1154.1>,{#Ref<0.0.1.133104>,<0.1158.1>}} [error_logger:info,2014-08-19T16:52:31.137,ns_1@10.242.238.90:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,'ns_vbm_new_sup-default'} started: [{pid,<0.1158.1>}, {name,{new_child_id,"VXYZ[\\]_",'ns_1@10.242.238.88'}}, {mfargs, {ebucketmigrator_srv,start_link, [{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{old_state_retriever, #Fun}, {on_not_ready_vbuckets, #Fun}, {username,"default"}, {password,get_from_config}, {vbuckets,"VXYZ[\\]_"}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]]}}, {restart_type,temporary}, {shutdown,60000}, {child_type,worker}] [ns_server:debug,2014-08-19T16:52:31.142,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:52:31.145,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:31.146,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 3442 us [ns_server:debug,2014-08-19T16:52:31.146,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:31.146,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{95, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.90']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:info,2014-08-19T16:52:31.147,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 94 state to replica [ns_server:info,2014-08-19T16:52:31.148,ns_1@10.242.238.90:tap_replication_manager-default<0.18775.0>:tap_replication_manager:change_vbucket_filter:200]Going to change replication from 'ns_1@10.242.238.88' to have "VXYZ[\\]^_" ("^", []) [ns_server:debug,2014-08-19T16:52:31.149,ns_1@10.242.238.90:<0.1160.1>:ns_vbm_new_sup:do_perform_vbucket_filter_change:135]Registered myself under id:{"default", {new_child_id,"VXYZ[\\]^_",'ns_1@10.242.238.88'}, #Ref<0.0.1.133228>} Args:[{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{old_state_retriever,#Fun}, {on_not_ready_vbuckets,#Fun}, {username,"default"}, {password,get_from_config}, {vbuckets,"VXYZ[\\]^_"}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]] [ns_server:debug,2014-08-19T16:52:31.149,ns_1@10.242.238.90:<0.1160.1>:ns_vbm_new_sup:do_perform_vbucket_filter_change:139]Linked myself to old ebucketmigrator <0.1158.1> [rebalance:debug,2014-08-19T16:52:31.155,ns_1@10.242.238.90:<0.32572.0>:janitor_agent:handle_call:795]Done [rebalance:debug,2014-08-19T16:52:31.155,ns_1@10.242.238.90:<0.32567.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:52:31.163,ns_1@10.242.238.90:<0.1158.1>:ebucketmigrator_srv:init:621]Reusing old upstream: [{vbuckets,"VXYZ[\\]_"}, {name,<<"replication_ns_1@10.242.238.90">>}, {takeover,false}] [rebalance:debug,2014-08-19T16:52:31.163,ns_1@10.242.238.90:<0.1158.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.1162.1> [ns_server:info,2014-08-19T16:52:31.163,ns_1@10.242.238.90:<0.1158.1>:ebucketmigrator_srv:handle_call:270]Starting new-style vbucket filter change on stream `replication_ns_1@10.242.238.90` [ns_server:info,2014-08-19T16:52:31.183,ns_1@10.242.238.90:<0.1158.1>:ebucketmigrator_srv:handle_call:297]Changing vbucket filter on tap stream `replication_ns_1@10.242.238.90`: [{86,1},{88,1},{89,1},{90,1},{91,1},{92,1},{93,1},{94,1},{95,1}] [ns_server:info,2014-08-19T16:52:31.184,ns_1@10.242.238.90:<0.1158.1>:ebucketmigrator_srv:handle_call:307]Successfully changed vbucket filter on tap stream `replication_ns_1@10.242.238.90`. [ns_server:info,2014-08-19T16:52:31.184,ns_1@10.242.238.90:<0.1158.1>:ebucketmigrator_srv:process_upstream:1027]Got vbucket filter change completion message. Silencing upstream sender [ns_server:info,2014-08-19T16:52:31.184,ns_1@10.242.238.90:<0.1158.1>:ebucketmigrator_srv:handle_info:366]Got reply from upstream silencing request. Completing state transition to a new ebucketmigrator. [ns_server:debug,2014-08-19T16:52:31.184,ns_1@10.242.238.90:<0.1158.1>:ebucketmigrator_srv:complete_native_vb_filter_change:170]Proceeding with reading unread binaries [ns_server:debug,2014-08-19T16:52:31.184,ns_1@10.242.238.90:<0.1158.1>:ebucketmigrator_srv:confirm_downstream:197]Going to confirm reception downstream messages [ns_server:debug,2014-08-19T16:52:31.185,ns_1@10.242.238.90:<0.1158.1>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:52:31.185,ns_1@10.242.238.90:<0.1163.1>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:52:31.185,ns_1@10.242.238.90:<0.1163.1>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:52:31.185,ns_1@10.242.238.90:<0.1158.1>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:52:31.185,ns_1@10.242.238.90:<0.1158.1>:ebucketmigrator_srv:confirm_downstream:201]Confirmed upstream messages are feeded to kernel [ns_server:debug,2014-08-19T16:52:31.185,ns_1@10.242.238.90:<0.1158.1>:ebucketmigrator_srv:reply_and_die:210]Passed old state to caller [ns_server:debug,2014-08-19T16:52:31.185,ns_1@10.242.238.90:<0.1158.1>:ebucketmigrator_srv:reply_and_die:213]Sent out state. Preparing to die [ns_server:debug,2014-08-19T16:52:31.185,ns_1@10.242.238.90:<0.1160.1>:ns_vbm_new_sup:do_perform_vbucket_filter_change:143]Got old state from previous ebucketmigrator: <0.1158.1> [ns_server:debug,2014-08-19T16:52:31.186,ns_1@10.242.238.90:<0.1160.1>:ns_vbm_new_sup:perform_vbucket_filter_change_loop:184]Sent old state to new instance [ns_server:info,2014-08-19T16:52:31.186,ns_1@10.242.238.90:<0.1165.1>:ns_vbm_new_sup:mk_old_state_retriever:83]Got vbucket filter change old state. Proceeding vbucket filter change operation [ns_server:debug,2014-08-19T16:52:31.186,ns_1@10.242.238.90:<0.1165.1>:ebucketmigrator_srv:init:494]Got old ebucketmigrator state from <0.1158.1>: {state,#Port<0.20537>,#Port<0.20533>,#Port<0.20538>,#Port<0.20534>,<0.1162.1>, <<"cut off">>,<<"cut off">>,[],28,false,false,0, {1408,452751,184644}, completed, {<0.1160.1>,#Ref<0.0.1.133241>}, <<"replication_ns_1@10.242.238.90">>,<0.1158.1>, {had_backfill,false,undefined,[]}, completed,false}. [ns_server:debug,2014-08-19T16:52:31.186,ns_1@10.242.238.90:<0.17162.0>:ns_process_registry:handle_info:98]Got exit msg: {'EXIT',<0.1160.1>,{#Ref<0.0.1.133230>,<0.1165.1>}} [ns_server:debug,2014-08-19T16:52:31.186,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.32572.0> (ok) [error_logger:info,2014-08-19T16:52:31.186,ns_1@10.242.238.90:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,'ns_vbm_new_sup-default'} started: [{pid,<0.1165.1>}, {name,{new_child_id,"VXYZ[\\]^_",'ns_1@10.242.238.88'}}, {mfargs, {ebucketmigrator_srv,start_link, [{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{old_state_retriever, #Fun}, {on_not_ready_vbuckets, #Fun}, {username,"default"}, {password,get_from_config}, {vbuckets,"VXYZ[\\]^_"}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]]}}, {restart_type,temporary}, {shutdown,60000}, {child_type,worker}] [ns_server:debug,2014-08-19T16:52:31.186,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.32567.0> (ok) [rebalance:debug,2014-08-19T16:52:31.188,ns_1@10.242.238.90:<0.32570.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:52:31.188,ns_1@10.242.238.90:<0.32570.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:52:31.188,ns_1@10.242.238.90:<0.1166.1>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:52:31.188,ns_1@10.242.238.90:<0.1166.1>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:52:31.188,ns_1@10.242.238.90:<0.32570.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [rebalance:debug,2014-08-19T16:52:31.189,ns_1@10.242.238.90:<0.32565.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:52:31.190,ns_1@10.242.238.90:<0.32565.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:52:31.190,ns_1@10.242.238.90:<0.1167.1>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:52:31.190,ns_1@10.242.238.90:<0.1167.1>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:52:31.190,ns_1@10.242.238.90:<0.32565.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:52:31.193,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:52:31.194,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:31.195,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 1550 us [ns_server:debug,2014-08-19T16:52:31.195,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:31.195,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{94, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.90']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:52:31.208,ns_1@10.242.238.90:<0.1165.1>:ebucketmigrator_srv:init:621]Reusing old upstream: [{vbuckets,"VXYZ[\\]^_"}, {name,<<"replication_ns_1@10.242.238.90">>}, {takeover,false}] [rebalance:debug,2014-08-19T16:52:31.209,ns_1@10.242.238.90:<0.1165.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.1169.1> [ns_server:debug,2014-08-19T16:52:31.231,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:52:31.235,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:31.235,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 3741 us [ns_server:debug,2014-08-19T16:52:31.236,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:31.236,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{246, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:52:31.277,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:52:31.281,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:31.281,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 3428 us [ns_server:debug,2014-08-19T16:52:31.282,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:31.282,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{245, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [rebalance:debug,2014-08-19T16:52:31.297,ns_1@10.242.238.90:<0.32562.0>:janitor_agent:handle_call:795]Done [rebalance:debug,2014-08-19T16:52:31.297,ns_1@10.242.238.90:<0.32557.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:52:31.297,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.32562.0> (ok) [ns_server:debug,2014-08-19T16:52:31.298,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.32557.0> (ok) [rebalance:debug,2014-08-19T16:52:31.299,ns_1@10.242.238.90:<0.32547.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:52:31.299,ns_1@10.242.238.90:<0.32547.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:52:31.299,ns_1@10.242.238.90:<0.1172.1>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:52:31.299,ns_1@10.242.238.90:<0.1172.1>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:debug,2014-08-19T16:52:31.300,ns_1@10.242.238.90:<0.32560.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [rebalance:info,2014-08-19T16:52:31.300,ns_1@10.242.238.90:<0.32547.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:52:31.300,ns_1@10.242.238.90:<0.32560.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:52:31.300,ns_1@10.242.238.90:<0.1173.1>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:52:31.300,ns_1@10.242.238.90:<0.1173.1>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:52:31.300,ns_1@10.242.238.90:<0.32560.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:52:31.324,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:52:31.327,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:31.328,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 3069 us [ns_server:debug,2014-08-19T16:52:31.328,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:31.328,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{247, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:52:31.375,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:52:31.380,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:31.380,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 4628 us [ns_server:debug,2014-08-19T16:52:31.380,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:31.381,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{248, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:info,2014-08-19T16:52:31.385,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 96 state to replica [ns_server:info,2014-08-19T16:52:31.386,ns_1@10.242.238.90:tap_replication_manager-default<0.18775.0>:tap_replication_manager:change_vbucket_filter:200]Going to change replication from 'ns_1@10.242.238.88' to have "VXYZ[\\]^_`" ("`", []) [ns_server:debug,2014-08-19T16:52:31.387,ns_1@10.242.238.90:<0.1182.1>:ns_vbm_new_sup:do_perform_vbucket_filter_change:135]Registered myself under id:{"default", {new_child_id,"VXYZ[\\]^_`",'ns_1@10.242.238.88'}, #Ref<0.0.1.133626>} Args:[{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{old_state_retriever,#Fun}, {on_not_ready_vbuckets,#Fun}, {username,"default"}, {password,get_from_config}, {vbuckets,"VXYZ[\\]^_`"}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]] [ns_server:debug,2014-08-19T16:52:31.387,ns_1@10.242.238.90:<0.1182.1>:ns_vbm_new_sup:do_perform_vbucket_filter_change:139]Linked myself to old ebucketmigrator <0.1165.1> [ns_server:info,2014-08-19T16:52:31.387,ns_1@10.242.238.90:<0.1165.1>:ebucketmigrator_srv:handle_call:270]Starting new-style vbucket filter change on stream `replication_ns_1@10.242.238.90` [ns_server:info,2014-08-19T16:52:31.408,ns_1@10.242.238.90:<0.1165.1>:ebucketmigrator_srv:handle_call:297]Changing vbucket filter on tap stream `replication_ns_1@10.242.238.90`: [{86,1},{88,1},{89,1},{90,1},{91,1},{92,1},{93,1},{94,1},{95,1},{96,1}] [ns_server:info,2014-08-19T16:52:31.409,ns_1@10.242.238.90:<0.1165.1>:ebucketmigrator_srv:handle_call:307]Successfully changed vbucket filter on tap stream `replication_ns_1@10.242.238.90`. [ns_server:info,2014-08-19T16:52:31.410,ns_1@10.242.238.90:<0.1165.1>:ebucketmigrator_srv:process_upstream:1027]Got vbucket filter change completion message. Silencing upstream sender [ns_server:info,2014-08-19T16:52:31.410,ns_1@10.242.238.90:<0.1165.1>:ebucketmigrator_srv:handle_info:366]Got reply from upstream silencing request. Completing state transition to a new ebucketmigrator. [ns_server:debug,2014-08-19T16:52:31.410,ns_1@10.242.238.90:<0.1165.1>:ebucketmigrator_srv:complete_native_vb_filter_change:170]Proceeding with reading unread binaries [ns_server:debug,2014-08-19T16:52:31.410,ns_1@10.242.238.90:<0.1165.1>:ebucketmigrator_srv:confirm_downstream:197]Going to confirm reception downstream messages [ns_server:debug,2014-08-19T16:52:31.410,ns_1@10.242.238.90:<0.1165.1>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:52:31.410,ns_1@10.242.238.90:<0.1184.1>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:52:31.410,ns_1@10.242.238.90:<0.1184.1>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:52:31.410,ns_1@10.242.238.90:<0.1165.1>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:52:31.411,ns_1@10.242.238.90:<0.1165.1>:ebucketmigrator_srv:confirm_downstream:201]Confirmed upstream messages are feeded to kernel [ns_server:debug,2014-08-19T16:52:31.411,ns_1@10.242.238.90:<0.1165.1>:ebucketmigrator_srv:reply_and_die:210]Passed old state to caller [ns_server:debug,2014-08-19T16:52:31.411,ns_1@10.242.238.90:<0.1165.1>:ebucketmigrator_srv:reply_and_die:213]Sent out state. Preparing to die [ns_server:debug,2014-08-19T16:52:31.411,ns_1@10.242.238.90:<0.1182.1>:ns_vbm_new_sup:do_perform_vbucket_filter_change:143]Got old state from previous ebucketmigrator: <0.1165.1> [ns_server:debug,2014-08-19T16:52:31.411,ns_1@10.242.238.90:<0.1182.1>:ns_vbm_new_sup:perform_vbucket_filter_change_loop:184]Sent old state to new instance [ns_server:info,2014-08-19T16:52:31.411,ns_1@10.242.238.90:<0.1186.1>:ns_vbm_new_sup:mk_old_state_retriever:83]Got vbucket filter change old state. Proceeding vbucket filter change operation [ns_server:debug,2014-08-19T16:52:31.411,ns_1@10.242.238.90:<0.1186.1>:ebucketmigrator_srv:init:494]Got old ebucketmigrator state from <0.1165.1>: {state,#Port<0.20537>,#Port<0.20533>,#Port<0.20538>,#Port<0.20534>,<0.1169.1>, <<"cut off">>,<<"cut off">>,[],31,false,false,0, {1408,452751,410097}, completed, {<0.1182.1>,#Ref<0.0.1.133639>}, <<"replication_ns_1@10.242.238.90">>,<0.1165.1>, {had_backfill,false,undefined,[]}, completed,false}. [ns_server:debug,2014-08-19T16:52:31.412,ns_1@10.242.238.90:<0.17162.0>:ns_process_registry:handle_info:98]Got exit msg: {'EXIT',<0.1182.1>,{#Ref<0.0.1.133628>,<0.1186.1>}} [error_logger:info,2014-08-19T16:52:31.412,ns_1@10.242.238.90:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,'ns_vbm_new_sup-default'} started: [{pid,<0.1186.1>}, {name, {new_child_id,"VXYZ[\\]^_`",'ns_1@10.242.238.88'}}, {mfargs, {ebucketmigrator_srv,start_link, [{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{old_state_retriever, #Fun}, {on_not_ready_vbuckets, #Fun}, {username,"default"}, {password,get_from_config}, {vbuckets,"VXYZ[\\]^_`"}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]]}}, {restart_type,temporary}, {shutdown,60000}, {child_type,worker}] [ns_server:debug,2014-08-19T16:52:31.417,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:52:31.420,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:31.420,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 3530 us [ns_server:debug,2014-08-19T16:52:31.421,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:31.421,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{96, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.90']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:52:31.433,ns_1@10.242.238.90:<0.1186.1>:ebucketmigrator_srv:init:621]Reusing old upstream: [{vbuckets,"VXYZ[\\]^_`"}, {name,<<"replication_ns_1@10.242.238.90">>}, {takeover,false}] [rebalance:debug,2014-08-19T16:52:31.433,ns_1@10.242.238.90:<0.1186.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.1188.1> [ns_server:info,2014-08-19T16:52:31.445,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 97 state to replica [ns_server:info,2014-08-19T16:52:31.446,ns_1@10.242.238.90:tap_replication_manager-default<0.18775.0>:tap_replication_manager:change_vbucket_filter:200]Going to change replication from 'ns_1@10.242.238.88' to have "VXYZ[\\]^_`a" ("a", []) [ns_server:debug,2014-08-19T16:52:31.447,ns_1@10.242.238.90:<0.1189.1>:ns_vbm_new_sup:do_perform_vbucket_filter_change:135]Registered myself under id:{"default", {new_child_id,"VXYZ[\\]^_`a",'ns_1@10.242.238.88'}, #Ref<0.0.1.133774>} Args:[{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{old_state_retriever,#Fun}, {on_not_ready_vbuckets,#Fun}, {username,"default"}, {password,get_from_config}, {vbuckets,"VXYZ[\\]^_`a"}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]] [ns_server:debug,2014-08-19T16:52:31.447,ns_1@10.242.238.90:<0.1189.1>:ns_vbm_new_sup:do_perform_vbucket_filter_change:139]Linked myself to old ebucketmigrator <0.1186.1> [ns_server:info,2014-08-19T16:52:31.447,ns_1@10.242.238.90:<0.1186.1>:ebucketmigrator_srv:handle_call:270]Starting new-style vbucket filter change on stream `replication_ns_1@10.242.238.90` [ns_server:info,2014-08-19T16:52:31.467,ns_1@10.242.238.90:<0.1186.1>:ebucketmigrator_srv:handle_call:297]Changing vbucket filter on tap stream `replication_ns_1@10.242.238.90`: [{86,1},{88,1},{89,1},{90,1},{91,1},{92,1},{93,1},{94,1},{95,1},{96,1},{97,1}] [ns_server:info,2014-08-19T16:52:31.467,ns_1@10.242.238.90:<0.1186.1>:ebucketmigrator_srv:handle_call:307]Successfully changed vbucket filter on tap stream `replication_ns_1@10.242.238.90`. [ns_server:info,2014-08-19T16:52:31.468,ns_1@10.242.238.90:<0.1186.1>:ebucketmigrator_srv:process_upstream:1027]Got vbucket filter change completion message. Silencing upstream sender [ns_server:info,2014-08-19T16:52:31.468,ns_1@10.242.238.90:<0.1186.1>:ebucketmigrator_srv:handle_info:366]Got reply from upstream silencing request. Completing state transition to a new ebucketmigrator. [ns_server:debug,2014-08-19T16:52:31.469,ns_1@10.242.238.90:<0.1186.1>:ebucketmigrator_srv:complete_native_vb_filter_change:170]Proceeding with reading unread binaries [ns_server:debug,2014-08-19T16:52:31.469,ns_1@10.242.238.90:<0.1186.1>:ebucketmigrator_srv:confirm_downstream:197]Going to confirm reception downstream messages [ns_server:debug,2014-08-19T16:52:31.469,ns_1@10.242.238.90:<0.1186.1>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:52:31.469,ns_1@10.242.238.90:<0.1191.1>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:52:31.469,ns_1@10.242.238.90:<0.1191.1>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:52:31.469,ns_1@10.242.238.90:<0.1186.1>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:52:31.469,ns_1@10.242.238.90:<0.1186.1>:ebucketmigrator_srv:confirm_downstream:201]Confirmed upstream messages are feeded to kernel [ns_server:debug,2014-08-19T16:52:31.469,ns_1@10.242.238.90:<0.1186.1>:ebucketmigrator_srv:reply_and_die:210]Passed old state to caller [ns_server:debug,2014-08-19T16:52:31.469,ns_1@10.242.238.90:<0.1186.1>:ebucketmigrator_srv:reply_and_die:213]Sent out state. Preparing to die [ns_server:debug,2014-08-19T16:52:31.470,ns_1@10.242.238.90:<0.1189.1>:ns_vbm_new_sup:do_perform_vbucket_filter_change:143]Got old state from previous ebucketmigrator: <0.1186.1> [ns_server:debug,2014-08-19T16:52:31.470,ns_1@10.242.238.90:<0.1189.1>:ns_vbm_new_sup:perform_vbucket_filter_change_loop:184]Sent old state to new instance [ns_server:info,2014-08-19T16:52:31.470,ns_1@10.242.238.90:<0.1193.1>:ns_vbm_new_sup:mk_old_state_retriever:83]Got vbucket filter change old state. Proceeding vbucket filter change operation [ns_server:debug,2014-08-19T16:52:31.470,ns_1@10.242.238.90:<0.1193.1>:ebucketmigrator_srv:init:494]Got old ebucketmigrator state from <0.1186.1>: {state,#Port<0.20537>,#Port<0.20533>,#Port<0.20538>,#Port<0.20534>,<0.1188.1>, <<"cut off">>,<<"cut off">>,[],34,false,false,0, {1408,452751,468661}, completed, {<0.1189.1>,#Ref<0.0.1.133787>}, <<"replication_ns_1@10.242.238.90">>,<0.1186.1>, {had_backfill,false,undefined,[]}, completed,false}. [ns_server:debug,2014-08-19T16:52:31.470,ns_1@10.242.238.90:<0.17162.0>:ns_process_registry:handle_info:98]Got exit msg: {'EXIT',<0.1189.1>,{#Ref<0.0.1.133776>,<0.1193.1>}} [error_logger:info,2014-08-19T16:52:31.470,ns_1@10.242.238.90:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,'ns_vbm_new_sup-default'} started: [{pid,<0.1193.1>}, {name, {new_child_id,"VXYZ[\\]^_`a",'ns_1@10.242.238.88'}}, {mfargs, {ebucketmigrator_srv,start_link, [{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{old_state_retriever, #Fun}, {on_not_ready_vbuckets, #Fun}, {username,"default"}, {password,get_from_config}, {vbuckets,"VXYZ[\\]^_`a"}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]]}}, {restart_type,temporary}, {shutdown,60000}, {child_type,worker}] [ns_server:debug,2014-08-19T16:52:31.477,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:52:31.479,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:31.480,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 2989 us [ns_server:debug,2014-08-19T16:52:31.480,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:31.481,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{97, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.90']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:info,2014-08-19T16:52:31.481,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 98 state to replica [ns_server:info,2014-08-19T16:52:31.481,ns_1@10.242.238.90:tap_replication_manager-default<0.18775.0>:tap_replication_manager:change_vbucket_filter:200]Going to change replication from 'ns_1@10.242.238.88' to have "VXYZ[\\]^_`ab" ("b", []) [ns_server:debug,2014-08-19T16:52:31.482,ns_1@10.242.238.90:<0.1195.1>:ns_vbm_new_sup:do_perform_vbucket_filter_change:135]Registered myself under id:{"default", {new_child_id,"VXYZ[\\]^_`ab", 'ns_1@10.242.238.88'}, #Ref<0.0.1.133901>} Args:[{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{old_state_retriever,#Fun}, {on_not_ready_vbuckets,#Fun}, {username,"default"}, {password,get_from_config}, {vbuckets,"VXYZ[\\]^_`ab"}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]] [ns_server:debug,2014-08-19T16:52:31.483,ns_1@10.242.238.90:<0.1195.1>:ns_vbm_new_sup:do_perform_vbucket_filter_change:139]Linked myself to old ebucketmigrator <0.1193.1> [ns_server:debug,2014-08-19T16:52:31.492,ns_1@10.242.238.90:<0.1193.1>:ebucketmigrator_srv:init:621]Reusing old upstream: [{vbuckets,"VXYZ[\\]^_`a"}, {name,<<"replication_ns_1@10.242.238.90">>}, {takeover,false}] [rebalance:debug,2014-08-19T16:52:31.492,ns_1@10.242.238.90:<0.1193.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.1197.1> [ns_server:info,2014-08-19T16:52:31.492,ns_1@10.242.238.90:<0.1193.1>:ebucketmigrator_srv:handle_call:270]Starting new-style vbucket filter change on stream `replication_ns_1@10.242.238.90` [ns_server:info,2014-08-19T16:52:31.512,ns_1@10.242.238.90:<0.1193.1>:ebucketmigrator_srv:handle_call:297]Changing vbucket filter on tap stream `replication_ns_1@10.242.238.90`: [{86,1}, {88,1}, {89,1}, {90,1}, {91,1}, {92,1}, {93,1}, {94,1}, {95,1}, {96,1}, {97,1}, {98,1}] [ns_server:info,2014-08-19T16:52:31.513,ns_1@10.242.238.90:<0.1193.1>:ebucketmigrator_srv:handle_call:307]Successfully changed vbucket filter on tap stream `replication_ns_1@10.242.238.90`. [ns_server:info,2014-08-19T16:52:31.513,ns_1@10.242.238.90:<0.1193.1>:ebucketmigrator_srv:process_upstream:1027]Got vbucket filter change completion message. Silencing upstream sender [ns_server:info,2014-08-19T16:52:31.514,ns_1@10.242.238.90:<0.1193.1>:ebucketmigrator_srv:handle_info:366]Got reply from upstream silencing request. Completing state transition to a new ebucketmigrator. [ns_server:debug,2014-08-19T16:52:31.514,ns_1@10.242.238.90:<0.1193.1>:ebucketmigrator_srv:complete_native_vb_filter_change:170]Proceeding with reading unread binaries [ns_server:debug,2014-08-19T16:52:31.514,ns_1@10.242.238.90:<0.1193.1>:ebucketmigrator_srv:confirm_downstream:197]Going to confirm reception downstream messages [ns_server:debug,2014-08-19T16:52:31.514,ns_1@10.242.238.90:<0.1193.1>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:52:31.514,ns_1@10.242.238.90:<0.1198.1>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:52:31.514,ns_1@10.242.238.90:<0.1198.1>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:debug,2014-08-19T16:52:31.514,ns_1@10.242.238.90:<0.32533.0>:janitor_agent:handle_call:795]Done [rebalance:debug,2014-08-19T16:52:31.514,ns_1@10.242.238.90:<0.32538.0>:janitor_agent:handle_call:795]Done [rebalance:info,2014-08-19T16:52:31.514,ns_1@10.242.238.90:<0.1193.1>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:52:31.514,ns_1@10.242.238.90:<0.1193.1>:ebucketmigrator_srv:confirm_downstream:201]Confirmed upstream messages are feeded to kernel [ns_server:debug,2014-08-19T16:52:31.515,ns_1@10.242.238.90:<0.1193.1>:ebucketmigrator_srv:reply_and_die:210]Passed old state to caller [ns_server:debug,2014-08-19T16:52:31.515,ns_1@10.242.238.90:<0.1193.1>:ebucketmigrator_srv:reply_and_die:213]Sent out state. Preparing to die [ns_server:debug,2014-08-19T16:52:31.515,ns_1@10.242.238.90:<0.1195.1>:ns_vbm_new_sup:do_perform_vbucket_filter_change:143]Got old state from previous ebucketmigrator: <0.1193.1> [ns_server:debug,2014-08-19T16:52:31.515,ns_1@10.242.238.90:<0.1195.1>:ns_vbm_new_sup:perform_vbucket_filter_change_loop:184]Sent old state to new instance [ns_server:info,2014-08-19T16:52:31.515,ns_1@10.242.238.90:<0.1200.1>:ns_vbm_new_sup:mk_old_state_retriever:83]Got vbucket filter change old state. Proceeding vbucket filter change operation [ns_server:debug,2014-08-19T16:52:31.515,ns_1@10.242.238.90:<0.1200.1>:ebucketmigrator_srv:init:494]Got old ebucketmigrator state from <0.1193.1>: {state,#Port<0.20537>,#Port<0.20533>,#Port<0.20538>,#Port<0.20534>,<0.1197.1>, <<"cut off">>,<<"cut off">>,[],37,false,false,0, {1408,452751,513844}, completed, {<0.1195.1>,#Ref<0.0.1.133914>}, <<"replication_ns_1@10.242.238.90">>,<0.1193.1>, {had_backfill,false,undefined,[]}, completed,false}. [ns_server:debug,2014-08-19T16:52:31.515,ns_1@10.242.238.90:<0.17162.0>:ns_process_registry:handle_info:98]Got exit msg: {'EXIT',<0.1195.1>,{#Ref<0.0.1.133903>,<0.1200.1>}} [error_logger:info,2014-08-19T16:52:31.515,ns_1@10.242.238.90:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,'ns_vbm_new_sup-default'} started: [{pid,<0.1200.1>}, {name, {new_child_id,"VXYZ[\\]^_`ab", 'ns_1@10.242.238.88'}}, {mfargs, {ebucketmigrator_srv,start_link, [{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{old_state_retriever, #Fun}, {on_not_ready_vbuckets, #Fun}, {username,"default"}, {password,get_from_config}, {vbuckets,"VXYZ[\\]^_`ab"}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]]}}, {restart_type,temporary}, {shutdown,60000}, {child_type,worker}] [ns_server:debug,2014-08-19T16:52:31.516,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.32533.0> (ok) [ns_server:debug,2014-08-19T16:52:31.516,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.32538.0> (ok) [rebalance:debug,2014-08-19T16:52:31.517,ns_1@10.242.238.90:<0.32536.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:52:31.517,ns_1@10.242.238.90:<0.32536.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:52:31.518,ns_1@10.242.238.90:<0.1201.1>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:52:31.518,ns_1@10.242.238.90:<0.1201.1>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:52:31.518,ns_1@10.242.238.90:<0.32536.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [rebalance:debug,2014-08-19T16:52:31.518,ns_1@10.242.238.90:<0.32531.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:52:31.518,ns_1@10.242.238.90:<0.32531.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:52:31.518,ns_1@10.242.238.90:<0.1202.1>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:52:31.519,ns_1@10.242.238.90:<0.1202.1>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:52:31.519,ns_1@10.242.238.90:<0.32531.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:52:31.521,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:52:31.524,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:31.525,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:31.525,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 3936 us [ns_server:debug,2014-08-19T16:52:31.525,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{98, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.90']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:info,2014-08-19T16:52:31.527,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 99 state to replica [ns_server:info,2014-08-19T16:52:31.527,ns_1@10.242.238.90:tap_replication_manager-default<0.18775.0>:tap_replication_manager:change_vbucket_filter:200]Going to change replication from 'ns_1@10.242.238.88' to have "VXYZ[\\]^_`abc" ("c", []) [ns_server:debug,2014-08-19T16:52:31.528,ns_1@10.242.238.90:<0.1204.1>:ns_vbm_new_sup:do_perform_vbucket_filter_change:135]Registered myself under id:{"default", {new_child_id,"VXYZ[\\]^_`abc", 'ns_1@10.242.238.88'}, #Ref<0.0.1.134090>} Args:[{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{old_state_retriever,#Fun}, {on_not_ready_vbuckets,#Fun}, {username,"default"}, {password,get_from_config}, {vbuckets,"VXYZ[\\]^_`abc"}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]] [ns_server:debug,2014-08-19T16:52:31.528,ns_1@10.242.238.90:<0.1204.1>:ns_vbm_new_sup:do_perform_vbucket_filter_change:139]Linked myself to old ebucketmigrator <0.1200.1> [ns_server:debug,2014-08-19T16:52:31.537,ns_1@10.242.238.90:<0.1200.1>:ebucketmigrator_srv:init:621]Reusing old upstream: [{vbuckets,"VXYZ[\\]^_`ab"}, {name,<<"replication_ns_1@10.242.238.90">>}, {takeover,false}] [rebalance:debug,2014-08-19T16:52:31.537,ns_1@10.242.238.90:<0.1200.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.1206.1> [ns_server:info,2014-08-19T16:52:31.538,ns_1@10.242.238.90:<0.1200.1>:ebucketmigrator_srv:handle_call:270]Starting new-style vbucket filter change on stream `replication_ns_1@10.242.238.90` [ns_server:info,2014-08-19T16:52:31.558,ns_1@10.242.238.90:<0.1200.1>:ebucketmigrator_srv:handle_call:297]Changing vbucket filter on tap stream `replication_ns_1@10.242.238.90`: [{86,1}, {88,1}, {89,1}, {90,1}, {91,1}, {92,1}, {93,1}, {94,1}, {95,1}, {96,1}, {97,1}, {98,1}, {99,1}] [ns_server:info,2014-08-19T16:52:31.558,ns_1@10.242.238.90:<0.1200.1>:ebucketmigrator_srv:handle_call:307]Successfully changed vbucket filter on tap stream `replication_ns_1@10.242.238.90`. [ns_server:info,2014-08-19T16:52:31.559,ns_1@10.242.238.90:<0.1200.1>:ebucketmigrator_srv:process_upstream:1027]Got vbucket filter change completion message. Silencing upstream sender [ns_server:info,2014-08-19T16:52:31.559,ns_1@10.242.238.90:<0.1200.1>:ebucketmigrator_srv:handle_info:366]Got reply from upstream silencing request. Completing state transition to a new ebucketmigrator. [ns_server:debug,2014-08-19T16:52:31.559,ns_1@10.242.238.90:<0.1200.1>:ebucketmigrator_srv:complete_native_vb_filter_change:170]Proceeding with reading unread binaries [ns_server:debug,2014-08-19T16:52:31.559,ns_1@10.242.238.90:<0.1200.1>:ebucketmigrator_srv:confirm_downstream:197]Going to confirm reception downstream messages [ns_server:debug,2014-08-19T16:52:31.560,ns_1@10.242.238.90:<0.1200.1>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:52:31.560,ns_1@10.242.238.90:<0.1207.1>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:52:31.560,ns_1@10.242.238.90:<0.1207.1>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:52:31.560,ns_1@10.242.238.90:<0.1200.1>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:52:31.560,ns_1@10.242.238.90:<0.1200.1>:ebucketmigrator_srv:confirm_downstream:201]Confirmed upstream messages are feeded to kernel [ns_server:debug,2014-08-19T16:52:31.560,ns_1@10.242.238.90:<0.1200.1>:ebucketmigrator_srv:reply_and_die:210]Passed old state to caller [ns_server:debug,2014-08-19T16:52:31.560,ns_1@10.242.238.90:<0.1200.1>:ebucketmigrator_srv:reply_and_die:213]Sent out state. Preparing to die [ns_server:debug,2014-08-19T16:52:31.560,ns_1@10.242.238.90:<0.1204.1>:ns_vbm_new_sup:do_perform_vbucket_filter_change:143]Got old state from previous ebucketmigrator: <0.1200.1> [ns_server:debug,2014-08-19T16:52:31.561,ns_1@10.242.238.90:<0.1204.1>:ns_vbm_new_sup:perform_vbucket_filter_change_loop:184]Sent old state to new instance [ns_server:info,2014-08-19T16:52:31.561,ns_1@10.242.238.90:<0.1209.1>:ns_vbm_new_sup:mk_old_state_retriever:83]Got vbucket filter change old state. Proceeding vbucket filter change operation [ns_server:debug,2014-08-19T16:52:31.561,ns_1@10.242.238.90:<0.1209.1>:ebucketmigrator_srv:init:494]Got old ebucketmigrator state from <0.1200.1>: {state,#Port<0.20537>,#Port<0.20533>,#Port<0.20538>,#Port<0.20534>,<0.1206.1>, <<"cut off">>,<<"cut off">>,[],40,false,false,0, {1408,452751,559579}, completed, {<0.1204.1>,#Ref<0.0.1.134103>}, <<"replication_ns_1@10.242.238.90">>,<0.1200.1>, {had_backfill,false,undefined,[]}, completed,false}. [ns_server:debug,2014-08-19T16:52:31.561,ns_1@10.242.238.90:<0.17162.0>:ns_process_registry:handle_info:98]Got exit msg: {'EXIT',<0.1204.1>,{#Ref<0.0.1.134092>,<0.1209.1>}} [error_logger:info,2014-08-19T16:52:31.561,ns_1@10.242.238.90:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,'ns_vbm_new_sup-default'} started: [{pid,<0.1209.1>}, {name, {new_child_id,"VXYZ[\\]^_`abc", 'ns_1@10.242.238.88'}}, {mfargs, {ebucketmigrator_srv,start_link, [{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{old_state_retriever, #Fun}, {on_not_ready_vbuckets, #Fun}, {username,"default"}, {password,get_from_config}, {vbuckets,"VXYZ[\\]^_`abc"}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]]}}, {restart_type,temporary}, {shutdown,60000}, {child_type,worker}] [ns_server:debug,2014-08-19T16:52:31.566,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:52:31.570,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:31.570,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 3633 us [ns_server:debug,2014-08-19T16:52:31.570,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:31.571,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{99, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.90']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:info,2014-08-19T16:52:31.572,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 100 state to replica [ns_server:info,2014-08-19T16:52:31.572,ns_1@10.242.238.90:tap_replication_manager-default<0.18775.0>:tap_replication_manager:change_vbucket_filter:200]Going to change replication from 'ns_1@10.242.238.88' to have "VXYZ[\\]^_`abcd" ("d", []) [ns_server:debug,2014-08-19T16:52:31.574,ns_1@10.242.238.90:<0.1211.1>:ns_vbm_new_sup:do_perform_vbucket_filter_change:135]Registered myself under id:{"default", {new_child_id,"VXYZ[\\]^_`abcd", 'ns_1@10.242.238.88'}, #Ref<0.0.1.134254>} Args:[{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{old_state_retriever,#Fun}, {on_not_ready_vbuckets,#Fun}, {username,"default"}, {password,get_from_config}, {vbuckets,"VXYZ[\\]^_`abcd"}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]] [ns_server:debug,2014-08-19T16:52:31.574,ns_1@10.242.238.90:<0.1211.1>:ns_vbm_new_sup:do_perform_vbucket_filter_change:139]Linked myself to old ebucketmigrator <0.1209.1> [ns_server:debug,2014-08-19T16:52:31.582,ns_1@10.242.238.90:<0.1209.1>:ebucketmigrator_srv:init:621]Reusing old upstream: [{vbuckets,"VXYZ[\\]^_`abc"}, {name,<<"replication_ns_1@10.242.238.90">>}, {takeover,false}] [rebalance:debug,2014-08-19T16:52:31.583,ns_1@10.242.238.90:<0.1209.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.1213.1> [ns_server:info,2014-08-19T16:52:31.583,ns_1@10.242.238.90:<0.1209.1>:ebucketmigrator_srv:handle_call:270]Starting new-style vbucket filter change on stream `replication_ns_1@10.242.238.90` [ns_server:info,2014-08-19T16:52:31.603,ns_1@10.242.238.90:<0.1209.1>:ebucketmigrator_srv:handle_call:297]Changing vbucket filter on tap stream `replication_ns_1@10.242.238.90`: [{86,1}, {88,1}, {89,1}, {90,1}, {91,1}, {92,1}, {93,1}, {94,1}, {95,1}, {96,1}, {97,1}, {98,1}, {99,1}, {100,1}] [ns_server:info,2014-08-19T16:52:31.603,ns_1@10.242.238.90:<0.1209.1>:ebucketmigrator_srv:handle_call:307]Successfully changed vbucket filter on tap stream `replication_ns_1@10.242.238.90`. [ns_server:info,2014-08-19T16:52:31.604,ns_1@10.242.238.90:<0.1209.1>:ebucketmigrator_srv:process_upstream:1027]Got vbucket filter change completion message. Silencing upstream sender [ns_server:info,2014-08-19T16:52:31.604,ns_1@10.242.238.90:<0.1209.1>:ebucketmigrator_srv:handle_info:366]Got reply from upstream silencing request. Completing state transition to a new ebucketmigrator. [ns_server:debug,2014-08-19T16:52:31.604,ns_1@10.242.238.90:<0.1209.1>:ebucketmigrator_srv:complete_native_vb_filter_change:170]Proceeding with reading unread binaries [ns_server:debug,2014-08-19T16:52:31.605,ns_1@10.242.238.90:<0.1209.1>:ebucketmigrator_srv:confirm_downstream:197]Going to confirm reception downstream messages [ns_server:debug,2014-08-19T16:52:31.605,ns_1@10.242.238.90:<0.1209.1>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:52:31.605,ns_1@10.242.238.90:<0.1214.1>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:52:31.605,ns_1@10.242.238.90:<0.1214.1>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:52:31.605,ns_1@10.242.238.90:<0.1209.1>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:52:31.605,ns_1@10.242.238.90:<0.1209.1>:ebucketmigrator_srv:confirm_downstream:201]Confirmed upstream messages are feeded to kernel [ns_server:debug,2014-08-19T16:52:31.605,ns_1@10.242.238.90:<0.1209.1>:ebucketmigrator_srv:reply_and_die:210]Passed old state to caller [ns_server:debug,2014-08-19T16:52:31.605,ns_1@10.242.238.90:<0.1209.1>:ebucketmigrator_srv:reply_and_die:213]Sent out state. Preparing to die [ns_server:debug,2014-08-19T16:52:31.605,ns_1@10.242.238.90:<0.1211.1>:ns_vbm_new_sup:do_perform_vbucket_filter_change:143]Got old state from previous ebucketmigrator: <0.1209.1> [ns_server:debug,2014-08-19T16:52:31.606,ns_1@10.242.238.90:<0.1211.1>:ns_vbm_new_sup:perform_vbucket_filter_change_loop:184]Sent old state to new instance [ns_server:info,2014-08-19T16:52:31.606,ns_1@10.242.238.90:<0.1216.1>:ns_vbm_new_sup:mk_old_state_retriever:83]Got vbucket filter change old state. Proceeding vbucket filter change operation [ns_server:debug,2014-08-19T16:52:31.606,ns_1@10.242.238.90:<0.1216.1>:ebucketmigrator_srv:init:494]Got old ebucketmigrator state from <0.1209.1>: {state,#Port<0.20537>,#Port<0.20533>,#Port<0.20538>,#Port<0.20534>,<0.1213.1>, <<"cut off">>,<<"cut off">>,[],43,false,false,0, {1408,452751,604610}, completed, {<0.1211.1>,#Ref<0.0.1.134267>}, <<"replication_ns_1@10.242.238.90">>,<0.1209.1>, {had_backfill,false,undefined,[]}, completed,false}. [ns_server:debug,2014-08-19T16:52:31.606,ns_1@10.242.238.90:<0.17162.0>:ns_process_registry:handle_info:98]Got exit msg: {'EXIT',<0.1211.1>,{#Ref<0.0.1.134256>,<0.1216.1>}} [error_logger:info,2014-08-19T16:52:31.606,ns_1@10.242.238.90:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,'ns_vbm_new_sup-default'} started: [{pid,<0.1216.1>}, {name, {new_child_id,"VXYZ[\\]^_`abcd", 'ns_1@10.242.238.88'}}, {mfargs, {ebucketmigrator_srv,start_link, [{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{old_state_retriever, #Fun}, {on_not_ready_vbuckets, #Fun}, {username,"default"}, {password,get_from_config}, {vbuckets,"VXYZ[\\]^_`abcd"}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]]}}, {restart_type,temporary}, {shutdown,60000}, {child_type,worker}] [ns_server:debug,2014-08-19T16:52:31.613,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:52:31.614,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:31.614,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 1355 us [rebalance:debug,2014-08-19T16:52:31.615,ns_1@10.242.238.90:<0.32523.0>:janitor_agent:handle_call:795]Done [rebalance:debug,2014-08-19T16:52:31.615,ns_1@10.242.238.90:<0.32528.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:52:31.615,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{100, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.90']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:52:31.615,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.32523.0> (ok) [ns_server:debug,2014-08-19T16:52:31.615,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:31.615,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.32528.0> (ok) [rebalance:debug,2014-08-19T16:52:31.616,ns_1@10.242.238.90:<0.32521.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:52:31.617,ns_1@10.242.238.90:<0.32521.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:52:31.617,ns_1@10.242.238.90:<0.1218.1>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:52:31.617,ns_1@10.242.238.90:<0.1218.1>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:52:31.617,ns_1@10.242.238.90:<0.32521.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:info,2014-08-19T16:52:31.617,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 101 state to replica [ns_server:info,2014-08-19T16:52:31.617,ns_1@10.242.238.90:tap_replication_manager-default<0.18775.0>:tap_replication_manager:change_vbucket_filter:200]Going to change replication from 'ns_1@10.242.238.88' to have "VXYZ[\\]^_`abcde" ("e", []) [rebalance:debug,2014-08-19T16:52:31.617,ns_1@10.242.238.90:<0.32526.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:52:31.618,ns_1@10.242.238.90:<0.32526.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:52:31.618,ns_1@10.242.238.90:<0.1219.1>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:52:31.618,ns_1@10.242.238.90:<0.1219.1>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:52:31.618,ns_1@10.242.238.90:<0.32526.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:52:31.619,ns_1@10.242.238.90:<0.1220.1>:ns_vbm_new_sup:do_perform_vbucket_filter_change:135]Registered myself under id:{"default", {new_child_id,"VXYZ[\\]^_`abcde", 'ns_1@10.242.238.88'}, #Ref<0.0.1.134443>} Args:[{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{old_state_retriever,#Fun}, {on_not_ready_vbuckets,#Fun}, {username,"default"}, {password,get_from_config}, {vbuckets,"VXYZ[\\]^_`abcde"}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]] [ns_server:debug,2014-08-19T16:52:31.619,ns_1@10.242.238.90:<0.1220.1>:ns_vbm_new_sup:do_perform_vbucket_filter_change:139]Linked myself to old ebucketmigrator <0.1216.1> [ns_server:debug,2014-08-19T16:52:31.628,ns_1@10.242.238.90:<0.1216.1>:ebucketmigrator_srv:init:621]Reusing old upstream: [{vbuckets,"VXYZ[\\]^_`abcd"}, {name,<<"replication_ns_1@10.242.238.90">>}, {takeover,false}] [rebalance:debug,2014-08-19T16:52:31.628,ns_1@10.242.238.90:<0.1216.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.1222.1> [ns_server:info,2014-08-19T16:52:31.628,ns_1@10.242.238.90:<0.1216.1>:ebucketmigrator_srv:handle_call:270]Starting new-style vbucket filter change on stream `replication_ns_1@10.242.238.90` [ns_server:info,2014-08-19T16:52:31.648,ns_1@10.242.238.90:<0.1216.1>:ebucketmigrator_srv:handle_call:297]Changing vbucket filter on tap stream `replication_ns_1@10.242.238.90`: [{86,1}, {88,1}, {89,1}, {90,1}, {91,1}, {92,1}, {93,1}, {94,1}, {95,1}, {96,1}, {97,1}, {98,1}, {99,1}, {100,1}, {101,1}] [ns_server:info,2014-08-19T16:52:31.649,ns_1@10.242.238.90:<0.1216.1>:ebucketmigrator_srv:handle_call:307]Successfully changed vbucket filter on tap stream `replication_ns_1@10.242.238.90`. [ns_server:info,2014-08-19T16:52:31.649,ns_1@10.242.238.90:<0.1216.1>:ebucketmigrator_srv:process_upstream:1027]Got vbucket filter change completion message. Silencing upstream sender [ns_server:info,2014-08-19T16:52:31.650,ns_1@10.242.238.90:<0.1216.1>:ebucketmigrator_srv:handle_info:366]Got reply from upstream silencing request. Completing state transition to a new ebucketmigrator. [ns_server:debug,2014-08-19T16:52:31.650,ns_1@10.242.238.90:<0.1216.1>:ebucketmigrator_srv:complete_native_vb_filter_change:170]Proceeding with reading unread binaries [ns_server:debug,2014-08-19T16:52:31.650,ns_1@10.242.238.90:<0.1216.1>:ebucketmigrator_srv:confirm_downstream:197]Going to confirm reception downstream messages [ns_server:debug,2014-08-19T16:52:31.650,ns_1@10.242.238.90:<0.1216.1>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:52:31.650,ns_1@10.242.238.90:<0.1223.1>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:52:31.650,ns_1@10.242.238.90:<0.1223.1>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:52:31.650,ns_1@10.242.238.90:<0.1216.1>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:52:31.650,ns_1@10.242.238.90:<0.1216.1>:ebucketmigrator_srv:confirm_downstream:201]Confirmed upstream messages are feeded to kernel [ns_server:debug,2014-08-19T16:52:31.650,ns_1@10.242.238.90:<0.1216.1>:ebucketmigrator_srv:reply_and_die:210]Passed old state to caller [ns_server:debug,2014-08-19T16:52:31.651,ns_1@10.242.238.90:<0.1216.1>:ebucketmigrator_srv:reply_and_die:213]Sent out state. Preparing to die [ns_server:debug,2014-08-19T16:52:31.651,ns_1@10.242.238.90:<0.1220.1>:ns_vbm_new_sup:do_perform_vbucket_filter_change:143]Got old state from previous ebucketmigrator: <0.1216.1> [ns_server:debug,2014-08-19T16:52:31.651,ns_1@10.242.238.90:<0.1220.1>:ns_vbm_new_sup:perform_vbucket_filter_change_loop:184]Sent old state to new instance [ns_server:info,2014-08-19T16:52:31.651,ns_1@10.242.238.90:<0.1225.1>:ns_vbm_new_sup:mk_old_state_retriever:83]Got vbucket filter change old state. Proceeding vbucket filter change operation [ns_server:debug,2014-08-19T16:52:31.651,ns_1@10.242.238.90:<0.1225.1>:ebucketmigrator_srv:init:494]Got old ebucketmigrator state from <0.1216.1>: {state,#Port<0.20537>,#Port<0.20533>,#Port<0.20538>,#Port<0.20534>,<0.1222.1>, <<"cut off">>,<<"cut off">>,[],46,false,false,0, {1408,452751,649943}, completed, {<0.1220.1>,#Ref<0.0.1.134456>}, <<"replication_ns_1@10.242.238.90">>,<0.1216.1>, {had_backfill,false,undefined,[]}, completed,false}. [ns_server:debug,2014-08-19T16:52:31.651,ns_1@10.242.238.90:<0.17162.0>:ns_process_registry:handle_info:98]Got exit msg: {'EXIT',<0.1220.1>,{#Ref<0.0.1.134445>,<0.1225.1>}} [error_logger:info,2014-08-19T16:52:31.651,ns_1@10.242.238.90:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,'ns_vbm_new_sup-default'} started: [{pid,<0.1225.1>}, {name, {new_child_id,"VXYZ[\\]^_`abcde", 'ns_1@10.242.238.88'}}, {mfargs, {ebucketmigrator_srv,start_link, [{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{old_state_retriever, #Fun}, {on_not_ready_vbuckets, #Fun}, {username,"default"}, {password,get_from_config}, {vbuckets,"VXYZ[\\]^_`abcde"}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]]}}, {restart_type,temporary}, {shutdown,60000}, {child_type,worker}] [ns_server:debug,2014-08-19T16:52:31.656,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:52:31.664,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:31.664,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 7818 us [ns_server:debug,2014-08-19T16:52:31.665,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:31.666,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{101, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.90']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:52:31.675,ns_1@10.242.238.90:<0.1225.1>:ebucketmigrator_srv:init:621]Reusing old upstream: [{vbuckets,"VXYZ[\\]^_`abcde"}, {name,<<"replication_ns_1@10.242.238.90">>}, {takeover,false}] [rebalance:debug,2014-08-19T16:52:31.675,ns_1@10.242.238.90:<0.1225.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.1227.1> [ns_server:debug,2014-08-19T16:52:31.701,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:52:31.704,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:31.705,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 3371 us [ns_server:debug,2014-08-19T16:52:31.705,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:31.705,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{250, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [rebalance:debug,2014-08-19T16:52:31.740,ns_1@10.242.238.90:<0.32507.0>:janitor_agent:handle_call:795]Done [rebalance:debug,2014-08-19T16:52:31.740,ns_1@10.242.238.90:<0.32518.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:52:31.740,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.32507.0> (ok) [ns_server:debug,2014-08-19T16:52:31.741,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.32518.0> (ok) [rebalance:debug,2014-08-19T16:52:31.742,ns_1@10.242.238.90:<0.32510.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:52:31.742,ns_1@10.242.238.90:<0.32510.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:52:31.742,ns_1@10.242.238.90:<0.1229.1>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:52:31.742,ns_1@10.242.238.90:<0.1229.1>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:52:31.743,ns_1@10.242.238.90:<0.32510.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [rebalance:debug,2014-08-19T16:52:31.743,ns_1@10.242.238.90:<0.32505.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:52:31.743,ns_1@10.242.238.90:<0.32505.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:52:31.743,ns_1@10.242.238.90:<0.1230.1>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:52:31.743,ns_1@10.242.238.90:<0.1230.1>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:52:31.743,ns_1@10.242.238.90:<0.32505.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:52:31.749,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:52:31.752,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:31.752,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 3097 us [ns_server:debug,2014-08-19T16:52:31.753,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:31.753,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{249, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:info,2014-08-19T16:52:31.754,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 103 state to replica [ns_server:info,2014-08-19T16:52:31.754,ns_1@10.242.238.90:tap_replication_manager-default<0.18775.0>:tap_replication_manager:change_vbucket_filter:200]Going to change replication from 'ns_1@10.242.238.88' to have "VXYZ[\\]^_`abcdeg" ("g", []) [ns_server:debug,2014-08-19T16:52:31.755,ns_1@10.242.238.90:<0.1232.1>:ns_vbm_new_sup:do_perform_vbucket_filter_change:135]Registered myself under id:{"default", {new_child_id,"VXYZ[\\]^_`abcdeg", 'ns_1@10.242.238.88'}, #Ref<0.0.1.134723>} Args:[{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{old_state_retriever,#Fun}, {on_not_ready_vbuckets,#Fun}, {username,"default"}, {password,get_from_config}, {vbuckets,"VXYZ[\\]^_`abcdeg"}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]] [ns_server:debug,2014-08-19T16:52:31.756,ns_1@10.242.238.90:<0.1232.1>:ns_vbm_new_sup:do_perform_vbucket_filter_change:139]Linked myself to old ebucketmigrator <0.1225.1> [ns_server:info,2014-08-19T16:52:31.756,ns_1@10.242.238.90:<0.1225.1>:ebucketmigrator_srv:handle_call:270]Starting new-style vbucket filter change on stream `replication_ns_1@10.242.238.90` [ns_server:info,2014-08-19T16:52:31.776,ns_1@10.242.238.90:<0.1225.1>:ebucketmigrator_srv:handle_call:297]Changing vbucket filter on tap stream `replication_ns_1@10.242.238.90`: [{86,1}, {88,1}, {89,1}, {90,1}, {91,1}, {92,1}, {93,1}, {94,1}, {95,1}, {96,1}, {97,1}, {98,1}, {99,1}, {100,1}, {101,1}, {103,1}] [ns_server:info,2014-08-19T16:52:31.777,ns_1@10.242.238.90:<0.1225.1>:ebucketmigrator_srv:handle_call:307]Successfully changed vbucket filter on tap stream `replication_ns_1@10.242.238.90`. [ns_server:info,2014-08-19T16:52:31.777,ns_1@10.242.238.90:<0.1225.1>:ebucketmigrator_srv:process_upstream:1027]Got vbucket filter change completion message. Silencing upstream sender [ns_server:info,2014-08-19T16:52:31.777,ns_1@10.242.238.90:<0.1225.1>:ebucketmigrator_srv:handle_info:366]Got reply from upstream silencing request. Completing state transition to a new ebucketmigrator. [ns_server:debug,2014-08-19T16:52:31.777,ns_1@10.242.238.90:<0.1225.1>:ebucketmigrator_srv:complete_native_vb_filter_change:170]Proceeding with reading unread binaries [ns_server:debug,2014-08-19T16:52:31.778,ns_1@10.242.238.90:<0.1225.1>:ebucketmigrator_srv:confirm_downstream:197]Going to confirm reception downstream messages [ns_server:debug,2014-08-19T16:52:31.778,ns_1@10.242.238.90:<0.1225.1>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:52:31.778,ns_1@10.242.238.90:<0.1234.1>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:52:31.778,ns_1@10.242.238.90:<0.1234.1>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:52:31.778,ns_1@10.242.238.90:<0.1225.1>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:52:31.778,ns_1@10.242.238.90:<0.1225.1>:ebucketmigrator_srv:confirm_downstream:201]Confirmed upstream messages are feeded to kernel [ns_server:debug,2014-08-19T16:52:31.778,ns_1@10.242.238.90:<0.1225.1>:ebucketmigrator_srv:reply_and_die:210]Passed old state to caller [ns_server:debug,2014-08-19T16:52:31.778,ns_1@10.242.238.90:<0.1225.1>:ebucketmigrator_srv:reply_and_die:213]Sent out state. Preparing to die [ns_server:debug,2014-08-19T16:52:31.778,ns_1@10.242.238.90:<0.1232.1>:ns_vbm_new_sup:do_perform_vbucket_filter_change:143]Got old state from previous ebucketmigrator: <0.1225.1> [ns_server:debug,2014-08-19T16:52:31.779,ns_1@10.242.238.90:<0.1232.1>:ns_vbm_new_sup:perform_vbucket_filter_change_loop:184]Sent old state to new instance [ns_server:info,2014-08-19T16:52:31.779,ns_1@10.242.238.90:<0.1236.1>:ns_vbm_new_sup:mk_old_state_retriever:83]Got vbucket filter change old state. Proceeding vbucket filter change operation [ns_server:debug,2014-08-19T16:52:31.779,ns_1@10.242.238.90:<0.1236.1>:ebucketmigrator_srv:init:494]Got old ebucketmigrator state from <0.1225.1>: {state,#Port<0.20537>,#Port<0.20533>,#Port<0.20538>,#Port<0.20534>,<0.1227.1>, <<"cut off">>,<<"cut off">>,[],49,false,false,0, {1408,452751,777684}, completed, {<0.1232.1>,#Ref<0.0.1.134736>}, <<"replication_ns_1@10.242.238.90">>,<0.1225.1>, {had_backfill,false,undefined,[]}, completed,false}. [ns_server:debug,2014-08-19T16:52:31.779,ns_1@10.242.238.90:<0.17162.0>:ns_process_registry:handle_info:98]Got exit msg: {'EXIT',<0.1232.1>,{#Ref<0.0.1.134725>,<0.1236.1>}} [error_logger:info,2014-08-19T16:52:31.779,ns_1@10.242.238.90:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,'ns_vbm_new_sup-default'} started: [{pid,<0.1236.1>}, {name, {new_child_id,"VXYZ[\\]^_`abcdeg", 'ns_1@10.242.238.88'}}, {mfargs, {ebucketmigrator_srv,start_link, [{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{old_state_retriever, #Fun}, {on_not_ready_vbuckets, #Fun}, {username,"default"}, {password,get_from_config}, {vbuckets,"VXYZ[\\]^_`abcdeg"}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]]}}, {restart_type,temporary}, {shutdown,60000}, {child_type,worker}] [ns_server:debug,2014-08-19T16:52:31.785,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:52:31.788,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:31.788,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 3229 us [ns_server:debug,2014-08-19T16:52:31.789,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{103, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.90']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:52:31.789,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:info,2014-08-19T16:52:31.790,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 102 state to replica [ns_server:info,2014-08-19T16:52:31.790,ns_1@10.242.238.90:tap_replication_manager-default<0.18775.0>:tap_replication_manager:change_vbucket_filter:200]Going to change replication from 'ns_1@10.242.238.88' to have "VXYZ[\\]^_`abcdefg" ("f", []) [ns_server:debug,2014-08-19T16:52:31.791,ns_1@10.242.238.90:<0.1238.1>:ns_vbm_new_sup:do_perform_vbucket_filter_change:135]Registered myself under id:{"default", {new_child_id,"VXYZ[\\]^_`abcdefg", 'ns_1@10.242.238.88'}, #Ref<0.0.1.134851>} Args:[{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{old_state_retriever,#Fun}, {on_not_ready_vbuckets,#Fun}, {username,"default"}, {password,get_from_config}, {vbuckets,"VXYZ[\\]^_`abcdefg"}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]] [ns_server:debug,2014-08-19T16:52:31.791,ns_1@10.242.238.90:<0.1238.1>:ns_vbm_new_sup:do_perform_vbucket_filter_change:139]Linked myself to old ebucketmigrator <0.1236.1> [ns_server:debug,2014-08-19T16:52:31.802,ns_1@10.242.238.90:<0.1236.1>:ebucketmigrator_srv:init:621]Reusing old upstream: [{vbuckets,"VXYZ[\\]^_`abcdeg"}, {name,<<"replication_ns_1@10.242.238.90">>}, {takeover,false}] [rebalance:debug,2014-08-19T16:52:31.802,ns_1@10.242.238.90:<0.1236.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.1240.1> [ns_server:info,2014-08-19T16:52:31.802,ns_1@10.242.238.90:<0.1236.1>:ebucketmigrator_srv:handle_call:270]Starting new-style vbucket filter change on stream `replication_ns_1@10.242.238.90` [ns_server:info,2014-08-19T16:52:31.822,ns_1@10.242.238.90:<0.1236.1>:ebucketmigrator_srv:handle_call:297]Changing vbucket filter on tap stream `replication_ns_1@10.242.238.90`: [{86,1}, {88,1}, {89,1}, {90,1}, {91,1}, {92,1}, {93,1}, {94,1}, {95,1}, {96,1}, {97,1}, {98,1}, {99,1}, {100,1}, {101,1}, {102,1}, {103,1}] [ns_server:info,2014-08-19T16:52:31.823,ns_1@10.242.238.90:<0.1236.1>:ebucketmigrator_srv:handle_call:307]Successfully changed vbucket filter on tap stream `replication_ns_1@10.242.238.90`. [ns_server:info,2014-08-19T16:52:31.824,ns_1@10.242.238.90:<0.1236.1>:ebucketmigrator_srv:process_upstream:1027]Got vbucket filter change completion message. Silencing upstream sender [ns_server:info,2014-08-19T16:52:31.824,ns_1@10.242.238.90:<0.1236.1>:ebucketmigrator_srv:handle_info:366]Got reply from upstream silencing request. Completing state transition to a new ebucketmigrator. [ns_server:debug,2014-08-19T16:52:31.824,ns_1@10.242.238.90:<0.1236.1>:ebucketmigrator_srv:complete_native_vb_filter_change:170]Proceeding with reading unread binaries [ns_server:debug,2014-08-19T16:52:31.824,ns_1@10.242.238.90:<0.1236.1>:ebucketmigrator_srv:confirm_downstream:197]Going to confirm reception downstream messages [ns_server:debug,2014-08-19T16:52:31.824,ns_1@10.242.238.90:<0.1236.1>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:52:31.824,ns_1@10.242.238.90:<0.1241.1>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:52:31.824,ns_1@10.242.238.90:<0.1241.1>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:52:31.824,ns_1@10.242.238.90:<0.1236.1>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:52:31.824,ns_1@10.242.238.90:<0.1236.1>:ebucketmigrator_srv:confirm_downstream:201]Confirmed upstream messages are feeded to kernel [ns_server:debug,2014-08-19T16:52:31.824,ns_1@10.242.238.90:<0.1236.1>:ebucketmigrator_srv:reply_and_die:210]Passed old state to caller [ns_server:debug,2014-08-19T16:52:31.825,ns_1@10.242.238.90:<0.1236.1>:ebucketmigrator_srv:reply_and_die:213]Sent out state. Preparing to die [ns_server:debug,2014-08-19T16:52:31.825,ns_1@10.242.238.90:<0.1238.1>:ns_vbm_new_sup:do_perform_vbucket_filter_change:143]Got old state from previous ebucketmigrator: <0.1236.1> [ns_server:debug,2014-08-19T16:52:31.825,ns_1@10.242.238.90:<0.1238.1>:ns_vbm_new_sup:perform_vbucket_filter_change_loop:184]Sent old state to new instance [ns_server:info,2014-08-19T16:52:31.825,ns_1@10.242.238.90:<0.1243.1>:ns_vbm_new_sup:mk_old_state_retriever:83]Got vbucket filter change old state. Proceeding vbucket filter change operation [ns_server:debug,2014-08-19T16:52:31.825,ns_1@10.242.238.90:<0.1243.1>:ebucketmigrator_srv:init:494]Got old ebucketmigrator state from <0.1236.1>: {state,#Port<0.20537>,#Port<0.20533>,#Port<0.20538>,#Port<0.20534>,<0.1240.1>, <<"cut off">>,<<"cut off">>,[],52,false,false,0, {1408,452751,824092}, completed, {<0.1238.1>,#Ref<0.0.1.134864>}, <<"replication_ns_1@10.242.238.90">>,<0.1236.1>, {had_backfill,false,undefined,[]}, completed,false}. [ns_server:debug,2014-08-19T16:52:31.825,ns_1@10.242.238.90:<0.17162.0>:ns_process_registry:handle_info:98]Got exit msg: {'EXIT',<0.1238.1>,{#Ref<0.0.1.134853>,<0.1243.1>}} [error_logger:info,2014-08-19T16:52:31.825,ns_1@10.242.238.90:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,'ns_vbm_new_sup-default'} started: [{pid,<0.1243.1>}, {name, {new_child_id,"VXYZ[\\]^_`abcdefg", 'ns_1@10.242.238.88'}}, {mfargs, {ebucketmigrator_srv,start_link, [{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{old_state_retriever, #Fun}, {on_not_ready_vbuckets, #Fun}, {username,"default"}, {password,get_from_config}, {vbuckets,"VXYZ[\\]^_`abcdefg"}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]]}}, {restart_type,temporary}, {shutdown,60000}, {child_type,worker}] [ns_server:debug,2014-08-19T16:52:31.830,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:52:31.833,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:31.834,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 3305 us [ns_server:debug,2014-08-19T16:52:31.834,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:31.834,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{102, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.90']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:info,2014-08-19T16:52:31.836,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 104 state to replica [ns_server:info,2014-08-19T16:52:31.837,ns_1@10.242.238.90:tap_replication_manager-default<0.18775.0>:tap_replication_manager:change_vbucket_filter:200]Going to change replication from 'ns_1@10.242.238.88' to have "VXYZ[\\]^_`abcdefgh" ("h", []) [ns_server:debug,2014-08-19T16:52:31.838,ns_1@10.242.238.90:<0.1245.1>:ns_vbm_new_sup:do_perform_vbucket_filter_change:135]Registered myself under id:{"default", {new_child_id,"VXYZ[\\]^_`abcdefgh", 'ns_1@10.242.238.88'}, #Ref<0.0.1.135007>} Args:[{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{old_state_retriever,#Fun}, {on_not_ready_vbuckets,#Fun}, {username,"default"}, {password,get_from_config}, {vbuckets,"VXYZ[\\]^_`abcdefgh"}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]] [ns_server:debug,2014-08-19T16:52:31.838,ns_1@10.242.238.90:<0.1245.1>:ns_vbm_new_sup:do_perform_vbucket_filter_change:139]Linked myself to old ebucketmigrator <0.1243.1> [ns_server:debug,2014-08-19T16:52:31.846,ns_1@10.242.238.90:<0.1243.1>:ebucketmigrator_srv:init:621]Reusing old upstream: [{vbuckets,"VXYZ[\\]^_`abcdefg"}, {name,<<"replication_ns_1@10.242.238.90">>}, {takeover,false}] [rebalance:debug,2014-08-19T16:52:31.847,ns_1@10.242.238.90:<0.1243.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.1247.1> [ns_server:info,2014-08-19T16:52:31.847,ns_1@10.242.238.90:<0.1243.1>:ebucketmigrator_srv:handle_call:270]Starting new-style vbucket filter change on stream `replication_ns_1@10.242.238.90` [rebalance:debug,2014-08-19T16:52:31.867,ns_1@10.242.238.90:<0.32488.0>:janitor_agent:handle_call:795]Done [rebalance:debug,2014-08-19T16:52:31.867,ns_1@10.242.238.90:<0.32483.0>:janitor_agent:handle_call:795]Done [ns_server:info,2014-08-19T16:52:31.867,ns_1@10.242.238.90:<0.1243.1>:ebucketmigrator_srv:handle_call:297]Changing vbucket filter on tap stream `replication_ns_1@10.242.238.90`: [{86,1}, {88,1}, {89,1}, {90,1}, {91,1}, {92,1}, {93,1}, {94,1}, {95,1}, {96,1}, {97,1}, {98,1}, {99,1}, {100,1}, {101,1}, {102,1}, {103,1}, {104,1}] [ns_server:info,2014-08-19T16:52:31.868,ns_1@10.242.238.90:<0.1243.1>:ebucketmigrator_srv:handle_call:307]Successfully changed vbucket filter on tap stream `replication_ns_1@10.242.238.90`. [ns_server:info,2014-08-19T16:52:31.868,ns_1@10.242.238.90:<0.1243.1>:ebucketmigrator_srv:process_upstream:1027]Got vbucket filter change completion message. Silencing upstream sender [ns_server:info,2014-08-19T16:52:31.868,ns_1@10.242.238.90:<0.1243.1>:ebucketmigrator_srv:handle_info:366]Got reply from upstream silencing request. Completing state transition to a new ebucketmigrator. [ns_server:debug,2014-08-19T16:52:31.869,ns_1@10.242.238.90:<0.1243.1>:ebucketmigrator_srv:complete_native_vb_filter_change:170]Proceeding with reading unread binaries [ns_server:debug,2014-08-19T16:52:31.869,ns_1@10.242.238.90:<0.1243.1>:ebucketmigrator_srv:confirm_downstream:197]Going to confirm reception downstream messages [ns_server:debug,2014-08-19T16:52:31.869,ns_1@10.242.238.90:<0.1243.1>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:52:31.869,ns_1@10.242.238.90:<0.1248.1>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:52:31.869,ns_1@10.242.238.90:<0.1248.1>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:52:31.869,ns_1@10.242.238.90:<0.1243.1>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:52:31.869,ns_1@10.242.238.90:<0.1243.1>:ebucketmigrator_srv:confirm_downstream:201]Confirmed upstream messages are feeded to kernel [ns_server:debug,2014-08-19T16:52:31.869,ns_1@10.242.238.90:<0.1243.1>:ebucketmigrator_srv:reply_and_die:210]Passed old state to caller [ns_server:debug,2014-08-19T16:52:31.869,ns_1@10.242.238.90:<0.1243.1>:ebucketmigrator_srv:reply_and_die:213]Sent out state. Preparing to die [ns_server:debug,2014-08-19T16:52:31.869,ns_1@10.242.238.90:<0.1245.1>:ns_vbm_new_sup:do_perform_vbucket_filter_change:143]Got old state from previous ebucketmigrator: <0.1243.1> [ns_server:debug,2014-08-19T16:52:31.870,ns_1@10.242.238.90:<0.1245.1>:ns_vbm_new_sup:perform_vbucket_filter_change_loop:184]Sent old state to new instance [ns_server:info,2014-08-19T16:52:31.870,ns_1@10.242.238.90:<0.1250.1>:ns_vbm_new_sup:mk_old_state_retriever:83]Got vbucket filter change old state. Proceeding vbucket filter change operation [ns_server:debug,2014-08-19T16:52:31.870,ns_1@10.242.238.90:<0.1250.1>:ebucketmigrator_srv:init:494]Got old ebucketmigrator state from <0.1243.1>: {state,#Port<0.20537>,#Port<0.20533>,#Port<0.20538>,#Port<0.20534>,<0.1247.1>, <<"cut off">>,<<"cut off">>,[],55,false,false,0, {1408,452751,868761}, completed, {<0.1245.1>,#Ref<0.0.1.135020>}, <<"replication_ns_1@10.242.238.90">>,<0.1243.1>, {had_backfill,false,undefined,[]}, completed,false}. [ns_server:debug,2014-08-19T16:52:31.870,ns_1@10.242.238.90:<0.17162.0>:ns_process_registry:handle_info:98]Got exit msg: {'EXIT',<0.1245.1>,{#Ref<0.0.1.135009>,<0.1250.1>}} [ns_server:debug,2014-08-19T16:52:31.870,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.32488.0> (ok) [error_logger:info,2014-08-19T16:52:31.870,ns_1@10.242.238.90:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,'ns_vbm_new_sup-default'} started: [{pid,<0.1250.1>}, {name, {new_child_id,"VXYZ[\\]^_`abcdefgh", 'ns_1@10.242.238.88'}}, {mfargs, {ebucketmigrator_srv,start_link, [{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{old_state_retriever, #Fun}, {on_not_ready_vbuckets, #Fun}, {username,"default"}, {password,get_from_config}, {vbuckets,"VXYZ[\\]^_`abcdefgh"}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]]}}, {restart_type,temporary}, {shutdown,60000}, {child_type,worker}] [ns_server:debug,2014-08-19T16:52:31.870,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.32483.0> (ok) [rebalance:debug,2014-08-19T16:52:31.872,ns_1@10.242.238.90:<0.32486.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:52:31.872,ns_1@10.242.238.90:<0.32486.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:52:31.872,ns_1@10.242.238.90:<0.1251.1>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:52:31.872,ns_1@10.242.238.90:<0.1251.1>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:52:31.873,ns_1@10.242.238.90:<0.32486.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [rebalance:debug,2014-08-19T16:52:31.873,ns_1@10.242.238.90:<0.32481.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:52:31.873,ns_1@10.242.238.90:<0.32481.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:52:31.873,ns_1@10.242.238.90:<0.1252.1>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:52:31.873,ns_1@10.242.238.90:<0.1252.1>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:52:31.873,ns_1@10.242.238.90:<0.32481.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:52:31.876,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:52:31.879,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:31.879,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 2896 us [ns_server:debug,2014-08-19T16:52:31.880,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{104, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.90']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:52:31.880,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:info,2014-08-19T16:52:31.881,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 105 state to replica [ns_server:info,2014-08-19T16:52:31.881,ns_1@10.242.238.90:tap_replication_manager-default<0.18775.0>:tap_replication_manager:change_vbucket_filter:200]Going to change replication from 'ns_1@10.242.238.88' to have "VXYZ[\\]^_`abcdefghi" ("i", []) [ns_server:debug,2014-08-19T16:52:31.882,ns_1@10.242.238.90:<0.1254.1>:ns_vbm_new_sup:do_perform_vbucket_filter_change:135]Registered myself under id:{"default", {new_child_id,"VXYZ[\\]^_`abcdefghi", 'ns_1@10.242.238.88'}, #Ref<0.0.1.135196>} Args:[{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{old_state_retriever,#Fun}, {on_not_ready_vbuckets,#Fun}, {username,"default"}, {password,get_from_config}, {vbuckets,"VXYZ[\\]^_`abcdefghi"}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]] [ns_server:debug,2014-08-19T16:52:31.882,ns_1@10.242.238.90:<0.1254.1>:ns_vbm_new_sup:do_perform_vbucket_filter_change:139]Linked myself to old ebucketmigrator <0.1250.1> [ns_server:debug,2014-08-19T16:52:31.896,ns_1@10.242.238.90:<0.1250.1>:ebucketmigrator_srv:init:621]Reusing old upstream: [{vbuckets,"VXYZ[\\]^_`abcdefgh"}, {name,<<"replication_ns_1@10.242.238.90">>}, {takeover,false}] [rebalance:debug,2014-08-19T16:52:31.896,ns_1@10.242.238.90:<0.1250.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.1256.1> [ns_server:info,2014-08-19T16:52:31.897,ns_1@10.242.238.90:<0.1250.1>:ebucketmigrator_srv:handle_call:270]Starting new-style vbucket filter change on stream `replication_ns_1@10.242.238.90` [ns_server:info,2014-08-19T16:52:31.917,ns_1@10.242.238.90:<0.1250.1>:ebucketmigrator_srv:handle_call:297]Changing vbucket filter on tap stream `replication_ns_1@10.242.238.90`: [{86,1}, {88,1}, {89,1}, {90,1}, {91,1}, {92,1}, {93,1}, {94,1}, {95,1}, {96,1}, {97,1}, {98,1}, {99,1}, {100,1}, {101,1}, {102,1}, {103,1}, {104,1}, {105,1}] [ns_server:info,2014-08-19T16:52:31.918,ns_1@10.242.238.90:<0.1250.1>:ebucketmigrator_srv:handle_call:307]Successfully changed vbucket filter on tap stream `replication_ns_1@10.242.238.90`. [ns_server:info,2014-08-19T16:52:31.918,ns_1@10.242.238.90:<0.1250.1>:ebucketmigrator_srv:process_upstream:1027]Got vbucket filter change completion message. Silencing upstream sender [ns_server:info,2014-08-19T16:52:31.918,ns_1@10.242.238.90:<0.1250.1>:ebucketmigrator_srv:handle_info:366]Got reply from upstream silencing request. Completing state transition to a new ebucketmigrator. [ns_server:debug,2014-08-19T16:52:31.918,ns_1@10.242.238.90:<0.1250.1>:ebucketmigrator_srv:complete_native_vb_filter_change:170]Proceeding with reading unread binaries [ns_server:debug,2014-08-19T16:52:31.918,ns_1@10.242.238.90:<0.1250.1>:ebucketmigrator_srv:confirm_downstream:197]Going to confirm reception downstream messages [ns_server:debug,2014-08-19T16:52:31.919,ns_1@10.242.238.90:<0.1250.1>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:52:31.919,ns_1@10.242.238.90:<0.1257.1>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:52:31.919,ns_1@10.242.238.90:<0.1257.1>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:52:31.919,ns_1@10.242.238.90:<0.1250.1>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:52:31.919,ns_1@10.242.238.90:<0.1250.1>:ebucketmigrator_srv:confirm_downstream:201]Confirmed upstream messages are feeded to kernel [ns_server:debug,2014-08-19T16:52:31.919,ns_1@10.242.238.90:<0.1250.1>:ebucketmigrator_srv:reply_and_die:210]Passed old state to caller [ns_server:debug,2014-08-19T16:52:31.919,ns_1@10.242.238.90:<0.1250.1>:ebucketmigrator_srv:reply_and_die:213]Sent out state. Preparing to die [ns_server:debug,2014-08-19T16:52:31.919,ns_1@10.242.238.90:<0.1254.1>:ns_vbm_new_sup:do_perform_vbucket_filter_change:143]Got old state from previous ebucketmigrator: <0.1250.1> [ns_server:debug,2014-08-19T16:52:31.920,ns_1@10.242.238.90:<0.1254.1>:ns_vbm_new_sup:perform_vbucket_filter_change_loop:184]Sent old state to new instance [ns_server:info,2014-08-19T16:52:31.920,ns_1@10.242.238.90:<0.1259.1>:ns_vbm_new_sup:mk_old_state_retriever:83]Got vbucket filter change old state. Proceeding vbucket filter change operation [ns_server:debug,2014-08-19T16:52:31.920,ns_1@10.242.238.90:<0.1259.1>:ebucketmigrator_srv:init:494]Got old ebucketmigrator state from <0.1250.1>: {state,#Port<0.20537>,#Port<0.20533>,#Port<0.20538>,#Port<0.20534>,<0.1256.1>, <<"cut off">>,<<"cut off">>,[],58,false,false,0, {1408,452751,918586}, completed, {<0.1254.1>,#Ref<0.0.1.135209>}, <<"replication_ns_1@10.242.238.90">>,<0.1250.1>, {had_backfill,false,undefined,[]}, completed,false}. [ns_server:debug,2014-08-19T16:52:31.920,ns_1@10.242.238.90:<0.17162.0>:ns_process_registry:handle_info:98]Got exit msg: {'EXIT',<0.1254.1>,{#Ref<0.0.1.135198>,<0.1259.1>}} [error_logger:info,2014-08-19T16:52:31.920,ns_1@10.242.238.90:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,'ns_vbm_new_sup-default'} started: [{pid,<0.1259.1>}, {name, {new_child_id,"VXYZ[\\]^_`abcdefghi", 'ns_1@10.242.238.88'}}, {mfargs, {ebucketmigrator_srv,start_link, [{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{old_state_retriever, #Fun}, {on_not_ready_vbuckets, #Fun}, {username,"default"}, {password,get_from_config}, {vbuckets,"VXYZ[\\]^_`abcdefghi"}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]]}}, {restart_type,temporary}, {shutdown,60000}, {child_type,worker}] [ns_server:debug,2014-08-19T16:52:31.925,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:52:31.930,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:31.930,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 4994 us [ns_server:debug,2014-08-19T16:52:31.931,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:31.931,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{105, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.90']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:52:31.942,ns_1@10.242.238.90:<0.1259.1>:ebucketmigrator_srv:init:621]Reusing old upstream: [{vbuckets,"VXYZ[\\]^_`abcdefghi"}, {name,<<"replication_ns_1@10.242.238.90">>}, {takeover,false}] [rebalance:debug,2014-08-19T16:52:31.942,ns_1@10.242.238.90:<0.1259.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.1261.1> [ns_server:debug,2014-08-19T16:52:31.971,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:52:31.974,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:31.974,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 3224 us [ns_server:debug,2014-08-19T16:52:31.975,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:31.975,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{252, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [rebalance:debug,2014-08-19T16:52:31.992,ns_1@10.242.238.90:<0.32478.0>:janitor_agent:handle_call:795]Done [rebalance:debug,2014-08-19T16:52:31.992,ns_1@10.242.238.90:<0.32473.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:52:32.008,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.32478.0> (ok) [ns_server:debug,2014-08-19T16:52:32.008,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.32473.0> (ok) [rebalance:debug,2014-08-19T16:52:32.009,ns_1@10.242.238.90:<0.32476.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:52:32.010,ns_1@10.242.238.90:<0.32476.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:52:32.010,ns_1@10.242.238.90:<0.1263.1>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:52:32.010,ns_1@10.242.238.90:<0.1263.1>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:debug,2014-08-19T16:52:32.010,ns_1@10.242.238.90:<0.32471.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [rebalance:info,2014-08-19T16:52:32.010,ns_1@10.242.238.90:<0.32476.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:52:32.010,ns_1@10.242.238.90:<0.32471.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:52:32.010,ns_1@10.242.238.90:<0.1264.1>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:52:32.011,ns_1@10.242.238.90:<0.1264.1>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:52:32.011,ns_1@10.242.238.90:<0.32471.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:52:32.024,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:52:32.027,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:32.027,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 2958 us [ns_server:debug,2014-08-19T16:52:32.028,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:32.028,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{251, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:52:32.073,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:52:32.075,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:32.075,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 1714 us [ns_server:debug,2014-08-19T16:52:32.075,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:32.076,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{253, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [rebalance:debug,2014-08-19T16:52:32.103,ns_1@10.242.238.90:<0.32468.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:52:32.103,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.32468.0> (ok) [rebalance:debug,2014-08-19T16:52:32.103,ns_1@10.242.238.90:<0.32463.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:52:32.103,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.32463.0> (ok) [rebalance:debug,2014-08-19T16:52:32.104,ns_1@10.242.238.90:<0.32466.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:52:32.104,ns_1@10.242.238.90:<0.32466.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:52:32.105,ns_1@10.242.238.90:<0.1267.1>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:52:32.105,ns_1@10.242.238.90:<0.1267.1>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:debug,2014-08-19T16:52:32.105,ns_1@10.242.238.90:<0.32461.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [rebalance:info,2014-08-19T16:52:32.105,ns_1@10.242.238.90:<0.32466.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:52:32.105,ns_1@10.242.238.90:<0.32461.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:52:32.105,ns_1@10.242.238.90:<0.1268.1>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:52:32.105,ns_1@10.242.238.90:<0.1268.1>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:52:32.105,ns_1@10.242.238.90:<0.32461.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:52:32.130,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:52:32.134,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 3542 us [ns_server:debug,2014-08-19T16:52:32.134,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:32.135,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:32.135,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{254, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:info,2014-08-19T16:52:32.135,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 106 state to replica [ns_server:info,2014-08-19T16:52:32.136,ns_1@10.242.238.90:tap_replication_manager-default<0.18775.0>:tap_replication_manager:change_vbucket_filter:200]Going to change replication from 'ns_1@10.242.238.88' to have "VXYZ[\\]^_`abcdefghij" ("j", []) [ns_server:debug,2014-08-19T16:52:32.137,ns_1@10.242.238.90:<0.1270.1>:ns_vbm_new_sup:do_perform_vbucket_filter_change:135]Registered myself under id:{"default", {new_child_id,"VXYZ[\\]^_`abcdefghij", 'ns_1@10.242.238.88'}, #Ref<0.0.1.135599>} Args:[{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{old_state_retriever,#Fun}, {on_not_ready_vbuckets,#Fun}, {username,"default"}, {password,get_from_config}, {vbuckets,"VXYZ[\\]^_`abcdefghij"}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]] [ns_server:debug,2014-08-19T16:52:32.137,ns_1@10.242.238.90:<0.1270.1>:ns_vbm_new_sup:do_perform_vbucket_filter_change:139]Linked myself to old ebucketmigrator <0.1259.1> [ns_server:info,2014-08-19T16:52:32.137,ns_1@10.242.238.90:<0.1259.1>:ebucketmigrator_srv:handle_call:270]Starting new-style vbucket filter change on stream `replication_ns_1@10.242.238.90` [ns_server:info,2014-08-19T16:52:32.158,ns_1@10.242.238.90:<0.1259.1>:ebucketmigrator_srv:handle_call:297]Changing vbucket filter on tap stream `replication_ns_1@10.242.238.90`: [{86,1}, {88,1}, {89,1}, {90,1}, {91,1}, {92,1}, {93,1}, {94,1}, {95,1}, {96,1}, {97,1}, {98,1}, {99,1}, {100,1}, {101,1}, {102,1}, {103,1}, {104,1}, {105,1}, {106,1}] [ns_server:info,2014-08-19T16:52:32.159,ns_1@10.242.238.90:<0.1259.1>:ebucketmigrator_srv:handle_call:307]Successfully changed vbucket filter on tap stream `replication_ns_1@10.242.238.90`. [ns_server:info,2014-08-19T16:52:32.160,ns_1@10.242.238.90:<0.1259.1>:ebucketmigrator_srv:process_upstream:1027]Got vbucket filter change completion message. Silencing upstream sender [ns_server:info,2014-08-19T16:52:32.160,ns_1@10.242.238.90:<0.1259.1>:ebucketmigrator_srv:handle_info:366]Got reply from upstream silencing request. Completing state transition to a new ebucketmigrator. [ns_server:debug,2014-08-19T16:52:32.160,ns_1@10.242.238.90:<0.1259.1>:ebucketmigrator_srv:complete_native_vb_filter_change:170]Proceeding with reading unread binaries [ns_server:debug,2014-08-19T16:52:32.160,ns_1@10.242.238.90:<0.1259.1>:ebucketmigrator_srv:confirm_downstream:197]Going to confirm reception downstream messages [ns_server:debug,2014-08-19T16:52:32.160,ns_1@10.242.238.90:<0.1259.1>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:52:32.160,ns_1@10.242.238.90:<0.1272.1>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:52:32.160,ns_1@10.242.238.90:<0.1272.1>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:52:32.161,ns_1@10.242.238.90:<0.1259.1>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:52:32.161,ns_1@10.242.238.90:<0.1259.1>:ebucketmigrator_srv:confirm_downstream:201]Confirmed upstream messages are feeded to kernel [ns_server:debug,2014-08-19T16:52:32.161,ns_1@10.242.238.90:<0.1259.1>:ebucketmigrator_srv:reply_and_die:210]Passed old state to caller [ns_server:debug,2014-08-19T16:52:32.161,ns_1@10.242.238.90:<0.1259.1>:ebucketmigrator_srv:reply_and_die:213]Sent out state. Preparing to die [ns_server:debug,2014-08-19T16:52:32.161,ns_1@10.242.238.90:<0.1270.1>:ns_vbm_new_sup:do_perform_vbucket_filter_change:143]Got old state from previous ebucketmigrator: <0.1259.1> [ns_server:debug,2014-08-19T16:52:32.161,ns_1@10.242.238.90:<0.1270.1>:ns_vbm_new_sup:perform_vbucket_filter_change_loop:184]Sent old state to new instance [ns_server:info,2014-08-19T16:52:32.162,ns_1@10.242.238.90:<0.1274.1>:ns_vbm_new_sup:mk_old_state_retriever:83]Got vbucket filter change old state. Proceeding vbucket filter change operation [ns_server:debug,2014-08-19T16:52:32.162,ns_1@10.242.238.90:<0.1274.1>:ebucketmigrator_srv:init:494]Got old ebucketmigrator state from <0.1259.1>: {state,#Port<0.20537>,#Port<0.20533>,#Port<0.20538>,#Port<0.20534>,<0.1261.1>, <<"cut off">>,<<"cut off">>,[],61,false,false,0, {1408,452752,160250}, completed, {<0.1270.1>,#Ref<0.0.1.135612>}, <<"replication_ns_1@10.242.238.90">>,<0.1259.1>, {had_backfill,false,undefined,[]}, completed,false}. [ns_server:debug,2014-08-19T16:52:32.162,ns_1@10.242.238.90:<0.17162.0>:ns_process_registry:handle_info:98]Got exit msg: {'EXIT',<0.1270.1>,{#Ref<0.0.1.135601>,<0.1274.1>}} [error_logger:info,2014-08-19T16:52:32.162,ns_1@10.242.238.90:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,'ns_vbm_new_sup-default'} started: [{pid,<0.1274.1>}, {name, {new_child_id,"VXYZ[\\]^_`abcdefghij", 'ns_1@10.242.238.88'}}, {mfargs, {ebucketmigrator_srv,start_link, [{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{old_state_retriever, #Fun}, {on_not_ready_vbuckets, #Fun}, {username,"default"}, {password,get_from_config}, {vbuckets,"VXYZ[\\]^_`abcdefghij"}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]]}}, {restart_type,temporary}, {shutdown,60000}, {child_type,worker}] [ns_server:debug,2014-08-19T16:52:32.167,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:52:32.170,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:32.170,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 3016 us [ns_server:debug,2014-08-19T16:52:32.170,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:32.171,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{106, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.90']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:info,2014-08-19T16:52:32.171,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 107 state to replica [ns_server:info,2014-08-19T16:52:32.172,ns_1@10.242.238.90:tap_replication_manager-default<0.18775.0>:tap_replication_manager:change_vbucket_filter:200]Going to change replication from 'ns_1@10.242.238.88' to have "VXYZ[\\]^_`abcdefghijk" ("k", []) [ns_server:debug,2014-08-19T16:52:32.172,ns_1@10.242.238.90:<0.1276.1>:ns_vbm_new_sup:do_perform_vbucket_filter_change:135]Registered myself under id:{"default", {new_child_id,"VXYZ[\\]^_`abcdefghijk", 'ns_1@10.242.238.88'}, #Ref<0.0.1.135726>} Args:[{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{old_state_retriever,#Fun}, {on_not_ready_vbuckets,#Fun}, {username,"default"}, {password,get_from_config}, {vbuckets,"VXYZ[\\]^_`abcdefghijk"}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]] [ns_server:debug,2014-08-19T16:52:32.173,ns_1@10.242.238.90:<0.1276.1>:ns_vbm_new_sup:do_perform_vbucket_filter_change:139]Linked myself to old ebucketmigrator <0.1274.1> [ns_server:debug,2014-08-19T16:52:32.183,ns_1@10.242.238.90:<0.1274.1>:ebucketmigrator_srv:init:621]Reusing old upstream: [{vbuckets,"VXYZ[\\]^_`abcdefghij"}, {name,<<"replication_ns_1@10.242.238.90">>}, {takeover,false}] [rebalance:debug,2014-08-19T16:52:32.183,ns_1@10.242.238.90:<0.1274.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.1278.1> [ns_server:info,2014-08-19T16:52:32.183,ns_1@10.242.238.90:<0.1274.1>:ebucketmigrator_srv:handle_call:270]Starting new-style vbucket filter change on stream `replication_ns_1@10.242.238.90` [ns_server:info,2014-08-19T16:52:32.203,ns_1@10.242.238.90:<0.1274.1>:ebucketmigrator_srv:handle_call:297]Changing vbucket filter on tap stream `replication_ns_1@10.242.238.90`: [{86,1}, {88,1}, {89,1}, {90,1}, {91,1}, {92,1}, {93,1}, {94,1}, {95,1}, {96,1}, {97,1}, {98,1}, {99,1}, {100,1}, {101,1}, {102,1}, {103,1}, {104,1}, {105,1}, {106,1}, {107,1}] [ns_server:info,2014-08-19T16:52:32.204,ns_1@10.242.238.90:<0.1274.1>:ebucketmigrator_srv:handle_call:307]Successfully changed vbucket filter on tap stream `replication_ns_1@10.242.238.90`. [ns_server:info,2014-08-19T16:52:32.204,ns_1@10.242.238.90:<0.1274.1>:ebucketmigrator_srv:process_upstream:1027]Got vbucket filter change completion message. Silencing upstream sender [ns_server:info,2014-08-19T16:52:32.204,ns_1@10.242.238.90:<0.1274.1>:ebucketmigrator_srv:handle_info:366]Got reply from upstream silencing request. Completing state transition to a new ebucketmigrator. [ns_server:debug,2014-08-19T16:52:32.204,ns_1@10.242.238.90:<0.1274.1>:ebucketmigrator_srv:complete_native_vb_filter_change:170]Proceeding with reading unread binaries [ns_server:debug,2014-08-19T16:52:32.204,ns_1@10.242.238.90:<0.1274.1>:ebucketmigrator_srv:confirm_downstream:197]Going to confirm reception downstream messages [ns_server:debug,2014-08-19T16:52:32.205,ns_1@10.242.238.90:<0.1274.1>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:52:32.205,ns_1@10.242.238.90:<0.1279.1>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:52:32.205,ns_1@10.242.238.90:<0.1279.1>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:52:32.205,ns_1@10.242.238.90:<0.1274.1>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:52:32.205,ns_1@10.242.238.90:<0.1274.1>:ebucketmigrator_srv:confirm_downstream:201]Confirmed upstream messages are feeded to kernel [ns_server:debug,2014-08-19T16:52:32.205,ns_1@10.242.238.90:<0.1274.1>:ebucketmigrator_srv:reply_and_die:210]Passed old state to caller [ns_server:debug,2014-08-19T16:52:32.205,ns_1@10.242.238.90:<0.1274.1>:ebucketmigrator_srv:reply_and_die:213]Sent out state. Preparing to die [ns_server:debug,2014-08-19T16:52:32.205,ns_1@10.242.238.90:<0.1276.1>:ns_vbm_new_sup:do_perform_vbucket_filter_change:143]Got old state from previous ebucketmigrator: <0.1274.1> [ns_server:debug,2014-08-19T16:52:32.206,ns_1@10.242.238.90:<0.1276.1>:ns_vbm_new_sup:perform_vbucket_filter_change_loop:184]Sent old state to new instance [ns_server:info,2014-08-19T16:52:32.206,ns_1@10.242.238.90:<0.1281.1>:ns_vbm_new_sup:mk_old_state_retriever:83]Got vbucket filter change old state. Proceeding vbucket filter change operation [ns_server:debug,2014-08-19T16:52:32.206,ns_1@10.242.238.90:<0.1281.1>:ebucketmigrator_srv:init:494]Got old ebucketmigrator state from <0.1274.1>: {state,#Port<0.20537>,#Port<0.20533>,#Port<0.20538>,#Port<0.20534>,<0.1278.1>, <<"cut off">>,<<"cut off">>,[],64,false,false,0, {1408,452752,204608}, completed, {<0.1276.1>,#Ref<0.0.1.135739>}, <<"replication_ns_1@10.242.238.90">>,<0.1274.1>, {had_backfill,false,undefined,[]}, completed,false}. [ns_server:debug,2014-08-19T16:52:32.206,ns_1@10.242.238.90:<0.17162.0>:ns_process_registry:handle_info:98]Got exit msg: {'EXIT',<0.1276.1>,{#Ref<0.0.1.135728>,<0.1281.1>}} [error_logger:info,2014-08-19T16:52:32.206,ns_1@10.242.238.90:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,'ns_vbm_new_sup-default'} started: [{pid,<0.1281.1>}, {name, {new_child_id,"VXYZ[\\]^_`abcdefghijk", 'ns_1@10.242.238.88'}}, {mfargs, {ebucketmigrator_srv,start_link, [{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{old_state_retriever, #Fun}, {on_not_ready_vbuckets, #Fun}, {username,"default"}, {password,get_from_config}, {vbuckets,"VXYZ[\\]^_`abcdefghijk"}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]]}}, {restart_type,temporary}, {shutdown,60000}, {child_type,worker}] [ns_server:debug,2014-08-19T16:52:32.211,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [rebalance:debug,2014-08-19T16:52:32.211,ns_1@10.242.238.90:<0.32452.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:52:32.212,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.32452.0> (ok) [rebalance:debug,2014-08-19T16:52:32.212,ns_1@10.242.238.90:<0.32439.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:52:32.212,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.32439.0> (ok) [rebalance:debug,2014-08-19T16:52:32.213,ns_1@10.242.238.90:<0.32442.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:52:32.213,ns_1@10.242.238.90:<0.32442.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:52:32.213,ns_1@10.242.238.90:<0.1282.1>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:52:32.213,ns_1@10.242.238.90:<0.1282.1>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:debug,2014-08-19T16:52:32.213,ns_1@10.242.238.90:<0.32437.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [rebalance:info,2014-08-19T16:52:32.214,ns_1@10.242.238.90:<0.32442.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:52:32.214,ns_1@10.242.238.90:<0.32437.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:52:32.214,ns_1@10.242.238.90:<0.1283.1>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:52:32.214,ns_1@10.242.238.90:<0.1283.1>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:52:32.214,ns_1@10.242.238.90:<0.32437.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:52:32.215,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:32.215,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 4190 us [ns_server:debug,2014-08-19T16:52:32.215,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:32.216,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{107, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.90']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:info,2014-08-19T16:52:32.221,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 109 state to replica [ns_server:info,2014-08-19T16:52:32.221,ns_1@10.242.238.90:tap_replication_manager-default<0.18775.0>:tap_replication_manager:change_vbucket_filter:200]Going to change replication from 'ns_1@10.242.238.88' to have "VXYZ[\\]^_`abcdefghijkm" ("m", []) [ns_server:debug,2014-08-19T16:52:32.222,ns_1@10.242.238.90:<0.1285.1>:ns_vbm_new_sup:do_perform_vbucket_filter_change:135]Registered myself under id:{"default", {new_child_id,"VXYZ[\\]^_`abcdefghijkm", 'ns_1@10.242.238.88'}, #Ref<0.0.1.135934>} Args:[{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{old_state_retriever,#Fun}, {on_not_ready_vbuckets,#Fun}, {username,"default"}, {password,get_from_config}, {vbuckets,"VXYZ[\\]^_`abcdefghijkm"}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]] [ns_server:debug,2014-08-19T16:52:32.222,ns_1@10.242.238.90:<0.1285.1>:ns_vbm_new_sup:do_perform_vbucket_filter_change:139]Linked myself to old ebucketmigrator <0.1281.1> [ns_server:debug,2014-08-19T16:52:32.227,ns_1@10.242.238.90:<0.1281.1>:ebucketmigrator_srv:init:621]Reusing old upstream: [{vbuckets,"VXYZ[\\]^_`abcdefghijk"}, {name,<<"replication_ns_1@10.242.238.90">>}, {takeover,false}] [rebalance:debug,2014-08-19T16:52:32.228,ns_1@10.242.238.90:<0.1281.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.1287.1> [ns_server:info,2014-08-19T16:52:32.228,ns_1@10.242.238.90:<0.1281.1>:ebucketmigrator_srv:handle_call:270]Starting new-style vbucket filter change on stream `replication_ns_1@10.242.238.90` [ns_server:info,2014-08-19T16:52:32.248,ns_1@10.242.238.90:<0.1281.1>:ebucketmigrator_srv:handle_call:297]Changing vbucket filter on tap stream `replication_ns_1@10.242.238.90`: [{86,1}, {88,1}, {89,1}, {90,1}, {91,1}, {92,1}, {93,1}, {94,1}, {95,1}, {96,1}, {97,1}, {98,1}, {99,1}, {100,1}, {101,1}, {102,1}, {103,1}, {104,1}, {105,1}, {106,1}, {107,1}, {109,1}] [ns_server:info,2014-08-19T16:52:32.249,ns_1@10.242.238.90:<0.1281.1>:ebucketmigrator_srv:handle_call:307]Successfully changed vbucket filter on tap stream `replication_ns_1@10.242.238.90`. [ns_server:info,2014-08-19T16:52:32.249,ns_1@10.242.238.90:<0.1281.1>:ebucketmigrator_srv:process_upstream:1027]Got vbucket filter change completion message. Silencing upstream sender [ns_server:info,2014-08-19T16:52:32.249,ns_1@10.242.238.90:<0.1281.1>:ebucketmigrator_srv:handle_info:366]Got reply from upstream silencing request. Completing state transition to a new ebucketmigrator. [ns_server:debug,2014-08-19T16:52:32.249,ns_1@10.242.238.90:<0.1281.1>:ebucketmigrator_srv:complete_native_vb_filter_change:170]Proceeding with reading unread binaries [ns_server:debug,2014-08-19T16:52:32.249,ns_1@10.242.238.90:<0.1281.1>:ebucketmigrator_srv:confirm_downstream:197]Going to confirm reception downstream messages [ns_server:debug,2014-08-19T16:52:32.249,ns_1@10.242.238.90:<0.1281.1>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:52:32.250,ns_1@10.242.238.90:<0.1288.1>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:52:32.250,ns_1@10.242.238.90:<0.1288.1>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:52:32.250,ns_1@10.242.238.90:<0.1281.1>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:52:32.250,ns_1@10.242.238.90:<0.1281.1>:ebucketmigrator_srv:confirm_downstream:201]Confirmed upstream messages are feeded to kernel [ns_server:debug,2014-08-19T16:52:32.250,ns_1@10.242.238.90:<0.1281.1>:ebucketmigrator_srv:reply_and_die:210]Passed old state to caller [ns_server:debug,2014-08-19T16:52:32.250,ns_1@10.242.238.90:<0.1281.1>:ebucketmigrator_srv:reply_and_die:213]Sent out state. Preparing to die [ns_server:debug,2014-08-19T16:52:32.250,ns_1@10.242.238.90:<0.1285.1>:ns_vbm_new_sup:do_perform_vbucket_filter_change:143]Got old state from previous ebucketmigrator: <0.1281.1> [ns_server:debug,2014-08-19T16:52:32.250,ns_1@10.242.238.90:<0.1285.1>:ns_vbm_new_sup:perform_vbucket_filter_change_loop:184]Sent old state to new instance [ns_server:info,2014-08-19T16:52:32.251,ns_1@10.242.238.90:<0.1290.1>:ns_vbm_new_sup:mk_old_state_retriever:83]Got vbucket filter change old state. Proceeding vbucket filter change operation [ns_server:debug,2014-08-19T16:52:32.251,ns_1@10.242.238.90:<0.1290.1>:ebucketmigrator_srv:init:494]Got old ebucketmigrator state from <0.1281.1>: {state,#Port<0.20537>,#Port<0.20533>,#Port<0.20538>,#Port<0.20534>,<0.1287.1>, <<"cut off">>,<<"cut off">>,[],67,false,false,0, {1408,452752,249483}, completed, {<0.1285.1>,#Ref<0.0.1.135947>}, <<"replication_ns_1@10.242.238.90">>,<0.1281.1>, {had_backfill,false,undefined,[]}, completed,false}. [ns_server:debug,2014-08-19T16:52:32.251,ns_1@10.242.238.90:<0.17162.0>:ns_process_registry:handle_info:98]Got exit msg: {'EXIT',<0.1285.1>,{#Ref<0.0.1.135936>,<0.1290.1>}} [error_logger:info,2014-08-19T16:52:32.251,ns_1@10.242.238.90:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,'ns_vbm_new_sup-default'} started: [{pid,<0.1290.1>}, {name, {new_child_id,"VXYZ[\\]^_`abcdefghijkm", 'ns_1@10.242.238.88'}}, {mfargs, {ebucketmigrator_srv,start_link, [{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{old_state_retriever, #Fun}, {on_not_ready_vbuckets, #Fun}, {username,"default"}, {password,get_from_config}, {vbuckets,"VXYZ[\\]^_`abcdefghijkm"}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]]}}, {restart_type,temporary}, {shutdown,60000}, {child_type,worker}] [ns_server:debug,2014-08-19T16:52:32.256,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:52:32.259,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:32.260,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 3225 us [ns_server:debug,2014-08-19T16:52:32.260,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:32.260,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{109, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.90']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:info,2014-08-19T16:52:32.261,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 108 state to replica [ns_server:info,2014-08-19T16:52:32.262,ns_1@10.242.238.90:tap_replication_manager-default<0.18775.0>:tap_replication_manager:change_vbucket_filter:200]Going to change replication from 'ns_1@10.242.238.88' to have "VXYZ[\\]^_`abcdefghijklm" ("l", []) [ns_server:debug,2014-08-19T16:52:32.262,ns_1@10.242.238.90:<0.1292.1>:ns_vbm_new_sup:do_perform_vbucket_filter_change:135]Registered myself under id:{"default", {new_child_id,"VXYZ[\\]^_`abcdefghijklm", 'ns_1@10.242.238.88'}, #Ref<0.0.1.136068>} Args:[{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{old_state_retriever,#Fun}, {on_not_ready_vbuckets,#Fun}, {username,"default"}, {password,get_from_config}, {vbuckets,"VXYZ[\\]^_`abcdefghijklm"}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]] [ns_server:debug,2014-08-19T16:52:32.263,ns_1@10.242.238.90:<0.1292.1>:ns_vbm_new_sup:do_perform_vbucket_filter_change:139]Linked myself to old ebucketmigrator <0.1290.1> [ns_server:debug,2014-08-19T16:52:32.272,ns_1@10.242.238.90:<0.1290.1>:ebucketmigrator_srv:init:621]Reusing old upstream: [{vbuckets,"VXYZ[\\]^_`abcdefghijkm"}, {name,<<"replication_ns_1@10.242.238.90">>}, {takeover,false}] [rebalance:debug,2014-08-19T16:52:32.273,ns_1@10.242.238.90:<0.1290.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.1294.1> [ns_server:info,2014-08-19T16:52:32.273,ns_1@10.242.238.90:<0.1290.1>:ebucketmigrator_srv:handle_call:270]Starting new-style vbucket filter change on stream `replication_ns_1@10.242.238.90` [ns_server:info,2014-08-19T16:52:32.293,ns_1@10.242.238.90:<0.1290.1>:ebucketmigrator_srv:handle_call:297]Changing vbucket filter on tap stream `replication_ns_1@10.242.238.90`: [{86,1}, {88,1}, {89,1}, {90,1}, {91,1}, {92,1}, {93,1}, {94,1}, {95,1}, {96,1}, {97,1}, {98,1}, {99,1}, {100,1}, {101,1}, {102,1}, {103,1}, {104,1}, {105,1}, {106,1}, {107,1}, {108,1}, {109,1}] [ns_server:info,2014-08-19T16:52:32.294,ns_1@10.242.238.90:<0.1290.1>:ebucketmigrator_srv:handle_call:307]Successfully changed vbucket filter on tap stream `replication_ns_1@10.242.238.90`. [ns_server:info,2014-08-19T16:52:32.294,ns_1@10.242.238.90:<0.1290.1>:ebucketmigrator_srv:process_upstream:1027]Got vbucket filter change completion message. Silencing upstream sender [ns_server:info,2014-08-19T16:52:32.294,ns_1@10.242.238.90:<0.1290.1>:ebucketmigrator_srv:handle_info:366]Got reply from upstream silencing request. Completing state transition to a new ebucketmigrator. [ns_server:debug,2014-08-19T16:52:32.294,ns_1@10.242.238.90:<0.1290.1>:ebucketmigrator_srv:complete_native_vb_filter_change:170]Proceeding with reading unread binaries [ns_server:debug,2014-08-19T16:52:32.294,ns_1@10.242.238.90:<0.1290.1>:ebucketmigrator_srv:confirm_downstream:197]Going to confirm reception downstream messages [ns_server:debug,2014-08-19T16:52:32.295,ns_1@10.242.238.90:<0.1290.1>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:52:32.295,ns_1@10.242.238.90:<0.1295.1>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:52:32.295,ns_1@10.242.238.90:<0.1295.1>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:52:32.295,ns_1@10.242.238.90:<0.1290.1>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:52:32.295,ns_1@10.242.238.90:<0.1290.1>:ebucketmigrator_srv:confirm_downstream:201]Confirmed upstream messages are feeded to kernel [ns_server:debug,2014-08-19T16:52:32.295,ns_1@10.242.238.90:<0.1290.1>:ebucketmigrator_srv:reply_and_die:210]Passed old state to caller [ns_server:debug,2014-08-19T16:52:32.295,ns_1@10.242.238.90:<0.1290.1>:ebucketmigrator_srv:reply_and_die:213]Sent out state. Preparing to die [ns_server:debug,2014-08-19T16:52:32.295,ns_1@10.242.238.90:<0.1292.1>:ns_vbm_new_sup:do_perform_vbucket_filter_change:143]Got old state from previous ebucketmigrator: <0.1290.1> [ns_server:debug,2014-08-19T16:52:32.296,ns_1@10.242.238.90:<0.1292.1>:ns_vbm_new_sup:perform_vbucket_filter_change_loop:184]Sent old state to new instance [ns_server:info,2014-08-19T16:52:32.296,ns_1@10.242.238.90:<0.1297.1>:ns_vbm_new_sup:mk_old_state_retriever:83]Got vbucket filter change old state. Proceeding vbucket filter change operation [ns_server:debug,2014-08-19T16:52:32.296,ns_1@10.242.238.90:<0.1297.1>:ebucketmigrator_srv:init:494]Got old ebucketmigrator state from <0.1290.1>: {state,#Port<0.20537>,#Port<0.20533>,#Port<0.20538>,#Port<0.20534>,<0.1294.1>, <<"cut off">>,<<"cut off">>,[],70,false,false,0, {1408,452752,294549}, completed, {<0.1292.1>,#Ref<0.0.1.136081>}, <<"replication_ns_1@10.242.238.90">>,<0.1290.1>, {had_backfill,false,undefined,[]}, completed,false}. [ns_server:debug,2014-08-19T16:52:32.296,ns_1@10.242.238.90:<0.17162.0>:ns_process_registry:handle_info:98]Got exit msg: {'EXIT',<0.1292.1>,{#Ref<0.0.1.136070>,<0.1297.1>}} [error_logger:info,2014-08-19T16:52:32.296,ns_1@10.242.238.90:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,'ns_vbm_new_sup-default'} started: [{pid,<0.1297.1>}, {name, {new_child_id,"VXYZ[\\]^_`abcdefghijklm", 'ns_1@10.242.238.88'}}, {mfargs, {ebucketmigrator_srv,start_link, [{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{old_state_retriever, #Fun}, {on_not_ready_vbuckets, #Fun}, {username,"default"}, {password,get_from_config}, {vbuckets,"VXYZ[\\]^_`abcdefghijklm"}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]]}}, {restart_type,temporary}, {shutdown,60000}, {child_type,worker}] [ns_server:debug,2014-08-19T16:52:32.301,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:52:32.305,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 3216 us [ns_server:debug,2014-08-19T16:52:32.305,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:32.305,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:32.306,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{108, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.90']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:52:32.318,ns_1@10.242.238.90:<0.1297.1>:ebucketmigrator_srv:init:621]Reusing old upstream: [{vbuckets,"VXYZ[\\]^_`abcdefghijklm"}, {name,<<"replication_ns_1@10.242.238.90">>}, {takeover,false}] [rebalance:debug,2014-08-19T16:52:32.319,ns_1@10.242.238.90:<0.1297.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.1299.1> [rebalance:debug,2014-08-19T16:52:32.320,ns_1@10.242.238.90:<0.32434.0>:janitor_agent:handle_call:795]Done [rebalance:debug,2014-08-19T16:52:32.320,ns_1@10.242.238.90:<0.32429.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:52:32.320,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.32434.0> (ok) [ns_server:debug,2014-08-19T16:52:32.320,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.32429.0> (ok) [rebalance:debug,2014-08-19T16:52:32.323,ns_1@10.242.238.90:<0.32427.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [rebalance:debug,2014-08-19T16:52:32.323,ns_1@10.242.238.90:<0.32432.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:52:32.323,ns_1@10.242.238.90:<0.32427.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:52:32.323,ns_1@10.242.238.90:<0.32432.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:52:32.323,ns_1@10.242.238.90:<0.1300.1>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:52:32.323,ns_1@10.242.238.90:<0.1301.1>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:52:32.323,ns_1@10.242.238.90:<0.1300.1>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [ns_server:debug,2014-08-19T16:52:32.323,ns_1@10.242.238.90:<0.1301.1>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:52:32.323,ns_1@10.242.238.90:<0.32427.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [rebalance:info,2014-08-19T16:52:32.323,ns_1@10.242.238.90:<0.32432.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:52:32.346,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:52:32.350,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:32.350,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 3352 us [ns_server:debug,2014-08-19T16:52:32.351,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:32.352,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{255, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [rebalance:debug,2014-08-19T16:52:32.404,ns_1@10.242.238.90:<0.32419.0>:janitor_agent:handle_call:795]Done [rebalance:debug,2014-08-19T16:52:32.404,ns_1@10.242.238.90:<0.32424.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:52:32.404,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.32419.0> (ok) [ns_server:debug,2014-08-19T16:52:32.404,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.32424.0> (ok) [rebalance:debug,2014-08-19T16:52:32.406,ns_1@10.242.238.90:<0.32403.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:52:32.406,ns_1@10.242.238.90:<0.32403.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:52:32.406,ns_1@10.242.238.90:<0.1303.1>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:52:32.406,ns_1@10.242.238.90:<0.1303.1>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:52:32.407,ns_1@10.242.238.90:<0.32403.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [rebalance:debug,2014-08-19T16:52:32.407,ns_1@10.242.238.90:<0.32422.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:52:32.407,ns_1@10.242.238.90:<0.32422.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:52:32.407,ns_1@10.242.238.90:<0.1304.1>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:52:32.408,ns_1@10.242.238.90:<0.1304.1>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:52:32.408,ns_1@10.242.238.90:<0.32422.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:52:32.410,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:52:32.413,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:32.414,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:32.414,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 4049 us [ns_server:debug,2014-08-19T16:52:32.414,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{172, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:info,2014-08-19T16:52:32.415,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 110 state to replica [ns_server:info,2014-08-19T16:52:32.416,ns_1@10.242.238.90:tap_replication_manager-default<0.18775.0>:tap_replication_manager:change_vbucket_filter:200]Going to change replication from 'ns_1@10.242.238.88' to have "VXYZ[\\]^_`abcdefghijklmn" ("n", []) [ns_server:debug,2014-08-19T16:52:32.417,ns_1@10.242.238.90:<0.1306.1>:ns_vbm_new_sup:do_perform_vbucket_filter_change:135]Registered myself under id:{"default", {new_child_id,"VXYZ[\\]^_`abcdefghijklmn", 'ns_1@10.242.238.88'}, #Ref<0.0.1.136398>} Args:[{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{old_state_retriever,#Fun}, {on_not_ready_vbuckets,#Fun}, {username,"default"}, {password,get_from_config}, {vbuckets,"VXYZ[\\]^_`abcdefghijklmn"}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]] [ns_server:debug,2014-08-19T16:52:32.417,ns_1@10.242.238.90:<0.1306.1>:ns_vbm_new_sup:do_perform_vbucket_filter_change:139]Linked myself to old ebucketmigrator <0.1297.1> [ns_server:info,2014-08-19T16:52:32.417,ns_1@10.242.238.90:<0.1297.1>:ebucketmigrator_srv:handle_call:270]Starting new-style vbucket filter change on stream `replication_ns_1@10.242.238.90` [ns_server:info,2014-08-19T16:52:32.438,ns_1@10.242.238.90:<0.1297.1>:ebucketmigrator_srv:handle_call:297]Changing vbucket filter on tap stream `replication_ns_1@10.242.238.90`: [{86,1}, {88,1}, {89,1}, {90,1}, {91,1}, {92,1}, {93,1}, {94,1}, {95,1}, {96,1}, {97,1}, {98,1}, {99,1}, {100,1}, {101,1}, {102,1}, {103,1}, {104,1}, {105,1}, {106,1}, {107,1}, {108,1}, {109,1}, {110,1}] [ns_server:info,2014-08-19T16:52:32.438,ns_1@10.242.238.90:<0.1297.1>:ebucketmigrator_srv:handle_call:307]Successfully changed vbucket filter on tap stream `replication_ns_1@10.242.238.90`. [ns_server:info,2014-08-19T16:52:32.439,ns_1@10.242.238.90:<0.1297.1>:ebucketmigrator_srv:process_upstream:1027]Got vbucket filter change completion message. Silencing upstream sender [ns_server:info,2014-08-19T16:52:32.439,ns_1@10.242.238.90:<0.1297.1>:ebucketmigrator_srv:handle_info:366]Got reply from upstream silencing request. Completing state transition to a new ebucketmigrator. [ns_server:debug,2014-08-19T16:52:32.439,ns_1@10.242.238.90:<0.1297.1>:ebucketmigrator_srv:complete_native_vb_filter_change:170]Proceeding with reading unread binaries [ns_server:debug,2014-08-19T16:52:32.439,ns_1@10.242.238.90:<0.1297.1>:ebucketmigrator_srv:confirm_downstream:197]Going to confirm reception downstream messages [ns_server:debug,2014-08-19T16:52:32.440,ns_1@10.242.238.90:<0.1297.1>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:52:32.440,ns_1@10.242.238.90:<0.1308.1>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:52:32.440,ns_1@10.242.238.90:<0.1308.1>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:52:32.440,ns_1@10.242.238.90:<0.1297.1>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:52:32.440,ns_1@10.242.238.90:<0.1297.1>:ebucketmigrator_srv:confirm_downstream:201]Confirmed upstream messages are feeded to kernel [ns_server:debug,2014-08-19T16:52:32.440,ns_1@10.242.238.90:<0.1297.1>:ebucketmigrator_srv:reply_and_die:210]Passed old state to caller [ns_server:debug,2014-08-19T16:52:32.440,ns_1@10.242.238.90:<0.1297.1>:ebucketmigrator_srv:reply_and_die:213]Sent out state. Preparing to die [ns_server:debug,2014-08-19T16:52:32.440,ns_1@10.242.238.90:<0.1306.1>:ns_vbm_new_sup:do_perform_vbucket_filter_change:143]Got old state from previous ebucketmigrator: <0.1297.1> [ns_server:debug,2014-08-19T16:52:32.441,ns_1@10.242.238.90:<0.1306.1>:ns_vbm_new_sup:perform_vbucket_filter_change_loop:184]Sent old state to new instance [ns_server:info,2014-08-19T16:52:32.441,ns_1@10.242.238.90:<0.1310.1>:ns_vbm_new_sup:mk_old_state_retriever:83]Got vbucket filter change old state. Proceeding vbucket filter change operation [ns_server:debug,2014-08-19T16:52:32.441,ns_1@10.242.238.90:<0.1310.1>:ebucketmigrator_srv:init:494]Got old ebucketmigrator state from <0.1297.1>: {state,#Port<0.20537>,#Port<0.20533>,#Port<0.20538>,#Port<0.20534>,<0.1299.1>, <<"cut off">>,<<"cut off">>,[],73,false,false,0, {1408,452752,439618}, completed, {<0.1306.1>,#Ref<0.0.1.136411>}, <<"replication_ns_1@10.242.238.90">>,<0.1297.1>, {had_backfill,false,undefined,[]}, completed,false}. [ns_server:debug,2014-08-19T16:52:32.441,ns_1@10.242.238.90:<0.17162.0>:ns_process_registry:handle_info:98]Got exit msg: {'EXIT',<0.1306.1>,{#Ref<0.0.1.136400>,<0.1310.1>}} [error_logger:info,2014-08-19T16:52:32.441,ns_1@10.242.238.90:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,'ns_vbm_new_sup-default'} started: [{pid,<0.1310.1>}, {name, {new_child_id,"VXYZ[\\]^_`abcdefghijklmn", 'ns_1@10.242.238.88'}}, {mfargs, {ebucketmigrator_srv,start_link, [{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{old_state_retriever, #Fun}, {on_not_ready_vbuckets, #Fun}, {username,"default"}, {password,get_from_config}, {vbuckets,"VXYZ[\\]^_`abcdefghijklmn"}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]]}}, {restart_type,temporary}, {shutdown,60000}, {child_type,worker}] [ns_server:debug,2014-08-19T16:52:32.445,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:52:32.448,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:32.449,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 3220 us [ns_server:debug,2014-08-19T16:52:32.449,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:32.449,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{110, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.90']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:info,2014-08-19T16:52:32.450,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 111 state to replica [ns_server:info,2014-08-19T16:52:32.450,ns_1@10.242.238.90:tap_replication_manager-default<0.18775.0>:tap_replication_manager:change_vbucket_filter:200]Going to change replication from 'ns_1@10.242.238.88' to have "VXYZ[\\]^_`abcdefghijklmno" ("o", []) [ns_server:debug,2014-08-19T16:52:32.451,ns_1@10.242.238.90:<0.1312.1>:ns_vbm_new_sup:do_perform_vbucket_filter_change:135]Registered myself under id:{"default", {new_child_id,"VXYZ[\\]^_`abcdefghijklmno", 'ns_1@10.242.238.88'}, #Ref<0.0.1.136525>} Args:[{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{old_state_retriever,#Fun}, {on_not_ready_vbuckets,#Fun}, {username,"default"}, {password,get_from_config}, {vbuckets,"VXYZ[\\]^_`abcdefghijklmno"}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]] [ns_server:debug,2014-08-19T16:52:32.452,ns_1@10.242.238.90:<0.1312.1>:ns_vbm_new_sup:do_perform_vbucket_filter_change:139]Linked myself to old ebucketmigrator <0.1310.1> [ns_server:debug,2014-08-19T16:52:32.462,ns_1@10.242.238.90:<0.1310.1>:ebucketmigrator_srv:init:621]Reusing old upstream: [{vbuckets,"VXYZ[\\]^_`abcdefghijklmn"}, {name,<<"replication_ns_1@10.242.238.90">>}, {takeover,false}] [rebalance:debug,2014-08-19T16:52:32.462,ns_1@10.242.238.90:<0.1310.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.1314.1> [ns_server:info,2014-08-19T16:52:32.463,ns_1@10.242.238.90:<0.1310.1>:ebucketmigrator_srv:handle_call:270]Starting new-style vbucket filter change on stream `replication_ns_1@10.242.238.90` [ns_server:info,2014-08-19T16:52:32.482,ns_1@10.242.238.90:<0.1310.1>:ebucketmigrator_srv:handle_call:297]Changing vbucket filter on tap stream `replication_ns_1@10.242.238.90`: [{86,1}, {88,1}, {89,1}, {90,1}, {91,1}, {92,1}, {93,1}, {94,1}, {95,1}, {96,1}, {97,1}, {98,1}, {99,1}, {100,1}, {101,1}, {102,1}, {103,1}, {104,1}, {105,1}, {106,1}, {107,1}, {108,1}, {109,1}, {110,1}, {111,1}] [ns_server:info,2014-08-19T16:52:32.483,ns_1@10.242.238.90:<0.1310.1>:ebucketmigrator_srv:handle_call:307]Successfully changed vbucket filter on tap stream `replication_ns_1@10.242.238.90`. [ns_server:info,2014-08-19T16:52:32.483,ns_1@10.242.238.90:<0.1310.1>:ebucketmigrator_srv:process_upstream:1027]Got vbucket filter change completion message. Silencing upstream sender [ns_server:info,2014-08-19T16:52:32.484,ns_1@10.242.238.90:<0.1310.1>:ebucketmigrator_srv:handle_info:366]Got reply from upstream silencing request. Completing state transition to a new ebucketmigrator. [ns_server:debug,2014-08-19T16:52:32.484,ns_1@10.242.238.90:<0.1310.1>:ebucketmigrator_srv:complete_native_vb_filter_change:170]Proceeding with reading unread binaries [ns_server:debug,2014-08-19T16:52:32.484,ns_1@10.242.238.90:<0.1310.1>:ebucketmigrator_srv:confirm_downstream:197]Going to confirm reception downstream messages [ns_server:debug,2014-08-19T16:52:32.484,ns_1@10.242.238.90:<0.1310.1>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:52:32.484,ns_1@10.242.238.90:<0.1315.1>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:52:32.484,ns_1@10.242.238.90:<0.1315.1>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:52:32.484,ns_1@10.242.238.90:<0.1310.1>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:52:32.484,ns_1@10.242.238.90:<0.1310.1>:ebucketmigrator_srv:confirm_downstream:201]Confirmed upstream messages are feeded to kernel [ns_server:debug,2014-08-19T16:52:32.485,ns_1@10.242.238.90:<0.1310.1>:ebucketmigrator_srv:reply_and_die:210]Passed old state to caller [ns_server:debug,2014-08-19T16:52:32.485,ns_1@10.242.238.90:<0.1310.1>:ebucketmigrator_srv:reply_and_die:213]Sent out state. Preparing to die [ns_server:debug,2014-08-19T16:52:32.485,ns_1@10.242.238.90:<0.1312.1>:ns_vbm_new_sup:do_perform_vbucket_filter_change:143]Got old state from previous ebucketmigrator: <0.1310.1> [ns_server:debug,2014-08-19T16:52:32.485,ns_1@10.242.238.90:<0.1312.1>:ns_vbm_new_sup:perform_vbucket_filter_change_loop:184]Sent old state to new instance [ns_server:info,2014-08-19T16:52:32.485,ns_1@10.242.238.90:<0.1317.1>:ns_vbm_new_sup:mk_old_state_retriever:83]Got vbucket filter change old state. Proceeding vbucket filter change operation [ns_server:debug,2014-08-19T16:52:32.485,ns_1@10.242.238.90:<0.1317.1>:ebucketmigrator_srv:init:494]Got old ebucketmigrator state from <0.1310.1>: {state,#Port<0.20537>,#Port<0.20533>,#Port<0.20538>,#Port<0.20534>,<0.1314.1>, <<"cut off">>,<<"cut off">>,[],76,false,false,0, {1408,452752,483942}, completed, {<0.1312.1>,#Ref<0.0.1.136538>}, <<"replication_ns_1@10.242.238.90">>,<0.1310.1>, {had_backfill,false,undefined,[]}, completed,false}. [ns_server:debug,2014-08-19T16:52:32.486,ns_1@10.242.238.90:<0.17162.0>:ns_process_registry:handle_info:98]Got exit msg: {'EXIT',<0.1312.1>,{#Ref<0.0.1.136527>,<0.1317.1>}} [error_logger:info,2014-08-19T16:52:32.486,ns_1@10.242.238.90:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,'ns_vbm_new_sup-default'} started: [{pid,<0.1317.1>}, {name, {new_child_id,"VXYZ[\\]^_`abcdefghijklmno", 'ns_1@10.242.238.88'}}, {mfargs, {ebucketmigrator_srv,start_link, [{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{old_state_retriever, #Fun}, {on_not_ready_vbuckets, #Fun}, {username,"default"}, {password,get_from_config}, {vbuckets,"VXYZ[\\]^_`abcdefghijklmno"}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]]}}, {restart_type,temporary}, {shutdown,60000}, {child_type,worker}] [ns_server:debug,2014-08-19T16:52:32.490,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:52:32.495,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:32.495,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 4798 us [ns_server:debug,2014-08-19T16:52:32.496,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:32.497,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{111, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.90']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:info,2014-08-19T16:52:32.500,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 113 state to replica [rebalance:debug,2014-08-19T16:52:32.504,ns_1@10.242.238.90:<0.32400.0>:janitor_agent:handle_call:795]Done [rebalance:debug,2014-08-19T16:52:32.505,ns_1@10.242.238.90:<0.32395.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:52:32.507,ns_1@10.242.238.90:<0.1317.1>:ebucketmigrator_srv:init:621]Reusing old upstream: [{vbuckets,"VXYZ[\\]^_`abcdefghijklmno"}, {name,<<"replication_ns_1@10.242.238.90">>}, {takeover,false}] [rebalance:debug,2014-08-19T16:52:32.528,ns_1@10.242.238.90:<0.1317.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.1319.1> [ns_server:info,2014-08-19T16:52:32.528,ns_1@10.242.238.90:tap_replication_manager-default<0.18775.0>:tap_replication_manager:change_vbucket_filter:200]Going to change replication from 'ns_1@10.242.238.88' to have "VXYZ[\\]^_`abcdefghijklmnoq" ("q", []) [ns_server:debug,2014-08-19T16:52:32.529,ns_1@10.242.238.90:<0.1320.1>:ns_vbm_new_sup:do_perform_vbucket_filter_change:135]Registered myself under id:{"default", {new_child_id,"VXYZ[\\]^_`abcdefghijklmnoq", 'ns_1@10.242.238.88'}, #Ref<0.0.1.136709>} Args:[{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{old_state_retriever,#Fun}, {on_not_ready_vbuckets,#Fun}, {username,"default"}, {password,get_from_config}, {vbuckets,"VXYZ[\\]^_`abcdefghijklmnoq"}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]] [ns_server:debug,2014-08-19T16:52:32.529,ns_1@10.242.238.90:<0.1320.1>:ns_vbm_new_sup:do_perform_vbucket_filter_change:139]Linked myself to old ebucketmigrator <0.1317.1> [ns_server:info,2014-08-19T16:52:32.529,ns_1@10.242.238.90:<0.1317.1>:ebucketmigrator_srv:handle_call:270]Starting new-style vbucket filter change on stream `replication_ns_1@10.242.238.90` [ns_server:info,2014-08-19T16:52:32.549,ns_1@10.242.238.90:<0.1317.1>:ebucketmigrator_srv:handle_call:297]Changing vbucket filter on tap stream `replication_ns_1@10.242.238.90`: [{86,1}, {88,1}, {89,1}, {90,1}, {91,1}, {92,1}, {93,1}, {94,1}, {95,1}, {96,1}, {97,1}, {98,1}, {99,1}, {100,1}, {101,1}, {102,1}, {103,1}, {104,1}, {105,1}, {106,1}, {107,1}, {108,1}, {109,1}, {110,1}, {111,1}, {113,1}] [ns_server:info,2014-08-19T16:52:32.550,ns_1@10.242.238.90:<0.1317.1>:ebucketmigrator_srv:handle_call:307]Successfully changed vbucket filter on tap stream `replication_ns_1@10.242.238.90`. [ns_server:info,2014-08-19T16:52:32.551,ns_1@10.242.238.90:<0.1317.1>:ebucketmigrator_srv:process_upstream:1027]Got vbucket filter change completion message. Silencing upstream sender [ns_server:info,2014-08-19T16:52:32.551,ns_1@10.242.238.90:<0.1317.1>:ebucketmigrator_srv:handle_info:366]Got reply from upstream silencing request. Completing state transition to a new ebucketmigrator. [ns_server:debug,2014-08-19T16:52:32.551,ns_1@10.242.238.90:<0.1317.1>:ebucketmigrator_srv:complete_native_vb_filter_change:170]Proceeding with reading unread binaries [ns_server:debug,2014-08-19T16:52:32.551,ns_1@10.242.238.90:<0.1317.1>:ebucketmigrator_srv:confirm_downstream:197]Going to confirm reception downstream messages [ns_server:debug,2014-08-19T16:52:32.551,ns_1@10.242.238.90:<0.1317.1>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:52:32.551,ns_1@10.242.238.90:<0.1322.1>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:52:32.551,ns_1@10.242.238.90:<0.1322.1>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:52:32.551,ns_1@10.242.238.90:<0.1317.1>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:52:32.552,ns_1@10.242.238.90:<0.1317.1>:ebucketmigrator_srv:confirm_downstream:201]Confirmed upstream messages are feeded to kernel [ns_server:debug,2014-08-19T16:52:32.552,ns_1@10.242.238.90:<0.1317.1>:ebucketmigrator_srv:reply_and_die:210]Passed old state to caller [ns_server:debug,2014-08-19T16:52:32.552,ns_1@10.242.238.90:<0.1317.1>:ebucketmigrator_srv:reply_and_die:213]Sent out state. Preparing to die [ns_server:debug,2014-08-19T16:52:32.552,ns_1@10.242.238.90:<0.1320.1>:ns_vbm_new_sup:do_perform_vbucket_filter_change:143]Got old state from previous ebucketmigrator: <0.1317.1> [ns_server:debug,2014-08-19T16:52:32.552,ns_1@10.242.238.90:<0.1320.1>:ns_vbm_new_sup:perform_vbucket_filter_change_loop:184]Sent old state to new instance [ns_server:info,2014-08-19T16:52:32.552,ns_1@10.242.238.90:<0.1324.1>:ns_vbm_new_sup:mk_old_state_retriever:83]Got vbucket filter change old state. Proceeding vbucket filter change operation [ns_server:debug,2014-08-19T16:52:32.553,ns_1@10.242.238.90:<0.1324.1>:ebucketmigrator_srv:init:494]Got old ebucketmigrator state from <0.1317.1>: {state,#Port<0.20537>,#Port<0.20533>,#Port<0.20538>,#Port<0.20534>,<0.1319.1>, <<"cut off">>,<<"cut off">>,[],79,false,false,0, {1408,452752,551119}, completed, {<0.1320.1>,#Ref<0.0.1.136722>}, <<"replication_ns_1@10.242.238.90">>,<0.1317.1>, {had_backfill,false,undefined,[]}, completed,false}. [ns_server:debug,2014-08-19T16:52:32.553,ns_1@10.242.238.90:<0.17162.0>:ns_process_registry:handle_info:98]Got exit msg: {'EXIT',<0.1320.1>,{#Ref<0.0.1.136711>,<0.1324.1>}} [error_logger:info,2014-08-19T16:52:32.553,ns_1@10.242.238.90:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,'ns_vbm_new_sup-default'} started: [{pid,<0.1324.1>}, {name, {new_child_id,"VXYZ[\\]^_`abcdefghijklmnoq", 'ns_1@10.242.238.88'}}, {mfargs, {ebucketmigrator_srv,start_link, [{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{old_state_retriever, #Fun}, {on_not_ready_vbuckets, #Fun}, {username,"default"}, {password,get_from_config}, {vbuckets,"VXYZ[\\]^_`abcdefghijklmnoq"}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]]}}, {restart_type,temporary}, {shutdown,60000}, {child_type,worker}] [ns_server:debug,2014-08-19T16:52:32.553,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.32400.0> (ok) [ns_server:debug,2014-08-19T16:52:32.553,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.32395.0> (ok) [rebalance:debug,2014-08-19T16:52:32.555,ns_1@10.242.238.90:<0.32393.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:52:32.556,ns_1@10.242.238.90:<0.32393.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:52:32.556,ns_1@10.242.238.90:<0.1325.1>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:52:32.556,ns_1@10.242.238.90:<0.1325.1>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:52:32.556,ns_1@10.242.238.90:<0.32393.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [rebalance:debug,2014-08-19T16:52:32.557,ns_1@10.242.238.90:<0.32398.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:52:32.557,ns_1@10.242.238.90:<0.32398.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:52:32.557,ns_1@10.242.238.90:<0.1326.1>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:52:32.557,ns_1@10.242.238.90:<0.1326.1>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:52:32.558,ns_1@10.242.238.90:<0.32398.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:52:32.559,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:52:32.562,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:32.563,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 3578 us [ns_server:debug,2014-08-19T16:52:32.563,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:32.563,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{113, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.90']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:info,2014-08-19T16:52:32.564,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 112 state to replica [ns_server:info,2014-08-19T16:52:32.565,ns_1@10.242.238.90:tap_replication_manager-default<0.18775.0>:tap_replication_manager:change_vbucket_filter:200]Going to change replication from 'ns_1@10.242.238.88' to have "VXYZ[\\]^_`abcdefghijklmnopq" ("p", []) [ns_server:debug,2014-08-19T16:52:32.566,ns_1@10.242.238.90:<0.1328.1>:ns_vbm_new_sup:do_perform_vbucket_filter_change:135]Registered myself under id:{"default", {new_child_id,"VXYZ[\\]^_`abcdefghijklmnopq", 'ns_1@10.242.238.88'}, #Ref<0.0.1.136885>} Args:[{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{old_state_retriever,#Fun}, {on_not_ready_vbuckets,#Fun}, {username,"default"}, {password,get_from_config}, {vbuckets,"VXYZ[\\]^_`abcdefghijklmnopq"}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]] [ns_server:debug,2014-08-19T16:52:32.566,ns_1@10.242.238.90:<0.1328.1>:ns_vbm_new_sup:do_perform_vbucket_filter_change:139]Linked myself to old ebucketmigrator <0.1324.1> [ns_server:debug,2014-08-19T16:52:32.576,ns_1@10.242.238.90:<0.1324.1>:ebucketmigrator_srv:init:621]Reusing old upstream: [{vbuckets,"VXYZ[\\]^_`abcdefghijklmnoq"}, {name,<<"replication_ns_1@10.242.238.90">>}, {takeover,false}] [rebalance:debug,2014-08-19T16:52:32.576,ns_1@10.242.238.90:<0.1324.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.1330.1> [ns_server:info,2014-08-19T16:52:32.577,ns_1@10.242.238.90:<0.1324.1>:ebucketmigrator_srv:handle_call:270]Starting new-style vbucket filter change on stream `replication_ns_1@10.242.238.90` [rebalance:debug,2014-08-19T16:52:32.589,ns_1@10.242.238.90:<0.32385.0>:janitor_agent:handle_call:795]Done [rebalance:debug,2014-08-19T16:52:32.589,ns_1@10.242.238.90:<0.32390.0>:janitor_agent:handle_call:795]Done [ns_server:info,2014-08-19T16:52:32.596,ns_1@10.242.238.90:<0.1324.1>:ebucketmigrator_srv:handle_call:297]Changing vbucket filter on tap stream `replication_ns_1@10.242.238.90`: [{86,1}, {88,1}, {89,1}, {90,1}, {91,1}, {92,1}, {93,1}, {94,1}, {95,1}, {96,1}, {97,1}, {98,1}, {99,1}, {100,1}, {101,1}, {102,1}, {103,1}, {104,1}, {105,1}, {106,1}, {107,1}, {108,1}, {109,1}, {110,1}, {111,1}, {112,1}, {113,1}] [ns_server:info,2014-08-19T16:52:32.597,ns_1@10.242.238.90:<0.1324.1>:ebucketmigrator_srv:handle_call:307]Successfully changed vbucket filter on tap stream `replication_ns_1@10.242.238.90`. [ns_server:info,2014-08-19T16:52:32.598,ns_1@10.242.238.90:<0.1324.1>:ebucketmigrator_srv:process_upstream:1027]Got vbucket filter change completion message. Silencing upstream sender [ns_server:info,2014-08-19T16:52:32.598,ns_1@10.242.238.90:<0.1324.1>:ebucketmigrator_srv:handle_info:366]Got reply from upstream silencing request. Completing state transition to a new ebucketmigrator. [ns_server:debug,2014-08-19T16:52:32.598,ns_1@10.242.238.90:<0.1324.1>:ebucketmigrator_srv:complete_native_vb_filter_change:170]Proceeding with reading unread binaries [ns_server:debug,2014-08-19T16:52:32.598,ns_1@10.242.238.90:<0.1324.1>:ebucketmigrator_srv:confirm_downstream:197]Going to confirm reception downstream messages [ns_server:debug,2014-08-19T16:52:32.598,ns_1@10.242.238.90:<0.1324.1>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:52:32.598,ns_1@10.242.238.90:<0.1331.1>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:52:32.598,ns_1@10.242.238.90:<0.1331.1>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:52:32.598,ns_1@10.242.238.90:<0.1324.1>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:52:32.598,ns_1@10.242.238.90:<0.1324.1>:ebucketmigrator_srv:confirm_downstream:201]Confirmed upstream messages are feeded to kernel [ns_server:debug,2014-08-19T16:52:32.598,ns_1@10.242.238.90:<0.1324.1>:ebucketmigrator_srv:reply_and_die:210]Passed old state to caller [ns_server:debug,2014-08-19T16:52:32.599,ns_1@10.242.238.90:<0.1324.1>:ebucketmigrator_srv:reply_and_die:213]Sent out state. Preparing to die [ns_server:debug,2014-08-19T16:52:32.599,ns_1@10.242.238.90:<0.1328.1>:ns_vbm_new_sup:do_perform_vbucket_filter_change:143]Got old state from previous ebucketmigrator: <0.1324.1> [ns_server:debug,2014-08-19T16:52:32.599,ns_1@10.242.238.90:<0.1328.1>:ns_vbm_new_sup:perform_vbucket_filter_change_loop:184]Sent old state to new instance [ns_server:info,2014-08-19T16:52:32.599,ns_1@10.242.238.90:<0.1333.1>:ns_vbm_new_sup:mk_old_state_retriever:83]Got vbucket filter change old state. Proceeding vbucket filter change operation [ns_server:debug,2014-08-19T16:52:32.599,ns_1@10.242.238.90:<0.1333.1>:ebucketmigrator_srv:init:494]Got old ebucketmigrator state from <0.1324.1>: {state,#Port<0.20537>,#Port<0.20533>,#Port<0.20538>,#Port<0.20534>,<0.1330.1>, <<"cut off">>,<<"cut off">>,[],82,false,false,0, {1408,452752,598017}, completed, {<0.1328.1>,#Ref<0.0.1.136898>}, <<"replication_ns_1@10.242.238.90">>,<0.1324.1>, {had_backfill,false,undefined,[]}, completed,false}. [ns_server:debug,2014-08-19T16:52:32.600,ns_1@10.242.238.90:<0.17162.0>:ns_process_registry:handle_info:98]Got exit msg: {'EXIT',<0.1328.1>,{#Ref<0.0.1.136887>,<0.1333.1>}} [ns_server:debug,2014-08-19T16:52:32.600,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.32385.0> (ok) [error_logger:info,2014-08-19T16:52:32.600,ns_1@10.242.238.90:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,'ns_vbm_new_sup-default'} started: [{pid,<0.1333.1>}, {name, {new_child_id,"VXYZ[\\]^_`abcdefghijklmnopq", 'ns_1@10.242.238.88'}}, {mfargs, {ebucketmigrator_srv,start_link, [{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{old_state_retriever, #Fun}, {on_not_ready_vbuckets, #Fun}, {username,"default"}, {password,get_from_config}, {vbuckets,"VXYZ[\\]^_`abcdefghijklmnopq"}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]]}}, {restart_type,temporary}, {shutdown,60000}, {child_type,worker}] [ns_server:debug,2014-08-19T16:52:32.600,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.32390.0> (ok) [rebalance:debug,2014-08-19T16:52:32.601,ns_1@10.242.238.90:<0.32383.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:52:32.601,ns_1@10.242.238.90:<0.32383.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:52:32.601,ns_1@10.242.238.90:<0.1334.1>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:52:32.602,ns_1@10.242.238.90:<0.1334.1>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:52:32.602,ns_1@10.242.238.90:<0.32383.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [rebalance:debug,2014-08-19T16:52:32.603,ns_1@10.242.238.90:<0.32388.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:52:32.603,ns_1@10.242.238.90:<0.32388.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:52:32.603,ns_1@10.242.238.90:<0.1335.1>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:52:32.603,ns_1@10.242.238.90:<0.1335.1>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:52:32.603,ns_1@10.242.238.90:<0.32388.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:52:32.605,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:52:32.608,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:32.608,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 3388 us [ns_server:debug,2014-08-19T16:52:32.608,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:32.609,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{112, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.90']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:info,2014-08-19T16:52:32.610,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 114 state to replica [ns_server:info,2014-08-19T16:52:32.610,ns_1@10.242.238.90:tap_replication_manager-default<0.18775.0>:tap_replication_manager:change_vbucket_filter:200]Going to change replication from 'ns_1@10.242.238.88' to have "VXYZ[\\]^_`abcdefghijklmnopqr" ("r", []) [ns_server:debug,2014-08-19T16:52:32.611,ns_1@10.242.238.90:<0.1337.1>:ns_vbm_new_sup:do_perform_vbucket_filter_change:135]Registered myself under id:{"default", {new_child_id,"VXYZ[\\]^_`abcdefghijklmnopqr", 'ns_1@10.242.238.88'}, #Ref<0.0.1.137073>} Args:[{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{old_state_retriever,#Fun}, {on_not_ready_vbuckets,#Fun}, {username,"default"}, {password,get_from_config}, {vbuckets,"VXYZ[\\]^_`abcdefghijklmnopqr"}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]] [ns_server:debug,2014-08-19T16:52:32.611,ns_1@10.242.238.90:<0.1337.1>:ns_vbm_new_sup:do_perform_vbucket_filter_change:139]Linked myself to old ebucketmigrator <0.1333.1> [ns_server:debug,2014-08-19T16:52:32.622,ns_1@10.242.238.90:<0.1333.1>:ebucketmigrator_srv:init:621]Reusing old upstream: [{vbuckets,"VXYZ[\\]^_`abcdefghijklmnopq"}, {name,<<"replication_ns_1@10.242.238.90">>}, {takeover,false}] [rebalance:debug,2014-08-19T16:52:32.622,ns_1@10.242.238.90:<0.1333.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.1339.1> [ns_server:info,2014-08-19T16:52:32.623,ns_1@10.242.238.90:<0.1333.1>:ebucketmigrator_srv:handle_call:270]Starting new-style vbucket filter change on stream `replication_ns_1@10.242.238.90` [ns_server:info,2014-08-19T16:52:32.643,ns_1@10.242.238.90:<0.1333.1>:ebucketmigrator_srv:handle_call:297]Changing vbucket filter on tap stream `replication_ns_1@10.242.238.90`: [{86,1}, {88,1}, {89,1}, {90,1}, {91,1}, {92,1}, {93,1}, {94,1}, {95,1}, {96,1}, {97,1}, {98,1}, {99,1}, {100,1}, {101,1}, {102,1}, {103,1}, {104,1}, {105,1}, {106,1}, {107,1}, {108,1}, {109,1}, {110,1}, {111,1}, {112,1}, {113,1}, {114,1}] [ns_server:info,2014-08-19T16:52:32.644,ns_1@10.242.238.90:<0.1333.1>:ebucketmigrator_srv:handle_call:307]Successfully changed vbucket filter on tap stream `replication_ns_1@10.242.238.90`. [ns_server:info,2014-08-19T16:52:32.644,ns_1@10.242.238.90:<0.1333.1>:ebucketmigrator_srv:process_upstream:1027]Got vbucket filter change completion message. Silencing upstream sender [ns_server:info,2014-08-19T16:52:32.645,ns_1@10.242.238.90:<0.1333.1>:ebucketmigrator_srv:handle_info:366]Got reply from upstream silencing request. Completing state transition to a new ebucketmigrator. [ns_server:debug,2014-08-19T16:52:32.645,ns_1@10.242.238.90:<0.1333.1>:ebucketmigrator_srv:complete_native_vb_filter_change:170]Proceeding with reading unread binaries [ns_server:debug,2014-08-19T16:52:32.645,ns_1@10.242.238.90:<0.1333.1>:ebucketmigrator_srv:confirm_downstream:197]Going to confirm reception downstream messages [ns_server:debug,2014-08-19T16:52:32.645,ns_1@10.242.238.90:<0.1333.1>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:52:32.645,ns_1@10.242.238.90:<0.1340.1>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:52:32.645,ns_1@10.242.238.90:<0.1340.1>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:52:32.645,ns_1@10.242.238.90:<0.1333.1>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:52:32.645,ns_1@10.242.238.90:<0.1333.1>:ebucketmigrator_srv:confirm_downstream:201]Confirmed upstream messages are feeded to kernel [ns_server:debug,2014-08-19T16:52:32.645,ns_1@10.242.238.90:<0.1333.1>:ebucketmigrator_srv:reply_and_die:210]Passed old state to caller [ns_server:debug,2014-08-19T16:52:32.645,ns_1@10.242.238.90:<0.1333.1>:ebucketmigrator_srv:reply_and_die:213]Sent out state. Preparing to die [ns_server:debug,2014-08-19T16:52:32.646,ns_1@10.242.238.90:<0.1337.1>:ns_vbm_new_sup:do_perform_vbucket_filter_change:143]Got old state from previous ebucketmigrator: <0.1333.1> [ns_server:debug,2014-08-19T16:52:32.646,ns_1@10.242.238.90:<0.1337.1>:ns_vbm_new_sup:perform_vbucket_filter_change_loop:184]Sent old state to new instance [ns_server:info,2014-08-19T16:52:32.646,ns_1@10.242.238.90:<0.1342.1>:ns_vbm_new_sup:mk_old_state_retriever:83]Got vbucket filter change old state. Proceeding vbucket filter change operation [ns_server:debug,2014-08-19T16:52:32.646,ns_1@10.242.238.90:<0.1342.1>:ebucketmigrator_srv:init:494]Got old ebucketmigrator state from <0.1333.1>: {state,#Port<0.20537>,#Port<0.20533>,#Port<0.20538>,#Port<0.20534>,<0.1339.1>, <<"cut off">>,<<"cut off">>,[],85,false,false,0, {1408,452752,644823}, completed, {<0.1337.1>,#Ref<0.0.1.137086>}, <<"replication_ns_1@10.242.238.90">>,<0.1333.1>, {had_backfill,false,undefined,[]}, completed,false}. [ns_server:debug,2014-08-19T16:52:32.647,ns_1@10.242.238.90:<0.17162.0>:ns_process_registry:handle_info:98]Got exit msg: {'EXIT',<0.1337.1>,{#Ref<0.0.1.137075>,<0.1342.1>}} [error_logger:info,2014-08-19T16:52:32.646,ns_1@10.242.238.90:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,'ns_vbm_new_sup-default'} started: [{pid,<0.1342.1>}, {name, {new_child_id,"VXYZ[\\]^_`abcdefghijklmnopqr", 'ns_1@10.242.238.88'}}, {mfargs, {ebucketmigrator_srv,start_link, [{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{old_state_retriever, #Fun}, {on_not_ready_vbuckets, #Fun}, {username,"default"}, {password,get_from_config}, {vbuckets,"VXYZ[\\]^_`abcdefghijklmnopqr"}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]]}}, {restart_type,temporary}, {shutdown,60000}, {child_type,worker}] [ns_server:debug,2014-08-19T16:52:32.651,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:52:32.654,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:32.655,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 3382 us [ns_server:debug,2014-08-19T16:52:32.655,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:32.655,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{114, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.90']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:info,2014-08-19T16:52:32.657,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 115 state to replica [ns_server:info,2014-08-19T16:52:32.658,ns_1@10.242.238.90:tap_replication_manager-default<0.18775.0>:tap_replication_manager:change_vbucket_filter:200]Going to change replication from 'ns_1@10.242.238.88' to have "VXYZ[\\]^_`abcdefghijklmnopqrs" ("s", []) [ns_server:debug,2014-08-19T16:52:32.658,ns_1@10.242.238.90:<0.1344.1>:ns_vbm_new_sup:do_perform_vbucket_filter_change:135]Registered myself under id:{"default", {new_child_id,"VXYZ[\\]^_`abcdefghijklmnopqrs", 'ns_1@10.242.238.88'}, #Ref<0.0.1.137227>} Args:[{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{old_state_retriever,#Fun}, {on_not_ready_vbuckets,#Fun}, {username,"default"}, {password,get_from_config}, {vbuckets,"VXYZ[\\]^_`abcdefghijklmnopqrs"}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]] [ns_server:debug,2014-08-19T16:52:32.659,ns_1@10.242.238.90:<0.1344.1>:ns_vbm_new_sup:do_perform_vbucket_filter_change:139]Linked myself to old ebucketmigrator <0.1342.1> [ns_server:debug,2014-08-19T16:52:32.668,ns_1@10.242.238.90:<0.1342.1>:ebucketmigrator_srv:init:621]Reusing old upstream: [{vbuckets,"VXYZ[\\]^_`abcdefghijklmnopqr"}, {name,<<"replication_ns_1@10.242.238.90">>}, {takeover,false}] [rebalance:debug,2014-08-19T16:52:32.669,ns_1@10.242.238.90:<0.1342.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.1346.1> [ns_server:info,2014-08-19T16:52:32.669,ns_1@10.242.238.90:<0.1342.1>:ebucketmigrator_srv:handle_call:270]Starting new-style vbucket filter change on stream `replication_ns_1@10.242.238.90` [rebalance:debug,2014-08-19T16:52:32.689,ns_1@10.242.238.90:<0.32380.0>:janitor_agent:handle_call:795]Done [rebalance:debug,2014-08-19T16:52:32.689,ns_1@10.242.238.90:<0.32361.0>:janitor_agent:handle_call:795]Done [ns_server:info,2014-08-19T16:52:32.689,ns_1@10.242.238.90:<0.1342.1>:ebucketmigrator_srv:handle_call:297]Changing vbucket filter on tap stream `replication_ns_1@10.242.238.90`: [{86,1}, {88,1}, {89,1}, {90,1}, {91,1}, {92,1}, {93,1}, {94,1}, {95,1}, {96,1}, {97,1}, {98,1}, {99,1}, {100,1}, {101,1}, {102,1}, {103,1}, {104,1}, {105,1}, {106,1}, {107,1}, {108,1}, {109,1}, {110,1}, {111,1}, {112,1}, {113,1}, {114,1}, {115,1}] [ns_server:info,2014-08-19T16:52:32.690,ns_1@10.242.238.90:<0.1342.1>:ebucketmigrator_srv:handle_call:307]Successfully changed vbucket filter on tap stream `replication_ns_1@10.242.238.90`. [ns_server:info,2014-08-19T16:52:32.690,ns_1@10.242.238.90:<0.1342.1>:ebucketmigrator_srv:process_upstream:1027]Got vbucket filter change completion message. Silencing upstream sender [ns_server:info,2014-08-19T16:52:32.690,ns_1@10.242.238.90:<0.1342.1>:ebucketmigrator_srv:handle_info:366]Got reply from upstream silencing request. Completing state transition to a new ebucketmigrator. [ns_server:debug,2014-08-19T16:52:32.690,ns_1@10.242.238.90:<0.1342.1>:ebucketmigrator_srv:complete_native_vb_filter_change:170]Proceeding with reading unread binaries [ns_server:debug,2014-08-19T16:52:32.691,ns_1@10.242.238.90:<0.1342.1>:ebucketmigrator_srv:confirm_downstream:197]Going to confirm reception downstream messages [ns_server:debug,2014-08-19T16:52:32.691,ns_1@10.242.238.90:<0.1342.1>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:52:32.691,ns_1@10.242.238.90:<0.1347.1>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:52:32.691,ns_1@10.242.238.90:<0.1347.1>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:52:32.691,ns_1@10.242.238.90:<0.1342.1>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:52:32.691,ns_1@10.242.238.90:<0.1342.1>:ebucketmigrator_srv:confirm_downstream:201]Confirmed upstream messages are feeded to kernel [ns_server:debug,2014-08-19T16:52:32.691,ns_1@10.242.238.90:<0.1342.1>:ebucketmigrator_srv:reply_and_die:210]Passed old state to caller [ns_server:debug,2014-08-19T16:52:32.691,ns_1@10.242.238.90:<0.1342.1>:ebucketmigrator_srv:reply_and_die:213]Sent out state. Preparing to die [ns_server:debug,2014-08-19T16:52:32.691,ns_1@10.242.238.90:<0.1344.1>:ns_vbm_new_sup:do_perform_vbucket_filter_change:143]Got old state from previous ebucketmigrator: <0.1342.1> [ns_server:debug,2014-08-19T16:52:32.692,ns_1@10.242.238.90:<0.1344.1>:ns_vbm_new_sup:perform_vbucket_filter_change_loop:184]Sent old state to new instance [ns_server:info,2014-08-19T16:52:32.692,ns_1@10.242.238.90:<0.1349.1>:ns_vbm_new_sup:mk_old_state_retriever:83]Got vbucket filter change old state. Proceeding vbucket filter change operation [ns_server:debug,2014-08-19T16:52:32.692,ns_1@10.242.238.90:<0.1349.1>:ebucketmigrator_srv:init:494]Got old ebucketmigrator state from <0.1342.1>: {state,#Port<0.20537>,#Port<0.20533>,#Port<0.20538>,#Port<0.20534>,<0.1346.1>, <<"cut off">>,<<"cut off">>,[],88,false,false,0, {1408,452752,690755}, completed, {<0.1344.1>,#Ref<0.0.1.137240>}, <<"replication_ns_1@10.242.238.90">>,<0.1342.1>, {had_backfill,false,undefined,[]}, completed,false}. [ns_server:debug,2014-08-19T16:52:32.692,ns_1@10.242.238.90:<0.17162.0>:ns_process_registry:handle_info:98]Got exit msg: {'EXIT',<0.1344.1>,{#Ref<0.0.1.137229>,<0.1349.1>}} [error_logger:info,2014-08-19T16:52:32.692,ns_1@10.242.238.90:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,'ns_vbm_new_sup-default'} started: [{pid,<0.1349.1>}, {name, {new_child_id,"VXYZ[\\]^_`abcdefghijklmnopqrs", 'ns_1@10.242.238.88'}}, {mfargs, {ebucketmigrator_srv,start_link, [{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{old_state_retriever, #Fun}, {on_not_ready_vbuckets, #Fun}, {username,"default"}, {password,get_from_config}, {vbuckets,"VXYZ[\\]^_`abcdefghijklmnopqrs"}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]]}}, {restart_type,temporary}, {shutdown,60000}, {child_type,worker}] [ns_server:debug,2014-08-19T16:52:32.692,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.32380.0> (ok) [ns_server:debug,2014-08-19T16:52:32.693,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.32361.0> (ok) [rebalance:debug,2014-08-19T16:52:32.695,ns_1@10.242.238.90:<0.32364.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:52:32.695,ns_1@10.242.238.90:<0.32364.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:52:32.695,ns_1@10.242.238.90:<0.1350.1>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:52:32.695,ns_1@10.242.238.90:<0.1350.1>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:52:32.695,ns_1@10.242.238.90:<0.32364.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [rebalance:debug,2014-08-19T16:52:32.696,ns_1@10.242.238.90:<0.32359.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:52:32.697,ns_1@10.242.238.90:<0.32359.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:52:32.697,ns_1@10.242.238.90:<0.1351.1>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:52:32.697,ns_1@10.242.238.90:<0.1351.1>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:52:32.697,ns_1@10.242.238.90:<0.32359.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:52:32.698,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:52:32.701,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:32.701,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 3541 us [ns_server:debug,2014-08-19T16:52:32.702,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:32.702,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{115, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.90']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:52:32.717,ns_1@10.242.238.90:<0.1349.1>:ebucketmigrator_srv:init:621]Reusing old upstream: [{vbuckets,"VXYZ[\\]^_`abcdefghijklmnopqrs"}, {name,<<"replication_ns_1@10.242.238.90">>}, {takeover,false}] [rebalance:debug,2014-08-19T16:52:32.718,ns_1@10.242.238.90:<0.1349.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.1353.1> [ns_server:info,2014-08-19T16:52:32.727,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 116 state to replica [ns_server:info,2014-08-19T16:52:32.728,ns_1@10.242.238.90:tap_replication_manager-default<0.18775.0>:tap_replication_manager:change_vbucket_filter:200]Going to change replication from 'ns_1@10.242.238.88' to have "VXYZ[\\]^_`abcdefghijklmnopqrst" ("t", []) [ns_server:debug,2014-08-19T16:52:32.729,ns_1@10.242.238.90:<0.1354.1>:ns_vbm_new_sup:do_perform_vbucket_filter_change:135]Registered myself under id:{"default", {new_child_id,"VXYZ[\\]^_`abcdefghijklmnopqrst", 'ns_1@10.242.238.88'}, #Ref<0.0.1.137450>} Args:[{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{old_state_retriever,#Fun}, {on_not_ready_vbuckets,#Fun}, {username,"default"}, {password,get_from_config}, {vbuckets,"VXYZ[\\]^_`abcdefghijklmnopqrst"}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]] [ns_server:debug,2014-08-19T16:52:32.729,ns_1@10.242.238.90:<0.1354.1>:ns_vbm_new_sup:do_perform_vbucket_filter_change:139]Linked myself to old ebucketmigrator <0.1349.1> [ns_server:info,2014-08-19T16:52:32.729,ns_1@10.242.238.90:<0.1349.1>:ebucketmigrator_srv:handle_call:270]Starting new-style vbucket filter change on stream `replication_ns_1@10.242.238.90` [ns_server:info,2014-08-19T16:52:32.749,ns_1@10.242.238.90:<0.1349.1>:ebucketmigrator_srv:handle_call:297]Changing vbucket filter on tap stream `replication_ns_1@10.242.238.90`: [{86,1}, {88,1}, {89,1}, {90,1}, {91,1}, {92,1}, {93,1}, {94,1}, {95,1}, {96,1}, {97,1}, {98,1}, {99,1}, {100,1}, {101,1}, {102,1}, {103,1}, {104,1}, {105,1}, {106,1}, {107,1}, {108,1}, {109,1}, {110,1}, {111,1}, {112,1}, {113,1}, {114,1}, {115,1}, {116,1}] [ns_server:info,2014-08-19T16:52:32.750,ns_1@10.242.238.90:<0.1349.1>:ebucketmigrator_srv:handle_call:307]Successfully changed vbucket filter on tap stream `replication_ns_1@10.242.238.90`. [ns_server:info,2014-08-19T16:52:32.750,ns_1@10.242.238.90:<0.1349.1>:ebucketmigrator_srv:process_upstream:1027]Got vbucket filter change completion message. Silencing upstream sender [ns_server:info,2014-08-19T16:52:32.750,ns_1@10.242.238.90:<0.1349.1>:ebucketmigrator_srv:handle_info:366]Got reply from upstream silencing request. Completing state transition to a new ebucketmigrator. [ns_server:debug,2014-08-19T16:52:32.751,ns_1@10.242.238.90:<0.1349.1>:ebucketmigrator_srv:complete_native_vb_filter_change:170]Proceeding with reading unread binaries [ns_server:debug,2014-08-19T16:52:32.751,ns_1@10.242.238.90:<0.1349.1>:ebucketmigrator_srv:confirm_downstream:197]Going to confirm reception downstream messages [ns_server:debug,2014-08-19T16:52:32.751,ns_1@10.242.238.90:<0.1349.1>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:52:32.751,ns_1@10.242.238.90:<0.1356.1>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:52:32.751,ns_1@10.242.238.90:<0.1356.1>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:52:32.751,ns_1@10.242.238.90:<0.1349.1>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:52:32.751,ns_1@10.242.238.90:<0.1349.1>:ebucketmigrator_srv:confirm_downstream:201]Confirmed upstream messages are feeded to kernel [ns_server:debug,2014-08-19T16:52:32.751,ns_1@10.242.238.90:<0.1349.1>:ebucketmigrator_srv:reply_and_die:210]Passed old state to caller [ns_server:debug,2014-08-19T16:52:32.751,ns_1@10.242.238.90:<0.1349.1>:ebucketmigrator_srv:reply_and_die:213]Sent out state. Preparing to die [ns_server:debug,2014-08-19T16:52:32.751,ns_1@10.242.238.90:<0.1354.1>:ns_vbm_new_sup:do_perform_vbucket_filter_change:143]Got old state from previous ebucketmigrator: <0.1349.1> [ns_server:debug,2014-08-19T16:52:32.752,ns_1@10.242.238.90:<0.1354.1>:ns_vbm_new_sup:perform_vbucket_filter_change_loop:184]Sent old state to new instance [ns_server:info,2014-08-19T16:52:32.752,ns_1@10.242.238.90:<0.1358.1>:ns_vbm_new_sup:mk_old_state_retriever:83]Got vbucket filter change old state. Proceeding vbucket filter change operation [ns_server:debug,2014-08-19T16:52:32.752,ns_1@10.242.238.90:<0.1358.1>:ebucketmigrator_srv:init:494]Got old ebucketmigrator state from <0.1349.1>: {state,#Port<0.20537>,#Port<0.20533>,#Port<0.20538>,#Port<0.20534>,<0.1353.1>, <<"cut off">>,<<"cut off">>,[],91,false,false,0, {1408,452752,750716}, completed, {<0.1354.1>,#Ref<0.0.1.137463>}, <<"replication_ns_1@10.242.238.90">>,<0.1349.1>, {had_backfill,false,undefined,[]}, completed,false}. [ns_server:debug,2014-08-19T16:52:32.752,ns_1@10.242.238.90:<0.17162.0>:ns_process_registry:handle_info:98]Got exit msg: {'EXIT',<0.1354.1>,{#Ref<0.0.1.137452>,<0.1358.1>}} [error_logger:info,2014-08-19T16:52:32.752,ns_1@10.242.238.90:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,'ns_vbm_new_sup-default'} started: [{pid,<0.1358.1>}, {name, {new_child_id,"VXYZ[\\]^_`abcdefghijklmnopqrst", 'ns_1@10.242.238.88'}}, {mfargs, {ebucketmigrator_srv,start_link, [{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{old_state_retriever, #Fun}, {on_not_ready_vbuckets, #Fun}, {username,"default"}, {password,get_from_config}, {vbuckets,"VXYZ[\\]^_`abcdefghijklmnopqrst"}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]]}}, {restart_type,temporary}, {shutdown,60000}, {child_type,worker}] [ns_server:debug,2014-08-19T16:52:32.757,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:52:32.760,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:32.761,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 3210 us [ns_server:debug,2014-08-19T16:52:32.761,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:32.761,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{116, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.90']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:info,2014-08-19T16:52:32.762,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 117 state to replica [ns_server:info,2014-08-19T16:52:32.763,ns_1@10.242.238.90:tap_replication_manager-default<0.18775.0>:tap_replication_manager:change_vbucket_filter:200]Going to change replication from 'ns_1@10.242.238.88' to have "VXYZ[\\]^_`abcdefghijklmnopqrstu" ("u", []) [ns_server:debug,2014-08-19T16:52:32.764,ns_1@10.242.238.90:<0.1361.1>:ns_vbm_new_sup:do_perform_vbucket_filter_change:135]Registered myself under id:{"default", {new_child_id,"VXYZ[\\]^_`abcdefghijklmnopqrstu", 'ns_1@10.242.238.88'}, #Ref<0.0.1.137591>} Args:[{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{old_state_retriever,#Fun}, {on_not_ready_vbuckets,#Fun}, {username,"default"}, {password,get_from_config}, {vbuckets,"VXYZ[\\]^_`abcdefghijklmnopqrstu"}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]] [ns_server:debug,2014-08-19T16:52:32.764,ns_1@10.242.238.90:<0.1361.1>:ns_vbm_new_sup:do_perform_vbucket_filter_change:139]Linked myself to old ebucketmigrator <0.1358.1> [ns_server:debug,2014-08-19T16:52:32.775,ns_1@10.242.238.90:<0.1358.1>:ebucketmigrator_srv:init:621]Reusing old upstream: [{vbuckets,"VXYZ[\\]^_`abcdefghijklmnopqrst"}, {name,<<"replication_ns_1@10.242.238.90">>}, {takeover,false}] [rebalance:debug,2014-08-19T16:52:32.775,ns_1@10.242.238.90:<0.1358.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.1363.1> [ns_server:info,2014-08-19T16:52:32.775,ns_1@10.242.238.90:<0.1358.1>:ebucketmigrator_srv:handle_call:270]Starting new-style vbucket filter change on stream `replication_ns_1@10.242.238.90` [rebalance:debug,2014-08-19T16:52:32.792,ns_1@10.242.238.90:<0.32356.0>:janitor_agent:handle_call:795]Done [rebalance:debug,2014-08-19T16:52:32.793,ns_1@10.242.238.90:<0.32351.0>:janitor_agent:handle_call:795]Done [ns_server:info,2014-08-19T16:52:32.796,ns_1@10.242.238.90:<0.1358.1>:ebucketmigrator_srv:handle_call:297]Changing vbucket filter on tap stream `replication_ns_1@10.242.238.90`: [{86,1}, {88,1}, {89,1}, {90,1}, {91,1}, {92,1}, {93,1}, {94,1}, {95,1}, {96,1}, {97,1}, {98,1}, {99,1}, {100,1}, {101,1}, {102,1}, {103,1}, {104,1}, {105,1}, {106,1}, {107,1}, {108,1}, {109,1}, {110,1}, {111,1}, {112,1}, {113,1}, {114,1}, {115,1}, {116,1}, {117,1}] [ns_server:info,2014-08-19T16:52:32.796,ns_1@10.242.238.90:<0.1358.1>:ebucketmigrator_srv:handle_call:307]Successfully changed vbucket filter on tap stream `replication_ns_1@10.242.238.90`. [ns_server:info,2014-08-19T16:52:32.797,ns_1@10.242.238.90:<0.1358.1>:ebucketmigrator_srv:process_upstream:1027]Got vbucket filter change completion message. Silencing upstream sender [ns_server:info,2014-08-19T16:52:32.797,ns_1@10.242.238.90:<0.1358.1>:ebucketmigrator_srv:handle_info:366]Got reply from upstream silencing request. Completing state transition to a new ebucketmigrator. [ns_server:debug,2014-08-19T16:52:32.797,ns_1@10.242.238.90:<0.1358.1>:ebucketmigrator_srv:complete_native_vb_filter_change:170]Proceeding with reading unread binaries [ns_server:debug,2014-08-19T16:52:32.797,ns_1@10.242.238.90:<0.1358.1>:ebucketmigrator_srv:confirm_downstream:197]Going to confirm reception downstream messages [ns_server:debug,2014-08-19T16:52:32.797,ns_1@10.242.238.90:<0.1358.1>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:52:32.797,ns_1@10.242.238.90:<0.1364.1>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:52:32.797,ns_1@10.242.238.90:<0.1364.1>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:52:32.797,ns_1@10.242.238.90:<0.1358.1>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:52:32.797,ns_1@10.242.238.90:<0.1358.1>:ebucketmigrator_srv:confirm_downstream:201]Confirmed upstream messages are feeded to kernel [ns_server:debug,2014-08-19T16:52:32.798,ns_1@10.242.238.90:<0.1358.1>:ebucketmigrator_srv:reply_and_die:210]Passed old state to caller [ns_server:debug,2014-08-19T16:52:32.798,ns_1@10.242.238.90:<0.1358.1>:ebucketmigrator_srv:reply_and_die:213]Sent out state. Preparing to die [ns_server:debug,2014-08-19T16:52:32.798,ns_1@10.242.238.90:<0.1361.1>:ns_vbm_new_sup:do_perform_vbucket_filter_change:143]Got old state from previous ebucketmigrator: <0.1358.1> [ns_server:debug,2014-08-19T16:52:32.798,ns_1@10.242.238.90:<0.1361.1>:ns_vbm_new_sup:perform_vbucket_filter_change_loop:184]Sent old state to new instance [ns_server:info,2014-08-19T16:52:32.798,ns_1@10.242.238.90:<0.1366.1>:ns_vbm_new_sup:mk_old_state_retriever:83]Got vbucket filter change old state. Proceeding vbucket filter change operation [ns_server:debug,2014-08-19T16:52:32.798,ns_1@10.242.238.90:<0.1366.1>:ebucketmigrator_srv:init:494]Got old ebucketmigrator state from <0.1358.1>: {state,#Port<0.20537>,#Port<0.20533>,#Port<0.20538>,#Port<0.20534>,<0.1363.1>, <<"cut off">>,<<"cut off">>,[],94,false,false,0, {1408,452752,797196}, completed, {<0.1361.1>,#Ref<0.0.1.137604>}, <<"replication_ns_1@10.242.238.90">>,<0.1358.1>, {had_backfill,false,undefined,[]}, completed,false}. [ns_server:debug,2014-08-19T16:52:32.798,ns_1@10.242.238.90:<0.17162.0>:ns_process_registry:handle_info:98]Got exit msg: {'EXIT',<0.1361.1>,{#Ref<0.0.1.137593>,<0.1366.1>}} [ns_server:debug,2014-08-19T16:52:32.799,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.32356.0> (ok) [error_logger:info,2014-08-19T16:52:32.798,ns_1@10.242.238.90:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,'ns_vbm_new_sup-default'} started: [{pid,<0.1366.1>}, {name, {new_child_id,"VXYZ[\\]^_`abcdefghijklmnopqrstu", 'ns_1@10.242.238.88'}}, {mfargs, {ebucketmigrator_srv,start_link, [{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{old_state_retriever, #Fun}, {on_not_ready_vbuckets, #Fun}, {username,"default"}, {password,get_from_config}, {vbuckets,"VXYZ[\\]^_`abcdefghijklmnopqrstu"}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]]}}, {restart_type,temporary}, {shutdown,60000}, {child_type,worker}] [ns_server:debug,2014-08-19T16:52:32.799,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.32351.0> (ok) [rebalance:debug,2014-08-19T16:52:32.800,ns_1@10.242.238.90:<0.32354.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:52:32.800,ns_1@10.242.238.90:<0.32354.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:52:32.800,ns_1@10.242.238.90:<0.1367.1>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:52:32.800,ns_1@10.242.238.90:<0.1367.1>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:debug,2014-08-19T16:52:32.800,ns_1@10.242.238.90:<0.32349.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [rebalance:info,2014-08-19T16:52:32.801,ns_1@10.242.238.90:<0.32354.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:52:32.801,ns_1@10.242.238.90:<0.32349.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:52:32.801,ns_1@10.242.238.90:<0.1368.1>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:52:32.801,ns_1@10.242.238.90:<0.1368.1>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:52:32.801,ns_1@10.242.238.90:<0.32349.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:52:32.806,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:52:32.811,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:32.811,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 4595 us [ns_server:debug,2014-08-19T16:52:32.811,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{117, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.90']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:52:32.812,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:info,2014-08-19T16:52:32.815,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 118 state to replica [ns_server:info,2014-08-19T16:52:32.815,ns_1@10.242.238.90:tap_replication_manager-default<0.18775.0>:tap_replication_manager:change_vbucket_filter:200]Going to change replication from 'ns_1@10.242.238.88' to have "VXYZ[\\]^_`abcdefghijklmnopqrstuv" ("v", []) [ns_server:debug,2014-08-19T16:52:32.818,ns_1@10.242.238.90:<0.1370.1>:ns_vbm_new_sup:do_perform_vbucket_filter_change:135]Registered myself under id:{"default", {new_child_id, "VXYZ[\\]^_`abcdefghijklmnopqrstuv", 'ns_1@10.242.238.88'}, #Ref<0.0.1.137787>} Args:[{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{old_state_retriever,#Fun}, {on_not_ready_vbuckets,#Fun}, {username,"default"}, {password,get_from_config}, {vbuckets,"VXYZ[\\]^_`abcdefghijklmnopqrstuv"}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]] [ns_server:debug,2014-08-19T16:52:32.819,ns_1@10.242.238.90:<0.1370.1>:ns_vbm_new_sup:do_perform_vbucket_filter_change:139]Linked myself to old ebucketmigrator <0.1366.1> [ns_server:debug,2014-08-19T16:52:32.823,ns_1@10.242.238.90:<0.1366.1>:ebucketmigrator_srv:init:621]Reusing old upstream: [{vbuckets,"VXYZ[\\]^_`abcdefghijklmnopqrstu"}, {name,<<"replication_ns_1@10.242.238.90">>}, {takeover,false}] [rebalance:debug,2014-08-19T16:52:32.823,ns_1@10.242.238.90:<0.1366.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.1372.1> [ns_server:info,2014-08-19T16:52:32.823,ns_1@10.242.238.90:<0.1366.1>:ebucketmigrator_srv:handle_call:270]Starting new-style vbucket filter change on stream `replication_ns_1@10.242.238.90` [ns_server:info,2014-08-19T16:52:32.843,ns_1@10.242.238.90:<0.1366.1>:ebucketmigrator_srv:handle_call:297]Changing vbucket filter on tap stream `replication_ns_1@10.242.238.90`: [{86,1}, {88,1}, {89,1}, {90,1}, {91,1}, {92,1}, {93,1}, {94,1}, {95,1}, {96,1}, {97,1}, {98,1}, {99,1}, {100,1}, {101,1}, {102,1}, {103,1}, {104,1}, {105,1}, {106,1}, {107,1}, {108,1}, {109,1}, {110,1}, {111,1}, {112,1}, {113,1}, {114,1}, {115,1}, {116,1}, {117,1}, {118,1}] [ns_server:info,2014-08-19T16:52:32.844,ns_1@10.242.238.90:<0.1366.1>:ebucketmigrator_srv:handle_call:307]Successfully changed vbucket filter on tap stream `replication_ns_1@10.242.238.90`. [ns_server:info,2014-08-19T16:52:32.845,ns_1@10.242.238.90:<0.1366.1>:ebucketmigrator_srv:process_upstream:1027]Got vbucket filter change completion message. Silencing upstream sender [ns_server:info,2014-08-19T16:52:32.845,ns_1@10.242.238.90:<0.1366.1>:ebucketmigrator_srv:handle_info:366]Got reply from upstream silencing request. Completing state transition to a new ebucketmigrator. [ns_server:debug,2014-08-19T16:52:32.845,ns_1@10.242.238.90:<0.1366.1>:ebucketmigrator_srv:complete_native_vb_filter_change:170]Proceeding with reading unread binaries [ns_server:debug,2014-08-19T16:52:32.845,ns_1@10.242.238.90:<0.1366.1>:ebucketmigrator_srv:confirm_downstream:197]Going to confirm reception downstream messages [ns_server:debug,2014-08-19T16:52:32.845,ns_1@10.242.238.90:<0.1366.1>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:52:32.845,ns_1@10.242.238.90:<0.1373.1>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:52:32.845,ns_1@10.242.238.90:<0.1373.1>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:52:32.845,ns_1@10.242.238.90:<0.1366.1>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:52:32.845,ns_1@10.242.238.90:<0.1366.1>:ebucketmigrator_srv:confirm_downstream:201]Confirmed upstream messages are feeded to kernel [ns_server:debug,2014-08-19T16:52:32.846,ns_1@10.242.238.90:<0.1366.1>:ebucketmigrator_srv:reply_and_die:210]Passed old state to caller [ns_server:debug,2014-08-19T16:52:32.846,ns_1@10.242.238.90:<0.1366.1>:ebucketmigrator_srv:reply_and_die:213]Sent out state. Preparing to die [ns_server:debug,2014-08-19T16:52:32.846,ns_1@10.242.238.90:<0.1370.1>:ns_vbm_new_sup:do_perform_vbucket_filter_change:143]Got old state from previous ebucketmigrator: <0.1366.1> [ns_server:debug,2014-08-19T16:52:32.846,ns_1@10.242.238.90:<0.1370.1>:ns_vbm_new_sup:perform_vbucket_filter_change_loop:184]Sent old state to new instance [ns_server:info,2014-08-19T16:52:32.846,ns_1@10.242.238.90:<0.1375.1>:ns_vbm_new_sup:mk_old_state_retriever:83]Got vbucket filter change old state. Proceeding vbucket filter change operation [ns_server:debug,2014-08-19T16:52:32.846,ns_1@10.242.238.90:<0.1375.1>:ebucketmigrator_srv:init:494]Got old ebucketmigrator state from <0.1366.1>: {state,#Port<0.20537>,#Port<0.20533>,#Port<0.20538>,#Port<0.20534>,<0.1372.1>, <<"cut off">>,<<"cut off">>,[],97,false,false,0, {1408,452752,845050}, completed, {<0.1370.1>,#Ref<0.0.1.137800>}, <<"replication_ns_1@10.242.238.90">>,<0.1366.1>, {had_backfill,false,undefined,[]}, completed,false}. [ns_server:debug,2014-08-19T16:52:32.847,ns_1@10.242.238.90:<0.17162.0>:ns_process_registry:handle_info:98]Got exit msg: {'EXIT',<0.1370.1>,{#Ref<0.0.1.137789>,<0.1375.1>}} [error_logger:info,2014-08-19T16:52:32.847,ns_1@10.242.238.90:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,'ns_vbm_new_sup-default'} started: [{pid,<0.1375.1>}, {name, {new_child_id,"VXYZ[\\]^_`abcdefghijklmnopqrstuv", 'ns_1@10.242.238.88'}}, {mfargs, {ebucketmigrator_srv,start_link, [{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{old_state_retriever, #Fun}, {on_not_ready_vbuckets, #Fun}, {username,"default"}, {password,get_from_config}, {vbuckets, "VXYZ[\\]^_`abcdefghijklmnopqrstuv"}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]]}}, {restart_type,temporary}, {shutdown,60000}, {child_type,worker}] [ns_server:debug,2014-08-19T16:52:32.852,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:52:32.855,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:32.855,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 3378 us [ns_server:debug,2014-08-19T16:52:32.856,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:32.856,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{118, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.90']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:info,2014-08-19T16:52:32.857,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 119 state to replica [ns_server:info,2014-08-19T16:52:32.857,ns_1@10.242.238.90:tap_replication_manager-default<0.18775.0>:tap_replication_manager:change_vbucket_filter:200]Going to change replication from 'ns_1@10.242.238.88' to have "VXYZ[\\]^_`abcdefghijklmnopqrstuvw" ("w", []) [ns_server:debug,2014-08-19T16:52:32.858,ns_1@10.242.238.90:<0.1377.1>:ns_vbm_new_sup:do_perform_vbucket_filter_change:135]Registered myself under id:{"default", {new_child_id, "VXYZ[\\]^_`abcdefghijklmnopqrstuvw", 'ns_1@10.242.238.88'}, #Ref<0.0.1.137933>} Args:[{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{old_state_retriever,#Fun}, {on_not_ready_vbuckets,#Fun}, {username,"default"}, {password,get_from_config}, {vbuckets,"VXYZ[\\]^_`abcdefghijklmnopqrstuvw"}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]] [ns_server:debug,2014-08-19T16:52:32.858,ns_1@10.242.238.90:<0.1377.1>:ns_vbm_new_sup:do_perform_vbucket_filter_change:139]Linked myself to old ebucketmigrator <0.1375.1> [ns_server:debug,2014-08-19T16:52:32.869,ns_1@10.242.238.90:<0.1375.1>:ebucketmigrator_srv:init:621]Reusing old upstream: [{vbuckets,"VXYZ[\\]^_`abcdefghijklmnopqrstuv"}, {name,<<"replication_ns_1@10.242.238.90">>}, {takeover,false}] [rebalance:debug,2014-08-19T16:52:32.869,ns_1@10.242.238.90:<0.1375.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.1379.1> [ns_server:info,2014-08-19T16:52:32.869,ns_1@10.242.238.90:<0.1375.1>:ebucketmigrator_srv:handle_call:270]Starting new-style vbucket filter change on stream `replication_ns_1@10.242.238.90` [rebalance:debug,2014-08-19T16:52:32.885,ns_1@10.242.238.90:<0.32346.0>:janitor_agent:handle_call:795]Done [rebalance:debug,2014-08-19T16:52:32.885,ns_1@10.242.238.90:<0.32341.0>:janitor_agent:handle_call:795]Done [ns_server:info,2014-08-19T16:52:32.890,ns_1@10.242.238.90:<0.1375.1>:ebucketmigrator_srv:handle_call:297]Changing vbucket filter on tap stream `replication_ns_1@10.242.238.90`: [{86,1}, {88,1}, {89,1}, {90,1}, {91,1}, {92,1}, {93,1}, {94,1}, {95,1}, {96,1}, {97,1}, {98,1}, {99,1}, {100,1}, {101,1}, {102,1}, {103,1}, {104,1}, {105,1}, {106,1}, {107,1}, {108,1}, {109,1}, {110,1}, {111,1}, {112,1}, {113,1}, {114,1}, {115,1}, {116,1}, {117,1}, {118,1}, {119,1}] [ns_server:info,2014-08-19T16:52:32.891,ns_1@10.242.238.90:<0.1375.1>:ebucketmigrator_srv:handle_call:307]Successfully changed vbucket filter on tap stream `replication_ns_1@10.242.238.90`. [ns_server:info,2014-08-19T16:52:32.891,ns_1@10.242.238.90:<0.1375.1>:ebucketmigrator_srv:process_upstream:1027]Got vbucket filter change completion message. Silencing upstream sender [ns_server:info,2014-08-19T16:52:32.891,ns_1@10.242.238.90:<0.1375.1>:ebucketmigrator_srv:handle_info:366]Got reply from upstream silencing request. Completing state transition to a new ebucketmigrator. [ns_server:debug,2014-08-19T16:52:32.891,ns_1@10.242.238.90:<0.1375.1>:ebucketmigrator_srv:complete_native_vb_filter_change:170]Proceeding with reading unread binaries [ns_server:debug,2014-08-19T16:52:32.892,ns_1@10.242.238.90:<0.1375.1>:ebucketmigrator_srv:confirm_downstream:197]Going to confirm reception downstream messages [ns_server:debug,2014-08-19T16:52:32.892,ns_1@10.242.238.90:<0.1375.1>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:52:32.892,ns_1@10.242.238.90:<0.1380.1>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:52:32.892,ns_1@10.242.238.90:<0.1380.1>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:52:32.892,ns_1@10.242.238.90:<0.1375.1>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:52:32.892,ns_1@10.242.238.90:<0.1375.1>:ebucketmigrator_srv:confirm_downstream:201]Confirmed upstream messages are feeded to kernel [ns_server:debug,2014-08-19T16:52:32.892,ns_1@10.242.238.90:<0.1375.1>:ebucketmigrator_srv:reply_and_die:210]Passed old state to caller [ns_server:debug,2014-08-19T16:52:32.892,ns_1@10.242.238.90:<0.1375.1>:ebucketmigrator_srv:reply_and_die:213]Sent out state. Preparing to die [ns_server:debug,2014-08-19T16:52:32.892,ns_1@10.242.238.90:<0.1377.1>:ns_vbm_new_sup:do_perform_vbucket_filter_change:143]Got old state from previous ebucketmigrator: <0.1375.1> [ns_server:debug,2014-08-19T16:52:32.893,ns_1@10.242.238.90:<0.1377.1>:ns_vbm_new_sup:perform_vbucket_filter_change_loop:184]Sent old state to new instance [ns_server:info,2014-08-19T16:52:32.893,ns_1@10.242.238.90:<0.1382.1>:ns_vbm_new_sup:mk_old_state_retriever:83]Got vbucket filter change old state. Proceeding vbucket filter change operation [ns_server:debug,2014-08-19T16:52:32.893,ns_1@10.242.238.90:<0.1382.1>:ebucketmigrator_srv:init:494]Got old ebucketmigrator state from <0.1375.1>: {state,#Port<0.20537>,#Port<0.20533>,#Port<0.20538>,#Port<0.20534>,<0.1379.1>, <<"cut off">>,<<"cut off">>,[],100,false,false,0, {1408,452752,891607}, completed, {<0.1377.1>,#Ref<0.0.1.137946>}, <<"replication_ns_1@10.242.238.90">>,<0.1375.1>, {had_backfill,false,undefined,[]}, completed,false}. [ns_server:debug,2014-08-19T16:52:32.893,ns_1@10.242.238.90:<0.17162.0>:ns_process_registry:handle_info:98]Got exit msg: {'EXIT',<0.1377.1>,{#Ref<0.0.1.137935>,<0.1382.1>}} [ns_server:debug,2014-08-19T16:52:32.893,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.32346.0> (ok) [error_logger:info,2014-08-19T16:52:32.893,ns_1@10.242.238.90:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,'ns_vbm_new_sup-default'} started: [{pid,<0.1382.1>}, {name, {new_child_id, "VXYZ[\\]^_`abcdefghijklmnopqrstuvw", 'ns_1@10.242.238.88'}}, {mfargs, {ebucketmigrator_srv,start_link, [{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{old_state_retriever, #Fun}, {on_not_ready_vbuckets, #Fun}, {username,"default"}, {password,get_from_config}, {vbuckets, "VXYZ[\\]^_`abcdefghijklmnopqrstuvw"}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]]}}, {restart_type,temporary}, {shutdown,60000}, {child_type,worker}] [ns_server:debug,2014-08-19T16:52:32.893,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.32341.0> (ok) [rebalance:debug,2014-08-19T16:52:32.896,ns_1@10.242.238.90:<0.32344.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:52:32.896,ns_1@10.242.238.90:<0.32344.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:52:32.896,ns_1@10.242.238.90:<0.1383.1>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:52:32.897,ns_1@10.242.238.90:<0.1383.1>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:52:32.897,ns_1@10.242.238.90:<0.32344.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [rebalance:debug,2014-08-19T16:52:32.897,ns_1@10.242.238.90:<0.32325.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:52:32.897,ns_1@10.242.238.90:<0.32325.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:52:32.897,ns_1@10.242.238.90:<0.1384.1>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:52:32.898,ns_1@10.242.238.90:<0.1384.1>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:52:32.898,ns_1@10.242.238.90:<0.32325.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:52:32.900,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:52:32.901,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:32.902,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 1443 us [ns_server:debug,2014-08-19T16:52:32.902,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:32.902,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{119, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.90']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:info,2014-08-19T16:52:32.905,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 120 state to replica [ns_server:info,2014-08-19T16:52:32.905,ns_1@10.242.238.90:tap_replication_manager-default<0.18775.0>:tap_replication_manager:change_vbucket_filter:200]Going to change replication from 'ns_1@10.242.238.88' to have "VXYZ[\\]^_`abcdefghijklmnopqrstuvwx" ("x", []) [ns_server:debug,2014-08-19T16:52:32.907,ns_1@10.242.238.90:<0.1386.1>:ns_vbm_new_sup:do_perform_vbucket_filter_change:135]Registered myself under id:{"default", {new_child_id, "VXYZ[\\]^_`abcdefghijklmnopqrstuvwx", 'ns_1@10.242.238.88'}, #Ref<0.0.1.138126>} Args:[{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{old_state_retriever,#Fun}, {on_not_ready_vbuckets,#Fun}, {username,"default"}, {password,get_from_config}, {vbuckets,"VXYZ[\\]^_`abcdefghijklmnopqrstuvwx"}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]] [ns_server:debug,2014-08-19T16:52:32.907,ns_1@10.242.238.90:<0.1386.1>:ns_vbm_new_sup:do_perform_vbucket_filter_change:139]Linked myself to old ebucketmigrator <0.1382.1> [ns_server:debug,2014-08-19T16:52:32.916,ns_1@10.242.238.90:<0.1382.1>:ebucketmigrator_srv:init:621]Reusing old upstream: [{vbuckets,"VXYZ[\\]^_`abcdefghijklmnopqrstuvw"}, {name,<<"replication_ns_1@10.242.238.90">>}, {takeover,false}] [rebalance:debug,2014-08-19T16:52:32.917,ns_1@10.242.238.90:<0.1382.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.1388.1> [ns_server:info,2014-08-19T16:52:32.917,ns_1@10.242.238.90:<0.1382.1>:ebucketmigrator_srv:handle_call:270]Starting new-style vbucket filter change on stream `replication_ns_1@10.242.238.90` [ns_server:info,2014-08-19T16:52:32.937,ns_1@10.242.238.90:<0.1382.1>:ebucketmigrator_srv:handle_call:297]Changing vbucket filter on tap stream `replication_ns_1@10.242.238.90`: [{86,1}, {88,1}, {89,1}, {90,1}, {91,1}, {92,1}, {93,1}, {94,1}, {95,1}, {96,1}, {97,1}, {98,1}, {99,1}, {100,1}, {101,1}, {102,1}, {103,1}, {104,1}, {105,1}, {106,1}, {107,1}, {108,1}, {109,1}, {110,1}, {111,1}, {112,1}, {113,1}, {114,1}, {115,1}, {116,1}, {117,1}, {118,1}, {119,1}, {120,1}] [ns_server:info,2014-08-19T16:52:32.938,ns_1@10.242.238.90:<0.1382.1>:ebucketmigrator_srv:handle_call:307]Successfully changed vbucket filter on tap stream `replication_ns_1@10.242.238.90`. [ns_server:info,2014-08-19T16:52:32.938,ns_1@10.242.238.90:<0.1382.1>:ebucketmigrator_srv:process_upstream:1027]Got vbucket filter change completion message. Silencing upstream sender [ns_server:info,2014-08-19T16:52:32.939,ns_1@10.242.238.90:<0.1382.1>:ebucketmigrator_srv:handle_info:366]Got reply from upstream silencing request. Completing state transition to a new ebucketmigrator. [ns_server:debug,2014-08-19T16:52:32.939,ns_1@10.242.238.90:<0.1382.1>:ebucketmigrator_srv:complete_native_vb_filter_change:170]Proceeding with reading unread binaries [ns_server:debug,2014-08-19T16:52:32.939,ns_1@10.242.238.90:<0.1382.1>:ebucketmigrator_srv:confirm_downstream:197]Going to confirm reception downstream messages [ns_server:debug,2014-08-19T16:52:32.939,ns_1@10.242.238.90:<0.1382.1>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:52:32.939,ns_1@10.242.238.90:<0.1389.1>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:52:32.939,ns_1@10.242.238.90:<0.1389.1>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:52:32.939,ns_1@10.242.238.90:<0.1382.1>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:52:32.939,ns_1@10.242.238.90:<0.1382.1>:ebucketmigrator_srv:confirm_downstream:201]Confirmed upstream messages are feeded to kernel [ns_server:debug,2014-08-19T16:52:32.939,ns_1@10.242.238.90:<0.1382.1>:ebucketmigrator_srv:reply_and_die:210]Passed old state to caller [ns_server:debug,2014-08-19T16:52:32.939,ns_1@10.242.238.90:<0.1382.1>:ebucketmigrator_srv:reply_and_die:213]Sent out state. Preparing to die [ns_server:debug,2014-08-19T16:52:32.939,ns_1@10.242.238.90:<0.1386.1>:ns_vbm_new_sup:do_perform_vbucket_filter_change:143]Got old state from previous ebucketmigrator: <0.1382.1> [ns_server:debug,2014-08-19T16:52:32.940,ns_1@10.242.238.90:<0.1386.1>:ns_vbm_new_sup:perform_vbucket_filter_change_loop:184]Sent old state to new instance [ns_server:info,2014-08-19T16:52:32.940,ns_1@10.242.238.90:<0.1391.1>:ns_vbm_new_sup:mk_old_state_retriever:83]Got vbucket filter change old state. Proceeding vbucket filter change operation [ns_server:debug,2014-08-19T16:52:32.940,ns_1@10.242.238.90:<0.1391.1>:ebucketmigrator_srv:init:494]Got old ebucketmigrator state from <0.1382.1>: {state,#Port<0.20537>,#Port<0.20533>,#Port<0.20538>,#Port<0.20534>,<0.1388.1>, <<"cut off">>,<<"cut off">>,[],103,false,false,0, {1408,452752,938933}, completed, {<0.1386.1>,#Ref<0.0.1.138139>}, <<"replication_ns_1@10.242.238.90">>,<0.1382.1>, {had_backfill,false,undefined,[]}, completed,false}. [ns_server:debug,2014-08-19T16:52:32.940,ns_1@10.242.238.90:<0.17162.0>:ns_process_registry:handle_info:98]Got exit msg: {'EXIT',<0.1386.1>,{#Ref<0.0.1.138128>,<0.1391.1>}} [error_logger:info,2014-08-19T16:52:32.940,ns_1@10.242.238.90:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,'ns_vbm_new_sup-default'} started: [{pid,<0.1391.1>}, {name, {new_child_id, "VXYZ[\\]^_`abcdefghijklmnopqrstuvwx", 'ns_1@10.242.238.88'}}, {mfargs, {ebucketmigrator_srv,start_link, [{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{old_state_retriever, #Fun}, {on_not_ready_vbuckets, #Fun}, {username,"default"}, {password,get_from_config}, {vbuckets, "VXYZ[\\]^_`abcdefghijklmnopqrstuvwx"}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]]}}, {restart_type,temporary}, {shutdown,60000}, {child_type,worker}] [ns_server:debug,2014-08-19T16:52:32.946,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:52:32.949,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:32.949,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 3238 us [ns_server:debug,2014-08-19T16:52:32.950,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:32.950,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{120, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.90']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:info,2014-08-19T16:52:32.951,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 121 state to replica [ns_server:info,2014-08-19T16:52:32.951,ns_1@10.242.238.90:tap_replication_manager-default<0.18775.0>:tap_replication_manager:change_vbucket_filter:200]Going to change replication from 'ns_1@10.242.238.88' to have "VXYZ[\\]^_`abcdefghijklmnopqrstuvwxy" ("y", []) [ns_server:debug,2014-08-19T16:52:32.952,ns_1@10.242.238.90:<0.1393.1>:ns_vbm_new_sup:do_perform_vbucket_filter_change:135]Registered myself under id:{"default", {new_child_id, "VXYZ[\\]^_`abcdefghijklmnopqrstuvwxy", 'ns_1@10.242.238.88'}, #Ref<0.0.1.138275>} Args:[{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{old_state_retriever,#Fun}, {on_not_ready_vbuckets,#Fun}, {username,"default"}, {password,get_from_config}, {vbuckets,"VXYZ[\\]^_`abcdefghijklmnopqrstuvwxy"}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]] [ns_server:debug,2014-08-19T16:52:32.952,ns_1@10.242.238.90:<0.1393.1>:ns_vbm_new_sup:do_perform_vbucket_filter_change:139]Linked myself to old ebucketmigrator <0.1391.1> [ns_server:debug,2014-08-19T16:52:32.963,ns_1@10.242.238.90:<0.1391.1>:ebucketmigrator_srv:init:621]Reusing old upstream: [{vbuckets,"VXYZ[\\]^_`abcdefghijklmnopqrstuvwx"}, {name,<<"replication_ns_1@10.242.238.90">>}, {takeover,false}] [rebalance:debug,2014-08-19T16:52:32.964,ns_1@10.242.238.90:<0.1391.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.1395.1> [ns_server:info,2014-08-19T16:52:32.964,ns_1@10.242.238.90:<0.1391.1>:ebucketmigrator_srv:handle_call:270]Starting new-style vbucket filter change on stream `replication_ns_1@10.242.238.90` [ns_server:info,2014-08-19T16:52:32.984,ns_1@10.242.238.90:<0.1391.1>:ebucketmigrator_srv:handle_call:297]Changing vbucket filter on tap stream `replication_ns_1@10.242.238.90`: [{86,1}, {88,1}, {89,1}, {90,1}, {91,1}, {92,1}, {93,1}, {94,1}, {95,1}, {96,1}, {97,1}, {98,1}, {99,1}, {100,1}, {101,1}, {102,1}, {103,1}, {104,1}, {105,1}, {106,1}, {107,1}, {108,1}, {109,1}, {110,1}, {111,1}, {112,1}, {113,1}, {114,1}, {115,1}, {116,1}, {117,1}, {118,1}, {119,1}, {120,1}, {121,1}] [ns_server:info,2014-08-19T16:52:32.985,ns_1@10.242.238.90:<0.1391.1>:ebucketmigrator_srv:handle_call:307]Successfully changed vbucket filter on tap stream `replication_ns_1@10.242.238.90`. [ns_server:info,2014-08-19T16:52:32.985,ns_1@10.242.238.90:<0.1391.1>:ebucketmigrator_srv:process_upstream:1027]Got vbucket filter change completion message. Silencing upstream sender [ns_server:info,2014-08-19T16:52:32.986,ns_1@10.242.238.90:<0.1391.1>:ebucketmigrator_srv:handle_info:366]Got reply from upstream silencing request. Completing state transition to a new ebucketmigrator. [ns_server:debug,2014-08-19T16:52:32.986,ns_1@10.242.238.90:<0.1391.1>:ebucketmigrator_srv:complete_native_vb_filter_change:170]Proceeding with reading unread binaries [ns_server:debug,2014-08-19T16:52:32.986,ns_1@10.242.238.90:<0.1391.1>:ebucketmigrator_srv:confirm_downstream:197]Going to confirm reception downstream messages [ns_server:debug,2014-08-19T16:52:32.986,ns_1@10.242.238.90:<0.1391.1>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:52:32.986,ns_1@10.242.238.90:<0.1396.1>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:52:32.986,ns_1@10.242.238.90:<0.1396.1>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:52:32.986,ns_1@10.242.238.90:<0.1391.1>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:52:32.986,ns_1@10.242.238.90:<0.1391.1>:ebucketmigrator_srv:confirm_downstream:201]Confirmed upstream messages are feeded to kernel [ns_server:debug,2014-08-19T16:52:32.987,ns_1@10.242.238.90:<0.1391.1>:ebucketmigrator_srv:reply_and_die:210]Passed old state to caller [ns_server:debug,2014-08-19T16:52:32.987,ns_1@10.242.238.90:<0.1391.1>:ebucketmigrator_srv:reply_and_die:213]Sent out state. Preparing to die [ns_server:debug,2014-08-19T16:52:32.987,ns_1@10.242.238.90:<0.1393.1>:ns_vbm_new_sup:do_perform_vbucket_filter_change:143]Got old state from previous ebucketmigrator: <0.1391.1> [ns_server:debug,2014-08-19T16:52:32.987,ns_1@10.242.238.90:<0.1393.1>:ns_vbm_new_sup:perform_vbucket_filter_change_loop:184]Sent old state to new instance [ns_server:info,2014-08-19T16:52:32.987,ns_1@10.242.238.90:<0.1398.1>:ns_vbm_new_sup:mk_old_state_retriever:83]Got vbucket filter change old state. Proceeding vbucket filter change operation [ns_server:debug,2014-08-19T16:52:32.987,ns_1@10.242.238.90:<0.1398.1>:ebucketmigrator_srv:init:494]Got old ebucketmigrator state from <0.1391.1>: {state,#Port<0.20537>,#Port<0.20533>,#Port<0.20538>,#Port<0.20534>,<0.1395.1>, <<"cut off">>,<<"cut off">>,[],106,false,false,0, {1408,452752,985867}, completed, {<0.1393.1>,#Ref<0.0.1.138288>}, <<"replication_ns_1@10.242.238.90">>,<0.1391.1>, {had_backfill,false,undefined,[]}, completed,false}. [ns_server:debug,2014-08-19T16:52:32.988,ns_1@10.242.238.90:<0.17162.0>:ns_process_registry:handle_info:98]Got exit msg: {'EXIT',<0.1393.1>,{#Ref<0.0.1.138277>,<0.1398.1>}} [error_logger:info,2014-08-19T16:52:32.988,ns_1@10.242.238.90:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,'ns_vbm_new_sup-default'} started: [{pid,<0.1398.1>}, {name, {new_child_id, "VXYZ[\\]^_`abcdefghijklmnopqrstuvwxy", 'ns_1@10.242.238.88'}}, {mfargs, {ebucketmigrator_srv,start_link, [{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{old_state_retriever, #Fun}, {on_not_ready_vbuckets, #Fun}, {username,"default"}, {password,get_from_config}, {vbuckets, "VXYZ[\\]^_`abcdefghijklmnopqrstuvwxy"}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]]}}, {restart_type,temporary}, {shutdown,60000}, {child_type,worker}] [ns_server:debug,2014-08-19T16:52:32.992,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:52:32.997,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 4304 us [ns_server:debug,2014-08-19T16:52:32.997,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:32.998,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:32.999,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{121, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.90']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:info,2014-08-19T16:52:33.001,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 123 state to replica [ns_server:info,2014-08-19T16:52:33.001,ns_1@10.242.238.90:tap_replication_manager-default<0.18775.0>:tap_replication_manager:change_vbucket_filter:200]Going to change replication from 'ns_1@10.242.238.88' to have "VXYZ[\\]^_`abcdefghijklmnopqrstuvwxy{" ("{", []) [rebalance:debug,2014-08-19T16:52:33.002,ns_1@10.242.238.90:<0.32322.0>:janitor_agent:handle_call:795]Done [rebalance:debug,2014-08-19T16:52:33.004,ns_1@10.242.238.90:<0.32317.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:52:33.004,ns_1@10.242.238.90:<0.1400.1>:ns_vbm_new_sup:do_perform_vbucket_filter_change:135]Registered myself under id:{"default", {new_child_id, "VXYZ[\\]^_`abcdefghijklmnopqrstuvwxy{", 'ns_1@10.242.238.88'}, #Ref<0.0.1.139364>} Args:[{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{old_state_retriever,#Fun}, {on_not_ready_vbuckets,#Fun}, {username,"default"}, {password,get_from_config}, {vbuckets,"VXYZ[\\]^_`abcdefghijklmnopqrstuvwxy{"}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]] [ns_server:debug,2014-08-19T16:52:33.005,ns_1@10.242.238.90:<0.1400.1>:ns_vbm_new_sup:do_perform_vbucket_filter_change:139]Linked myself to old ebucketmigrator <0.1398.1> [ns_server:debug,2014-08-19T16:52:33.014,ns_1@10.242.238.90:<0.1398.1>:ebucketmigrator_srv:init:621]Reusing old upstream: [{vbuckets,"VXYZ[\\]^_`abcdefghijklmnopqrstuvwxy"}, {name,<<"replication_ns_1@10.242.238.90">>}, {takeover,false}] [rebalance:debug,2014-08-19T16:52:33.014,ns_1@10.242.238.90:<0.1398.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.1402.1> [ns_server:info,2014-08-19T16:52:33.015,ns_1@10.242.238.90:<0.1398.1>:ebucketmigrator_srv:handle_call:270]Starting new-style vbucket filter change on stream `replication_ns_1@10.242.238.90` [ns_server:info,2014-08-19T16:52:33.034,ns_1@10.242.238.90:<0.1398.1>:ebucketmigrator_srv:handle_call:297]Changing vbucket filter on tap stream `replication_ns_1@10.242.238.90`: [{86,1}, {88,1}, {89,1}, {90,1}, {91,1}, {92,1}, {93,1}, {94,1}, {95,1}, {96,1}, {97,1}, {98,1}, {99,1}, {100,1}, {101,1}, {102,1}, {103,1}, {104,1}, {105,1}, {106,1}, {107,1}, {108,1}, {109,1}, {110,1}, {111,1}, {112,1}, {113,1}, {114,1}, {115,1}, {116,1}, {117,1}, {118,1}, {119,1}, {120,1}, {121,1}, {123,1}] [ns_server:info,2014-08-19T16:52:33.035,ns_1@10.242.238.90:<0.1398.1>:ebucketmigrator_srv:handle_call:307]Successfully changed vbucket filter on tap stream `replication_ns_1@10.242.238.90`. [ns_server:info,2014-08-19T16:52:33.035,ns_1@10.242.238.90:<0.1398.1>:ebucketmigrator_srv:process_upstream:1027]Got vbucket filter change completion message. Silencing upstream sender [ns_server:info,2014-08-19T16:52:33.035,ns_1@10.242.238.90:<0.1398.1>:ebucketmigrator_srv:handle_info:366]Got reply from upstream silencing request. Completing state transition to a new ebucketmigrator. [ns_server:debug,2014-08-19T16:52:33.036,ns_1@10.242.238.90:<0.1398.1>:ebucketmigrator_srv:complete_native_vb_filter_change:170]Proceeding with reading unread binaries [ns_server:debug,2014-08-19T16:52:33.036,ns_1@10.242.238.90:<0.1398.1>:ebucketmigrator_srv:confirm_downstream:197]Going to confirm reception downstream messages [ns_server:debug,2014-08-19T16:52:33.036,ns_1@10.242.238.90:<0.1398.1>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:52:33.036,ns_1@10.242.238.90:<0.1403.1>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:52:33.036,ns_1@10.242.238.90:<0.1403.1>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:52:33.036,ns_1@10.242.238.90:<0.1398.1>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:52:33.036,ns_1@10.242.238.90:<0.1398.1>:ebucketmigrator_srv:confirm_downstream:201]Confirmed upstream messages are feeded to kernel [ns_server:debug,2014-08-19T16:52:33.036,ns_1@10.242.238.90:<0.1398.1>:ebucketmigrator_srv:reply_and_die:210]Passed old state to caller [ns_server:debug,2014-08-19T16:52:33.036,ns_1@10.242.238.90:<0.1398.1>:ebucketmigrator_srv:reply_and_die:213]Sent out state. Preparing to die [ns_server:debug,2014-08-19T16:52:33.036,ns_1@10.242.238.90:<0.1400.1>:ns_vbm_new_sup:do_perform_vbucket_filter_change:143]Got old state from previous ebucketmigrator: <0.1398.1> [ns_server:debug,2014-08-19T16:52:33.037,ns_1@10.242.238.90:<0.1400.1>:ns_vbm_new_sup:perform_vbucket_filter_change_loop:184]Sent old state to new instance [ns_server:info,2014-08-19T16:52:33.037,ns_1@10.242.238.90:<0.1405.1>:ns_vbm_new_sup:mk_old_state_retriever:83]Got vbucket filter change old state. Proceeding vbucket filter change operation [ns_server:debug,2014-08-19T16:52:33.037,ns_1@10.242.238.90:<0.1405.1>:ebucketmigrator_srv:init:494]Got old ebucketmigrator state from <0.1398.1>: {state,#Port<0.20537>,#Port<0.20533>,#Port<0.20538>,#Port<0.20534>,<0.1402.1>, <<"cut off">>,<<"cut off">>,[],109,false,false,0, {1408,452753,35830}, completed, {<0.1400.1>,#Ref<0.0.1.139380>}, <<"replication_ns_1@10.242.238.90">>,<0.1398.1>, {had_backfill,false,undefined,[]}, completed,false}. [ns_server:debug,2014-08-19T16:52:33.037,ns_1@10.242.238.90:<0.17162.0>:ns_process_registry:handle_info:98]Got exit msg: {'EXIT',<0.1400.1>,{#Ref<0.0.1.139367>,<0.1405.1>}} [ns_server:debug,2014-08-19T16:52:33.037,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.32322.0> (ok) [ns_server:debug,2014-08-19T16:52:33.038,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.32317.0> (ok) [error_logger:info,2014-08-19T16:52:33.037,ns_1@10.242.238.90:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,'ns_vbm_new_sup-default'} started: [{pid,<0.1405.1>}, {name, {new_child_id, "VXYZ[\\]^_`abcdefghijklmnopqrstuvwxy{", 'ns_1@10.242.238.88'}}, {mfargs, {ebucketmigrator_srv,start_link, [{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{old_state_retriever, #Fun}, {on_not_ready_vbuckets, #Fun}, {username,"default"}, {password,get_from_config}, {vbuckets, "VXYZ[\\]^_`abcdefghijklmnopqrstuvwxy{"}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]]}}, {restart_type,temporary}, {shutdown,60000}, {child_type,worker}] [rebalance:debug,2014-08-19T16:52:33.039,ns_1@10.242.238.90:<0.32320.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:52:33.039,ns_1@10.242.238.90:<0.32320.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:52:33.039,ns_1@10.242.238.90:<0.1406.1>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:52:33.039,ns_1@10.242.238.90:<0.1406.1>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:52:33.040,ns_1@10.242.238.90:<0.32320.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [rebalance:debug,2014-08-19T16:52:33.040,ns_1@10.242.238.90:<0.32315.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:52:33.040,ns_1@10.242.238.90:<0.32315.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:52:33.041,ns_1@10.242.238.90:<0.1407.1>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:52:33.041,ns_1@10.242.238.90:<0.1407.1>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:52:33.041,ns_1@10.242.238.90:<0.32315.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:52:33.042,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:52:33.045,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:33.046,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 3303 us [ns_server:debug,2014-08-19T16:52:33.046,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:33.047,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{123, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.90']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:info,2014-08-19T16:52:33.047,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 122 state to replica [ns_server:info,2014-08-19T16:52:33.047,ns_1@10.242.238.90:tap_replication_manager-default<0.18775.0>:tap_replication_manager:change_vbucket_filter:200]Going to change replication from 'ns_1@10.242.238.88' to have "VXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{" ("z", []) [ns_server:debug,2014-08-19T16:52:33.048,ns_1@10.242.238.90:<0.1409.1>:ns_vbm_new_sup:do_perform_vbucket_filter_change:135]Registered myself under id:{"default", {new_child_id, "VXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{", 'ns_1@10.242.238.88'}, #Ref<0.0.1.139540>} Args:[{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{old_state_retriever,#Fun}, {on_not_ready_vbuckets,#Fun}, {username,"default"}, {password,get_from_config}, {vbuckets,"VXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{"}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]] [ns_server:debug,2014-08-19T16:52:33.049,ns_1@10.242.238.90:<0.1409.1>:ns_vbm_new_sup:do_perform_vbucket_filter_change:139]Linked myself to old ebucketmigrator <0.1405.1> [ns_server:debug,2014-08-19T16:52:33.063,ns_1@10.242.238.90:<0.1405.1>:ebucketmigrator_srv:init:621]Reusing old upstream: [{vbuckets,"VXYZ[\\]^_`abcdefghijklmnopqrstuvwxy{"}, {name,<<"replication_ns_1@10.242.238.90">>}, {takeover,false}] [rebalance:debug,2014-08-19T16:52:33.063,ns_1@10.242.238.90:<0.1405.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.1411.1> [ns_server:info,2014-08-19T16:52:33.063,ns_1@10.242.238.90:<0.1405.1>:ebucketmigrator_srv:handle_call:270]Starting new-style vbucket filter change on stream `replication_ns_1@10.242.238.90` [ns_server:info,2014-08-19T16:52:33.085,ns_1@10.242.238.90:<0.1405.1>:ebucketmigrator_srv:handle_call:297]Changing vbucket filter on tap stream `replication_ns_1@10.242.238.90`: [{86,1}, {88,1}, {89,1}, {90,1}, {91,1}, {92,1}, {93,1}, {94,1}, {95,1}, {96,1}, {97,1}, {98,1}, {99,1}, {100,1}, {101,1}, {102,1}, {103,1}, {104,1}, {105,1}, {106,1}, {107,1}, {108,1}, {109,1}, {110,1}, {111,1}, {112,1}, {113,1}, {114,1}, {115,1}, {116,1}, {117,1}, {118,1}, {119,1}, {120,1}, {121,1}, {122,1}, {123,1}] [ns_server:info,2014-08-19T16:52:33.086,ns_1@10.242.238.90:<0.1405.1>:ebucketmigrator_srv:handle_call:307]Successfully changed vbucket filter on tap stream `replication_ns_1@10.242.238.90`. [ns_server:info,2014-08-19T16:52:33.086,ns_1@10.242.238.90:<0.1405.1>:ebucketmigrator_srv:process_upstream:1027]Got vbucket filter change completion message. Silencing upstream sender [ns_server:info,2014-08-19T16:52:33.086,ns_1@10.242.238.90:<0.1405.1>:ebucketmigrator_srv:handle_info:366]Got reply from upstream silencing request. Completing state transition to a new ebucketmigrator. [ns_server:debug,2014-08-19T16:52:33.086,ns_1@10.242.238.90:<0.1405.1>:ebucketmigrator_srv:complete_native_vb_filter_change:170]Proceeding with reading unread binaries [ns_server:debug,2014-08-19T16:52:33.086,ns_1@10.242.238.90:<0.1405.1>:ebucketmigrator_srv:confirm_downstream:197]Going to confirm reception downstream messages [ns_server:debug,2014-08-19T16:52:33.086,ns_1@10.242.238.90:<0.1405.1>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:52:33.086,ns_1@10.242.238.90:<0.1412.1>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:52:33.087,ns_1@10.242.238.90:<0.1412.1>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:52:33.087,ns_1@10.242.238.90:<0.1405.1>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:52:33.087,ns_1@10.242.238.90:<0.1405.1>:ebucketmigrator_srv:confirm_downstream:201]Confirmed upstream messages are feeded to kernel [ns_server:debug,2014-08-19T16:52:33.087,ns_1@10.242.238.90:<0.1405.1>:ebucketmigrator_srv:reply_and_die:210]Passed old state to caller [ns_server:debug,2014-08-19T16:52:33.087,ns_1@10.242.238.90:<0.1405.1>:ebucketmigrator_srv:reply_and_die:213]Sent out state. Preparing to die [ns_server:debug,2014-08-19T16:52:33.087,ns_1@10.242.238.90:<0.1409.1>:ns_vbm_new_sup:do_perform_vbucket_filter_change:143]Got old state from previous ebucketmigrator: <0.1405.1> [ns_server:debug,2014-08-19T16:52:33.087,ns_1@10.242.238.90:<0.1409.1>:ns_vbm_new_sup:perform_vbucket_filter_change_loop:184]Sent old state to new instance [ns_server:info,2014-08-19T16:52:33.088,ns_1@10.242.238.90:<0.1414.1>:ns_vbm_new_sup:mk_old_state_retriever:83]Got vbucket filter change old state. Proceeding vbucket filter change operation [ns_server:debug,2014-08-19T16:52:33.088,ns_1@10.242.238.90:<0.1414.1>:ebucketmigrator_srv:init:494]Got old ebucketmigrator state from <0.1405.1>: {state,#Port<0.20537>,#Port<0.20533>,#Port<0.20538>,#Port<0.20534>,<0.1411.1>, <<"cut off">>,<<"cut off">>,[],112,false,false,0, {1408,452753,86483}, completed, {<0.1409.1>,#Ref<0.0.1.139553>}, <<"replication_ns_1@10.242.238.90">>,<0.1405.1>, {had_backfill,false,undefined,[]}, completed,false}. [ns_server:debug,2014-08-19T16:52:33.088,ns_1@10.242.238.90:<0.17162.0>:ns_process_registry:handle_info:98]Got exit msg: {'EXIT',<0.1409.1>,{#Ref<0.0.1.139542>,<0.1414.1>}} [error_logger:info,2014-08-19T16:52:33.088,ns_1@10.242.238.90:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,'ns_vbm_new_sup-default'} started: [{pid,<0.1414.1>}, {name, {new_child_id, "VXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{", 'ns_1@10.242.238.88'}}, {mfargs, {ebucketmigrator_srv,start_link, [{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{old_state_retriever, #Fun}, {on_not_ready_vbuckets, #Fun}, {username,"default"}, {password,get_from_config}, {vbuckets, "VXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{"}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]]}}, {restart_type,temporary}, {shutdown,60000}, {child_type,worker}] [ns_server:debug,2014-08-19T16:52:33.094,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:52:33.101,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:33.101,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 7702 us [ns_server:debug,2014-08-19T16:52:33.102,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:33.102,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{122, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.90']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:info,2014-08-19T16:52:33.104,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 125 state to replica [ns_server:info,2014-08-19T16:52:33.104,ns_1@10.242.238.90:tap_replication_manager-default<0.18775.0>:tap_replication_manager:change_vbucket_filter:200]Going to change replication from 'ns_1@10.242.238.88' to have "VXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{}" ("}", []) [ns_server:debug,2014-08-19T16:52:33.106,ns_1@10.242.238.90:<0.1416.1>:ns_vbm_new_sup:do_perform_vbucket_filter_change:135]Registered myself under id:{"default", {new_child_id, "VXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{}", 'ns_1@10.242.238.88'}, #Ref<0.0.1.139708>} Args:[{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{old_state_retriever,#Fun}, {on_not_ready_vbuckets,#Fun}, {username,"default"}, {password,get_from_config}, {vbuckets,"VXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{}"}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]] [ns_server:debug,2014-08-19T16:52:33.106,ns_1@10.242.238.90:<0.1416.1>:ns_vbm_new_sup:do_perform_vbucket_filter_change:139]Linked myself to old ebucketmigrator <0.1414.1> [ns_server:debug,2014-08-19T16:52:33.112,ns_1@10.242.238.90:<0.1414.1>:ebucketmigrator_srv:init:621]Reusing old upstream: [{vbuckets,"VXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{"}, {name,<<"replication_ns_1@10.242.238.90">>}, {takeover,false}] [rebalance:debug,2014-08-19T16:52:33.112,ns_1@10.242.238.90:<0.1414.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.1418.1> [ns_server:info,2014-08-19T16:52:33.112,ns_1@10.242.238.90:<0.1414.1>:ebucketmigrator_srv:handle_call:270]Starting new-style vbucket filter change on stream `replication_ns_1@10.242.238.90` [rebalance:debug,2014-08-19T16:52:33.119,ns_1@10.242.238.90:<0.32312.0>:janitor_agent:handle_call:795]Done [rebalance:debug,2014-08-19T16:52:33.119,ns_1@10.242.238.90:<0.32307.0>:janitor_agent:handle_call:795]Done [ns_server:info,2014-08-19T16:52:33.132,ns_1@10.242.238.90:<0.1414.1>:ebucketmigrator_srv:handle_call:297]Changing vbucket filter on tap stream `replication_ns_1@10.242.238.90`: [{86,1}, {88,1}, {89,1}, {90,1}, {91,1}, {92,1}, {93,1}, {94,1}, {95,1}, {96,1}, {97,1}, {98,1}, {99,1}, {100,1}, {101,1}, {102,1}, {103,1}, {104,1}, {105,1}, {106,1}, {107,1}, {108,1}, {109,1}, {110,1}, {111,1}, {112,1}, {113,1}, {114,1}, {115,1}, {116,1}, {117,1}, {118,1}, {119,1}, {120,1}, {121,1}, {122,1}, {123,1}, {125,1}] [ns_server:info,2014-08-19T16:52:33.133,ns_1@10.242.238.90:<0.1414.1>:ebucketmigrator_srv:handle_call:307]Successfully changed vbucket filter on tap stream `replication_ns_1@10.242.238.90`. [ns_server:info,2014-08-19T16:52:33.134,ns_1@10.242.238.90:<0.1414.1>:ebucketmigrator_srv:process_upstream:1027]Got vbucket filter change completion message. Silencing upstream sender [ns_server:info,2014-08-19T16:52:33.134,ns_1@10.242.238.90:<0.1414.1>:ebucketmigrator_srv:handle_info:366]Got reply from upstream silencing request. Completing state transition to a new ebucketmigrator. [ns_server:debug,2014-08-19T16:52:33.134,ns_1@10.242.238.90:<0.1414.1>:ebucketmigrator_srv:complete_native_vb_filter_change:170]Proceeding with reading unread binaries [ns_server:debug,2014-08-19T16:52:33.134,ns_1@10.242.238.90:<0.1414.1>:ebucketmigrator_srv:confirm_downstream:197]Going to confirm reception downstream messages [ns_server:debug,2014-08-19T16:52:33.134,ns_1@10.242.238.90:<0.1414.1>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:52:33.134,ns_1@10.242.238.90:<0.1419.1>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:52:33.135,ns_1@10.242.238.90:<0.1419.1>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:52:33.135,ns_1@10.242.238.90:<0.1414.1>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:52:33.135,ns_1@10.242.238.90:<0.1414.1>:ebucketmigrator_srv:confirm_downstream:201]Confirmed upstream messages are feeded to kernel [ns_server:debug,2014-08-19T16:52:33.135,ns_1@10.242.238.90:<0.1414.1>:ebucketmigrator_srv:reply_and_die:210]Passed old state to caller [ns_server:debug,2014-08-19T16:52:33.135,ns_1@10.242.238.90:<0.1414.1>:ebucketmigrator_srv:reply_and_die:213]Sent out state. Preparing to die [ns_server:debug,2014-08-19T16:52:33.135,ns_1@10.242.238.90:<0.1416.1>:ns_vbm_new_sup:do_perform_vbucket_filter_change:143]Got old state from previous ebucketmigrator: <0.1414.1> [ns_server:debug,2014-08-19T16:52:33.135,ns_1@10.242.238.90:<0.1416.1>:ns_vbm_new_sup:perform_vbucket_filter_change_loop:184]Sent old state to new instance [ns_server:info,2014-08-19T16:52:33.136,ns_1@10.242.238.90:<0.1421.1>:ns_vbm_new_sup:mk_old_state_retriever:83]Got vbucket filter change old state. Proceeding vbucket filter change operation [ns_server:debug,2014-08-19T16:52:33.136,ns_1@10.242.238.90:<0.1421.1>:ebucketmigrator_srv:init:494]Got old ebucketmigrator state from <0.1414.1>: {state,#Port<0.20537>,#Port<0.20533>,#Port<0.20538>,#Port<0.20534>,<0.1418.1>, <<"cut off">>,<<"cut off">>,[],115,false,false,0, {1408,452753,134223}, completed, {<0.1416.1>,#Ref<0.0.1.139721>}, <<"replication_ns_1@10.242.238.90">>,<0.1414.1>, {had_backfill,false,undefined,[]}, completed,false}. [ns_server:debug,2014-08-19T16:52:33.136,ns_1@10.242.238.90:<0.17162.0>:ns_process_registry:handle_info:98]Got exit msg: {'EXIT',<0.1416.1>,{#Ref<0.0.1.139710>,<0.1421.1>}} [ns_server:debug,2014-08-19T16:52:33.136,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.32312.0> (ok) [error_logger:info,2014-08-19T16:52:33.136,ns_1@10.242.238.90:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,'ns_vbm_new_sup-default'} started: [{pid,<0.1421.1>}, {name, {new_child_id, "VXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{}", 'ns_1@10.242.238.88'}}, {mfargs, {ebucketmigrator_srv,start_link, [{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{old_state_retriever, #Fun}, {on_not_ready_vbuckets, #Fun}, {username,"default"}, {password,get_from_config}, {vbuckets, "VXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{}"}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]]}}, {restart_type,temporary}, {shutdown,60000}, {child_type,worker}] [ns_server:debug,2014-08-19T16:52:33.136,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.32307.0> (ok) [rebalance:debug,2014-08-19T16:52:33.138,ns_1@10.242.238.90:<0.32310.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:52:33.138,ns_1@10.242.238.90:<0.32310.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:52:33.138,ns_1@10.242.238.90:<0.1422.1>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:52:33.138,ns_1@10.242.238.90:<0.1422.1>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:52:33.138,ns_1@10.242.238.90:<0.32310.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [rebalance:debug,2014-08-19T16:52:33.139,ns_1@10.242.238.90:<0.32305.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:52:33.139,ns_1@10.242.238.90:<0.32305.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:52:33.139,ns_1@10.242.238.90:<0.1423.1>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:52:33.139,ns_1@10.242.238.90:<0.1423.1>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:52:33.139,ns_1@10.242.238.90:<0.32305.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:52:33.141,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:52:33.145,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:33.145,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 3422 us [ns_server:debug,2014-08-19T16:52:33.145,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:33.146,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{125, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.90']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:info,2014-08-19T16:52:33.146,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 124 state to replica [ns_server:info,2014-08-19T16:52:33.147,ns_1@10.242.238.90:tap_replication_manager-default<0.18775.0>:tap_replication_manager:change_vbucket_filter:200]Going to change replication from 'ns_1@10.242.238.88' to have "VXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}" ("|", []) [ns_server:debug,2014-08-19T16:52:33.147,ns_1@10.242.238.90:<0.1425.1>:ns_vbm_new_sup:do_perform_vbucket_filter_change:135]Registered myself under id:{"default", {new_child_id, "VXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}", 'ns_1@10.242.238.88'}, #Ref<0.0.1.139896>} Args:[{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{old_state_retriever,#Fun}, {on_not_ready_vbuckets,#Fun}, {username,"default"}, {password,get_from_config}, {vbuckets,"VXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}"}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]] [ns_server:debug,2014-08-19T16:52:33.148,ns_1@10.242.238.90:<0.1425.1>:ns_vbm_new_sup:do_perform_vbucket_filter_change:139]Linked myself to old ebucketmigrator <0.1421.1> [ns_server:debug,2014-08-19T16:52:33.164,ns_1@10.242.238.90:<0.1421.1>:ebucketmigrator_srv:init:621]Reusing old upstream: [{vbuckets,"VXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{}"}, {name,<<"replication_ns_1@10.242.238.90">>}, {takeover,false}] [rebalance:debug,2014-08-19T16:52:33.164,ns_1@10.242.238.90:<0.1421.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.1427.1> [ns_server:info,2014-08-19T16:52:33.165,ns_1@10.242.238.90:<0.1421.1>:ebucketmigrator_srv:handle_call:270]Starting new-style vbucket filter change on stream `replication_ns_1@10.242.238.90` [ns_server:info,2014-08-19T16:52:33.185,ns_1@10.242.238.90:<0.1421.1>:ebucketmigrator_srv:handle_call:297]Changing vbucket filter on tap stream `replication_ns_1@10.242.238.90`: [{86,1}, {88,1}, {89,1}, {90,1}, {91,1}, {92,1}, {93,1}, {94,1}, {95,1}, {96,1}, {97,1}, {98,1}, {99,1}, {100,1}, {101,1}, {102,1}, {103,1}, {104,1}, {105,1}, {106,1}, {107,1}, {108,1}, {109,1}, {110,1}, {111,1}, {112,1}, {113,1}, {114,1}, {115,1}, {116,1}, {117,1}, {118,1}, {119,1}, {120,1}, {121,1}, {122,1}, {123,1}, {124,1}, {125,1}] [ns_server:info,2014-08-19T16:52:33.186,ns_1@10.242.238.90:<0.1421.1>:ebucketmigrator_srv:handle_call:307]Successfully changed vbucket filter on tap stream `replication_ns_1@10.242.238.90`. [ns_server:info,2014-08-19T16:52:33.186,ns_1@10.242.238.90:<0.1421.1>:ebucketmigrator_srv:process_upstream:1027]Got vbucket filter change completion message. Silencing upstream sender [ns_server:info,2014-08-19T16:52:33.186,ns_1@10.242.238.90:<0.1421.1>:ebucketmigrator_srv:handle_info:366]Got reply from upstream silencing request. Completing state transition to a new ebucketmigrator. [ns_server:debug,2014-08-19T16:52:33.186,ns_1@10.242.238.90:<0.1421.1>:ebucketmigrator_srv:complete_native_vb_filter_change:170]Proceeding with reading unread binaries [ns_server:debug,2014-08-19T16:52:33.186,ns_1@10.242.238.90:<0.1421.1>:ebucketmigrator_srv:confirm_downstream:197]Going to confirm reception downstream messages [ns_server:debug,2014-08-19T16:52:33.187,ns_1@10.242.238.90:<0.1421.1>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:52:33.187,ns_1@10.242.238.90:<0.1428.1>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:52:33.187,ns_1@10.242.238.90:<0.1428.1>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:52:33.187,ns_1@10.242.238.90:<0.1421.1>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:52:33.187,ns_1@10.242.238.90:<0.1421.1>:ebucketmigrator_srv:confirm_downstream:201]Confirmed upstream messages are feeded to kernel [ns_server:debug,2014-08-19T16:52:33.188,ns_1@10.242.238.90:<0.1421.1>:ebucketmigrator_srv:reply_and_die:210]Passed old state to caller [ns_server:debug,2014-08-19T16:52:33.188,ns_1@10.242.238.90:<0.1421.1>:ebucketmigrator_srv:reply_and_die:213]Sent out state. Preparing to die [ns_server:debug,2014-08-19T16:52:33.188,ns_1@10.242.238.90:<0.1425.1>:ns_vbm_new_sup:do_perform_vbucket_filter_change:143]Got old state from previous ebucketmigrator: <0.1421.1> [ns_server:debug,2014-08-19T16:52:33.188,ns_1@10.242.238.90:<0.1425.1>:ns_vbm_new_sup:perform_vbucket_filter_change_loop:184]Sent old state to new instance [ns_server:info,2014-08-19T16:52:33.188,ns_1@10.242.238.90:<0.1430.1>:ns_vbm_new_sup:mk_old_state_retriever:83]Got vbucket filter change old state. Proceeding vbucket filter change operation [ns_server:debug,2014-08-19T16:52:33.188,ns_1@10.242.238.90:<0.1430.1>:ebucketmigrator_srv:init:494]Got old ebucketmigrator state from <0.1421.1>: {state,#Port<0.20537>,#Port<0.20533>,#Port<0.20538>,#Port<0.20534>,<0.1427.1>, <<"cut off">>,<<"cut off">>,[],118,false,false,0, {1408,452753,186635}, completed, {<0.1425.1>,#Ref<0.0.1.139909>}, <<"replication_ns_1@10.242.238.90">>,<0.1421.1>, {had_backfill,false,undefined,[]}, completed,false}. [ns_server:debug,2014-08-19T16:52:33.189,ns_1@10.242.238.90:<0.17162.0>:ns_process_registry:handle_info:98]Got exit msg: {'EXIT',<0.1425.1>,{#Ref<0.0.1.139898>,<0.1430.1>}} [error_logger:info,2014-08-19T16:52:33.189,ns_1@10.242.238.90:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,'ns_vbm_new_sup-default'} started: [{pid,<0.1430.1>}, {name, {new_child_id, "VXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}", 'ns_1@10.242.238.88'}}, {mfargs, {ebucketmigrator_srv,start_link, [{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{old_state_retriever, #Fun}, {on_not_ready_vbuckets, #Fun}, {username,"default"}, {password,get_from_config}, {vbuckets, "VXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}"}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]]}}, {restart_type,temporary}, {shutdown,60000}, {child_type,worker}] [ns_server:debug,2014-08-19T16:52:33.193,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:52:33.196,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:33.196,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 3099 us [ns_server:debug,2014-08-19T16:52:33.197,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:33.197,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{124, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.90']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:info,2014-08-19T16:52:33.199,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 127 state to replica [ns_server:info,2014-08-19T16:52:33.199,ns_1@10.242.238.90:tap_replication_manager-default<0.18775.0>:tap_replication_manager:change_vbucket_filter:200]Going to change replication from 'ns_1@10.242.238.88' to have [86,88,89,90,91,92,93,94,95,96,97,98,99,100,101,102,103,104,105,106,107,108, 109,110,111,112,113,114,115,116,117,118,119,120,121,122,123,124,125,127] ([127], []) [ns_server:debug,2014-08-19T16:52:33.200,ns_1@10.242.238.90:<0.1432.1>:ns_vbm_new_sup:do_perform_vbucket_filter_change:135]Registered myself under id:{"default", {new_child_id, [86,88,89,90,91,92,93,94,95,96,97,98,99,100, 101,102,103,104,105,106,107,108,109,110,111, 112,113,114,115,116,117,118,119,120,121,122, 123,124,125,127], 'ns_1@10.242.238.88'}, #Ref<0.0.1.140040>} Args:[{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{old_state_retriever,#Fun}, {on_not_ready_vbuckets,#Fun}, {username,"default"}, {password,get_from_config}, {vbuckets,[86,88,89,90,91,92,93,94,95,96,97,98,99,100,101,102,103,104, 105,106,107,108,109,110,111,112,113,114,115,116,117,118,119, 120,121,122,123,124,125,127]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]] [ns_server:debug,2014-08-19T16:52:33.200,ns_1@10.242.238.90:<0.1432.1>:ns_vbm_new_sup:do_perform_vbucket_filter_change:139]Linked myself to old ebucketmigrator <0.1430.1> [ns_server:debug,2014-08-19T16:52:33.211,ns_1@10.242.238.90:<0.1430.1>:ebucketmigrator_srv:init:621]Reusing old upstream: [{vbuckets,"VXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}"}, {name,<<"replication_ns_1@10.242.238.90">>}, {takeover,false}] [rebalance:debug,2014-08-19T16:52:33.211,ns_1@10.242.238.90:<0.1430.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.1434.1> [ns_server:info,2014-08-19T16:52:33.212,ns_1@10.242.238.90:<0.1430.1>:ebucketmigrator_srv:handle_call:270]Starting new-style vbucket filter change on stream `replication_ns_1@10.242.238.90` [ns_server:info,2014-08-19T16:52:33.232,ns_1@10.242.238.90:<0.1430.1>:ebucketmigrator_srv:handle_call:297]Changing vbucket filter on tap stream `replication_ns_1@10.242.238.90`: [{86,1}, {88,1}, {89,1}, {90,1}, {91,1}, {92,1}, {93,1}, {94,1}, {95,1}, {96,1}, {97,1}, {98,1}, {99,1}, {100,1}, {101,1}, {102,1}, {103,1}, {104,1}, {105,1}, {106,1}, {107,1}, {108,1}, {109,1}, {110,1}, {111,1}, {112,1}, {113,1}, {114,1}, {115,1}, {116,1}, {117,1}, {118,1}, {119,1}, {120,1}, {121,1}, {122,1}, {123,1}, {124,1}, {125,1}, {127,1}] [ns_server:info,2014-08-19T16:52:33.233,ns_1@10.242.238.90:<0.1430.1>:ebucketmigrator_srv:handle_call:307]Successfully changed vbucket filter on tap stream `replication_ns_1@10.242.238.90`. [ns_server:info,2014-08-19T16:52:33.234,ns_1@10.242.238.90:<0.1430.1>:ebucketmigrator_srv:process_upstream:1027]Got vbucket filter change completion message. Silencing upstream sender [ns_server:info,2014-08-19T16:52:33.234,ns_1@10.242.238.90:<0.1430.1>:ebucketmigrator_srv:handle_info:366]Got reply from upstream silencing request. Completing state transition to a new ebucketmigrator. [ns_server:debug,2014-08-19T16:52:33.234,ns_1@10.242.238.90:<0.1430.1>:ebucketmigrator_srv:complete_native_vb_filter_change:170]Proceeding with reading unread binaries [ns_server:debug,2014-08-19T16:52:33.234,ns_1@10.242.238.90:<0.1430.1>:ebucketmigrator_srv:confirm_downstream:197]Going to confirm reception downstream messages [ns_server:debug,2014-08-19T16:52:33.234,ns_1@10.242.238.90:<0.1430.1>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:52:33.234,ns_1@10.242.238.90:<0.1435.1>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:52:33.234,ns_1@10.242.238.90:<0.1435.1>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:52:33.234,ns_1@10.242.238.90:<0.1430.1>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:52:33.234,ns_1@10.242.238.90:<0.1430.1>:ebucketmigrator_srv:confirm_downstream:201]Confirmed upstream messages are feeded to kernel [ns_server:debug,2014-08-19T16:52:33.235,ns_1@10.242.238.90:<0.1430.1>:ebucketmigrator_srv:reply_and_die:210]Passed old state to caller [ns_server:debug,2014-08-19T16:52:33.235,ns_1@10.242.238.90:<0.1430.1>:ebucketmigrator_srv:reply_and_die:213]Sent out state. Preparing to die [ns_server:debug,2014-08-19T16:52:33.235,ns_1@10.242.238.90:<0.1432.1>:ns_vbm_new_sup:do_perform_vbucket_filter_change:143]Got old state from previous ebucketmigrator: <0.1430.1> [ns_server:debug,2014-08-19T16:52:33.235,ns_1@10.242.238.90:<0.1432.1>:ns_vbm_new_sup:perform_vbucket_filter_change_loop:184]Sent old state to new instance [ns_server:info,2014-08-19T16:52:33.235,ns_1@10.242.238.90:<0.1437.1>:ns_vbm_new_sup:mk_old_state_retriever:83]Got vbucket filter change old state. Proceeding vbucket filter change operation [ns_server:debug,2014-08-19T16:52:33.235,ns_1@10.242.238.90:<0.1437.1>:ebucketmigrator_srv:init:494]Got old ebucketmigrator state from <0.1430.1>: {state,#Port<0.20537>,#Port<0.20533>,#Port<0.20538>,#Port<0.20534>,<0.1434.1>, <<"cut off">>,<<"cut off">>,[],121,false,false,0, {1408,452753,234048}, completed, {<0.1432.1>,#Ref<0.0.1.140053>}, <<"replication_ns_1@10.242.238.90">>,<0.1430.1>, {had_backfill,false,undefined,[]}, completed,false}. [ns_server:debug,2014-08-19T16:52:33.236,ns_1@10.242.238.90:<0.17162.0>:ns_process_registry:handle_info:98]Got exit msg: {'EXIT',<0.1432.1>,{#Ref<0.0.1.140042>,<0.1437.1>}} [error_logger:info,2014-08-19T16:52:33.236,ns_1@10.242.238.90:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,'ns_vbm_new_sup-default'} started: [{pid,<0.1437.1>}, {name, {new_child_id, [86,88,89,90,91,92,93,94,95,96,97,98,99,100, 101,102,103,104,105,106,107,108,109,110,111, 112,113,114,115,116,117,118,119,120,121,122, 123,124,125,127], 'ns_1@10.242.238.88'}}, {mfargs, {ebucketmigrator_srv,start_link, [{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{old_state_retriever, #Fun}, {on_not_ready_vbuckets, #Fun}, {username,"default"}, {password,get_from_config}, {vbuckets, [86,88,89,90,91,92,93,94,95,96,97,98,99, 100,101,102,103,104,105,106,107,108,109, 110,111,112,113,114,115,116,117,118,119, 120,121,122,123,124,125,127]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]]}}, {restart_type,temporary}, {shutdown,60000}, {child_type,worker}] [ns_server:debug,2014-08-19T16:52:33.241,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:52:33.244,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:33.245,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 3256 us [ns_server:debug,2014-08-19T16:52:33.245,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:33.245,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{127, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.90']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:info,2014-08-19T16:52:33.246,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 126 state to replica [ns_server:info,2014-08-19T16:52:33.246,ns_1@10.242.238.90:tap_replication_manager-default<0.18775.0>:tap_replication_manager:change_vbucket_filter:200]Going to change replication from 'ns_1@10.242.238.88' to have [86,88,89,90,91,92,93,94,95,96,97,98,99,100,101,102,103,104,105,106,107,108, 109,110,111,112,113,114,115,116,117,118,119,120,121,122,123,124,125,126,127] ("~", []) [ns_server:debug,2014-08-19T16:52:33.248,ns_1@10.242.238.90:<0.1439.1>:ns_vbm_new_sup:do_perform_vbucket_filter_change:135]Registered myself under id:{"default", {new_child_id, [86,88,89,90,91,92,93,94,95,96,97,98,99,100, 101,102,103,104,105,106,107,108,109,110,111, 112,113,114,115,116,117,118,119,120,121,122, 123,124,125,126,127], 'ns_1@10.242.238.88'}, #Ref<0.0.1.140190>} Args:[{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{old_state_retriever,#Fun}, {on_not_ready_vbuckets,#Fun}, {username,"default"}, {password,get_from_config}, {vbuckets,[86,88,89,90,91,92,93,94,95,96,97,98,99,100,101,102,103,104, 105,106,107,108,109,110,111,112,113,114,115,116,117,118,119, 120,121,122,123,124,125,126,127]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]] [ns_server:debug,2014-08-19T16:52:33.249,ns_1@10.242.238.90:<0.1439.1>:ns_vbm_new_sup:do_perform_vbucket_filter_change:139]Linked myself to old ebucketmigrator <0.1437.1> [ns_server:debug,2014-08-19T16:52:33.259,ns_1@10.242.238.90:<0.1437.1>:ebucketmigrator_srv:init:621]Reusing old upstream: [{vbuckets,[86,88,89,90,91,92,93,94,95,96,97,98,99,100,101,102,103,104,105, 106,107,108,109,110,111,112,113,114,115,116,117,118,119,120,121, 122,123,124,125,127]}, {name,<<"replication_ns_1@10.242.238.90">>}, {takeover,false}] [rebalance:debug,2014-08-19T16:52:33.259,ns_1@10.242.238.90:<0.1437.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.1441.1> [ns_server:info,2014-08-19T16:52:33.259,ns_1@10.242.238.90:<0.1437.1>:ebucketmigrator_srv:handle_call:270]Starting new-style vbucket filter change on stream `replication_ns_1@10.242.238.90` [rebalance:debug,2014-08-19T16:52:33.261,ns_1@10.242.238.90:<0.32297.0>:janitor_agent:handle_call:795]Done [rebalance:debug,2014-08-19T16:52:33.261,ns_1@10.242.238.90:<0.32302.0>:janitor_agent:handle_call:795]Done [ns_server:info,2014-08-19T16:52:33.280,ns_1@10.242.238.90:<0.1437.1>:ebucketmigrator_srv:handle_call:297]Changing vbucket filter on tap stream `replication_ns_1@10.242.238.90`: [{86,1}, {88,1}, {89,1}, {90,1}, {91,1}, {92,1}, {93,1}, {94,1}, {95,1}, {96,1}, {97,1}, {98,1}, {99,1}, {100,1}, {101,1}, {102,1}, {103,1}, {104,1}, {105,1}, {106,1}, {107,1}, {108,1}, {109,1}, {110,1}, {111,1}, {112,1}, {113,1}, {114,1}, {115,1}, {116,1}, {117,1}, {118,1}, {119,1}, {120,1}, {121,1}, {122,1}, {123,1}, {124,1}, {125,1}, {126,1}, {127,1}] [ns_server:info,2014-08-19T16:52:33.280,ns_1@10.242.238.90:<0.1437.1>:ebucketmigrator_srv:handle_call:307]Successfully changed vbucket filter on tap stream `replication_ns_1@10.242.238.90`. [ns_server:info,2014-08-19T16:52:33.281,ns_1@10.242.238.90:<0.1437.1>:ebucketmigrator_srv:process_upstream:1027]Got vbucket filter change completion message. Silencing upstream sender [ns_server:info,2014-08-19T16:52:33.281,ns_1@10.242.238.90:<0.1437.1>:ebucketmigrator_srv:handle_info:366]Got reply from upstream silencing request. Completing state transition to a new ebucketmigrator. [ns_server:debug,2014-08-19T16:52:33.281,ns_1@10.242.238.90:<0.1437.1>:ebucketmigrator_srv:complete_native_vb_filter_change:170]Proceeding with reading unread binaries [ns_server:debug,2014-08-19T16:52:33.281,ns_1@10.242.238.90:<0.1437.1>:ebucketmigrator_srv:confirm_downstream:197]Going to confirm reception downstream messages [ns_server:debug,2014-08-19T16:52:33.281,ns_1@10.242.238.90:<0.1437.1>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:52:33.281,ns_1@10.242.238.90:<0.1442.1>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:52:33.281,ns_1@10.242.238.90:<0.1442.1>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:52:33.282,ns_1@10.242.238.90:<0.1437.1>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:52:33.282,ns_1@10.242.238.90:<0.1437.1>:ebucketmigrator_srv:confirm_downstream:201]Confirmed upstream messages are feeded to kernel [ns_server:debug,2014-08-19T16:52:33.282,ns_1@10.242.238.90:<0.1437.1>:ebucketmigrator_srv:reply_and_die:210]Passed old state to caller [ns_server:debug,2014-08-19T16:52:33.282,ns_1@10.242.238.90:<0.1437.1>:ebucketmigrator_srv:reply_and_die:213]Sent out state. Preparing to die [ns_server:debug,2014-08-19T16:52:33.282,ns_1@10.242.238.90:<0.1439.1>:ns_vbm_new_sup:do_perform_vbucket_filter_change:143]Got old state from previous ebucketmigrator: <0.1437.1> [ns_server:debug,2014-08-19T16:52:33.282,ns_1@10.242.238.90:<0.1439.1>:ns_vbm_new_sup:perform_vbucket_filter_change_loop:184]Sent old state to new instance [ns_server:info,2014-08-19T16:52:33.282,ns_1@10.242.238.90:<0.1444.1>:ns_vbm_new_sup:mk_old_state_retriever:83]Got vbucket filter change old state. Proceeding vbucket filter change operation [ns_server:debug,2014-08-19T16:52:33.283,ns_1@10.242.238.90:<0.1444.1>:ebucketmigrator_srv:init:494]Got old ebucketmigrator state from <0.1437.1>: {state,#Port<0.20537>,#Port<0.20533>,#Port<0.20538>,#Port<0.20534>,<0.1441.1>, <<"cut off">>,<<"cut off">>,[],124,false,false,0, {1408,452753,281311}, completed, {<0.1439.1>,#Ref<0.0.1.140203>}, <<"replication_ns_1@10.242.238.90">>,<0.1437.1>, {had_backfill,false,undefined,[]}, completed,false}. [ns_server:debug,2014-08-19T16:52:33.283,ns_1@10.242.238.90:<0.17162.0>:ns_process_registry:handle_info:98]Got exit msg: {'EXIT',<0.1439.1>,{#Ref<0.0.1.140192>,<0.1444.1>}} [ns_server:debug,2014-08-19T16:52:33.283,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.32297.0> (ok) [ns_server:debug,2014-08-19T16:52:33.283,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.32302.0> (ok) [error_logger:info,2014-08-19T16:52:33.283,ns_1@10.242.238.90:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,'ns_vbm_new_sup-default'} started: [{pid,<0.1444.1>}, {name, {new_child_id, [86,88,89,90,91,92,93,94,95,96,97,98,99,100, 101,102,103,104,105,106,107,108,109,110,111, 112,113,114,115,116,117,118,119,120,121,122, 123,124,125,126,127], 'ns_1@10.242.238.88'}}, {mfargs, {ebucketmigrator_srv,start_link, [{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{old_state_retriever, #Fun}, {on_not_ready_vbuckets, #Fun}, {username,"default"}, {password,get_from_config}, {vbuckets, [86,88,89,90,91,92,93,94,95,96,97,98,99, 100,101,102,103,104,105,106,107,108,109, 110,111,112,113,114,115,116,117,118,119, 120,121,122,123,124,125,126,127]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]]}}, {restart_type,temporary}, {shutdown,60000}, {child_type,worker}] [rebalance:debug,2014-08-19T16:52:33.286,ns_1@10.242.238.90:<0.32281.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:52:33.286,ns_1@10.242.238.90:<0.32281.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:52:33.286,ns_1@10.242.238.90:<0.1445.1>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [rebalance:debug,2014-08-19T16:52:33.286,ns_1@10.242.238.90:<0.32300.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:52:33.287,ns_1@10.242.238.90:<0.1445.1>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [ns_server:debug,2014-08-19T16:52:33.287,ns_1@10.242.238.90:<0.32300.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:52:33.287,ns_1@10.242.238.90:<0.1446.1>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:52:33.287,ns_1@10.242.238.90:<0.1446.1>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:52:33.287,ns_1@10.242.238.90:<0.32281.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [rebalance:info,2014-08-19T16:52:33.287,ns_1@10.242.238.90:<0.32300.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:52:33.288,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:52:33.291,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:33.291,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 3280 us [ns_server:debug,2014-08-19T16:52:33.292,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:33.292,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{126, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.90']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:info,2014-08-19T16:52:33.296,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 129 state to replica [ns_server:info,2014-08-19T16:52:33.296,ns_1@10.242.238.90:tap_replication_manager-default<0.18775.0>:tap_replication_manager:change_vbucket_filter:200]Going to change replication from 'ns_1@10.242.238.88' to have [86,88,89,90,91,92,93,94,95,96,97,98,99,100,101,102,103,104,105,106,107,108, 109,110,111,112,113,114,115,116,117,118,119,120,121,122,123,124,125,126,127, 129] ([129], []) [ns_server:debug,2014-08-19T16:52:33.298,ns_1@10.242.238.90:<0.1448.1>:ns_vbm_new_sup:do_perform_vbucket_filter_change:135]Registered myself under id:{"default", {new_child_id, [86,88,89,90,91,92,93,94,95,96,97,98,99,100, 101,102,103,104,105,106,107,108,109,110,111, 112,113,114,115,116,117,118,119,120,121,122, 123,124,125,126,127,129], 'ns_1@10.242.238.88'}, #Ref<0.0.1.140385>} Args:[{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{old_state_retriever,#Fun}, {on_not_ready_vbuckets,#Fun}, {username,"default"}, {password,get_from_config}, {vbuckets,[86,88,89,90,91,92,93,94,95,96,97,98,99,100,101,102,103,104, 105,106,107,108,109,110,111,112,113,114,115,116,117,118,119, 120,121,122,123,124,125,126,127,129]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]] [ns_server:debug,2014-08-19T16:52:33.298,ns_1@10.242.238.90:<0.1448.1>:ns_vbm_new_sup:do_perform_vbucket_filter_change:139]Linked myself to old ebucketmigrator <0.1444.1> [ns_server:debug,2014-08-19T16:52:33.305,ns_1@10.242.238.90:<0.1444.1>:ebucketmigrator_srv:init:621]Reusing old upstream: [{vbuckets,[86,88,89,90,91,92,93,94,95,96,97,98,99,100,101,102,103,104,105, 106,107,108,109,110,111,112,113,114,115,116,117,118,119,120,121, 122,123,124,125,126,127]}, {name,<<"replication_ns_1@10.242.238.90">>}, {takeover,false}] [rebalance:debug,2014-08-19T16:52:33.306,ns_1@10.242.238.90:<0.1444.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.1450.1> [ns_server:info,2014-08-19T16:52:33.306,ns_1@10.242.238.90:<0.1444.1>:ebucketmigrator_srv:handle_call:270]Starting new-style vbucket filter change on stream `replication_ns_1@10.242.238.90` [ns_server:info,2014-08-19T16:52:33.326,ns_1@10.242.238.90:<0.1444.1>:ebucketmigrator_srv:handle_call:297]Changing vbucket filter on tap stream `replication_ns_1@10.242.238.90`: [{86,1}, {88,1}, {89,1}, {90,1}, {91,1}, {92,1}, {93,1}, {94,1}, {95,1}, {96,1}, {97,1}, {98,1}, {99,1}, {100,1}, {101,1}, {102,1}, {103,1}, {104,1}, {105,1}, {106,1}, {107,1}, {108,1}, {109,1}, {110,1}, {111,1}, {112,1}, {113,1}, {114,1}, {115,1}, {116,1}, {117,1}, {118,1}, {119,1}, {120,1}, {121,1}, {122,1}, {123,1}, {124,1}, {125,1}, {126,1}, {127,1}, {129,1}] [ns_server:info,2014-08-19T16:52:33.327,ns_1@10.242.238.90:<0.1444.1>:ebucketmigrator_srv:handle_call:307]Successfully changed vbucket filter on tap stream `replication_ns_1@10.242.238.90`. [ns_server:info,2014-08-19T16:52:33.327,ns_1@10.242.238.90:<0.1444.1>:ebucketmigrator_srv:process_upstream:1027]Got vbucket filter change completion message. Silencing upstream sender [ns_server:info,2014-08-19T16:52:33.327,ns_1@10.242.238.90:<0.1444.1>:ebucketmigrator_srv:handle_info:366]Got reply from upstream silencing request. Completing state transition to a new ebucketmigrator. [ns_server:debug,2014-08-19T16:52:33.327,ns_1@10.242.238.90:<0.1444.1>:ebucketmigrator_srv:complete_native_vb_filter_change:170]Proceeding with reading unread binaries [ns_server:debug,2014-08-19T16:52:33.328,ns_1@10.242.238.90:<0.1444.1>:ebucketmigrator_srv:confirm_downstream:197]Going to confirm reception downstream messages [ns_server:debug,2014-08-19T16:52:33.328,ns_1@10.242.238.90:<0.1444.1>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:52:33.328,ns_1@10.242.238.90:<0.1451.1>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:52:33.328,ns_1@10.242.238.90:<0.1451.1>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:52:33.328,ns_1@10.242.238.90:<0.1444.1>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:52:33.328,ns_1@10.242.238.90:<0.1444.1>:ebucketmigrator_srv:confirm_downstream:201]Confirmed upstream messages are feeded to kernel [ns_server:debug,2014-08-19T16:52:33.328,ns_1@10.242.238.90:<0.1444.1>:ebucketmigrator_srv:reply_and_die:210]Passed old state to caller [ns_server:debug,2014-08-19T16:52:33.328,ns_1@10.242.238.90:<0.1444.1>:ebucketmigrator_srv:reply_and_die:213]Sent out state. Preparing to die [ns_server:debug,2014-08-19T16:52:33.328,ns_1@10.242.238.90:<0.1448.1>:ns_vbm_new_sup:do_perform_vbucket_filter_change:143]Got old state from previous ebucketmigrator: <0.1444.1> [ns_server:debug,2014-08-19T16:52:33.329,ns_1@10.242.238.90:<0.1448.1>:ns_vbm_new_sup:perform_vbucket_filter_change_loop:184]Sent old state to new instance [ns_server:info,2014-08-19T16:52:33.329,ns_1@10.242.238.90:<0.1453.1>:ns_vbm_new_sup:mk_old_state_retriever:83]Got vbucket filter change old state. Proceeding vbucket filter change operation [ns_server:debug,2014-08-19T16:52:33.329,ns_1@10.242.238.90:<0.1453.1>:ebucketmigrator_srv:init:494]Got old ebucketmigrator state from <0.1444.1>: {state,#Port<0.20537>,#Port<0.20533>,#Port<0.20538>,#Port<0.20534>,<0.1450.1>, <<"cut off">>,<<"cut off">>,[],127,false,false,0, {1408,452753,327661}, completed, {<0.1448.1>,#Ref<0.0.1.140399>}, <<"replication_ns_1@10.242.238.90">>,<0.1444.1>, {had_backfill,false,undefined,[]}, completed,false}. [ns_server:debug,2014-08-19T16:52:33.329,ns_1@10.242.238.90:<0.17162.0>:ns_process_registry:handle_info:98]Got exit msg: {'EXIT',<0.1448.1>,{#Ref<0.0.1.140387>,<0.1453.1>}} [error_logger:info,2014-08-19T16:52:33.329,ns_1@10.242.238.90:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,'ns_vbm_new_sup-default'} started: [{pid,<0.1453.1>}, {name, {new_child_id, [86,88,89,90,91,92,93,94,95,96,97,98,99,100, 101,102,103,104,105,106,107,108,109,110,111, 112,113,114,115,116,117,118,119,120,121,122, 123,124,125,126,127,129], 'ns_1@10.242.238.88'}}, {mfargs, {ebucketmigrator_srv,start_link, [{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{old_state_retriever, #Fun}, {on_not_ready_vbuckets, #Fun}, {username,"default"}, {password,get_from_config}, {vbuckets, [86,88,89,90,91,92,93,94,95,96,97,98,99, 100,101,102,103,104,105,106,107,108,109, 110,111,112,113,114,115,116,117,118,119, 120,121,122,123,124,125,126,127,129]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]]}}, {restart_type,temporary}, {shutdown,60000}, {child_type,worker}] [ns_server:debug,2014-08-19T16:52:33.334,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:52:33.337,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:33.337,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 3456 us [ns_server:debug,2014-08-19T16:52:33.338,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{129, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.90']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:info,2014-08-19T16:52:33.339,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 128 state to replica [ns_server:debug,2014-08-19T16:52:33.339,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:info,2014-08-19T16:52:33.339,ns_1@10.242.238.90:tap_replication_manager-default<0.18775.0>:tap_replication_manager:change_vbucket_filter:200]Going to change replication from 'ns_1@10.242.238.88' to have [86,88,89,90,91,92,93,94,95,96,97,98,99,100,101,102,103,104,105,106,107,108, 109,110,111,112,113,114,115,116,117,118,119,120,121,122,123,124,125,126,127, 128,129] ([128], []) [ns_server:debug,2014-08-19T16:52:33.340,ns_1@10.242.238.90:<0.1463.1>:ns_vbm_new_sup:do_perform_vbucket_filter_change:135]Registered myself under id:{"default", {new_child_id, [86,88,89,90,91,92,93,94,95,96,97,98,99,100, 101,102,103,104,105,106,107,108,109,110,111, 112,113,114,115,116,117,118,119,120,121,122, 123,124,125,126,127,128,129], 'ns_1@10.242.238.88'}, #Ref<0.0.1.140558>} Args:[{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{old_state_retriever,#Fun}, {on_not_ready_vbuckets,#Fun}, {username,"default"}, {password,get_from_config}, {vbuckets,[86,88,89,90,91,92,93,94,95,96,97,98,99,100,101,102,103,104, 105,106,107,108,109,110,111,112,113,114,115,116,117,118,119, 120,121,122,123,124,125,126,127,128,129]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]] [ns_server:debug,2014-08-19T16:52:33.341,ns_1@10.242.238.90:<0.1463.1>:ns_vbm_new_sup:do_perform_vbucket_filter_change:139]Linked myself to old ebucketmigrator <0.1453.1> [ns_server:debug,2014-08-19T16:52:33.356,ns_1@10.242.238.90:<0.1453.1>:ebucketmigrator_srv:init:621]Reusing old upstream: [{vbuckets,[86,88,89,90,91,92,93,94,95,96,97,98,99,100,101,102,103,104,105, 106,107,108,109,110,111,112,113,114,115,116,117,118,119,120,121, 122,123,124,125,126,127,129]}, {name,<<"replication_ns_1@10.242.238.90">>}, {takeover,false}] [rebalance:debug,2014-08-19T16:52:33.356,ns_1@10.242.238.90:<0.1453.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.1465.1> [ns_server:info,2014-08-19T16:52:33.356,ns_1@10.242.238.90:<0.1453.1>:ebucketmigrator_srv:handle_call:270]Starting new-style vbucket filter change on stream `replication_ns_1@10.242.238.90` [ns_server:info,2014-08-19T16:52:33.377,ns_1@10.242.238.90:<0.1453.1>:ebucketmigrator_srv:handle_call:297]Changing vbucket filter on tap stream `replication_ns_1@10.242.238.90`: [{86,1}, {88,1}, {89,1}, {90,1}, {91,1}, {92,1}, {93,1}, {94,1}, {95,1}, {96,1}, {97,1}, {98,1}, {99,1}, {100,1}, {101,1}, {102,1}, {103,1}, {104,1}, {105,1}, {106,1}, {107,1}, {108,1}, {109,1}, {110,1}, {111,1}, {112,1}, {113,1}, {114,1}, {115,1}, {116,1}, {117,1}, {118,1}, {119,1}, {120,1}, {121,1}, {122,1}, {123,1}, {124,1}, {125,1}, {126,1}, {127,1}, {128,1}, {129,1}] [ns_server:info,2014-08-19T16:52:33.378,ns_1@10.242.238.90:<0.1453.1>:ebucketmigrator_srv:handle_call:307]Successfully changed vbucket filter on tap stream `replication_ns_1@10.242.238.90`. [ns_server:info,2014-08-19T16:52:33.378,ns_1@10.242.238.90:<0.1453.1>:ebucketmigrator_srv:process_upstream:1027]Got vbucket filter change completion message. Silencing upstream sender [ns_server:info,2014-08-19T16:52:33.378,ns_1@10.242.238.90:<0.1453.1>:ebucketmigrator_srv:handle_info:366]Got reply from upstream silencing request. Completing state transition to a new ebucketmigrator. [ns_server:debug,2014-08-19T16:52:33.378,ns_1@10.242.238.90:<0.1453.1>:ebucketmigrator_srv:complete_native_vb_filter_change:170]Proceeding with reading unread binaries [ns_server:debug,2014-08-19T16:52:33.379,ns_1@10.242.238.90:<0.1453.1>:ebucketmigrator_srv:confirm_downstream:197]Going to confirm reception downstream messages [ns_server:debug,2014-08-19T16:52:33.379,ns_1@10.242.238.90:<0.1453.1>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:52:33.379,ns_1@10.242.238.90:<0.1472.1>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:52:33.379,ns_1@10.242.238.90:<0.1472.1>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:52:33.379,ns_1@10.242.238.90:<0.1453.1>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:52:33.379,ns_1@10.242.238.90:<0.1453.1>:ebucketmigrator_srv:confirm_downstream:201]Confirmed upstream messages are feeded to kernel [ns_server:debug,2014-08-19T16:52:33.379,ns_1@10.242.238.90:<0.1453.1>:ebucketmigrator_srv:reply_and_die:210]Passed old state to caller [ns_server:debug,2014-08-19T16:52:33.379,ns_1@10.242.238.90:<0.1453.1>:ebucketmigrator_srv:reply_and_die:213]Sent out state. Preparing to die [ns_server:debug,2014-08-19T16:52:33.379,ns_1@10.242.238.90:<0.1463.1>:ns_vbm_new_sup:do_perform_vbucket_filter_change:143]Got old state from previous ebucketmigrator: <0.1453.1> [ns_server:debug,2014-08-19T16:52:33.380,ns_1@10.242.238.90:<0.1463.1>:ns_vbm_new_sup:perform_vbucket_filter_change_loop:184]Sent old state to new instance [ns_server:info,2014-08-19T16:52:33.380,ns_1@10.242.238.90:<0.1474.1>:ns_vbm_new_sup:mk_old_state_retriever:83]Got vbucket filter change old state. Proceeding vbucket filter change operation [ns_server:debug,2014-08-19T16:52:33.380,ns_1@10.242.238.90:<0.1474.1>:ebucketmigrator_srv:init:494]Got old ebucketmigrator state from <0.1453.1>: {state,#Port<0.20537>,#Port<0.20533>,#Port<0.20538>,#Port<0.20534>,<0.1465.1>, <<"cut off">>,<<"cut off">>,[],130,false,false,0, {1408,452753,378642}, completed, {<0.1463.1>,#Ref<0.0.1.140571>}, <<"replication_ns_1@10.242.238.90">>,<0.1453.1>, {had_backfill,false,undefined,[]}, completed,false}. [ns_server:debug,2014-08-19T16:52:33.380,ns_1@10.242.238.90:<0.17162.0>:ns_process_registry:handle_info:98]Got exit msg: {'EXIT',<0.1463.1>,{#Ref<0.0.1.140560>,<0.1474.1>}} [error_logger:info,2014-08-19T16:52:33.380,ns_1@10.242.238.90:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,'ns_vbm_new_sup-default'} started: [{pid,<0.1474.1>}, {name, {new_child_id, [86,88,89,90,91,92,93,94,95,96,97,98,99,100, 101,102,103,104,105,106,107,108,109,110,111, 112,113,114,115,116,117,118,119,120,121,122, 123,124,125,126,127,128,129], 'ns_1@10.242.238.88'}}, {mfargs, {ebucketmigrator_srv,start_link, [{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{old_state_retriever, #Fun}, {on_not_ready_vbuckets, #Fun}, {username,"default"}, {password,get_from_config}, {vbuckets, [86,88,89,90,91,92,93,94,95,96,97,98,99, 100,101,102,103,104,105,106,107,108,109, 110,111,112,113,114,115,116,117,118,119, 120,121,122,123,124,125,126,127,128, 129]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]]}}, {restart_type,temporary}, {shutdown,60000}, {child_type,worker}] [ns_server:debug,2014-08-19T16:52:33.385,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:52:33.393,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:33.393,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 7881 us [ns_server:debug,2014-08-19T16:52:33.393,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:33.394,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{128, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.90']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:info,2014-08-19T16:52:33.395,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 131 state to replica [ns_server:info,2014-08-19T16:52:33.395,ns_1@10.242.238.90:tap_replication_manager-default<0.18775.0>:tap_replication_manager:change_vbucket_filter:200]Going to change replication from 'ns_1@10.242.238.88' to have [86,88,89,90,91,92,93,94,95,96,97,98,99,100,101,102,103,104,105,106,107,108, 109,110,111,112,113,114,115,116,117,118,119,120,121,122,123,124,125,126,127, 128,129,131] ([131], []) [ns_server:debug,2014-08-19T16:52:33.396,ns_1@10.242.238.90:<0.1476.1>:ns_vbm_new_sup:do_perform_vbucket_filter_change:135]Registered myself under id:{"default", {new_child_id, [86,88,89,90,91,92,93,94,95,96,97,98,99,100, 101,102,103,104,105,106,107,108,109,110,111, 112,113,114,115,116,117,118,119,120,121,122, 123,124,125,126,127,128,129,131], 'ns_1@10.242.238.88'}, #Ref<0.0.1.140715>} Args:[{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{old_state_retriever,#Fun}, {on_not_ready_vbuckets,#Fun}, {username,"default"}, {password,get_from_config}, {vbuckets,[86,88,89,90,91,92,93,94,95,96,97,98,99,100,101,102,103,104, 105,106,107,108,109,110,111,112,113,114,115,116,117,118,119, 120,121,122,123,124,125,126,127,128,129,131]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]] [ns_server:debug,2014-08-19T16:52:33.397,ns_1@10.242.238.90:<0.1476.1>:ns_vbm_new_sup:do_perform_vbucket_filter_change:139]Linked myself to old ebucketmigrator <0.1474.1> [ns_server:debug,2014-08-19T16:52:33.402,ns_1@10.242.238.90:<0.1474.1>:ebucketmigrator_srv:init:621]Reusing old upstream: [{vbuckets,[86,88,89,90,91,92,93,94,95,96,97,98,99,100,101,102,103,104,105, 106,107,108,109,110,111,112,113,114,115,116,117,118,119,120,121, 122,123,124,125,126,127,128,129]}, {name,<<"replication_ns_1@10.242.238.90">>}, {takeover,false}] [rebalance:debug,2014-08-19T16:52:33.402,ns_1@10.242.238.90:<0.1474.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.1478.1> [ns_server:info,2014-08-19T16:52:33.402,ns_1@10.242.238.90:<0.1474.1>:ebucketmigrator_srv:handle_call:270]Starting new-style vbucket filter change on stream `replication_ns_1@10.242.238.90` [rebalance:debug,2014-08-19T16:52:33.403,ns_1@10.242.238.90:<0.32278.0>:janitor_agent:handle_call:795]Done [rebalance:debug,2014-08-19T16:52:33.403,ns_1@10.242.238.90:<0.32273.0>:janitor_agent:handle_call:795]Done [ns_server:info,2014-08-19T16:52:33.422,ns_1@10.242.238.90:<0.1474.1>:ebucketmigrator_srv:handle_call:297]Changing vbucket filter on tap stream `replication_ns_1@10.242.238.90`: [{86,1}, {88,1}, {89,1}, {90,1}, {91,1}, {92,1}, {93,1}, {94,1}, {95,1}, {96,1}, {97,1}, {98,1}, {99,1}, {100,1}, {101,1}, {102,1}, {103,1}, {104,1}, {105,1}, {106,1}, {107,1}, {108,1}, {109,1}, {110,1}, {111,1}, {112,1}, {113,1}, {114,1}, {115,1}, {116,1}, {117,1}, {118,1}, {119,1}, {120,1}, {121,1}, {122,1}, {123,1}, {124,1}, {125,1}, {126,1}, {127,1}, {128,1}, {129,1}, {131,1}] [ns_server:info,2014-08-19T16:52:33.423,ns_1@10.242.238.90:<0.1474.1>:ebucketmigrator_srv:handle_call:307]Successfully changed vbucket filter on tap stream `replication_ns_1@10.242.238.90`. [ns_server:info,2014-08-19T16:52:33.424,ns_1@10.242.238.90:<0.1474.1>:ebucketmigrator_srv:process_upstream:1027]Got vbucket filter change completion message. Silencing upstream sender [ns_server:info,2014-08-19T16:52:33.424,ns_1@10.242.238.90:<0.1474.1>:ebucketmigrator_srv:handle_info:366]Got reply from upstream silencing request. Completing state transition to a new ebucketmigrator. [ns_server:debug,2014-08-19T16:52:33.424,ns_1@10.242.238.90:<0.1474.1>:ebucketmigrator_srv:complete_native_vb_filter_change:170]Proceeding with reading unread binaries [ns_server:debug,2014-08-19T16:52:33.424,ns_1@10.242.238.90:<0.1474.1>:ebucketmigrator_srv:confirm_downstream:197]Going to confirm reception downstream messages [ns_server:debug,2014-08-19T16:52:33.424,ns_1@10.242.238.90:<0.1474.1>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:52:33.424,ns_1@10.242.238.90:<0.1479.1>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:52:33.424,ns_1@10.242.238.90:<0.1479.1>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:52:33.425,ns_1@10.242.238.90:<0.1474.1>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:52:33.425,ns_1@10.242.238.90:<0.1474.1>:ebucketmigrator_srv:confirm_downstream:201]Confirmed upstream messages are feeded to kernel [ns_server:debug,2014-08-19T16:52:33.425,ns_1@10.242.238.90:<0.1474.1>:ebucketmigrator_srv:reply_and_die:210]Passed old state to caller [ns_server:debug,2014-08-19T16:52:33.425,ns_1@10.242.238.90:<0.1474.1>:ebucketmigrator_srv:reply_and_die:213]Sent out state. Preparing to die [ns_server:debug,2014-08-19T16:52:33.425,ns_1@10.242.238.90:<0.1476.1>:ns_vbm_new_sup:do_perform_vbucket_filter_change:143]Got old state from previous ebucketmigrator: <0.1474.1> [ns_server:debug,2014-08-19T16:52:33.425,ns_1@10.242.238.90:<0.1476.1>:ns_vbm_new_sup:perform_vbucket_filter_change_loop:184]Sent old state to new instance [ns_server:info,2014-08-19T16:52:33.426,ns_1@10.242.238.90:<0.1481.1>:ns_vbm_new_sup:mk_old_state_retriever:83]Got vbucket filter change old state. Proceeding vbucket filter change operation [ns_server:debug,2014-08-19T16:52:33.426,ns_1@10.242.238.90:<0.1481.1>:ebucketmigrator_srv:init:494]Got old ebucketmigrator state from <0.1474.1>: {state,#Port<0.20537>,#Port<0.20533>,#Port<0.20538>,#Port<0.20534>,<0.1478.1>, <<"cut off">>,<<"cut off">>,[],133,false,false,0, {1408,452753,424318}, completed, {<0.1476.1>,#Ref<0.0.1.140728>}, <<"replication_ns_1@10.242.238.90">>,<0.1474.1>, {had_backfill,false,undefined,[]}, completed,false}. [ns_server:debug,2014-08-19T16:52:33.426,ns_1@10.242.238.90:<0.17162.0>:ns_process_registry:handle_info:98]Got exit msg: {'EXIT',<0.1476.1>,{#Ref<0.0.1.140717>,<0.1481.1>}} [ns_server:debug,2014-08-19T16:52:33.426,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.32278.0> (ok) [ns_server:debug,2014-08-19T16:52:33.426,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.32273.0> (ok) [error_logger:info,2014-08-19T16:52:33.426,ns_1@10.242.238.90:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,'ns_vbm_new_sup-default'} started: [{pid,<0.1481.1>}, {name, {new_child_id, [86,88,89,90,91,92,93,94,95,96,97,98,99,100, 101,102,103,104,105,106,107,108,109,110,111, 112,113,114,115,116,117,118,119,120,121,122, 123,124,125,126,127,128,129,131], 'ns_1@10.242.238.88'}}, {mfargs, {ebucketmigrator_srv,start_link, [{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{old_state_retriever, #Fun}, {on_not_ready_vbuckets, #Fun}, {username,"default"}, {password,get_from_config}, {vbuckets, [86,88,89,90,91,92,93,94,95,96,97,98,99, 100,101,102,103,104,105,106,107,108,109, 110,111,112,113,114,115,116,117,118,119, 120,121,122,123,124,125,126,127,128,129, 131]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]]}}, {restart_type,temporary}, {shutdown,60000}, {child_type,worker}] [rebalance:debug,2014-08-19T16:52:33.428,ns_1@10.242.238.90:<0.32276.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:52:33.428,ns_1@10.242.238.90:<0.32276.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:52:33.428,ns_1@10.242.238.90:<0.1482.1>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:52:33.428,ns_1@10.242.238.90:<0.1482.1>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:debug,2014-08-19T16:52:33.428,ns_1@10.242.238.90:<0.32271.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [rebalance:info,2014-08-19T16:52:33.428,ns_1@10.242.238.90:<0.32276.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:52:33.429,ns_1@10.242.238.90:<0.32271.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:52:33.429,ns_1@10.242.238.90:<0.1483.1>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:52:33.429,ns_1@10.242.238.90:<0.1483.1>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:52:33.429,ns_1@10.242.238.90:<0.32271.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:52:33.434,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:52:33.437,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:33.437,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 3293 us [ns_server:debug,2014-08-19T16:52:33.438,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:33.438,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{131, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.90']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:info,2014-08-19T16:52:33.440,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 130 state to replica [ns_server:info,2014-08-19T16:52:33.440,ns_1@10.242.238.90:tap_replication_manager-default<0.18775.0>:tap_replication_manager:change_vbucket_filter:200]Going to change replication from 'ns_1@10.242.238.88' to have [86,88,89,90,91,92,93,94,95,96,97,98,99,100,101,102,103,104,105,106,107,108, 109,110,111,112,113,114,115,116,117,118,119,120,121,122,123,124,125,126,127, 128,129,130,131] ([130], []) [ns_server:debug,2014-08-19T16:52:33.441,ns_1@10.242.238.90:<0.1488.1>:ns_vbm_new_sup:do_perform_vbucket_filter_change:135]Registered myself under id:{"default", {new_child_id, [86,88,89,90,91,92,93,94,95,96,97,98,99,100, 101,102,103,104,105,106,107,108,109,110,111, 112,113,114,115,116,117,118,119,120,121,122, 123,124,125,126,127,128,129,130,131], 'ns_1@10.242.238.88'}, #Ref<0.0.1.140950>} Args:[{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{old_state_retriever,#Fun}, {on_not_ready_vbuckets,#Fun}, {username,"default"}, {password,get_from_config}, {vbuckets,[86,88,89,90,91,92,93,94,95,96,97,98,99,100,101,102,103,104, 105,106,107,108,109,110,111,112,113,114,115,116,117,118,119, 120,121,122,123,124,125,126,127,128,129,130,131]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]] [ns_server:debug,2014-08-19T16:52:33.442,ns_1@10.242.238.90:<0.1488.1>:ns_vbm_new_sup:do_perform_vbucket_filter_change:139]Linked myself to old ebucketmigrator <0.1481.1> [ns_server:debug,2014-08-19T16:52:33.450,ns_1@10.242.238.90:<0.1481.1>:ebucketmigrator_srv:init:621]Reusing old upstream: [{vbuckets,[86,88,89,90,91,92,93,94,95,96,97,98,99,100,101,102,103,104,105, 106,107,108,109,110,111,112,113,114,115,116,117,118,119,120,121, 122,123,124,125,126,127,128,129,131]}, {name,<<"replication_ns_1@10.242.238.90">>}, {takeover,false}] [rebalance:debug,2014-08-19T16:52:33.450,ns_1@10.242.238.90:<0.1481.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.1490.1> [ns_server:info,2014-08-19T16:52:33.451,ns_1@10.242.238.90:<0.1481.1>:ebucketmigrator_srv:handle_call:270]Starting new-style vbucket filter change on stream `replication_ns_1@10.242.238.90` [ns_server:info,2014-08-19T16:52:33.471,ns_1@10.242.238.90:<0.1481.1>:ebucketmigrator_srv:handle_call:297]Changing vbucket filter on tap stream `replication_ns_1@10.242.238.90`: [{86,1}, {88,1}, {89,1}, {90,1}, {91,1}, {92,1}, {93,1}, {94,1}, {95,1}, {96,1}, {97,1}, {98,1}, {99,1}, {100,1}, {101,1}, {102,1}, {103,1}, {104,1}, {105,1}, {106,1}, {107,1}, {108,1}, {109,1}, {110,1}, {111,1}, {112,1}, {113,1}, {114,1}, {115,1}, {116,1}, {117,1}, {118,1}, {119,1}, {120,1}, {121,1}, {122,1}, {123,1}, {124,1}, {125,1}, {126,1}, {127,1}, {128,1}, {129,1}, {130,1}, {131,1}] [ns_server:info,2014-08-19T16:52:33.472,ns_1@10.242.238.90:<0.1481.1>:ebucketmigrator_srv:handle_call:307]Successfully changed vbucket filter on tap stream `replication_ns_1@10.242.238.90`. [ns_server:info,2014-08-19T16:52:33.472,ns_1@10.242.238.90:<0.1481.1>:ebucketmigrator_srv:process_upstream:1027]Got vbucket filter change completion message. Silencing upstream sender [ns_server:info,2014-08-19T16:52:33.472,ns_1@10.242.238.90:<0.1481.1>:ebucketmigrator_srv:handle_info:366]Got reply from upstream silencing request. Completing state transition to a new ebucketmigrator. [ns_server:debug,2014-08-19T16:52:33.472,ns_1@10.242.238.90:<0.1481.1>:ebucketmigrator_srv:complete_native_vb_filter_change:170]Proceeding with reading unread binaries [ns_server:debug,2014-08-19T16:52:33.473,ns_1@10.242.238.90:<0.1481.1>:ebucketmigrator_srv:confirm_downstream:197]Going to confirm reception downstream messages [ns_server:debug,2014-08-19T16:52:33.473,ns_1@10.242.238.90:<0.1481.1>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:52:33.473,ns_1@10.242.238.90:<0.1491.1>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:52:33.473,ns_1@10.242.238.90:<0.1491.1>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:52:33.473,ns_1@10.242.238.90:<0.1481.1>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:52:33.473,ns_1@10.242.238.90:<0.1481.1>:ebucketmigrator_srv:confirm_downstream:201]Confirmed upstream messages are feeded to kernel [ns_server:debug,2014-08-19T16:52:33.473,ns_1@10.242.238.90:<0.1481.1>:ebucketmigrator_srv:reply_and_die:210]Passed old state to caller [ns_server:debug,2014-08-19T16:52:33.473,ns_1@10.242.238.90:<0.1481.1>:ebucketmigrator_srv:reply_and_die:213]Sent out state. Preparing to die [ns_server:debug,2014-08-19T16:52:33.474,ns_1@10.242.238.90:<0.1488.1>:ns_vbm_new_sup:do_perform_vbucket_filter_change:143]Got old state from previous ebucketmigrator: <0.1481.1> [ns_server:debug,2014-08-19T16:52:33.474,ns_1@10.242.238.90:<0.1488.1>:ns_vbm_new_sup:perform_vbucket_filter_change_loop:184]Sent old state to new instance [ns_server:info,2014-08-19T16:52:33.474,ns_1@10.242.238.90:<0.1493.1>:ns_vbm_new_sup:mk_old_state_retriever:83]Got vbucket filter change old state. Proceeding vbucket filter change operation [ns_server:debug,2014-08-19T16:52:33.474,ns_1@10.242.238.90:<0.1493.1>:ebucketmigrator_srv:init:494]Got old ebucketmigrator state from <0.1481.1>: {state,#Port<0.20537>,#Port<0.20533>,#Port<0.20538>,#Port<0.20534>,<0.1490.1>, <<"cut off">>,<<"cut off">>,[],136,false,false,0, {1408,452753,472667}, completed, {<0.1488.1>,#Ref<0.0.1.140963>}, <<"replication_ns_1@10.242.238.90">>,<0.1481.1>, {had_backfill,false,undefined,[]}, completed,false}. [ns_server:debug,2014-08-19T16:52:33.475,ns_1@10.242.238.90:<0.17162.0>:ns_process_registry:handle_info:98]Got exit msg: {'EXIT',<0.1488.1>,{#Ref<0.0.1.140952>,<0.1493.1>}} [error_logger:info,2014-08-19T16:52:33.475,ns_1@10.242.238.90:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,'ns_vbm_new_sup-default'} started: [{pid,<0.1493.1>}, {name, {new_child_id, [86,88,89,90,91,92,93,94,95,96,97,98,99,100, 101,102,103,104,105,106,107,108,109,110,111, 112,113,114,115,116,117,118,119,120,121,122, 123,124,125,126,127,128,129,130,131], 'ns_1@10.242.238.88'}}, {mfargs, {ebucketmigrator_srv,start_link, [{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{old_state_retriever, #Fun}, {on_not_ready_vbuckets, #Fun}, {username,"default"}, {password,get_from_config}, {vbuckets, [86,88,89,90,91,92,93,94,95,96,97,98,99, 100,101,102,103,104,105,106,107,108,109, 110,111,112,113,114,115,116,117,118,119, 120,121,122,123,124,125,126,127,128,129, 130,131]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]]}}, {restart_type,temporary}, {shutdown,60000}, {child_type,worker}] [ns_server:debug,2014-08-19T16:52:33.480,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:52:33.483,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:33.484,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 3592 us [ns_server:debug,2014-08-19T16:52:33.484,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:33.484,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{130, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.90']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:info,2014-08-19T16:52:33.485,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 132 state to replica [ns_server:info,2014-08-19T16:52:33.486,ns_1@10.242.238.90:tap_replication_manager-default<0.18775.0>:tap_replication_manager:change_vbucket_filter:200]Going to change replication from 'ns_1@10.242.238.88' to have [86,88,89,90,91,92,93,94,95,96,97,98,99,100,101,102,103,104,105,106,107,108, 109,110,111,112,113,114,115,116,117,118,119,120,121,122,123,124,125,126,127, 128,129,130,131,132] ([132], []) [ns_server:debug,2014-08-19T16:52:33.487,ns_1@10.242.238.90:<0.1495.1>:ns_vbm_new_sup:do_perform_vbucket_filter_change:135]Registered myself under id:{"default", {new_child_id, [86,88,89,90,91,92,93,94,95,96,97,98,99,100, 101,102,103,104,105,106,107,108,109,110,111, 112,113,114,115,116,117,118,119,120,121,122, 123,124,125,126,127,128,129,130,131,132], 'ns_1@10.242.238.88'}, #Ref<0.0.1.141104>} Args:[{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{old_state_retriever,#Fun}, {on_not_ready_vbuckets,#Fun}, {username,"default"}, {password,get_from_config}, {vbuckets,[86,88,89,90,91,92,93,94,95,96,97,98,99,100,101,102,103,104, 105,106,107,108,109,110,111,112,113,114,115,116,117,118,119, 120,121,122,123,124,125,126,127,128,129,130,131,132]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]] [ns_server:debug,2014-08-19T16:52:33.487,ns_1@10.242.238.90:<0.1495.1>:ns_vbm_new_sup:do_perform_vbucket_filter_change:139]Linked myself to old ebucketmigrator <0.1493.1> [ns_server:debug,2014-08-19T16:52:33.498,ns_1@10.242.238.90:<0.1493.1>:ebucketmigrator_srv:init:621]Reusing old upstream: [{vbuckets,[86,88,89,90,91,92,93,94,95,96,97,98,99,100,101,102,103,104,105, 106,107,108,109,110,111,112,113,114,115,116,117,118,119,120,121, 122,123,124,125,126,127,128,129,130,131]}, {name,<<"replication_ns_1@10.242.238.90">>}, {takeover,false}] [rebalance:debug,2014-08-19T16:52:33.499,ns_1@10.242.238.90:<0.1493.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.1497.1> [ns_server:info,2014-08-19T16:52:33.499,ns_1@10.242.238.90:<0.1493.1>:ebucketmigrator_srv:handle_call:270]Starting new-style vbucket filter change on stream `replication_ns_1@10.242.238.90` [ns_server:info,2014-08-19T16:52:33.519,ns_1@10.242.238.90:<0.1493.1>:ebucketmigrator_srv:handle_call:297]Changing vbucket filter on tap stream `replication_ns_1@10.242.238.90`: [{86,1}, {88,1}, {89,1}, {90,1}, {91,1}, {92,1}, {93,1}, {94,1}, {95,1}, {96,1}, {97,1}, {98,1}, {99,1}, {100,1}, {101,1}, {102,1}, {103,1}, {104,1}, {105,1}, {106,1}, {107,1}, {108,1}, {109,1}, {110,1}, {111,1}, {112,1}, {113,1}, {114,1}, {115,1}, {116,1}, {117,1}, {118,1}, {119,1}, {120,1}, {121,1}, {122,1}, {123,1}, {124,1}, {125,1}, {126,1}, {127,1}, {128,1}, {129,1}, {130,1}, {131,1}, {132,1}] [ns_server:info,2014-08-19T16:52:33.520,ns_1@10.242.238.90:<0.1493.1>:ebucketmigrator_srv:handle_call:307]Successfully changed vbucket filter on tap stream `replication_ns_1@10.242.238.90`. [ns_server:info,2014-08-19T16:52:33.520,ns_1@10.242.238.90:<0.1493.1>:ebucketmigrator_srv:process_upstream:1027]Got vbucket filter change completion message. Silencing upstream sender [ns_server:info,2014-08-19T16:52:33.520,ns_1@10.242.238.90:<0.1493.1>:ebucketmigrator_srv:handle_info:366]Got reply from upstream silencing request. Completing state transition to a new ebucketmigrator. [ns_server:debug,2014-08-19T16:52:33.520,ns_1@10.242.238.90:<0.1493.1>:ebucketmigrator_srv:complete_native_vb_filter_change:170]Proceeding with reading unread binaries [ns_server:debug,2014-08-19T16:52:33.520,ns_1@10.242.238.90:<0.1493.1>:ebucketmigrator_srv:confirm_downstream:197]Going to confirm reception downstream messages [ns_server:debug,2014-08-19T16:52:33.520,ns_1@10.242.238.90:<0.1493.1>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:52:33.520,ns_1@10.242.238.90:<0.1498.1>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:52:33.521,ns_1@10.242.238.90:<0.1498.1>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:52:33.521,ns_1@10.242.238.90:<0.1493.1>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:52:33.521,ns_1@10.242.238.90:<0.1493.1>:ebucketmigrator_srv:confirm_downstream:201]Confirmed upstream messages are feeded to kernel [ns_server:debug,2014-08-19T16:52:33.521,ns_1@10.242.238.90:<0.1493.1>:ebucketmigrator_srv:reply_and_die:210]Passed old state to caller [ns_server:debug,2014-08-19T16:52:33.521,ns_1@10.242.238.90:<0.1493.1>:ebucketmigrator_srv:reply_and_die:213]Sent out state. Preparing to die [ns_server:debug,2014-08-19T16:52:33.521,ns_1@10.242.238.90:<0.1495.1>:ns_vbm_new_sup:do_perform_vbucket_filter_change:143]Got old state from previous ebucketmigrator: <0.1493.1> [ns_server:debug,2014-08-19T16:52:33.521,ns_1@10.242.238.90:<0.1495.1>:ns_vbm_new_sup:perform_vbucket_filter_change_loop:184]Sent old state to new instance [ns_server:info,2014-08-19T16:52:33.521,ns_1@10.242.238.90:<0.1500.1>:ns_vbm_new_sup:mk_old_state_retriever:83]Got vbucket filter change old state. Proceeding vbucket filter change operation [ns_server:debug,2014-08-19T16:52:33.522,ns_1@10.242.238.90:<0.1500.1>:ebucketmigrator_srv:init:494]Got old ebucketmigrator state from <0.1493.1>: {state,#Port<0.20537>,#Port<0.20533>,#Port<0.20538>,#Port<0.20534>,<0.1497.1>, <<"cut off">>,<<"cut off">>,[],139,false,false,0, {1408,452753,520354}, completed, {<0.1495.1>,#Ref<0.0.1.141117>}, <<"replication_ns_1@10.242.238.90">>,<0.1493.1>, {had_backfill,false,undefined,[]}, completed,false}. [ns_server:debug,2014-08-19T16:52:33.522,ns_1@10.242.238.90:<0.17162.0>:ns_process_registry:handle_info:98]Got exit msg: {'EXIT',<0.1495.1>,{#Ref<0.0.1.141106>,<0.1500.1>}} [error_logger:info,2014-08-19T16:52:33.522,ns_1@10.242.238.90:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,'ns_vbm_new_sup-default'} started: [{pid,<0.1500.1>}, {name, {new_child_id, [86,88,89,90,91,92,93,94,95,96,97,98,99,100, 101,102,103,104,105,106,107,108,109,110,111, 112,113,114,115,116,117,118,119,120,121,122, 123,124,125,126,127,128,129,130,131,132], 'ns_1@10.242.238.88'}}, {mfargs, {ebucketmigrator_srv,start_link, [{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{old_state_retriever, #Fun}, {on_not_ready_vbuckets, #Fun}, {username,"default"}, {password,get_from_config}, {vbuckets, [86,88,89,90,91,92,93,94,95,96,97,98,99, 100,101,102,103,104,105,106,107,108,109, 110,111,112,113,114,115,116,117,118,119, 120,121,122,123,124,125,126,127,128,129, 130,131,132]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]]}}, {restart_type,temporary}, {shutdown,60000}, {child_type,worker}] [ns_server:debug,2014-08-19T16:52:33.526,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:52:33.529,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:33.530,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 3334 us [ns_server:debug,2014-08-19T16:52:33.530,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:33.530,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{132, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.90']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:info,2014-08-19T16:52:33.531,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 133 state to replica [ns_server:info,2014-08-19T16:52:33.532,ns_1@10.242.238.90:tap_replication_manager-default<0.18775.0>:tap_replication_manager:change_vbucket_filter:200]Going to change replication from 'ns_1@10.242.238.88' to have [86,88,89,90,91,92,93,94,95,96,97,98,99,100,101,102,103,104,105,106,107,108, 109,110,111,112,113,114,115,116,117,118,119,120,121,122,123,124,125,126,127, 128,129,130,131,132,133] ([133], []) [ns_server:debug,2014-08-19T16:52:33.533,ns_1@10.242.238.90:<0.1502.1>:ns_vbm_new_sup:do_perform_vbucket_filter_change:135]Registered myself under id:{"default", {new_child_id, [86,88,89,90,91,92,93,94,95,96,97,98,99,100, 101,102,103,104,105,106,107,108,109,110,111, 112,113,114,115,116,117,118,119,120,121,122, 123,124,125,126,127,128,129,130,131,132,133], 'ns_1@10.242.238.88'}, #Ref<0.0.1.141240>} Args:[{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{old_state_retriever,#Fun}, {on_not_ready_vbuckets,#Fun}, {username,"default"}, {password,get_from_config}, {vbuckets,[86,88,89,90,91,92,93,94,95,96,97,98,99,100,101,102,103,104, 105,106,107,108,109,110,111,112,113,114,115,116,117,118,119, 120,121,122,123,124,125,126,127,128,129,130,131,132,133]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]] [ns_server:debug,2014-08-19T16:52:33.533,ns_1@10.242.238.90:<0.1502.1>:ns_vbm_new_sup:do_perform_vbucket_filter_change:139]Linked myself to old ebucketmigrator <0.1500.1> [rebalance:debug,2014-08-19T16:52:33.539,ns_1@10.242.238.90:<0.32268.0>:janitor_agent:handle_call:795]Done [rebalance:debug,2014-08-19T16:52:33.539,ns_1@10.242.238.90:<0.32263.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:52:33.548,ns_1@10.242.238.90:<0.1500.1>:ebucketmigrator_srv:init:621]Reusing old upstream: [{vbuckets,[86,88,89,90,91,92,93,94,95,96,97,98,99,100,101,102,103,104,105, 106,107,108,109,110,111,112,113,114,115,116,117,118,119,120,121, 122,123,124,125,126,127,128,129,130,131,132]}, {name,<<"replication_ns_1@10.242.238.90">>}, {takeover,false}] [rebalance:debug,2014-08-19T16:52:33.548,ns_1@10.242.238.90:<0.1500.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.1504.1> [ns_server:info,2014-08-19T16:52:33.548,ns_1@10.242.238.90:<0.1500.1>:ebucketmigrator_srv:handle_call:270]Starting new-style vbucket filter change on stream `replication_ns_1@10.242.238.90` [ns_server:info,2014-08-19T16:52:33.569,ns_1@10.242.238.90:<0.1500.1>:ebucketmigrator_srv:handle_call:297]Changing vbucket filter on tap stream `replication_ns_1@10.242.238.90`: [{86,1}, {88,1}, {89,1}, {90,1}, {91,1}, {92,1}, {93,1}, {94,1}, {95,1}, {96,1}, {97,1}, {98,1}, {99,1}, {100,1}, {101,1}, {102,1}, {103,1}, {104,1}, {105,1}, {106,1}, {107,1}, {108,1}, {109,1}, {110,1}, {111,1}, {112,1}, {113,1}, {114,1}, {115,1}, {116,1}, {117,1}, {118,1}, {119,1}, {120,1}, {121,1}, {122,1}, {123,1}, {124,1}, {125,1}, {126,1}, {127,1}, {128,1}, {129,1}, {130,1}, {131,1}, {132,1}, {133,1}] [ns_server:info,2014-08-19T16:52:33.570,ns_1@10.242.238.90:<0.1500.1>:ebucketmigrator_srv:handle_call:307]Successfully changed vbucket filter on tap stream `replication_ns_1@10.242.238.90`. [ns_server:info,2014-08-19T16:52:33.570,ns_1@10.242.238.90:<0.1500.1>:ebucketmigrator_srv:process_upstream:1027]Got vbucket filter change completion message. Silencing upstream sender [ns_server:info,2014-08-19T16:52:33.570,ns_1@10.242.238.90:<0.1500.1>:ebucketmigrator_srv:handle_info:366]Got reply from upstream silencing request. Completing state transition to a new ebucketmigrator. [ns_server:debug,2014-08-19T16:52:33.570,ns_1@10.242.238.90:<0.1500.1>:ebucketmigrator_srv:complete_native_vb_filter_change:170]Proceeding with reading unread binaries [ns_server:debug,2014-08-19T16:52:33.570,ns_1@10.242.238.90:<0.1500.1>:ebucketmigrator_srv:confirm_downstream:197]Going to confirm reception downstream messages [ns_server:debug,2014-08-19T16:52:33.571,ns_1@10.242.238.90:<0.1500.1>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:52:33.571,ns_1@10.242.238.90:<0.1505.1>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:52:33.571,ns_1@10.242.238.90:<0.1505.1>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:52:33.571,ns_1@10.242.238.90:<0.1500.1>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:52:33.571,ns_1@10.242.238.90:<0.1500.1>:ebucketmigrator_srv:confirm_downstream:201]Confirmed upstream messages are feeded to kernel [ns_server:debug,2014-08-19T16:52:33.571,ns_1@10.242.238.90:<0.1500.1>:ebucketmigrator_srv:reply_and_die:210]Passed old state to caller [ns_server:debug,2014-08-19T16:52:33.571,ns_1@10.242.238.90:<0.1500.1>:ebucketmigrator_srv:reply_and_die:213]Sent out state. Preparing to die [ns_server:debug,2014-08-19T16:52:33.571,ns_1@10.242.238.90:<0.1502.1>:ns_vbm_new_sup:do_perform_vbucket_filter_change:143]Got old state from previous ebucketmigrator: <0.1500.1> [ns_server:debug,2014-08-19T16:52:33.572,ns_1@10.242.238.90:<0.1502.1>:ns_vbm_new_sup:perform_vbucket_filter_change_loop:184]Sent old state to new instance [ns_server:info,2014-08-19T16:52:33.572,ns_1@10.242.238.90:<0.1507.1>:ns_vbm_new_sup:mk_old_state_retriever:83]Got vbucket filter change old state. Proceeding vbucket filter change operation [ns_server:debug,2014-08-19T16:52:33.572,ns_1@10.242.238.90:<0.1507.1>:ebucketmigrator_srv:init:494]Got old ebucketmigrator state from <0.1500.1>: {state,#Port<0.20537>,#Port<0.20533>,#Port<0.20538>,#Port<0.20534>,<0.1504.1>, <<"cut off">>,<<"cut off">>,[],142,false,false,0, {1408,452753,570556}, completed, {<0.1502.1>,#Ref<0.0.1.141253>}, <<"replication_ns_1@10.242.238.90">>,<0.1500.1>, {had_backfill,false,undefined,[]}, completed,false}. [ns_server:debug,2014-08-19T16:52:33.572,ns_1@10.242.238.90:<0.17162.0>:ns_process_registry:handle_info:98]Got exit msg: {'EXIT',<0.1502.1>,{#Ref<0.0.1.141242>,<0.1507.1>}} [ns_server:debug,2014-08-19T16:52:33.572,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.32268.0> (ok) [ns_server:debug,2014-08-19T16:52:33.572,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.32263.0> (ok) [error_logger:info,2014-08-19T16:52:33.572,ns_1@10.242.238.90:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,'ns_vbm_new_sup-default'} started: [{pid,<0.1507.1>}, {name, {new_child_id, [86,88,89,90,91,92,93,94,95,96,97,98,99,100, 101,102,103,104,105,106,107,108,109,110,111, 112,113,114,115,116,117,118,119,120,121,122, 123,124,125,126,127,128,129,130,131,132,133], 'ns_1@10.242.238.88'}}, {mfargs, {ebucketmigrator_srv,start_link, [{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{old_state_retriever, #Fun}, {on_not_ready_vbuckets, #Fun}, {username,"default"}, {password,get_from_config}, {vbuckets, [86,88,89,90,91,92,93,94,95,96,97,98,99, 100,101,102,103,104,105,106,107,108,109, 110,111,112,113,114,115,116,117,118,119, 120,121,122,123,124,125,126,127,128,129, 130,131,132,133]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]]}}, {restart_type,temporary}, {shutdown,60000}, {child_type,worker}] [rebalance:debug,2014-08-19T16:52:33.574,ns_1@10.242.238.90:<0.32266.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:52:33.574,ns_1@10.242.238.90:<0.32266.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:52:33.574,ns_1@10.242.238.90:<0.1508.1>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:52:33.574,ns_1@10.242.238.90:<0.1508.1>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:52:33.574,ns_1@10.242.238.90:<0.32266.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [rebalance:debug,2014-08-19T16:52:33.576,ns_1@10.242.238.90:<0.32261.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:52:33.576,ns_1@10.242.238.90:<0.32261.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:52:33.576,ns_1@10.242.238.90:<0.1509.1>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:52:33.576,ns_1@10.242.238.90:<0.1509.1>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:52:33.577,ns_1@10.242.238.90:<0.32261.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:52:33.579,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:52:33.582,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:33.582,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 3396 us [ns_server:debug,2014-08-19T16:52:33.583,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:33.583,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{133, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.90']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:info,2014-08-19T16:52:33.584,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 135 state to replica [ns_server:info,2014-08-19T16:52:33.585,ns_1@10.242.238.90:tap_replication_manager-default<0.18775.0>:tap_replication_manager:change_vbucket_filter:200]Going to change replication from 'ns_1@10.242.238.88' to have [86,88,89,90,91,92,93,94,95,96,97,98,99,100,101,102,103,104,105,106,107,108, 109,110,111,112,113,114,115,116,117,118,119,120,121,122,123,124,125,126,127, 128,129,130,131,132,133,135] ([135], []) [ns_server:debug,2014-08-19T16:52:33.586,ns_1@10.242.238.90:<0.1511.1>:ns_vbm_new_sup:do_perform_vbucket_filter_change:135]Registered myself under id:{"default", {new_child_id, [86,88,89,90,91,92,93,94,95,96,97,98,99,100, 101,102,103,104,105,106,107,108,109,110,111, 112,113,114,115,116,117,118,119,120,121,122, 123,124,125,126,127,128,129,130,131,132,133, 135], 'ns_1@10.242.238.88'}, #Ref<0.0.1.141444>} Args:[{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{old_state_retriever,#Fun}, {on_not_ready_vbuckets,#Fun}, {username,"default"}, {password,get_from_config}, {vbuckets,[86,88,89,90,91,92,93,94,95,96,97,98,99,100,101,102,103,104, 105,106,107,108,109,110,111,112,113,114,115,116,117,118,119, 120,121,122,123,124,125,126,127,128,129,130,131,132,133, 135]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]] [ns_server:debug,2014-08-19T16:52:33.586,ns_1@10.242.238.90:<0.1511.1>:ns_vbm_new_sup:do_perform_vbucket_filter_change:139]Linked myself to old ebucketmigrator <0.1507.1> [ns_server:debug,2014-08-19T16:52:33.596,ns_1@10.242.238.90:<0.1507.1>:ebucketmigrator_srv:init:621]Reusing old upstream: [{vbuckets,[86,88,89,90,91,92,93,94,95,96,97,98,99,100,101,102,103,104,105, 106,107,108,109,110,111,112,113,114,115,116,117,118,119,120,121, 122,123,124,125,126,127,128,129,130,131,132,133]}, {name,<<"replication_ns_1@10.242.238.90">>}, {takeover,false}] [rebalance:debug,2014-08-19T16:52:33.596,ns_1@10.242.238.90:<0.1507.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.1513.1> [ns_server:info,2014-08-19T16:52:33.597,ns_1@10.242.238.90:<0.1507.1>:ebucketmigrator_srv:handle_call:270]Starting new-style vbucket filter change on stream `replication_ns_1@10.242.238.90` [ns_server:info,2014-08-19T16:52:33.617,ns_1@10.242.238.90:<0.1507.1>:ebucketmigrator_srv:handle_call:297]Changing vbucket filter on tap stream `replication_ns_1@10.242.238.90`: [{86,1}, {88,1}, {89,1}, {90,1}, {91,1}, {92,1}, {93,1}, {94,1}, {95,1}, {96,1}, {97,1}, {98,1}, {99,1}, {100,1}, {101,1}, {102,1}, {103,1}, {104,1}, {105,1}, {106,1}, {107,1}, {108,1}, {109,1}, {110,1}, {111,1}, {112,1}, {113,1}, {114,1}, {115,1}, {116,1}, {117,1}, {118,1}, {119,1}, {120,1}, {121,1}, {122,1}, {123,1}, {124,1}, {125,1}, {126,1}, {127,1}, {128,1}, {129,1}, {130,1}, {131,1}, {132,1}, {133,1}, {135,1}] [ns_server:info,2014-08-19T16:52:33.618,ns_1@10.242.238.90:<0.1507.1>:ebucketmigrator_srv:handle_call:307]Successfully changed vbucket filter on tap stream `replication_ns_1@10.242.238.90`. [ns_server:info,2014-08-19T16:52:33.618,ns_1@10.242.238.90:<0.1507.1>:ebucketmigrator_srv:process_upstream:1027]Got vbucket filter change completion message. Silencing upstream sender [ns_server:info,2014-08-19T16:52:33.618,ns_1@10.242.238.90:<0.1507.1>:ebucketmigrator_srv:handle_info:366]Got reply from upstream silencing request. Completing state transition to a new ebucketmigrator. [ns_server:debug,2014-08-19T16:52:33.618,ns_1@10.242.238.90:<0.1507.1>:ebucketmigrator_srv:complete_native_vb_filter_change:170]Proceeding with reading unread binaries [ns_server:debug,2014-08-19T16:52:33.619,ns_1@10.242.238.90:<0.1507.1>:ebucketmigrator_srv:confirm_downstream:197]Going to confirm reception downstream messages [ns_server:debug,2014-08-19T16:52:33.619,ns_1@10.242.238.90:<0.1507.1>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:52:33.619,ns_1@10.242.238.90:<0.1514.1>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:52:33.619,ns_1@10.242.238.90:<0.1514.1>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:52:33.619,ns_1@10.242.238.90:<0.1507.1>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:52:33.619,ns_1@10.242.238.90:<0.1507.1>:ebucketmigrator_srv:confirm_downstream:201]Confirmed upstream messages are feeded to kernel [ns_server:debug,2014-08-19T16:52:33.619,ns_1@10.242.238.90:<0.1507.1>:ebucketmigrator_srv:reply_and_die:210]Passed old state to caller [ns_server:debug,2014-08-19T16:52:33.619,ns_1@10.242.238.90:<0.1507.1>:ebucketmigrator_srv:reply_and_die:213]Sent out state. Preparing to die [ns_server:debug,2014-08-19T16:52:33.619,ns_1@10.242.238.90:<0.1511.1>:ns_vbm_new_sup:do_perform_vbucket_filter_change:143]Got old state from previous ebucketmigrator: <0.1507.1> [ns_server:debug,2014-08-19T16:52:33.620,ns_1@10.242.238.90:<0.1511.1>:ns_vbm_new_sup:perform_vbucket_filter_change_loop:184]Sent old state to new instance [ns_server:info,2014-08-19T16:52:33.620,ns_1@10.242.238.90:<0.1516.1>:ns_vbm_new_sup:mk_old_state_retriever:83]Got vbucket filter change old state. Proceeding vbucket filter change operation [ns_server:debug,2014-08-19T16:52:33.620,ns_1@10.242.238.90:<0.1516.1>:ebucketmigrator_srv:init:494]Got old ebucketmigrator state from <0.1507.1>: {state,#Port<0.20537>,#Port<0.20533>,#Port<0.20538>,#Port<0.20534>,<0.1513.1>, <<"cut off">>,<<"cut off">>,[],145,false,false,0, {1408,452753,618660}, completed, {<0.1511.1>,#Ref<0.0.1.141457>}, <<"replication_ns_1@10.242.238.90">>,<0.1507.1>, {had_backfill,false,undefined,[]}, completed,false}. [ns_server:debug,2014-08-19T16:52:33.620,ns_1@10.242.238.90:<0.17162.0>:ns_process_registry:handle_info:98]Got exit msg: {'EXIT',<0.1511.1>,{#Ref<0.0.1.141446>,<0.1516.1>}} [error_logger:info,2014-08-19T16:52:33.620,ns_1@10.242.238.90:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,'ns_vbm_new_sup-default'} started: [{pid,<0.1516.1>}, {name, {new_child_id, [86,88,89,90,91,92,93,94,95,96,97,98,99,100, 101,102,103,104,105,106,107,108,109,110,111, 112,113,114,115,116,117,118,119,120,121,122, 123,124,125,126,127,128,129,130,131,132,133, 135], 'ns_1@10.242.238.88'}}, {mfargs, {ebucketmigrator_srv,start_link, [{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{old_state_retriever, #Fun}, {on_not_ready_vbuckets, #Fun}, {username,"default"}, {password,get_from_config}, {vbuckets, [86,88,89,90,91,92,93,94,95,96,97,98,99, 100,101,102,103,104,105,106,107,108,109, 110,111,112,113,114,115,116,117,118,119, 120,121,122,123,124,125,126,127,128,129, 130,131,132,133,135]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]]}}, {restart_type,temporary}, {shutdown,60000}, {child_type,worker}] [ns_server:debug,2014-08-19T16:52:33.626,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:52:33.627,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:33.628,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 1522 us [ns_server:debug,2014-08-19T16:52:33.628,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:33.628,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{135, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.90']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:info,2014-08-19T16:52:33.630,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 134 state to replica [ns_server:info,2014-08-19T16:52:33.630,ns_1@10.242.238.90:tap_replication_manager-default<0.18775.0>:tap_replication_manager:change_vbucket_filter:200]Going to change replication from 'ns_1@10.242.238.88' to have [86,88,89,90,91,92,93,94,95,96,97,98,99,100,101,102,103,104,105,106,107,108, 109,110,111,112,113,114,115,116,117,118,119,120,121,122,123,124,125,126,127, 128,129,130,131,132,133,134,135] ([134], []) [ns_server:debug,2014-08-19T16:52:33.631,ns_1@10.242.238.90:<0.1518.1>:ns_vbm_new_sup:do_perform_vbucket_filter_change:135]Registered myself under id:{"default", {new_child_id, [86,88,89,90,91,92,93,94,95,96,97,98,99,100, 101,102,103,104,105,106,107,108,109,110,111, 112,113,114,115,116,117,118,119,120,121,122, 123,124,125,126,127,128,129,130,131,132,133, 134,135], 'ns_1@10.242.238.88'}, #Ref<0.0.1.141591>} Args:[{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{old_state_retriever,#Fun}, {on_not_ready_vbuckets,#Fun}, {username,"default"}, {password,get_from_config}, {vbuckets,[86,88,89,90,91,92,93,94,95,96,97,98,99,100,101,102,103,104, 105,106,107,108,109,110,111,112,113,114,115,116,117,118,119, 120,121,122,123,124,125,126,127,128,129,130,131,132,133,134, 135]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]] [ns_server:debug,2014-08-19T16:52:33.631,ns_1@10.242.238.90:<0.1518.1>:ns_vbm_new_sup:do_perform_vbucket_filter_change:139]Linked myself to old ebucketmigrator <0.1516.1> [ns_server:debug,2014-08-19T16:52:33.643,ns_1@10.242.238.90:<0.1516.1>:ebucketmigrator_srv:init:621]Reusing old upstream: [{vbuckets,[86,88,89,90,91,92,93,94,95,96,97,98,99,100,101,102,103,104,105, 106,107,108,109,110,111,112,113,114,115,116,117,118,119,120,121, 122,123,124,125,126,127,128,129,130,131,132,133,135]}, {name,<<"replication_ns_1@10.242.238.90">>}, {takeover,false}] [rebalance:debug,2014-08-19T16:52:33.643,ns_1@10.242.238.90:<0.1516.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.1520.1> [ns_server:info,2014-08-19T16:52:33.644,ns_1@10.242.238.90:<0.1516.1>:ebucketmigrator_srv:handle_call:270]Starting new-style vbucket filter change on stream `replication_ns_1@10.242.238.90` [ns_server:info,2014-08-19T16:52:33.664,ns_1@10.242.238.90:<0.1516.1>:ebucketmigrator_srv:handle_call:297]Changing vbucket filter on tap stream `replication_ns_1@10.242.238.90`: [{86,1}, {88,1}, {89,1}, {90,1}, {91,1}, {92,1}, {93,1}, {94,1}, {95,1}, {96,1}, {97,1}, {98,1}, {99,1}, {100,1}, {101,1}, {102,1}, {103,1}, {104,1}, {105,1}, {106,1}, {107,1}, {108,1}, {109,1}, {110,1}, {111,1}, {112,1}, {113,1}, {114,1}, {115,1}, {116,1}, {117,1}, {118,1}, {119,1}, {120,1}, {121,1}, {122,1}, {123,1}, {124,1}, {125,1}, {126,1}, {127,1}, {128,1}, {129,1}, {130,1}, {131,1}, {132,1}, {133,1}, {134,1}, {135,1}] [ns_server:info,2014-08-19T16:52:33.665,ns_1@10.242.238.90:<0.1516.1>:ebucketmigrator_srv:handle_call:307]Successfully changed vbucket filter on tap stream `replication_ns_1@10.242.238.90`. [ns_server:info,2014-08-19T16:52:33.665,ns_1@10.242.238.90:<0.1516.1>:ebucketmigrator_srv:process_upstream:1027]Got vbucket filter change completion message. Silencing upstream sender [ns_server:info,2014-08-19T16:52:33.665,ns_1@10.242.238.90:<0.1516.1>:ebucketmigrator_srv:handle_info:366]Got reply from upstream silencing request. Completing state transition to a new ebucketmigrator. [ns_server:debug,2014-08-19T16:52:33.665,ns_1@10.242.238.90:<0.1516.1>:ebucketmigrator_srv:complete_native_vb_filter_change:170]Proceeding with reading unread binaries [ns_server:debug,2014-08-19T16:52:33.665,ns_1@10.242.238.90:<0.1516.1>:ebucketmigrator_srv:confirm_downstream:197]Going to confirm reception downstream messages [ns_server:debug,2014-08-19T16:52:33.666,ns_1@10.242.238.90:<0.1516.1>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:52:33.666,ns_1@10.242.238.90:<0.1521.1>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:52:33.666,ns_1@10.242.238.90:<0.1521.1>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:52:33.666,ns_1@10.242.238.90:<0.1516.1>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:52:33.666,ns_1@10.242.238.90:<0.1516.1>:ebucketmigrator_srv:confirm_downstream:201]Confirmed upstream messages are feeded to kernel [ns_server:debug,2014-08-19T16:52:33.666,ns_1@10.242.238.90:<0.1516.1>:ebucketmigrator_srv:reply_and_die:210]Passed old state to caller [ns_server:debug,2014-08-19T16:52:33.666,ns_1@10.242.238.90:<0.1516.1>:ebucketmigrator_srv:reply_and_die:213]Sent out state. Preparing to die [ns_server:debug,2014-08-19T16:52:33.666,ns_1@10.242.238.90:<0.1518.1>:ns_vbm_new_sup:do_perform_vbucket_filter_change:143]Got old state from previous ebucketmigrator: <0.1516.1> [ns_server:debug,2014-08-19T16:52:33.667,ns_1@10.242.238.90:<0.1518.1>:ns_vbm_new_sup:perform_vbucket_filter_change_loop:184]Sent old state to new instance [ns_server:info,2014-08-19T16:52:33.667,ns_1@10.242.238.90:<0.1523.1>:ns_vbm_new_sup:mk_old_state_retriever:83]Got vbucket filter change old state. Proceeding vbucket filter change operation [ns_server:debug,2014-08-19T16:52:33.667,ns_1@10.242.238.90:<0.1523.1>:ebucketmigrator_srv:init:494]Got old ebucketmigrator state from <0.1516.1>: {state,#Port<0.20537>,#Port<0.20533>,#Port<0.20538>,#Port<0.20534>,<0.1520.1>, <<"cut off">>,<<"cut off">>,[],148,false,false,0, {1408,452753,665586}, completed, {<0.1518.1>,#Ref<0.0.1.141604>}, <<"replication_ns_1@10.242.238.90">>,<0.1516.1>, {had_backfill,false,undefined,[]}, completed,false}. [ns_server:debug,2014-08-19T16:52:33.667,ns_1@10.242.238.90:<0.17162.0>:ns_process_registry:handle_info:98]Got exit msg: {'EXIT',<0.1518.1>,{#Ref<0.0.1.141593>,<0.1523.1>}} [error_logger:info,2014-08-19T16:52:33.667,ns_1@10.242.238.90:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,'ns_vbm_new_sup-default'} started: [{pid,<0.1523.1>}, {name, {new_child_id, [86,88,89,90,91,92,93,94,95,96,97,98,99,100, 101,102,103,104,105,106,107,108,109,110,111, 112,113,114,115,116,117,118,119,120,121,122, 123,124,125,126,127,128,129,130,131,132,133, 134,135], 'ns_1@10.242.238.88'}}, {mfargs, {ebucketmigrator_srv,start_link, [{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{old_state_retriever, #Fun}, {on_not_ready_vbuckets, #Fun}, {username,"default"}, {password,get_from_config}, {vbuckets, [86,88,89,90,91,92,93,94,95,96,97,98,99, 100,101,102,103,104,105,106,107,108,109, 110,111,112,113,114,115,116,117,118,119, 120,121,122,123,124,125,126,127,128,129, 130,131,132,133,134,135]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]]}}, {restart_type,temporary}, {shutdown,60000}, {child_type,worker}] [rebalance:debug,2014-08-19T16:52:33.671,ns_1@10.242.238.90:<0.32258.0>:janitor_agent:handle_call:795]Done [rebalance:debug,2014-08-19T16:52:33.671,ns_1@10.242.238.90:<0.32253.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:52:33.671,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.32258.0> (ok) [ns_server:debug,2014-08-19T16:52:33.672,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.32253.0> (ok) [ns_server:debug,2014-08-19T16:52:33.672,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [rebalance:debug,2014-08-19T16:52:33.673,ns_1@10.242.238.90:<0.32256.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:52:33.673,ns_1@10.242.238.90:<0.32256.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:52:33.673,ns_1@10.242.238.90:<0.1524.1>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:52:33.673,ns_1@10.242.238.90:<0.1524.1>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:debug,2014-08-19T16:52:33.674,ns_1@10.242.238.90:<0.32251.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:52:33.674,ns_1@10.242.238.90:<0.32251.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:52:33.674,ns_1@10.242.238.90:<0.1525.1>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:52:33.674,ns_1@10.242.238.90:<0.1525.1>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:52:33.674,ns_1@10.242.238.90:<0.32256.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [rebalance:info,2014-08-19T16:52:33.674,ns_1@10.242.238.90:<0.32251.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:52:33.676,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:33.677,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 5019 us [ns_server:debug,2014-08-19T16:52:33.677,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:33.677,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{134, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.90']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:info,2014-08-19T16:52:33.680,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 136 state to replica [ns_server:info,2014-08-19T16:52:33.680,ns_1@10.242.238.90:tap_replication_manager-default<0.18775.0>:tap_replication_manager:change_vbucket_filter:200]Going to change replication from 'ns_1@10.242.238.88' to have [86,88,89,90,91,92,93,94,95,96,97,98,99,100,101,102,103,104,105,106,107,108, 109,110,111,112,113,114,115,116,117,118,119,120,121,122,123,124,125,126,127, 128,129,130,131,132,133,134,135,136] ([136], []) [ns_server:debug,2014-08-19T16:52:33.681,ns_1@10.242.238.90:<0.1527.1>:ns_vbm_new_sup:do_perform_vbucket_filter_change:135]Registered myself under id:{"default", {new_child_id, [86,88,89,90,91,92,93,94,95,96,97,98,99,100, 101,102,103,104,105,106,107,108,109,110,111, 112,113,114,115,116,117,118,119,120,121,122, 123,124,125,126,127,128,129,130,131,132,133, 134,135,136], 'ns_1@10.242.238.88'}, #Ref<0.0.1.141787>} Args:[{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{old_state_retriever,#Fun}, {on_not_ready_vbuckets,#Fun}, {username,"default"}, {password,get_from_config}, {vbuckets,[86,88,89,90,91,92,93,94,95,96,97,98,99,100,101,102,103,104, 105,106,107,108,109,110,111,112,113,114,115,116,117,118,119, 120,121,122,123,124,125,126,127,128,129,130,131,132,133,134, 135,136]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]] [ns_server:debug,2014-08-19T16:52:33.682,ns_1@10.242.238.90:<0.1527.1>:ns_vbm_new_sup:do_perform_vbucket_filter_change:139]Linked myself to old ebucketmigrator <0.1523.1> [ns_server:debug,2014-08-19T16:52:33.693,ns_1@10.242.238.90:<0.1523.1>:ebucketmigrator_srv:init:621]Reusing old upstream: [{vbuckets,[86,88,89,90,91,92,93,94,95,96,97,98,99,100,101,102,103,104,105, 106,107,108,109,110,111,112,113,114,115,116,117,118,119,120,121, 122,123,124,125,126,127,128,129,130,131,132,133,134,135]}, {name,<<"replication_ns_1@10.242.238.90">>}, {takeover,false}] [rebalance:debug,2014-08-19T16:52:33.693,ns_1@10.242.238.90:<0.1523.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.1529.1> [ns_server:info,2014-08-19T16:52:33.693,ns_1@10.242.238.90:<0.1523.1>:ebucketmigrator_srv:handle_call:270]Starting new-style vbucket filter change on stream `replication_ns_1@10.242.238.90` [ns_server:info,2014-08-19T16:52:33.714,ns_1@10.242.238.90:<0.1523.1>:ebucketmigrator_srv:handle_call:297]Changing vbucket filter on tap stream `replication_ns_1@10.242.238.90`: [{86,1}, {88,1}, {89,1}, {90,1}, {91,1}, {92,1}, {93,1}, {94,1}, {95,1}, {96,1}, {97,1}, {98,1}, {99,1}, {100,1}, {101,1}, {102,1}, {103,1}, {104,1}, {105,1}, {106,1}, {107,1}, {108,1}, {109,1}, {110,1}, {111,1}, {112,1}, {113,1}, {114,1}, {115,1}, {116,1}, {117,1}, {118,1}, {119,1}, {120,1}, {121,1}, {122,1}, {123,1}, {124,1}, {125,1}, {126,1}, {127,1}, {128,1}, {129,1}, {130,1}, {131,1}, {132,1}, {133,1}, {134,1}, {135,1}, {136,1}] [ns_server:info,2014-08-19T16:52:33.715,ns_1@10.242.238.90:<0.1523.1>:ebucketmigrator_srv:handle_call:307]Successfully changed vbucket filter on tap stream `replication_ns_1@10.242.238.90`. [ns_server:info,2014-08-19T16:52:33.715,ns_1@10.242.238.90:<0.1523.1>:ebucketmigrator_srv:process_upstream:1027]Got vbucket filter change completion message. Silencing upstream sender [ns_server:info,2014-08-19T16:52:33.715,ns_1@10.242.238.90:<0.1523.1>:ebucketmigrator_srv:handle_info:366]Got reply from upstream silencing request. Completing state transition to a new ebucketmigrator. [ns_server:debug,2014-08-19T16:52:33.715,ns_1@10.242.238.90:<0.1523.1>:ebucketmigrator_srv:complete_native_vb_filter_change:170]Proceeding with reading unread binaries [ns_server:debug,2014-08-19T16:52:33.715,ns_1@10.242.238.90:<0.1523.1>:ebucketmigrator_srv:confirm_downstream:197]Going to confirm reception downstream messages [ns_server:debug,2014-08-19T16:52:33.715,ns_1@10.242.238.90:<0.1523.1>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:52:33.715,ns_1@10.242.238.90:<0.1530.1>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:52:33.716,ns_1@10.242.238.90:<0.1530.1>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:52:33.716,ns_1@10.242.238.90:<0.1523.1>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:52:33.716,ns_1@10.242.238.90:<0.1523.1>:ebucketmigrator_srv:confirm_downstream:201]Confirmed upstream messages are feeded to kernel [ns_server:debug,2014-08-19T16:52:33.716,ns_1@10.242.238.90:<0.1523.1>:ebucketmigrator_srv:reply_and_die:210]Passed old state to caller [ns_server:debug,2014-08-19T16:52:33.716,ns_1@10.242.238.90:<0.1523.1>:ebucketmigrator_srv:reply_and_die:213]Sent out state. Preparing to die [ns_server:debug,2014-08-19T16:52:33.716,ns_1@10.242.238.90:<0.1527.1>:ns_vbm_new_sup:do_perform_vbucket_filter_change:143]Got old state from previous ebucketmigrator: <0.1523.1> [ns_server:debug,2014-08-19T16:52:33.716,ns_1@10.242.238.90:<0.1527.1>:ns_vbm_new_sup:perform_vbucket_filter_change_loop:184]Sent old state to new instance [ns_server:info,2014-08-19T16:52:33.716,ns_1@10.242.238.90:<0.1532.1>:ns_vbm_new_sup:mk_old_state_retriever:83]Got vbucket filter change old state. Proceeding vbucket filter change operation [ns_server:debug,2014-08-19T16:52:33.717,ns_1@10.242.238.90:<0.1532.1>:ebucketmigrator_srv:init:494]Got old ebucketmigrator state from <0.1523.1>: {state,#Port<0.20537>,#Port<0.20533>,#Port<0.20538>,#Port<0.20534>,<0.1529.1>, <<"cut off">>,<<"cut off">>,[],151,false,false,0, {1408,452753,715466}, completed, {<0.1527.1>,#Ref<0.0.1.141800>}, <<"replication_ns_1@10.242.238.90">>,<0.1523.1>, {had_backfill,false,undefined,[]}, completed,false}. [ns_server:debug,2014-08-19T16:52:33.717,ns_1@10.242.238.90:<0.17162.0>:ns_process_registry:handle_info:98]Got exit msg: {'EXIT',<0.1527.1>,{#Ref<0.0.1.141789>,<0.1532.1>}} [error_logger:info,2014-08-19T16:52:33.717,ns_1@10.242.238.90:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,'ns_vbm_new_sup-default'} started: [{pid,<0.1532.1>}, {name, {new_child_id, [86,88,89,90,91,92,93,94,95,96,97,98,99,100, 101,102,103,104,105,106,107,108,109,110,111, 112,113,114,115,116,117,118,119,120,121,122, 123,124,125,126,127,128,129,130,131,132,133, 134,135,136], 'ns_1@10.242.238.88'}}, {mfargs, {ebucketmigrator_srv,start_link, [{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{old_state_retriever, #Fun}, {on_not_ready_vbuckets, #Fun}, {username,"default"}, {password,get_from_config}, {vbuckets, [86,88,89,90,91,92,93,94,95,96,97,98,99, 100,101,102,103,104,105,106,107,108,109, 110,111,112,113,114,115,116,117,118,119, 120,121,122,123,124,125,126,127,128,129, 130,131,132,133,134,135,136]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]]}}, {restart_type,temporary}, {shutdown,60000}, {child_type,worker}] [ns_server:debug,2014-08-19T16:52:33.723,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:52:33.726,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:33.726,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 3359 us [ns_server:debug,2014-08-19T16:52:33.727,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:33.727,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{136, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.90']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:info,2014-08-19T16:52:33.729,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 137 state to replica [ns_server:info,2014-08-19T16:52:33.729,ns_1@10.242.238.90:tap_replication_manager-default<0.18775.0>:tap_replication_manager:change_vbucket_filter:200]Going to change replication from 'ns_1@10.242.238.88' to have [86,88,89,90,91,92,93,94,95,96,97,98,99,100,101,102,103,104,105,106,107,108, 109,110,111,112,113,114,115,116,117,118,119,120,121,122,123,124,125,126,127, 128,129,130,131,132,133,134,135,136,137] ([137], []) [ns_server:debug,2014-08-19T16:52:33.732,ns_1@10.242.238.90:<0.1534.1>:ns_vbm_new_sup:do_perform_vbucket_filter_change:135]Registered myself under id:{"default", {new_child_id, [86,88,89,90,91,92,93,94,95,96,97,98,99,100, 101,102,103,104,105,106,107,108,109,110,111, 112,113,114,115,116,117,118,119,120,121,122, 123,124,125,126,127,128,129,130,131,132,133, 134,135,136,137], 'ns_1@10.242.238.88'}, #Ref<0.0.1.141936>} Args:[{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{old_state_retriever,#Fun}, {on_not_ready_vbuckets,#Fun}, {username,"default"}, {password,get_from_config}, {vbuckets,[86,88,89,90,91,92,93,94,95,96,97,98,99,100,101,102,103,104, 105,106,107,108,109,110,111,112,113,114,115,116,117,118,119, 120,121,122,123,124,125,126,127,128,129,130,131,132,133,134, 135,136,137]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]] [ns_server:debug,2014-08-19T16:52:33.732,ns_1@10.242.238.90:<0.1534.1>:ns_vbm_new_sup:do_perform_vbucket_filter_change:139]Linked myself to old ebucketmigrator <0.1532.1> [ns_server:debug,2014-08-19T16:52:33.743,ns_1@10.242.238.90:<0.1532.1>:ebucketmigrator_srv:init:621]Reusing old upstream: [{vbuckets,[86,88,89,90,91,92,93,94,95,96,97,98,99,100,101,102,103,104,105, 106,107,108,109,110,111,112,113,114,115,116,117,118,119,120,121, 122,123,124,125,126,127,128,129,130,131,132,133,134,135,136]}, {name,<<"replication_ns_1@10.242.238.90">>}, {takeover,false}] [rebalance:debug,2014-08-19T16:52:33.743,ns_1@10.242.238.90:<0.1532.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.1536.1> [ns_server:info,2014-08-19T16:52:33.743,ns_1@10.242.238.90:<0.1532.1>:ebucketmigrator_srv:handle_call:270]Starting new-style vbucket filter change on stream `replication_ns_1@10.242.238.90` [ns_server:info,2014-08-19T16:52:33.763,ns_1@10.242.238.90:<0.1532.1>:ebucketmigrator_srv:handle_call:297]Changing vbucket filter on tap stream `replication_ns_1@10.242.238.90`: [{86,1}, {88,1}, {89,1}, {90,1}, {91,1}, {92,1}, {93,1}, {94,1}, {95,1}, {96,1}, {97,1}, {98,1}, {99,1}, {100,1}, {101,1}, {102,1}, {103,1}, {104,1}, {105,1}, {106,1}, {107,1}, {108,1}, {109,1}, {110,1}, {111,1}, {112,1}, {113,1}, {114,1}, {115,1}, {116,1}, {117,1}, {118,1}, {119,1}, {120,1}, {121,1}, {122,1}, {123,1}, {124,1}, {125,1}, {126,1}, {127,1}, {128,1}, {129,1}, {130,1}, {131,1}, {132,1}, {133,1}, {134,1}, {135,1}, {136,1}, {137,1}] [ns_server:info,2014-08-19T16:52:33.764,ns_1@10.242.238.90:<0.1532.1>:ebucketmigrator_srv:handle_call:307]Successfully changed vbucket filter on tap stream `replication_ns_1@10.242.238.90`. [ns_server:info,2014-08-19T16:52:33.765,ns_1@10.242.238.90:<0.1532.1>:ebucketmigrator_srv:process_upstream:1027]Got vbucket filter change completion message. Silencing upstream sender [ns_server:info,2014-08-19T16:52:33.765,ns_1@10.242.238.90:<0.1532.1>:ebucketmigrator_srv:handle_info:366]Got reply from upstream silencing request. Completing state transition to a new ebucketmigrator. [ns_server:debug,2014-08-19T16:52:33.765,ns_1@10.242.238.90:<0.1532.1>:ebucketmigrator_srv:complete_native_vb_filter_change:170]Proceeding with reading unread binaries [ns_server:debug,2014-08-19T16:52:33.765,ns_1@10.242.238.90:<0.1532.1>:ebucketmigrator_srv:confirm_downstream:197]Going to confirm reception downstream messages [ns_server:debug,2014-08-19T16:52:33.765,ns_1@10.242.238.90:<0.1532.1>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:52:33.765,ns_1@10.242.238.90:<0.1537.1>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:52:33.765,ns_1@10.242.238.90:<0.1537.1>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:52:33.765,ns_1@10.242.238.90:<0.1532.1>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:52:33.765,ns_1@10.242.238.90:<0.1532.1>:ebucketmigrator_srv:confirm_downstream:201]Confirmed upstream messages are feeded to kernel [ns_server:debug,2014-08-19T16:52:33.766,ns_1@10.242.238.90:<0.1532.1>:ebucketmigrator_srv:reply_and_die:210]Passed old state to caller [ns_server:debug,2014-08-19T16:52:33.766,ns_1@10.242.238.90:<0.1532.1>:ebucketmigrator_srv:reply_and_die:213]Sent out state. Preparing to die [ns_server:debug,2014-08-19T16:52:33.766,ns_1@10.242.238.90:<0.1534.1>:ns_vbm_new_sup:do_perform_vbucket_filter_change:143]Got old state from previous ebucketmigrator: <0.1532.1> [ns_server:debug,2014-08-19T16:52:33.766,ns_1@10.242.238.90:<0.1534.1>:ns_vbm_new_sup:perform_vbucket_filter_change_loop:184]Sent old state to new instance [ns_server:info,2014-08-19T16:52:33.766,ns_1@10.242.238.90:<0.1539.1>:ns_vbm_new_sup:mk_old_state_retriever:83]Got vbucket filter change old state. Proceeding vbucket filter change operation [ns_server:debug,2014-08-19T16:52:33.767,ns_1@10.242.238.90:<0.1539.1>:ebucketmigrator_srv:init:494]Got old ebucketmigrator state from <0.1532.1>: {state,#Port<0.20537>,#Port<0.20533>,#Port<0.20538>,#Port<0.20534>,<0.1536.1>, <<"cut off">>,<<"cut off">>,[],154,false,false,0, {1408,452753,765155}, completed, {<0.1534.1>,#Ref<0.0.1.141951>}, <<"replication_ns_1@10.242.238.90">>,<0.1532.1>, {had_backfill,false,undefined,[]}, completed,false}. [ns_server:debug,2014-08-19T16:52:33.767,ns_1@10.242.238.90:<0.17162.0>:ns_process_registry:handle_info:98]Got exit msg: {'EXIT',<0.1534.1>,{#Ref<0.0.1.141939>,<0.1539.1>}} [error_logger:info,2014-08-19T16:52:33.767,ns_1@10.242.238.90:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,'ns_vbm_new_sup-default'} started: [{pid,<0.1539.1>}, {name, {new_child_id, [86,88,89,90,91,92,93,94,95,96,97,98,99,100, 101,102,103,104,105,106,107,108,109,110,111, 112,113,114,115,116,117,118,119,120,121,122, 123,124,125,126,127,128,129,130,131,132,133, 134,135,136,137], 'ns_1@10.242.238.88'}}, {mfargs, {ebucketmigrator_srv,start_link, [{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{old_state_retriever, #Fun}, {on_not_ready_vbuckets, #Fun}, {username,"default"}, {password,get_from_config}, {vbuckets, [86,88,89,90,91,92,93,94,95,96,97,98,99, 100,101,102,103,104,105,106,107,108,109, 110,111,112,113,114,115,116,117,118,119, 120,121,122,123,124,125,126,127,128,129, 130,131,132,133,134,135,136,137]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]]}}, {restart_type,temporary}, {shutdown,60000}, {child_type,worker}] [ns_server:debug,2014-08-19T16:52:33.772,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:52:33.775,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:33.775,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 3081 us [ns_server:debug,2014-08-19T16:52:33.775,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:33.776,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{137, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.90']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:info,2014-08-19T16:52:33.777,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 138 state to replica [ns_server:info,2014-08-19T16:52:33.777,ns_1@10.242.238.90:tap_replication_manager-default<0.18775.0>:tap_replication_manager:change_vbucket_filter:200]Going to change replication from 'ns_1@10.242.238.88' to have [86,88,89,90,91,92,93,94,95,96,97,98,99,100,101,102,103,104,105,106,107,108, 109,110,111,112,113,114,115,116,117,118,119,120,121,122,123,124,125,126,127, 128,129,130,131,132,133,134,135,136,137,138] ([138], []) [ns_server:debug,2014-08-19T16:52:33.778,ns_1@10.242.238.90:<0.1541.1>:ns_vbm_new_sup:do_perform_vbucket_filter_change:135]Registered myself under id:{"default", {new_child_id, [86,88,89,90,91,92,93,94,95,96,97,98,99,100, 101,102,103,104,105,106,107,108,109,110,111, 112,113,114,115,116,117,118,119,120,121,122, 123,124,125,126,127,128,129,130,131,132,133, 134,135,136,137,138], 'ns_1@10.242.238.88'}, #Ref<0.0.1.142084>} Args:[{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{old_state_retriever,#Fun}, {on_not_ready_vbuckets,#Fun}, {username,"default"}, {password,get_from_config}, {vbuckets,[86,88,89,90,91,92,93,94,95,96,97,98,99,100,101,102,103,104, 105,106,107,108,109,110,111,112,113,114,115,116,117,118,119, 120,121,122,123,124,125,126,127,128,129,130,131,132,133,134, 135,136,137,138]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]] [ns_server:debug,2014-08-19T16:52:33.778,ns_1@10.242.238.90:<0.1541.1>:ns_vbm_new_sup:do_perform_vbucket_filter_change:139]Linked myself to old ebucketmigrator <0.1539.1> [ns_server:debug,2014-08-19T16:52:33.789,ns_1@10.242.238.90:<0.1539.1>:ebucketmigrator_srv:init:621]Reusing old upstream: [{vbuckets,[86,88,89,90,91,92,93,94,95,96,97,98,99,100,101,102,103,104,105, 106,107,108,109,110,111,112,113,114,115,116,117,118,119,120,121, 122,123,124,125,126,127,128,129,130,131,132,133,134,135,136,137]}, {name,<<"replication_ns_1@10.242.238.90">>}, {takeover,false}] [rebalance:debug,2014-08-19T16:52:33.789,ns_1@10.242.238.90:<0.1539.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.1543.1> [ns_server:info,2014-08-19T16:52:33.789,ns_1@10.242.238.90:<0.1539.1>:ebucketmigrator_srv:handle_call:270]Starting new-style vbucket filter change on stream `replication_ns_1@10.242.238.90` [rebalance:debug,2014-08-19T16:52:33.790,ns_1@10.242.238.90:<0.32248.0>:janitor_agent:handle_call:795]Done [rebalance:debug,2014-08-19T16:52:33.790,ns_1@10.242.238.90:<0.32229.0>:janitor_agent:handle_call:795]Done [ns_server:info,2014-08-19T16:52:33.810,ns_1@10.242.238.90:<0.1539.1>:ebucketmigrator_srv:handle_call:297]Changing vbucket filter on tap stream `replication_ns_1@10.242.238.90`: [{86,1}, {88,1}, {89,1}, {90,1}, {91,1}, {92,1}, {93,1}, {94,1}, {95,1}, {96,1}, {97,1}, {98,1}, {99,1}, {100,1}, {101,1}, {102,1}, {103,1}, {104,1}, {105,1}, {106,1}, {107,1}, {108,1}, {109,1}, {110,1}, {111,1}, {112,1}, {113,1}, {114,1}, {115,1}, {116,1}, {117,1}, {118,1}, {119,1}, {120,1}, {121,1}, {122,1}, {123,1}, {124,1}, {125,1}, {126,1}, {127,1}, {128,1}, {129,1}, {130,1}, {131,1}, {132,1}, {133,1}, {134,1}, {135,1}, {136,1}, {137,1}, {138,1}] [ns_server:info,2014-08-19T16:52:33.811,ns_1@10.242.238.90:<0.1539.1>:ebucketmigrator_srv:handle_call:307]Successfully changed vbucket filter on tap stream `replication_ns_1@10.242.238.90`. [ns_server:info,2014-08-19T16:52:33.811,ns_1@10.242.238.90:<0.1539.1>:ebucketmigrator_srv:process_upstream:1027]Got vbucket filter change completion message. Silencing upstream sender [ns_server:info,2014-08-19T16:52:33.811,ns_1@10.242.238.90:<0.1539.1>:ebucketmigrator_srv:handle_info:366]Got reply from upstream silencing request. Completing state transition to a new ebucketmigrator. [ns_server:debug,2014-08-19T16:52:33.811,ns_1@10.242.238.90:<0.1539.1>:ebucketmigrator_srv:complete_native_vb_filter_change:170]Proceeding with reading unread binaries [ns_server:debug,2014-08-19T16:52:33.811,ns_1@10.242.238.90:<0.1539.1>:ebucketmigrator_srv:confirm_downstream:197]Going to confirm reception downstream messages [ns_server:debug,2014-08-19T16:52:33.811,ns_1@10.242.238.90:<0.1539.1>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:52:33.811,ns_1@10.242.238.90:<0.1544.1>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:52:33.812,ns_1@10.242.238.90:<0.1544.1>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:52:33.812,ns_1@10.242.238.90:<0.1539.1>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:52:33.812,ns_1@10.242.238.90:<0.1539.1>:ebucketmigrator_srv:confirm_downstream:201]Confirmed upstream messages are feeded to kernel [ns_server:debug,2014-08-19T16:52:33.812,ns_1@10.242.238.90:<0.1539.1>:ebucketmigrator_srv:reply_and_die:210]Passed old state to caller [ns_server:debug,2014-08-19T16:52:33.812,ns_1@10.242.238.90:<0.1539.1>:ebucketmigrator_srv:reply_and_die:213]Sent out state. Preparing to die [ns_server:debug,2014-08-19T16:52:33.812,ns_1@10.242.238.90:<0.1541.1>:ns_vbm_new_sup:do_perform_vbucket_filter_change:143]Got old state from previous ebucketmigrator: <0.1539.1> [ns_server:debug,2014-08-19T16:52:33.812,ns_1@10.242.238.90:<0.1541.1>:ns_vbm_new_sup:perform_vbucket_filter_change_loop:184]Sent old state to new instance [ns_server:info,2014-08-19T16:52:33.813,ns_1@10.242.238.90:<0.1546.1>:ns_vbm_new_sup:mk_old_state_retriever:83]Got vbucket filter change old state. Proceeding vbucket filter change operation [ns_server:debug,2014-08-19T16:52:33.813,ns_1@10.242.238.90:<0.1546.1>:ebucketmigrator_srv:init:494]Got old ebucketmigrator state from <0.1539.1>: {state,#Port<0.20537>,#Port<0.20533>,#Port<0.20538>,#Port<0.20534>,<0.1543.1>, <<"cut off">>,<<"cut off">>,[],157,false,false,0, {1408,452753,811560}, completed, {<0.1541.1>,#Ref<0.0.1.142097>}, <<"replication_ns_1@10.242.238.90">>,<0.1539.1>, {had_backfill,false,undefined,[]}, completed,false}. [ns_server:debug,2014-08-19T16:52:33.813,ns_1@10.242.238.90:<0.17162.0>:ns_process_registry:handle_info:98]Got exit msg: {'EXIT',<0.1541.1>,{#Ref<0.0.1.142086>,<0.1546.1>}} [ns_server:debug,2014-08-19T16:52:33.813,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.32248.0> (ok) [ns_server:debug,2014-08-19T16:52:33.813,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.32229.0> (ok) [error_logger:info,2014-08-19T16:52:33.813,ns_1@10.242.238.90:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,'ns_vbm_new_sup-default'} started: [{pid,<0.1546.1>}, {name, {new_child_id, [86,88,89,90,91,92,93,94,95,96,97,98,99,100, 101,102,103,104,105,106,107,108,109,110,111, 112,113,114,115,116,117,118,119,120,121,122, 123,124,125,126,127,128,129,130,131,132,133, 134,135,136,137,138], 'ns_1@10.242.238.88'}}, {mfargs, {ebucketmigrator_srv,start_link, [{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{old_state_retriever, #Fun}, {on_not_ready_vbuckets, #Fun}, {username,"default"}, {password,get_from_config}, {vbuckets, [86,88,89,90,91,92,93,94,95,96,97,98,99, 100,101,102,103,104,105,106,107,108,109, 110,111,112,113,114,115,116,117,118,119, 120,121,122,123,124,125,126,127,128,129, 130,131,132,133,134,135,136,137,138]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]]}}, {restart_type,temporary}, {shutdown,60000}, {child_type,worker}] [rebalance:debug,2014-08-19T16:52:33.815,ns_1@10.242.238.90:<0.32232.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:52:33.815,ns_1@10.242.238.90:<0.32232.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:52:33.815,ns_1@10.242.238.90:<0.1547.1>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:52:33.815,ns_1@10.242.238.90:<0.1547.1>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:debug,2014-08-19T16:52:33.815,ns_1@10.242.238.90:<0.32227.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:52:33.815,ns_1@10.242.238.90:<0.32227.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:52:33.815,ns_1@10.242.238.90:<0.1548.1>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [rebalance:info,2014-08-19T16:52:33.815,ns_1@10.242.238.90:<0.32232.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:52:33.815,ns_1@10.242.238.90:<0.1548.1>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:52:33.816,ns_1@10.242.238.90:<0.32227.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:52:33.821,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:52:33.822,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:33.823,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 1762 us [ns_server:debug,2014-08-19T16:52:33.823,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:33.823,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{138, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.90']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:info,2014-08-19T16:52:33.824,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 139 state to replica [ns_server:info,2014-08-19T16:52:33.825,ns_1@10.242.238.90:tap_replication_manager-default<0.18775.0>:tap_replication_manager:change_vbucket_filter:200]Going to change replication from 'ns_1@10.242.238.88' to have [86,88,89,90,91,92,93,94,95,96,97,98,99,100,101,102,103,104,105,106,107,108, 109,110,111,112,113,114,115,116,117,118,119,120,121,122,123,124,125,126,127, 128,129,130,131,132,133,134,135,136,137,138,139] ([139], []) [ns_server:debug,2014-08-19T16:52:33.826,ns_1@10.242.238.90:<0.1550.1>:ns_vbm_new_sup:do_perform_vbucket_filter_change:135]Registered myself under id:{"default", {new_child_id, [86,88,89,90,91,92,93,94,95,96,97,98,99,100, 101,102,103,104,105,106,107,108,109,110,111, 112,113,114,115,116,117,118,119,120,121,122, 123,124,125,126,127,128,129,130,131,132,133, 134,135,136,137,138,139], 'ns_1@10.242.238.88'}, #Ref<0.0.1.142272>} Args:[{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{old_state_retriever,#Fun}, {on_not_ready_vbuckets,#Fun}, {username,"default"}, {password,get_from_config}, {vbuckets,[86,88,89,90,91,92,93,94,95,96,97,98,99,100,101,102,103,104, 105,106,107,108,109,110,111,112,113,114,115,116,117,118,119, 120,121,122,123,124,125,126,127,128,129,130,131,132,133,134, 135,136,137,138,139]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]] [ns_server:debug,2014-08-19T16:52:33.826,ns_1@10.242.238.90:<0.1550.1>:ns_vbm_new_sup:do_perform_vbucket_filter_change:139]Linked myself to old ebucketmigrator <0.1546.1> [ns_server:debug,2014-08-19T16:52:33.836,ns_1@10.242.238.90:<0.1546.1>:ebucketmigrator_srv:init:621]Reusing old upstream: [{vbuckets,[86,88,89,90,91,92,93,94,95,96,97,98,99,100,101,102,103,104,105, 106,107,108,109,110,111,112,113,114,115,116,117,118,119,120,121, 122,123,124,125,126,127,128,129,130,131,132,133,134,135,136,137, 138]}, {name,<<"replication_ns_1@10.242.238.90">>}, {takeover,false}] [rebalance:debug,2014-08-19T16:52:33.836,ns_1@10.242.238.90:<0.1546.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.1552.1> [ns_server:info,2014-08-19T16:52:33.837,ns_1@10.242.238.90:<0.1546.1>:ebucketmigrator_srv:handle_call:270]Starting new-style vbucket filter change on stream `replication_ns_1@10.242.238.90` [ns_server:info,2014-08-19T16:52:33.858,ns_1@10.242.238.90:<0.1546.1>:ebucketmigrator_srv:handle_call:297]Changing vbucket filter on tap stream `replication_ns_1@10.242.238.90`: [{86,1}, {88,1}, {89,1}, {90,1}, {91,1}, {92,1}, {93,1}, {94,1}, {95,1}, {96,1}, {97,1}, {98,1}, {99,1}, {100,1}, {101,1}, {102,1}, {103,1}, {104,1}, {105,1}, {106,1}, {107,1}, {108,1}, {109,1}, {110,1}, {111,1}, {112,1}, {113,1}, {114,1}, {115,1}, {116,1}, {117,1}, {118,1}, {119,1}, {120,1}, {121,1}, {122,1}, {123,1}, {124,1}, {125,1}, {126,1}, {127,1}, {128,1}, {129,1}, {130,1}, {131,1}, {132,1}, {133,1}, {134,1}, {135,1}, {136,1}, {137,1}, {138,1}, {139,1}] [ns_server:info,2014-08-19T16:52:33.859,ns_1@10.242.238.90:<0.1546.1>:ebucketmigrator_srv:handle_call:307]Successfully changed vbucket filter on tap stream `replication_ns_1@10.242.238.90`. [ns_server:info,2014-08-19T16:52:33.860,ns_1@10.242.238.90:<0.1546.1>:ebucketmigrator_srv:process_upstream:1027]Got vbucket filter change completion message. Silencing upstream sender [ns_server:info,2014-08-19T16:52:33.860,ns_1@10.242.238.90:<0.1546.1>:ebucketmigrator_srv:handle_info:366]Got reply from upstream silencing request. Completing state transition to a new ebucketmigrator. [ns_server:debug,2014-08-19T16:52:33.860,ns_1@10.242.238.90:<0.1546.1>:ebucketmigrator_srv:complete_native_vb_filter_change:170]Proceeding with reading unread binaries [ns_server:debug,2014-08-19T16:52:33.860,ns_1@10.242.238.90:<0.1546.1>:ebucketmigrator_srv:confirm_downstream:197]Going to confirm reception downstream messages [ns_server:debug,2014-08-19T16:52:33.861,ns_1@10.242.238.90:<0.1546.1>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:52:33.861,ns_1@10.242.238.90:<0.1553.1>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:52:33.861,ns_1@10.242.238.90:<0.1553.1>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:52:33.861,ns_1@10.242.238.90:<0.1546.1>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:52:33.861,ns_1@10.242.238.90:<0.1546.1>:ebucketmigrator_srv:confirm_downstream:201]Confirmed upstream messages are feeded to kernel [ns_server:debug,2014-08-19T16:52:33.861,ns_1@10.242.238.90:<0.1546.1>:ebucketmigrator_srv:reply_and_die:210]Passed old state to caller [ns_server:debug,2014-08-19T16:52:33.861,ns_1@10.242.238.90:<0.1546.1>:ebucketmigrator_srv:reply_and_die:213]Sent out state. Preparing to die [ns_server:debug,2014-08-19T16:52:33.861,ns_1@10.242.238.90:<0.1550.1>:ns_vbm_new_sup:do_perform_vbucket_filter_change:143]Got old state from previous ebucketmigrator: <0.1546.1> [ns_server:debug,2014-08-19T16:52:33.862,ns_1@10.242.238.90:<0.1550.1>:ns_vbm_new_sup:perform_vbucket_filter_change_loop:184]Sent old state to new instance [ns_server:info,2014-08-19T16:52:33.862,ns_1@10.242.238.90:<0.1555.1>:ns_vbm_new_sup:mk_old_state_retriever:83]Got vbucket filter change old state. Proceeding vbucket filter change operation [ns_server:debug,2014-08-19T16:52:33.862,ns_1@10.242.238.90:<0.1555.1>:ebucketmigrator_srv:init:494]Got old ebucketmigrator state from <0.1546.1>: {state,#Port<0.20537>,#Port<0.20533>,#Port<0.20538>,#Port<0.20534>,<0.1552.1>, <<"cut off">>,<<"cut off">>,[],160,false,false,0, {1408,452753,860471}, completed, {<0.1550.1>,#Ref<0.0.1.142288>}, <<"replication_ns_1@10.242.238.90">>,<0.1546.1>, {had_backfill,false,undefined,[]}, completed,false}. [ns_server:debug,2014-08-19T16:52:33.862,ns_1@10.242.238.90:<0.17162.0>:ns_process_registry:handle_info:98]Got exit msg: {'EXIT',<0.1550.1>,{#Ref<0.0.1.142274>,<0.1555.1>}} [error_logger:info,2014-08-19T16:52:33.862,ns_1@10.242.238.90:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,'ns_vbm_new_sup-default'} started: [{pid,<0.1555.1>}, {name, {new_child_id, [86,88,89,90,91,92,93,94,95,96,97,98,99,100, 101,102,103,104,105,106,107,108,109,110,111, 112,113,114,115,116,117,118,119,120,121,122, 123,124,125,126,127,128,129,130,131,132,133, 134,135,136,137,138,139], 'ns_1@10.242.238.88'}}, {mfargs, {ebucketmigrator_srv,start_link, [{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{old_state_retriever, #Fun}, {on_not_ready_vbuckets, #Fun}, {username,"default"}, {password,get_from_config}, {vbuckets, [86,88,89,90,91,92,93,94,95,96,97,98,99, 100,101,102,103,104,105,106,107,108,109, 110,111,112,113,114,115,116,117,118,119, 120,121,122,123,124,125,126,127,128,129, 130,131,132,133,134,135,136,137,138, 139]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]]}}, {restart_type,temporary}, {shutdown,60000}, {child_type,worker}] [ns_server:debug,2014-08-19T16:52:33.867,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:52:33.870,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:33.871,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 3299 us [ns_server:debug,2014-08-19T16:52:33.871,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:33.871,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{139, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.90']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:info,2014-08-19T16:52:33.874,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 140 state to replica [ns_server:info,2014-08-19T16:52:33.874,ns_1@10.242.238.90:tap_replication_manager-default<0.18775.0>:tap_replication_manager:change_vbucket_filter:200]Going to change replication from 'ns_1@10.242.238.88' to have [86,88,89,90,91,92,93,94,95,96,97,98,99,100,101,102,103,104,105,106,107,108, 109,110,111,112,113,114,115,116,117,118,119,120,121,122,123,124,125,126,127, 128,129,130,131,132,133,134,135,136,137,138,139,140] ([140], []) [ns_server:debug,2014-08-19T16:52:33.875,ns_1@10.242.238.90:<0.1557.1>:ns_vbm_new_sup:do_perform_vbucket_filter_change:135]Registered myself under id:{"default", {new_child_id, [86,88,89,90,91,92,93,94,95,96,97,98,99,100, 101,102,103,104,105,106,107,108,109,110,111, 112,113,114,115,116,117,118,119,120,121,122, 123,124,125,126,127,128,129,130,131,132,133, 134,135,136,137,138,139,140], 'ns_1@10.242.238.88'}, #Ref<0.0.1.142426>} Args:[{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{old_state_retriever,#Fun}, {on_not_ready_vbuckets,#Fun}, {username,"default"}, {password,get_from_config}, {vbuckets,[86,88,89,90,91,92,93,94,95,96,97,98,99,100,101,102,103,104, 105,106,107,108,109,110,111,112,113,114,115,116,117,118,119, 120,121,122,123,124,125,126,127,128,129,130,131,132,133,134, 135,136,137,138,139,140]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]] [ns_server:debug,2014-08-19T16:52:33.875,ns_1@10.242.238.90:<0.1557.1>:ns_vbm_new_sup:do_perform_vbucket_filter_change:139]Linked myself to old ebucketmigrator <0.1555.1> [ns_server:debug,2014-08-19T16:52:33.885,ns_1@10.242.238.90:<0.1555.1>:ebucketmigrator_srv:init:621]Reusing old upstream: [{vbuckets,[86,88,89,90,91,92,93,94,95,96,97,98,99,100,101,102,103,104,105, 106,107,108,109,110,111,112,113,114,115,116,117,118,119,120,121, 122,123,124,125,126,127,128,129,130,131,132,133,134,135,136,137, 138,139]}, {name,<<"replication_ns_1@10.242.238.90">>}, {takeover,false}] [rebalance:debug,2014-08-19T16:52:33.885,ns_1@10.242.238.90:<0.1555.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.1559.1> [ns_server:info,2014-08-19T16:52:33.886,ns_1@10.242.238.90:<0.1555.1>:ebucketmigrator_srv:handle_call:270]Starting new-style vbucket filter change on stream `replication_ns_1@10.242.238.90` [rebalance:debug,2014-08-19T16:52:33.899,ns_1@10.242.238.90:<0.32219.0>:janitor_agent:handle_call:795]Done [rebalance:debug,2014-08-19T16:52:33.899,ns_1@10.242.238.90:<0.32224.0>:janitor_agent:handle_call:795]Done [ns_server:info,2014-08-19T16:52:33.906,ns_1@10.242.238.90:<0.1555.1>:ebucketmigrator_srv:handle_call:297]Changing vbucket filter on tap stream `replication_ns_1@10.242.238.90`: [{86,1}, {88,1}, {89,1}, {90,1}, {91,1}, {92,1}, {93,1}, {94,1}, {95,1}, {96,1}, {97,1}, {98,1}, {99,1}, {100,1}, {101,1}, {102,1}, {103,1}, {104,1}, {105,1}, {106,1}, {107,1}, {108,1}, {109,1}, {110,1}, {111,1}, {112,1}, {113,1}, {114,1}, {115,1}, {116,1}, {117,1}, {118,1}, {119,1}, {120,1}, {121,1}, {122,1}, {123,1}, {124,1}, {125,1}, {126,1}, {127,1}, {128,1}, {129,1}, {130,1}, {131,1}, {132,1}, {133,1}, {134,1}, {135,1}, {136,1}, {137,1}, {138,1}, {139,1}, {140,1}] [ns_server:info,2014-08-19T16:52:33.907,ns_1@10.242.238.90:<0.1555.1>:ebucketmigrator_srv:handle_call:307]Successfully changed vbucket filter on tap stream `replication_ns_1@10.242.238.90`. [ns_server:info,2014-08-19T16:52:33.907,ns_1@10.242.238.90:<0.1555.1>:ebucketmigrator_srv:process_upstream:1027]Got vbucket filter change completion message. Silencing upstream sender [ns_server:info,2014-08-19T16:52:33.907,ns_1@10.242.238.90:<0.1555.1>:ebucketmigrator_srv:handle_info:366]Got reply from upstream silencing request. Completing state transition to a new ebucketmigrator. [ns_server:debug,2014-08-19T16:52:33.907,ns_1@10.242.238.90:<0.1555.1>:ebucketmigrator_srv:complete_native_vb_filter_change:170]Proceeding with reading unread binaries [ns_server:debug,2014-08-19T16:52:33.907,ns_1@10.242.238.90:<0.1555.1>:ebucketmigrator_srv:confirm_downstream:197]Going to confirm reception downstream messages [ns_server:debug,2014-08-19T16:52:33.907,ns_1@10.242.238.90:<0.1555.1>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:52:33.907,ns_1@10.242.238.90:<0.1560.1>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:52:33.908,ns_1@10.242.238.90:<0.1560.1>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:52:33.908,ns_1@10.242.238.90:<0.1555.1>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:52:33.908,ns_1@10.242.238.90:<0.1555.1>:ebucketmigrator_srv:confirm_downstream:201]Confirmed upstream messages are feeded to kernel [ns_server:debug,2014-08-19T16:52:33.908,ns_1@10.242.238.90:<0.1555.1>:ebucketmigrator_srv:reply_and_die:210]Passed old state to caller [ns_server:debug,2014-08-19T16:52:33.908,ns_1@10.242.238.90:<0.1555.1>:ebucketmigrator_srv:reply_and_die:213]Sent out state. Preparing to die [ns_server:debug,2014-08-19T16:52:33.908,ns_1@10.242.238.90:<0.1557.1>:ns_vbm_new_sup:do_perform_vbucket_filter_change:143]Got old state from previous ebucketmigrator: <0.1555.1> [ns_server:debug,2014-08-19T16:52:33.908,ns_1@10.242.238.90:<0.1557.1>:ns_vbm_new_sup:perform_vbucket_filter_change_loop:184]Sent old state to new instance [ns_server:info,2014-08-19T16:52:33.909,ns_1@10.242.238.90:<0.1562.1>:ns_vbm_new_sup:mk_old_state_retriever:83]Got vbucket filter change old state. Proceeding vbucket filter change operation [ns_server:debug,2014-08-19T16:52:33.909,ns_1@10.242.238.90:<0.1562.1>:ebucketmigrator_srv:init:494]Got old ebucketmigrator state from <0.1555.1>: {state,#Port<0.20537>,#Port<0.20533>,#Port<0.20538>,#Port<0.20534>,<0.1559.1>, <<"cut off">>,<<"cut off">>,[],163,false,false,0, {1408,452753,907447}, completed, {<0.1557.1>,#Ref<0.0.1.142439>}, <<"replication_ns_1@10.242.238.90">>,<0.1555.1>, {had_backfill,false,undefined,[]}, completed,false}. [ns_server:debug,2014-08-19T16:52:33.909,ns_1@10.242.238.90:<0.17162.0>:ns_process_registry:handle_info:98]Got exit msg: {'EXIT',<0.1557.1>,{#Ref<0.0.1.142428>,<0.1562.1>}} [ns_server:debug,2014-08-19T16:52:33.909,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.32219.0> (ok) [ns_server:debug,2014-08-19T16:52:33.909,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.32224.0> (ok) [error_logger:info,2014-08-19T16:52:33.909,ns_1@10.242.238.90:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,'ns_vbm_new_sup-default'} started: [{pid,<0.1562.1>}, {name, {new_child_id, [86,88,89,90,91,92,93,94,95,96,97,98,99,100, 101,102,103,104,105,106,107,108,109,110,111, 112,113,114,115,116,117,118,119,120,121,122, 123,124,125,126,127,128,129,130,131,132,133, 134,135,136,137,138,139,140], 'ns_1@10.242.238.88'}}, {mfargs, {ebucketmigrator_srv,start_link, [{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{old_state_retriever, #Fun}, {on_not_ready_vbuckets, #Fun}, {username,"default"}, {password,get_from_config}, {vbuckets, [86,88,89,90,91,92,93,94,95,96,97,98,99, 100,101,102,103,104,105,106,107,108,109, 110,111,112,113,114,115,116,117,118,119, 120,121,122,123,124,125,126,127,128,129, 130,131,132,133,134,135,136,137,138,139, 140]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]]}}, {restart_type,temporary}, {shutdown,60000}, {child_type,worker}] [rebalance:debug,2014-08-19T16:52:33.911,ns_1@10.242.238.90:<0.32217.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:52:33.912,ns_1@10.242.238.90:<0.32217.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:52:33.912,ns_1@10.242.238.90:<0.1563.1>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:52:33.912,ns_1@10.242.238.90:<0.1563.1>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:52:33.912,ns_1@10.242.238.90:<0.32217.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [rebalance:debug,2014-08-19T16:52:33.913,ns_1@10.242.238.90:<0.32222.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:52:33.913,ns_1@10.242.238.90:<0.32222.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:52:33.913,ns_1@10.242.238.90:<0.1564.1>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:52:33.913,ns_1@10.242.238.90:<0.1564.1>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:52:33.913,ns_1@10.242.238.90:<0.32222.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:52:33.916,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:52:33.917,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:33.917,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 1406 us [ns_server:debug,2014-08-19T16:52:33.917,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:33.918,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{140, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.90']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:info,2014-08-19T16:52:33.919,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 141 state to replica [ns_server:info,2014-08-19T16:52:33.919,ns_1@10.242.238.90:tap_replication_manager-default<0.18775.0>:tap_replication_manager:change_vbucket_filter:200]Going to change replication from 'ns_1@10.242.238.88' to have [86,88,89,90,91,92,93,94,95,96,97,98,99,100,101,102,103,104,105,106,107,108, 109,110,111,112,113,114,115,116,117,118,119,120,121,122,123,124,125,126,127, 128,129,130,131,132,133,134,135,136,137,138,139,140,141] ([141], []) [ns_server:debug,2014-08-19T16:52:33.920,ns_1@10.242.238.90:<0.1566.1>:ns_vbm_new_sup:do_perform_vbucket_filter_change:135]Registered myself under id:{"default", {new_child_id, [86,88,89,90,91,92,93,94,95,96,97,98,99,100, 101,102,103,104,105,106,107,108,109,110,111, 112,113,114,115,116,117,118,119,120,121,122, 123,124,125,126,127,128,129,130,131,132,133, 134,135,136,137,138,139,140,141], 'ns_1@10.242.238.88'}, #Ref<0.0.1.142614>} Args:[{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{old_state_retriever,#Fun}, {on_not_ready_vbuckets,#Fun}, {username,"default"}, {password,get_from_config}, {vbuckets,[86,88,89,90,91,92,93,94,95,96,97,98,99,100,101,102,103,104, 105,106,107,108,109,110,111,112,113,114,115,116,117,118,119, 120,121,122,123,124,125,126,127,128,129,130,131,132,133,134, 135,136,137,138,139,140,141]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]] [ns_server:debug,2014-08-19T16:52:33.920,ns_1@10.242.238.90:<0.1566.1>:ns_vbm_new_sup:do_perform_vbucket_filter_change:139]Linked myself to old ebucketmigrator <0.1562.1> [ns_server:debug,2014-08-19T16:52:33.935,ns_1@10.242.238.90:<0.1562.1>:ebucketmigrator_srv:init:621]Reusing old upstream: [{vbuckets,[86,88,89,90,91,92,93,94,95,96,97,98,99,100,101,102,103,104,105, 106,107,108,109,110,111,112,113,114,115,116,117,118,119,120,121, 122,123,124,125,126,127,128,129,130,131,132,133,134,135,136,137, 138,139,140]}, {name,<<"replication_ns_1@10.242.238.90">>}, {takeover,false}] [rebalance:debug,2014-08-19T16:52:33.935,ns_1@10.242.238.90:<0.1562.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.1568.1> [ns_server:info,2014-08-19T16:52:33.935,ns_1@10.242.238.90:<0.1562.1>:ebucketmigrator_srv:handle_call:270]Starting new-style vbucket filter change on stream `replication_ns_1@10.242.238.90` [ns_server:info,2014-08-19T16:52:33.956,ns_1@10.242.238.90:<0.1562.1>:ebucketmigrator_srv:handle_call:297]Changing vbucket filter on tap stream `replication_ns_1@10.242.238.90`: [{86,1}, {88,1}, {89,1}, {90,1}, {91,1}, {92,1}, {93,1}, {94,1}, {95,1}, {96,1}, {97,1}, {98,1}, {99,1}, {100,1}, {101,1}, {102,1}, {103,1}, {104,1}, {105,1}, {106,1}, {107,1}, {108,1}, {109,1}, {110,1}, {111,1}, {112,1}, {113,1}, {114,1}, {115,1}, {116,1}, {117,1}, {118,1}, {119,1}, {120,1}, {121,1}, {122,1}, {123,1}, {124,1}, {125,1}, {126,1}, {127,1}, {128,1}, {129,1}, {130,1}, {131,1}, {132,1}, {133,1}, {134,1}, {135,1}, {136,1}, {137,1}, {138,1}, {139,1}, {140,1}, {141,1}] [ns_server:info,2014-08-19T16:52:33.957,ns_1@10.242.238.90:<0.1562.1>:ebucketmigrator_srv:handle_call:307]Successfully changed vbucket filter on tap stream `replication_ns_1@10.242.238.90`. [ns_server:info,2014-08-19T16:52:33.957,ns_1@10.242.238.90:<0.1562.1>:ebucketmigrator_srv:process_upstream:1027]Got vbucket filter change completion message. Silencing upstream sender [ns_server:info,2014-08-19T16:52:33.958,ns_1@10.242.238.90:<0.1562.1>:ebucketmigrator_srv:handle_info:366]Got reply from upstream silencing request. Completing state transition to a new ebucketmigrator. [ns_server:debug,2014-08-19T16:52:33.958,ns_1@10.242.238.90:<0.1562.1>:ebucketmigrator_srv:complete_native_vb_filter_change:170]Proceeding with reading unread binaries [ns_server:debug,2014-08-19T16:52:33.958,ns_1@10.242.238.90:<0.1562.1>:ebucketmigrator_srv:confirm_downstream:197]Going to confirm reception downstream messages [ns_server:debug,2014-08-19T16:52:33.958,ns_1@10.242.238.90:<0.1562.1>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:52:33.958,ns_1@10.242.238.90:<0.1569.1>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:52:33.958,ns_1@10.242.238.90:<0.1569.1>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:52:33.958,ns_1@10.242.238.90:<0.1562.1>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:52:33.958,ns_1@10.242.238.90:<0.1562.1>:ebucketmigrator_srv:confirm_downstream:201]Confirmed upstream messages are feeded to kernel [ns_server:debug,2014-08-19T16:52:33.959,ns_1@10.242.238.90:<0.1562.1>:ebucketmigrator_srv:reply_and_die:210]Passed old state to caller [ns_server:debug,2014-08-19T16:52:33.959,ns_1@10.242.238.90:<0.1562.1>:ebucketmigrator_srv:reply_and_die:213]Sent out state. Preparing to die [ns_server:debug,2014-08-19T16:52:33.959,ns_1@10.242.238.90:<0.1566.1>:ns_vbm_new_sup:do_perform_vbucket_filter_change:143]Got old state from previous ebucketmigrator: <0.1562.1> [ns_server:debug,2014-08-19T16:52:33.959,ns_1@10.242.238.90:<0.1566.1>:ns_vbm_new_sup:perform_vbucket_filter_change_loop:184]Sent old state to new instance [ns_server:info,2014-08-19T16:52:33.959,ns_1@10.242.238.90:<0.1571.1>:ns_vbm_new_sup:mk_old_state_retriever:83]Got vbucket filter change old state. Proceeding vbucket filter change operation [ns_server:debug,2014-08-19T16:52:33.960,ns_1@10.242.238.90:<0.1571.1>:ebucketmigrator_srv:init:494]Got old ebucketmigrator state from <0.1562.1>: {state,#Port<0.20537>,#Port<0.20533>,#Port<0.20538>,#Port<0.20534>,<0.1568.1>, <<"cut off">>,<<"cut off">>,[],166,false,false,0, {1408,452753,957828}, completed, {<0.1566.1>,#Ref<0.0.1.142627>}, <<"replication_ns_1@10.242.238.90">>,<0.1562.1>, {had_backfill,false,undefined,[]}, completed,false}. [ns_server:debug,2014-08-19T16:52:33.960,ns_1@10.242.238.90:<0.17162.0>:ns_process_registry:handle_info:98]Got exit msg: {'EXIT',<0.1566.1>,{#Ref<0.0.1.142616>,<0.1571.1>}} [error_logger:info,2014-08-19T16:52:33.960,ns_1@10.242.238.90:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,'ns_vbm_new_sup-default'} started: [{pid,<0.1571.1>}, {name, {new_child_id, [86,88,89,90,91,92,93,94,95,96,97,98,99,100, 101,102,103,104,105,106,107,108,109,110,111, 112,113,114,115,116,117,118,119,120,121,122, 123,124,125,126,127,128,129,130,131,132,133, 134,135,136,137,138,139,140,141], 'ns_1@10.242.238.88'}}, {mfargs, {ebucketmigrator_srv,start_link, [{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{old_state_retriever, #Fun}, {on_not_ready_vbuckets, #Fun}, {username,"default"}, {password,get_from_config}, {vbuckets, [86,88,89,90,91,92,93,94,95,96,97,98,99, 100,101,102,103,104,105,106,107,108,109, 110,111,112,113,114,115,116,117,118,119, 120,121,122,123,124,125,126,127,128,129, 130,131,132,133,134,135,136,137,138,139, 140,141]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]]}}, {restart_type,temporary}, {shutdown,60000}, {child_type,worker}] [ns_server:debug,2014-08-19T16:52:33.965,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:52:33.972,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:33.973,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 7492 us [ns_server:debug,2014-08-19T16:52:33.973,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:33.973,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{141, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.90']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:info,2014-08-19T16:52:33.974,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 143 state to replica [ns_server:info,2014-08-19T16:52:33.974,ns_1@10.242.238.90:tap_replication_manager-default<0.18775.0>:tap_replication_manager:change_vbucket_filter:200]Going to change replication from 'ns_1@10.242.238.88' to have [86,88,89,90,91,92,93,94,95,96,97,98,99,100,101,102,103,104,105,106,107,108, 109,110,111,112,113,114,115,116,117,118,119,120,121,122,123,124,125,126,127, 128,129,130,131,132,133,134,135,136,137,138,139,140,141,143] ([143], []) [ns_server:debug,2014-08-19T16:52:33.976,ns_1@10.242.238.90:<0.1573.1>:ns_vbm_new_sup:do_perform_vbucket_filter_change:135]Registered myself under id:{"default", {new_child_id, [86,88,89,90,91,92,93,94,95,96,97,98,99,100, 101,102,103,104,105,106,107,108,109,110,111, 112,113,114,115,116,117,118,119,120,121,122, 123,124,125,126,127,128,129,130,131,132,133, 134,135,136,137,138,139,140,141,143], 'ns_1@10.242.238.88'}, #Ref<0.0.1.142768>} Args:[{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{old_state_retriever,#Fun}, {on_not_ready_vbuckets,#Fun}, {username,"default"}, {password,get_from_config}, {vbuckets,[86,88,89,90,91,92,93,94,95,96,97,98,99,100,101,102,103,104, 105,106,107,108,109,110,111,112,113,114,115,116,117,118,119, 120,121,122,123,124,125,126,127,128,129,130,131,132,133,134, 135,136,137,138,139,140,141,143]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]] [ns_server:debug,2014-08-19T16:52:33.976,ns_1@10.242.238.90:<0.1573.1>:ns_vbm_new_sup:do_perform_vbucket_filter_change:139]Linked myself to old ebucketmigrator <0.1571.1> [ns_server:debug,2014-08-19T16:52:33.986,ns_1@10.242.238.90:<0.1571.1>:ebucketmigrator_srv:init:621]Reusing old upstream: [{vbuckets,[86,88,89,90,91,92,93,94,95,96,97,98,99,100,101,102,103,104,105, 106,107,108,109,110,111,112,113,114,115,116,117,118,119,120,121, 122,123,124,125,126,127,128,129,130,131,132,133,134,135,136,137, 138,139,140,141]}, {name,<<"replication_ns_1@10.242.238.90">>}, {takeover,false}] [rebalance:debug,2014-08-19T16:52:33.987,ns_1@10.242.238.90:<0.1571.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.1575.1> [ns_server:info,2014-08-19T16:52:33.987,ns_1@10.242.238.90:<0.1571.1>:ebucketmigrator_srv:handle_call:270]Starting new-style vbucket filter change on stream `replication_ns_1@10.242.238.90` [rebalance:debug,2014-08-19T16:52:33.999,ns_1@10.242.238.90:<0.32214.0>:janitor_agent:handle_call:795]Done [rebalance:debug,2014-08-19T16:52:33.999,ns_1@10.242.238.90:<0.32209.0>:janitor_agent:handle_call:795]Done [ns_server:info,2014-08-19T16:52:34.007,ns_1@10.242.238.90:<0.1571.1>:ebucketmigrator_srv:handle_call:297]Changing vbucket filter on tap stream `replication_ns_1@10.242.238.90`: [{86,1}, {88,1}, {89,1}, {90,1}, {91,1}, {92,1}, {93,1}, {94,1}, {95,1}, {96,1}, {97,1}, {98,1}, {99,1}, {100,1}, {101,1}, {102,1}, {103,1}, {104,1}, {105,1}, {106,1}, {107,1}, {108,1}, {109,1}, {110,1}, {111,1}, {112,1}, {113,1}, {114,1}, {115,1}, {116,1}, {117,1}, {118,1}, {119,1}, {120,1}, {121,1}, {122,1}, {123,1}, {124,1}, {125,1}, {126,1}, {127,1}, {128,1}, {129,1}, {130,1}, {131,1}, {132,1}, {133,1}, {134,1}, {135,1}, {136,1}, {137,1}, {138,1}, {139,1}, {140,1}, {141,1}, {143,1}] [ns_server:info,2014-08-19T16:52:34.008,ns_1@10.242.238.90:<0.1571.1>:ebucketmigrator_srv:handle_call:307]Successfully changed vbucket filter on tap stream `replication_ns_1@10.242.238.90`. [ns_server:info,2014-08-19T16:52:34.009,ns_1@10.242.238.90:<0.1571.1>:ebucketmigrator_srv:process_upstream:1027]Got vbucket filter change completion message. Silencing upstream sender [ns_server:info,2014-08-19T16:52:34.009,ns_1@10.242.238.90:<0.1571.1>:ebucketmigrator_srv:handle_info:366]Got reply from upstream silencing request. Completing state transition to a new ebucketmigrator. [ns_server:debug,2014-08-19T16:52:34.009,ns_1@10.242.238.90:<0.1571.1>:ebucketmigrator_srv:complete_native_vb_filter_change:170]Proceeding with reading unread binaries [ns_server:debug,2014-08-19T16:52:34.009,ns_1@10.242.238.90:<0.1571.1>:ebucketmigrator_srv:confirm_downstream:197]Going to confirm reception downstream messages [ns_server:debug,2014-08-19T16:52:34.009,ns_1@10.242.238.90:<0.1571.1>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:52:34.009,ns_1@10.242.238.90:<0.1576.1>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:52:34.009,ns_1@10.242.238.90:<0.1576.1>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:52:34.009,ns_1@10.242.238.90:<0.1571.1>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:52:34.010,ns_1@10.242.238.90:<0.1571.1>:ebucketmigrator_srv:confirm_downstream:201]Confirmed upstream messages are feeded to kernel [ns_server:debug,2014-08-19T16:52:34.010,ns_1@10.242.238.90:<0.1571.1>:ebucketmigrator_srv:reply_and_die:210]Passed old state to caller [ns_server:debug,2014-08-19T16:52:34.010,ns_1@10.242.238.90:<0.1571.1>:ebucketmigrator_srv:reply_and_die:213]Sent out state. Preparing to die [ns_server:debug,2014-08-19T16:52:34.010,ns_1@10.242.238.90:<0.1573.1>:ns_vbm_new_sup:do_perform_vbucket_filter_change:143]Got old state from previous ebucketmigrator: <0.1571.1> [ns_server:debug,2014-08-19T16:52:34.010,ns_1@10.242.238.90:<0.1573.1>:ns_vbm_new_sup:perform_vbucket_filter_change_loop:184]Sent old state to new instance [ns_server:info,2014-08-19T16:52:34.010,ns_1@10.242.238.90:<0.1578.1>:ns_vbm_new_sup:mk_old_state_retriever:83]Got vbucket filter change old state. Proceeding vbucket filter change operation [ns_server:debug,2014-08-19T16:52:34.010,ns_1@10.242.238.90:<0.1578.1>:ebucketmigrator_srv:init:494]Got old ebucketmigrator state from <0.1571.1>: {state,#Port<0.20537>,#Port<0.20533>,#Port<0.20538>,#Port<0.20534>,<0.1575.1>, <<"cut off">>,<<"cut off">>,[],169,false,false,0, {1408,452754,9115}, completed, {<0.1573.1>,#Ref<0.0.1.142781>}, <<"replication_ns_1@10.242.238.90">>,<0.1571.1>, {had_backfill,false,undefined,[]}, completed,false}. [ns_server:debug,2014-08-19T16:52:34.011,ns_1@10.242.238.90:<0.17162.0>:ns_process_registry:handle_info:98]Got exit msg: {'EXIT',<0.1573.1>,{#Ref<0.0.1.142770>,<0.1578.1>}} [error_logger:info,2014-08-19T16:52:34.011,ns_1@10.242.238.90:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,'ns_vbm_new_sup-default'} started: [{pid,<0.1578.1>}, {name, {new_child_id, [86,88,89,90,91,92,93,94,95,96,97,98,99,100, 101,102,103,104,105,106,107,108,109,110,111, 112,113,114,115,116,117,118,119,120,121,122, 123,124,125,126,127,128,129,130,131,132,133, 134,135,136,137,138,139,140,141,143], 'ns_1@10.242.238.88'}}, {mfargs, {ebucketmigrator_srv,start_link, [{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{old_state_retriever, #Fun}, {on_not_ready_vbuckets, #Fun}, {username,"default"}, {password,get_from_config}, {vbuckets, [86,88,89,90,91,92,93,94,95,96,97,98,99, 100,101,102,103,104,105,106,107,108,109, 110,111,112,113,114,115,116,117,118,119, 120,121,122,123,124,125,126,127,128,129, 130,131,132,133,134,135,136,137,138,139, 140,141,143]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]]}}, {restart_type,temporary}, {shutdown,60000}, {child_type,worker}] [ns_server:debug,2014-08-19T16:52:34.011,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.32214.0> (ok) [ns_server:debug,2014-08-19T16:52:34.011,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.32209.0> (ok) [rebalance:debug,2014-08-19T16:52:34.012,ns_1@10.242.238.90:<0.32212.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:52:34.013,ns_1@10.242.238.90:<0.32212.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:52:34.013,ns_1@10.242.238.90:<0.1579.1>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [rebalance:debug,2014-08-19T16:52:34.013,ns_1@10.242.238.90:<0.32207.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:52:34.013,ns_1@10.242.238.90:<0.1579.1>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [ns_server:debug,2014-08-19T16:52:34.013,ns_1@10.242.238.90:<0.32207.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:52:34.013,ns_1@10.242.238.90:<0.1580.1>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [rebalance:info,2014-08-19T16:52:34.013,ns_1@10.242.238.90:<0.32212.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:52:34.013,ns_1@10.242.238.90:<0.1580.1>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:52:34.013,ns_1@10.242.238.90:<0.32207.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:52:34.018,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:52:34.021,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:34.022,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 3452 us [ns_server:debug,2014-08-19T16:52:34.022,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:34.023,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{143, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.90']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:info,2014-08-19T16:52:34.024,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 142 state to replica [ns_server:info,2014-08-19T16:52:34.024,ns_1@10.242.238.90:tap_replication_manager-default<0.18775.0>:tap_replication_manager:change_vbucket_filter:200]Going to change replication from 'ns_1@10.242.238.88' to have [86,88,89,90,91,92,93,94,95,96,97,98,99,100,101,102,103,104,105,106,107,108, 109,110,111,112,113,114,115,116,117,118,119,120,121,122,123,124,125,126,127, 128,129,130,131,132,133,134,135,136,137,138,139,140,141,142,143] ([142], []) [ns_server:debug,2014-08-19T16:52:34.026,ns_1@10.242.238.90:<0.1582.1>:ns_vbm_new_sup:do_perform_vbucket_filter_change:135]Registered myself under id:{"default", {new_child_id, [86,88,89,90,91,92,93,94,95,96,97,98,99,100, 101,102,103,104,105,106,107,108,109,110,111, 112,113,114,115,116,117,118,119,120,121,122, 123,124,125,126,127,128,129,130,131,132,133, 134,135,136,137,138,139,140,141,142,143], 'ns_1@10.242.238.88'}, #Ref<0.0.1.142959>} Args:[{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{old_state_retriever,#Fun}, {on_not_ready_vbuckets,#Fun}, {username,"default"}, {password,get_from_config}, {vbuckets,[86,88,89,90,91,92,93,94,95,96,97,98,99,100,101,102,103,104, 105,106,107,108,109,110,111,112,113,114,115,116,117,118,119, 120,121,122,123,124,125,126,127,128,129,130,131,132,133,134, 135,136,137,138,139,140,141,142,143]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]] [ns_server:debug,2014-08-19T16:52:34.027,ns_1@10.242.238.90:<0.1582.1>:ns_vbm_new_sup:do_perform_vbucket_filter_change:139]Linked myself to old ebucketmigrator <0.1578.1> [ns_server:debug,2014-08-19T16:52:34.037,ns_1@10.242.238.90:<0.1578.1>:ebucketmigrator_srv:init:621]Reusing old upstream: [{vbuckets,[86,88,89,90,91,92,93,94,95,96,97,98,99,100,101,102,103,104,105, 106,107,108,109,110,111,112,113,114,115,116,117,118,119,120,121, 122,123,124,125,126,127,128,129,130,131,132,133,134,135,136,137, 138,139,140,141,143]}, {name,<<"replication_ns_1@10.242.238.90">>}, {takeover,false}] [rebalance:debug,2014-08-19T16:52:34.037,ns_1@10.242.238.90:<0.1578.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.1584.1> [ns_server:info,2014-08-19T16:52:34.037,ns_1@10.242.238.90:<0.1578.1>:ebucketmigrator_srv:handle_call:270]Starting new-style vbucket filter change on stream `replication_ns_1@10.242.238.90` [ns_server:info,2014-08-19T16:52:34.058,ns_1@10.242.238.90:<0.1578.1>:ebucketmigrator_srv:handle_call:297]Changing vbucket filter on tap stream `replication_ns_1@10.242.238.90`: [{86,1}, {88,1}, {89,1}, {90,1}, {91,1}, {92,1}, {93,1}, {94,1}, {95,1}, {96,1}, {97,1}, {98,1}, {99,1}, {100,1}, {101,1}, {102,1}, {103,1}, {104,1}, {105,1}, {106,1}, {107,1}, {108,1}, {109,1}, {110,1}, {111,1}, {112,1}, {113,1}, {114,1}, {115,1}, {116,1}, {117,1}, {118,1}, {119,1}, {120,1}, {121,1}, {122,1}, {123,1}, {124,1}, {125,1}, {126,1}, {127,1}, {128,1}, {129,1}, {130,1}, {131,1}, {132,1}, {133,1}, {134,1}, {135,1}, {136,1}, {137,1}, {138,1}, {139,1}, {140,1}, {141,1}, {142,1}, {143,1}] [ns_server:info,2014-08-19T16:52:34.059,ns_1@10.242.238.90:<0.1578.1>:ebucketmigrator_srv:handle_call:307]Successfully changed vbucket filter on tap stream `replication_ns_1@10.242.238.90`. [ns_server:info,2014-08-19T16:52:34.059,ns_1@10.242.238.90:<0.1578.1>:ebucketmigrator_srv:process_upstream:1027]Got vbucket filter change completion message. Silencing upstream sender [ns_server:info,2014-08-19T16:52:34.059,ns_1@10.242.238.90:<0.1578.1>:ebucketmigrator_srv:handle_info:366]Got reply from upstream silencing request. Completing state transition to a new ebucketmigrator. [ns_server:debug,2014-08-19T16:52:34.059,ns_1@10.242.238.90:<0.1578.1>:ebucketmigrator_srv:complete_native_vb_filter_change:170]Proceeding with reading unread binaries [ns_server:debug,2014-08-19T16:52:34.059,ns_1@10.242.238.90:<0.1578.1>:ebucketmigrator_srv:confirm_downstream:197]Going to confirm reception downstream messages [ns_server:debug,2014-08-19T16:52:34.060,ns_1@10.242.238.90:<0.1578.1>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:52:34.060,ns_1@10.242.238.90:<0.1585.1>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:52:34.060,ns_1@10.242.238.90:<0.1585.1>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:52:34.060,ns_1@10.242.238.90:<0.1578.1>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:52:34.060,ns_1@10.242.238.90:<0.1578.1>:ebucketmigrator_srv:confirm_downstream:201]Confirmed upstream messages are feeded to kernel [ns_server:debug,2014-08-19T16:52:34.060,ns_1@10.242.238.90:<0.1578.1>:ebucketmigrator_srv:reply_and_die:210]Passed old state to caller [ns_server:debug,2014-08-19T16:52:34.060,ns_1@10.242.238.90:<0.1578.1>:ebucketmigrator_srv:reply_and_die:213]Sent out state. Preparing to die [ns_server:debug,2014-08-19T16:52:34.060,ns_1@10.242.238.90:<0.1582.1>:ns_vbm_new_sup:do_perform_vbucket_filter_change:143]Got old state from previous ebucketmigrator: <0.1578.1> [ns_server:debug,2014-08-19T16:52:34.061,ns_1@10.242.238.90:<0.1582.1>:ns_vbm_new_sup:perform_vbucket_filter_change_loop:184]Sent old state to new instance [ns_server:info,2014-08-19T16:52:34.061,ns_1@10.242.238.90:<0.1587.1>:ns_vbm_new_sup:mk_old_state_retriever:83]Got vbucket filter change old state. Proceeding vbucket filter change operation [ns_server:debug,2014-08-19T16:52:34.061,ns_1@10.242.238.90:<0.1587.1>:ebucketmigrator_srv:init:494]Got old ebucketmigrator state from <0.1578.1>: {state,#Port<0.20537>,#Port<0.20533>,#Port<0.20538>,#Port<0.20534>,<0.1584.1>, <<"cut off">>,<<"cut off">>,[],172,false,false,0, {1408,452754,59533}, completed, {<0.1582.1>,#Ref<0.0.1.142973>}, <<"replication_ns_1@10.242.238.90">>,<0.1578.1>, {had_backfill,false,undefined,[]}, completed,false}. [ns_server:debug,2014-08-19T16:52:34.061,ns_1@10.242.238.90:<0.17162.0>:ns_process_registry:handle_info:98]Got exit msg: {'EXIT',<0.1582.1>,{#Ref<0.0.1.142961>,<0.1587.1>}} [error_logger:info,2014-08-19T16:52:34.061,ns_1@10.242.238.90:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,'ns_vbm_new_sup-default'} started: [{pid,<0.1587.1>}, {name, {new_child_id, [86,88,89,90,91,92,93,94,95,96,97,98,99,100, 101,102,103,104,105,106,107,108,109,110,111, 112,113,114,115,116,117,118,119,120,121,122, 123,124,125,126,127,128,129,130,131,132,133, 134,135,136,137,138,139,140,141,142,143], 'ns_1@10.242.238.88'}}, {mfargs, {ebucketmigrator_srv,start_link, [{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{old_state_retriever, #Fun}, {on_not_ready_vbuckets, #Fun}, {username,"default"}, {password,get_from_config}, {vbuckets, [86,88,89,90,91,92,93,94,95,96,97,98,99, 100,101,102,103,104,105,106,107,108,109, 110,111,112,113,114,115,116,117,118,119, 120,121,122,123,124,125,126,127,128,129, 130,131,132,133,134,135,136,137,138,139, 140,141,142,143]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]]}}, {restart_type,temporary}, {shutdown,60000}, {child_type,worker}] [ns_server:debug,2014-08-19T16:52:34.066,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:52:34.069,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:34.069,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 3134 us [ns_server:debug,2014-08-19T16:52:34.069,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:34.070,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{142, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.90']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:info,2014-08-19T16:52:34.071,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 144 state to replica [ns_server:info,2014-08-19T16:52:34.071,ns_1@10.242.238.90:tap_replication_manager-default<0.18775.0>:tap_replication_manager:change_vbucket_filter:200]Going to change replication from 'ns_1@10.242.238.88' to have [86,88,89,90,91,92,93,94,95,96,97,98,99,100,101,102,103,104,105,106,107,108, 109,110,111,112,113,114,115,116,117,118,119,120,121,122,123,124,125,126,127, 128,129,130,131,132,133,134,135,136,137,138,139,140,141,142,143,144] ([144], []) [ns_server:debug,2014-08-19T16:52:34.072,ns_1@10.242.238.90:<0.1589.1>:ns_vbm_new_sup:do_perform_vbucket_filter_change:135]Registered myself under id:{"default", {new_child_id, [86,88,89,90,91,92,93,94,95,96,97,98,99,100, 101,102,103,104,105,106,107,108,109,110,111, 112,113,114,115,116,117,118,119,120,121,122, 123,124,125,126,127,128,129,130,131,132,133, 134,135,136,137,138,139,140,141,142,143,144], 'ns_1@10.242.238.88'}, #Ref<0.0.1.143110>} Args:[{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{old_state_retriever,#Fun}, {on_not_ready_vbuckets,#Fun}, {username,"default"}, {password,get_from_config}, {vbuckets,[86,88,89,90,91,92,93,94,95,96,97,98,99,100,101,102,103,104, 105,106,107,108,109,110,111,112,113,114,115,116,117,118,119, 120,121,122,123,124,125,126,127,128,129,130,131,132,133,134, 135,136,137,138,139,140,141,142,143,144]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]] [ns_server:debug,2014-08-19T16:52:34.072,ns_1@10.242.238.90:<0.1589.1>:ns_vbm_new_sup:do_perform_vbucket_filter_change:139]Linked myself to old ebucketmigrator <0.1587.1> [ns_server:debug,2014-08-19T16:52:34.084,ns_1@10.242.238.90:<0.1587.1>:ebucketmigrator_srv:init:621]Reusing old upstream: [{vbuckets,[86,88,89,90,91,92,93,94,95,96,97,98,99,100,101,102,103,104,105, 106,107,108,109,110,111,112,113,114,115,116,117,118,119,120,121, 122,123,124,125,126,127,128,129,130,131,132,133,134,135,136,137, 138,139,140,141,142,143]}, {name,<<"replication_ns_1@10.242.238.90">>}, {takeover,false}] [rebalance:debug,2014-08-19T16:52:34.085,ns_1@10.242.238.90:<0.1587.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.1591.1> [ns_server:info,2014-08-19T16:52:34.085,ns_1@10.242.238.90:<0.1587.1>:ebucketmigrator_srv:handle_call:270]Starting new-style vbucket filter change on stream `replication_ns_1@10.242.238.90` [ns_server:info,2014-08-19T16:52:34.107,ns_1@10.242.238.90:<0.1587.1>:ebucketmigrator_srv:handle_call:297]Changing vbucket filter on tap stream `replication_ns_1@10.242.238.90`: [{86,1}, {88,1}, {89,1}, {90,1}, {91,1}, {92,1}, {93,1}, {94,1}, {95,1}, {96,1}, {97,1}, {98,1}, {99,1}, {100,1}, {101,1}, {102,1}, {103,1}, {104,1}, {105,1}, {106,1}, {107,1}, {108,1}, {109,1}, {110,1}, {111,1}, {112,1}, {113,1}, {114,1}, {115,1}, {116,1}, {117,1}, {118,1}, {119,1}, {120,1}, {121,1}, {122,1}, {123,1}, {124,1}, {125,1}, {126,1}, {127,1}, {128,1}, {129,1}, {130,1}, {131,1}, {132,1}, {133,1}, {134,1}, {135,1}, {136,1}, {137,1}, {138,1}, {139,1}, {140,1}, {141,1}, {142,1}, {143,1}, {144,1}] [rebalance:debug,2014-08-19T16:52:34.108,ns_1@10.242.238.90:<0.32185.0>:janitor_agent:handle_call:795]Done [rebalance:debug,2014-08-19T16:52:34.108,ns_1@10.242.238.90:<0.32204.0>:janitor_agent:handle_call:795]Done [ns_server:info,2014-08-19T16:52:34.108,ns_1@10.242.238.90:<0.1587.1>:ebucketmigrator_srv:handle_call:307]Successfully changed vbucket filter on tap stream `replication_ns_1@10.242.238.90`. [ns_server:info,2014-08-19T16:52:34.109,ns_1@10.242.238.90:<0.1587.1>:ebucketmigrator_srv:process_upstream:1027]Got vbucket filter change completion message. Silencing upstream sender [ns_server:info,2014-08-19T16:52:34.109,ns_1@10.242.238.90:<0.1587.1>:ebucketmigrator_srv:handle_info:366]Got reply from upstream silencing request. Completing state transition to a new ebucketmigrator. [ns_server:debug,2014-08-19T16:52:34.109,ns_1@10.242.238.90:<0.1587.1>:ebucketmigrator_srv:complete_native_vb_filter_change:170]Proceeding with reading unread binaries [ns_server:debug,2014-08-19T16:52:34.109,ns_1@10.242.238.90:<0.1587.1>:ebucketmigrator_srv:confirm_downstream:197]Going to confirm reception downstream messages [ns_server:debug,2014-08-19T16:52:34.109,ns_1@10.242.238.90:<0.1587.1>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:52:34.109,ns_1@10.242.238.90:<0.1592.1>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:52:34.109,ns_1@10.242.238.90:<0.1592.1>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:52:34.109,ns_1@10.242.238.90:<0.1587.1>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:52:34.110,ns_1@10.242.238.90:<0.1587.1>:ebucketmigrator_srv:confirm_downstream:201]Confirmed upstream messages are feeded to kernel [ns_server:debug,2014-08-19T16:52:34.110,ns_1@10.242.238.90:<0.1587.1>:ebucketmigrator_srv:reply_and_die:210]Passed old state to caller [ns_server:debug,2014-08-19T16:52:34.110,ns_1@10.242.238.90:<0.1587.1>:ebucketmigrator_srv:reply_and_die:213]Sent out state. Preparing to die [ns_server:debug,2014-08-19T16:52:34.110,ns_1@10.242.238.90:<0.1589.1>:ns_vbm_new_sup:do_perform_vbucket_filter_change:143]Got old state from previous ebucketmigrator: <0.1587.1> [ns_server:debug,2014-08-19T16:52:34.110,ns_1@10.242.238.90:<0.1589.1>:ns_vbm_new_sup:perform_vbucket_filter_change_loop:184]Sent old state to new instance [ns_server:info,2014-08-19T16:52:34.110,ns_1@10.242.238.90:<0.1594.1>:ns_vbm_new_sup:mk_old_state_retriever:83]Got vbucket filter change old state. Proceeding vbucket filter change operation [ns_server:debug,2014-08-19T16:52:34.110,ns_1@10.242.238.90:<0.1594.1>:ebucketmigrator_srv:init:494]Got old ebucketmigrator state from <0.1587.1>: {state,#Port<0.20537>,#Port<0.20533>,#Port<0.20538>,#Port<0.20534>,<0.1591.1>, <<"cut off">>,<<"cut off">>,[],175,false,false,0, {1408,452754,109167}, completed, {<0.1589.1>,#Ref<0.0.1.143123>}, <<"replication_ns_1@10.242.238.90">>,<0.1587.1>, {had_backfill,false,undefined,[]}, completed,false}. [ns_server:debug,2014-08-19T16:52:34.111,ns_1@10.242.238.90:<0.17162.0>:ns_process_registry:handle_info:98]Got exit msg: {'EXIT',<0.1589.1>,{#Ref<0.0.1.143112>,<0.1594.1>}} [ns_server:debug,2014-08-19T16:52:34.111,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.32185.0> (ok) [ns_server:debug,2014-08-19T16:52:34.111,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.32204.0> (ok) [error_logger:info,2014-08-19T16:52:34.111,ns_1@10.242.238.90:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,'ns_vbm_new_sup-default'} started: [{pid,<0.1594.1>}, {name, {new_child_id, [86,88,89,90,91,92,93,94,95,96,97,98,99,100, 101,102,103,104,105,106,107,108,109,110,111, 112,113,114,115,116,117,118,119,120,121,122, 123,124,125,126,127,128,129,130,131,132,133, 134,135,136,137,138,139,140,141,142,143,144], 'ns_1@10.242.238.88'}}, {mfargs, {ebucketmigrator_srv,start_link, [{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{old_state_retriever, #Fun}, {on_not_ready_vbuckets, #Fun}, {username,"default"}, {password,get_from_config}, {vbuckets, [86,88,89,90,91,92,93,94,95,96,97,98,99, 100,101,102,103,104,105,106,107,108,109, 110,111,112,113,114,115,116,117,118,119, 120,121,122,123,124,125,126,127,128,129, 130,131,132,133,134,135,136,137,138,139, 140,141,142,143,144]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]]}}, {restart_type,temporary}, {shutdown,60000}, {child_type,worker}] [rebalance:debug,2014-08-19T16:52:34.112,ns_1@10.242.238.90:<0.32183.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:52:34.113,ns_1@10.242.238.90:<0.32183.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:52:34.113,ns_1@10.242.238.90:<0.1595.1>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:52:34.113,ns_1@10.242.238.90:<0.1595.1>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:debug,2014-08-19T16:52:34.113,ns_1@10.242.238.90:<0.32188.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [rebalance:info,2014-08-19T16:52:34.113,ns_1@10.242.238.90:<0.32183.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:52:34.113,ns_1@10.242.238.90:<0.32188.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:52:34.113,ns_1@10.242.238.90:<0.1596.1>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:52:34.113,ns_1@10.242.238.90:<0.1596.1>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:52:34.113,ns_1@10.242.238.90:<0.32188.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:52:34.118,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:52:34.119,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:34.120,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 1744 us [ns_server:debug,2014-08-19T16:52:34.120,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:34.121,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{144, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.90']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:info,2014-08-19T16:52:34.125,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 145 state to replica [ns_server:info,2014-08-19T16:52:34.126,ns_1@10.242.238.90:tap_replication_manager-default<0.18775.0>:tap_replication_manager:change_vbucket_filter:200]Going to change replication from 'ns_1@10.242.238.88' to have [86,88,89,90,91,92,93,94,95,96,97,98,99,100,101,102,103,104,105,106,107,108, 109,110,111,112,113,114,115,116,117,118,119,120,121,122,123,124,125,126,127, 128,129,130,131,132,133,134,135,136,137,138,139,140,141,142,143,144,145] ([145], []) [ns_server:debug,2014-08-19T16:52:34.127,ns_1@10.242.238.90:<0.1598.1>:ns_vbm_new_sup:do_perform_vbucket_filter_change:135]Registered myself under id:{"default", {new_child_id, [86,88,89,90,91,92,93,94,95,96,97,98,99,100, 101,102,103,104,105,106,107,108,109,110,111, 112,113,114,115,116,117,118,119,120,121,122, 123,124,125,126,127,128,129,130,131,132,133, 134,135,136,137,138,139,140,141,142,143,144, 145], 'ns_1@10.242.238.88'}, #Ref<0.0.1.143322>} Args:[{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{old_state_retriever,#Fun}, {on_not_ready_vbuckets,#Fun}, {username,"default"}, {password,get_from_config}, {vbuckets,[86,88,89,90,91,92,93,94,95,96,97,98,99,100,101,102,103,104, 105,106,107,108,109,110,111,112,113,114,115,116,117,118,119, 120,121,122,123,124,125,126,127,128,129,130,131,132,133,134, 135,136,137,138,139,140,141,142,143,144,145]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]] [ns_server:debug,2014-08-19T16:52:34.127,ns_1@10.242.238.90:<0.1598.1>:ns_vbm_new_sup:do_perform_vbucket_filter_change:139]Linked myself to old ebucketmigrator <0.1594.1> [ns_server:debug,2014-08-19T16:52:34.134,ns_1@10.242.238.90:<0.1594.1>:ebucketmigrator_srv:init:621]Reusing old upstream: [{vbuckets,[86,88,89,90,91,92,93,94,95,96,97,98,99,100,101,102,103,104,105, 106,107,108,109,110,111,112,113,114,115,116,117,118,119,120,121, 122,123,124,125,126,127,128,129,130,131,132,133,134,135,136,137, 138,139,140,141,142,143,144]}, {name,<<"replication_ns_1@10.242.238.90">>}, {takeover,false}] [rebalance:debug,2014-08-19T16:52:34.135,ns_1@10.242.238.90:<0.1594.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.1600.1> [ns_server:info,2014-08-19T16:52:34.135,ns_1@10.242.238.90:<0.1594.1>:ebucketmigrator_srv:handle_call:270]Starting new-style vbucket filter change on stream `replication_ns_1@10.242.238.90` [ns_server:info,2014-08-19T16:52:34.155,ns_1@10.242.238.90:<0.1594.1>:ebucketmigrator_srv:handle_call:297]Changing vbucket filter on tap stream `replication_ns_1@10.242.238.90`: [{86,1}, {88,1}, {89,1}, {90,1}, {91,1}, {92,1}, {93,1}, {94,1}, {95,1}, {96,1}, {97,1}, {98,1}, {99,1}, {100,1}, {101,1}, {102,1}, {103,1}, {104,1}, {105,1}, {106,1}, {107,1}, {108,1}, {109,1}, {110,1}, {111,1}, {112,1}, {113,1}, {114,1}, {115,1}, {116,1}, {117,1}, {118,1}, {119,1}, {120,1}, {121,1}, {122,1}, {123,1}, {124,1}, {125,1}, {126,1}, {127,1}, {128,1}, {129,1}, {130,1}, {131,1}, {132,1}, {133,1}, {134,1}, {135,1}, {136,1}, {137,1}, {138,1}, {139,1}, {140,1}, {141,1}, {142,1}, {143,1}, {144,1}, {145,1}] [ns_server:info,2014-08-19T16:52:34.156,ns_1@10.242.238.90:<0.1594.1>:ebucketmigrator_srv:handle_call:307]Successfully changed vbucket filter on tap stream `replication_ns_1@10.242.238.90`. [ns_server:info,2014-08-19T16:52:34.157,ns_1@10.242.238.90:<0.1594.1>:ebucketmigrator_srv:process_upstream:1027]Got vbucket filter change completion message. Silencing upstream sender [ns_server:info,2014-08-19T16:52:34.157,ns_1@10.242.238.90:<0.1594.1>:ebucketmigrator_srv:handle_info:366]Got reply from upstream silencing request. Completing state transition to a new ebucketmigrator. [ns_server:debug,2014-08-19T16:52:34.157,ns_1@10.242.238.90:<0.1594.1>:ebucketmigrator_srv:complete_native_vb_filter_change:170]Proceeding with reading unread binaries [ns_server:debug,2014-08-19T16:52:34.157,ns_1@10.242.238.90:<0.1594.1>:ebucketmigrator_srv:confirm_downstream:197]Going to confirm reception downstream messages [ns_server:debug,2014-08-19T16:52:34.157,ns_1@10.242.238.90:<0.1594.1>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:52:34.157,ns_1@10.242.238.90:<0.1601.1>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:52:34.157,ns_1@10.242.238.90:<0.1601.1>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:52:34.157,ns_1@10.242.238.90:<0.1594.1>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:52:34.158,ns_1@10.242.238.90:<0.1594.1>:ebucketmigrator_srv:confirm_downstream:201]Confirmed upstream messages are feeded to kernel [ns_server:debug,2014-08-19T16:52:34.158,ns_1@10.242.238.90:<0.1594.1>:ebucketmigrator_srv:reply_and_die:210]Passed old state to caller [ns_server:debug,2014-08-19T16:52:34.158,ns_1@10.242.238.90:<0.1594.1>:ebucketmigrator_srv:reply_and_die:213]Sent out state. Preparing to die [ns_server:debug,2014-08-19T16:52:34.158,ns_1@10.242.238.90:<0.1598.1>:ns_vbm_new_sup:do_perform_vbucket_filter_change:143]Got old state from previous ebucketmigrator: <0.1594.1> [ns_server:debug,2014-08-19T16:52:34.158,ns_1@10.242.238.90:<0.1598.1>:ns_vbm_new_sup:perform_vbucket_filter_change_loop:184]Sent old state to new instance [ns_server:info,2014-08-19T16:52:34.158,ns_1@10.242.238.90:<0.1603.1>:ns_vbm_new_sup:mk_old_state_retriever:83]Got vbucket filter change old state. Proceeding vbucket filter change operation [ns_server:debug,2014-08-19T16:52:34.158,ns_1@10.242.238.90:<0.1603.1>:ebucketmigrator_srv:init:494]Got old ebucketmigrator state from <0.1594.1>: {state,#Port<0.20537>,#Port<0.20533>,#Port<0.20538>,#Port<0.20534>,<0.1600.1>, <<"cut off">>,<<"cut off">>,[],178,false,false,0, {1408,452754,157012}, completed, {<0.1598.1>,#Ref<0.0.1.143335>}, <<"replication_ns_1@10.242.238.90">>,<0.1594.1>, {had_backfill,false,undefined,[]}, completed,false}. [ns_server:debug,2014-08-19T16:52:34.159,ns_1@10.242.238.90:<0.17162.0>:ns_process_registry:handle_info:98]Got exit msg: {'EXIT',<0.1598.1>,{#Ref<0.0.1.143324>,<0.1603.1>}} [error_logger:info,2014-08-19T16:52:34.159,ns_1@10.242.238.90:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,'ns_vbm_new_sup-default'} started: [{pid,<0.1603.1>}, {name, {new_child_id, [86,88,89,90,91,92,93,94,95,96,97,98,99,100, 101,102,103,104,105,106,107,108,109,110,111, 112,113,114,115,116,117,118,119,120,121,122, 123,124,125,126,127,128,129,130,131,132,133, 134,135,136,137,138,139,140,141,142,143,144, 145], 'ns_1@10.242.238.88'}}, {mfargs, {ebucketmigrator_srv,start_link, [{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{old_state_retriever, #Fun}, {on_not_ready_vbuckets, #Fun}, {username,"default"}, {password,get_from_config}, {vbuckets, [86,88,89,90,91,92,93,94,95,96,97,98,99, 100,101,102,103,104,105,106,107,108,109, 110,111,112,113,114,115,116,117,118,119, 120,121,122,123,124,125,126,127,128,129, 130,131,132,133,134,135,136,137,138,139, 140,141,142,143,144,145]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]]}}, {restart_type,temporary}, {shutdown,60000}, {child_type,worker}] [ns_server:debug,2014-08-19T16:52:34.163,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:52:34.167,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:34.167,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 1797 us [ns_server:debug,2014-08-19T16:52:34.167,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:34.168,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{145, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.90']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:52:34.181,ns_1@10.242.238.90:<0.1603.1>:ebucketmigrator_srv:init:621]Reusing old upstream: [{vbuckets,[86,88,89,90,91,92,93,94,95,96,97,98,99,100,101,102,103,104,105, 106,107,108,109,110,111,112,113,114,115,116,117,118,119,120,121, 122,123,124,125,126,127,128,129,130,131,132,133,134,135,136,137, 138,139,140,141,142,143,144,145]}, {name,<<"replication_ns_1@10.242.238.90">>}, {takeover,false}] [rebalance:debug,2014-08-19T16:52:34.181,ns_1@10.242.238.90:<0.1603.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.1605.1> [rebalance:debug,2014-08-19T16:52:34.191,ns_1@10.242.238.90:<0.32180.0>:janitor_agent:handle_call:795]Done [rebalance:debug,2014-08-19T16:52:34.191,ns_1@10.242.238.90:<0.32174.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:52:34.191,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.32180.0> (ok) [ns_server:debug,2014-08-19T16:52:34.192,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.32174.0> (ok) [rebalance:debug,2014-08-19T16:52:34.193,ns_1@10.242.238.90:<0.32177.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:52:34.193,ns_1@10.242.238.90:<0.32177.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:52:34.193,ns_1@10.242.238.90:<0.1606.1>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:52:34.193,ns_1@10.242.238.90:<0.1606.1>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:52:34.193,ns_1@10.242.238.90:<0.32177.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [rebalance:debug,2014-08-19T16:52:34.193,ns_1@10.242.238.90:<0.32172.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:52:34.194,ns_1@10.242.238.90:<0.32172.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:52:34.194,ns_1@10.242.238.90:<0.1607.1>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:52:34.194,ns_1@10.242.238.90:<0.1607.1>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:52:34.194,ns_1@10.242.238.90:<0.32172.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:52:34.204,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:52:34.207,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:34.207,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 3567 us [ns_server:debug,2014-08-19T16:52:34.208,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:34.208,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{3, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.89']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:52:34.249,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:52:34.253,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:34.253,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 3924 us [ns_server:debug,2014-08-19T16:52:34.253,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:34.254,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{2, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.89']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:info,2014-08-19T16:52:34.257,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 146 state to replica [ns_server:info,2014-08-19T16:52:34.257,ns_1@10.242.238.90:tap_replication_manager-default<0.18775.0>:tap_replication_manager:change_vbucket_filter:200]Going to change replication from 'ns_1@10.242.238.88' to have [86,88,89,90,91,92,93,94,95,96,97,98,99,100,101,102,103,104,105,106,107,108, 109,110,111,112,113,114,115,116,117,118,119,120,121,122,123,124,125,126,127, 128,129,130,131,132,133,134,135,136,137,138,139,140,141,142,143,144,145,146] ([146], []) [ns_server:debug,2014-08-19T16:52:34.259,ns_1@10.242.238.90:<0.1610.1>:ns_vbm_new_sup:do_perform_vbucket_filter_change:135]Registered myself under id:{"default", {new_child_id, [86,88,89,90,91,92,93,94,95,96,97,98,99,100, 101,102,103,104,105,106,107,108,109,110,111, 112,113,114,115,116,117,118,119,120,121,122, 123,124,125,126,127,128,129,130,131,132,133, 134,135,136,137,138,139,140,141,142,143,144, 145,146], 'ns_1@10.242.238.88'}, #Ref<0.0.1.143589>} Args:[{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{old_state_retriever,#Fun}, {on_not_ready_vbuckets,#Fun}, {username,"default"}, {password,get_from_config}, {vbuckets,[86,88,89,90,91,92,93,94,95,96,97,98,99,100,101,102,103,104, 105,106,107,108,109,110,111,112,113,114,115,116,117,118,119, 120,121,122,123,124,125,126,127,128,129,130,131,132,133,134, 135,136,137,138,139,140,141,142,143,144,145,146]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]] [ns_server:debug,2014-08-19T16:52:34.259,ns_1@10.242.238.90:<0.1610.1>:ns_vbm_new_sup:do_perform_vbucket_filter_change:139]Linked myself to old ebucketmigrator <0.1603.1> [ns_server:info,2014-08-19T16:52:34.259,ns_1@10.242.238.90:<0.1603.1>:ebucketmigrator_srv:handle_call:270]Starting new-style vbucket filter change on stream `replication_ns_1@10.242.238.90` [ns_server:info,2014-08-19T16:52:34.279,ns_1@10.242.238.90:<0.1603.1>:ebucketmigrator_srv:handle_call:297]Changing vbucket filter on tap stream `replication_ns_1@10.242.238.90`: [{86,1}, {88,1}, {89,1}, {90,1}, {91,1}, {92,1}, {93,1}, {94,1}, {95,1}, {96,1}, {97,1}, {98,1}, {99,1}, {100,1}, {101,1}, {102,1}, {103,1}, {104,1}, {105,1}, {106,1}, {107,1}, {108,1}, {109,1}, {110,1}, {111,1}, {112,1}, {113,1}, {114,1}, {115,1}, {116,1}, {117,1}, {118,1}, {119,1}, {120,1}, {121,1}, {122,1}, {123,1}, {124,1}, {125,1}, {126,1}, {127,1}, {128,1}, {129,1}, {130,1}, {131,1}, {132,1}, {133,1}, {134,1}, {135,1}, {136,1}, {137,1}, {138,1}, {139,1}, {140,1}, {141,1}, {142,1}, {143,1}, {144,1}, {145,1}, {146,1}] [ns_server:info,2014-08-19T16:52:34.280,ns_1@10.242.238.90:<0.1603.1>:ebucketmigrator_srv:handle_call:307]Successfully changed vbucket filter on tap stream `replication_ns_1@10.242.238.90`. [ns_server:info,2014-08-19T16:52:34.281,ns_1@10.242.238.90:<0.1603.1>:ebucketmigrator_srv:process_upstream:1027]Got vbucket filter change completion message. Silencing upstream sender [ns_server:info,2014-08-19T16:52:34.281,ns_1@10.242.238.90:<0.1603.1>:ebucketmigrator_srv:handle_info:366]Got reply from upstream silencing request. Completing state transition to a new ebucketmigrator. [ns_server:debug,2014-08-19T16:52:34.281,ns_1@10.242.238.90:<0.1603.1>:ebucketmigrator_srv:complete_native_vb_filter_change:170]Proceeding with reading unread binaries [ns_server:debug,2014-08-19T16:52:34.282,ns_1@10.242.238.90:<0.1603.1>:ebucketmigrator_srv:confirm_downstream:197]Going to confirm reception downstream messages [ns_server:debug,2014-08-19T16:52:34.282,ns_1@10.242.238.90:<0.1603.1>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:52:34.282,ns_1@10.242.238.90:<0.1612.1>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:52:34.282,ns_1@10.242.238.90:<0.1612.1>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:52:34.282,ns_1@10.242.238.90:<0.1603.1>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:52:34.282,ns_1@10.242.238.90:<0.1603.1>:ebucketmigrator_srv:confirm_downstream:201]Confirmed upstream messages are feeded to kernel [ns_server:debug,2014-08-19T16:52:34.282,ns_1@10.242.238.90:<0.1603.1>:ebucketmigrator_srv:reply_and_die:210]Passed old state to caller [ns_server:debug,2014-08-19T16:52:34.282,ns_1@10.242.238.90:<0.1603.1>:ebucketmigrator_srv:reply_and_die:213]Sent out state. Preparing to die [ns_server:debug,2014-08-19T16:52:34.282,ns_1@10.242.238.90:<0.1610.1>:ns_vbm_new_sup:do_perform_vbucket_filter_change:143]Got old state from previous ebucketmigrator: <0.1603.1> [ns_server:debug,2014-08-19T16:52:34.283,ns_1@10.242.238.90:<0.1610.1>:ns_vbm_new_sup:perform_vbucket_filter_change_loop:184]Sent old state to new instance [ns_server:info,2014-08-19T16:52:34.283,ns_1@10.242.238.90:<0.1614.1>:ns_vbm_new_sup:mk_old_state_retriever:83]Got vbucket filter change old state. Proceeding vbucket filter change operation [ns_server:debug,2014-08-19T16:52:34.283,ns_1@10.242.238.90:<0.1614.1>:ebucketmigrator_srv:init:494]Got old ebucketmigrator state from <0.1603.1>: {state,#Port<0.20537>,#Port<0.20533>,#Port<0.20538>,#Port<0.20534>,<0.1605.1>, <<"cut off">>,<<"cut off">>,[],181,false,false,0, {1408,452754,281632}, completed, {<0.1610.1>,#Ref<0.0.1.143604>}, <<"replication_ns_1@10.242.238.90">>,<0.1603.1>, {had_backfill,false,undefined,[]}, completed,false}. [ns_server:debug,2014-08-19T16:52:34.283,ns_1@10.242.238.90:<0.17162.0>:ns_process_registry:handle_info:98]Got exit msg: {'EXIT',<0.1610.1>,{#Ref<0.0.1.143591>,<0.1614.1>}} [error_logger:info,2014-08-19T16:52:34.283,ns_1@10.242.238.90:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,'ns_vbm_new_sup-default'} started: [{pid,<0.1614.1>}, {name, {new_child_id, [86,88,89,90,91,92,93,94,95,96,97,98,99,100, 101,102,103,104,105,106,107,108,109,110,111, 112,113,114,115,116,117,118,119,120,121,122, 123,124,125,126,127,128,129,130,131,132,133, 134,135,136,137,138,139,140,141,142,143,144, 145,146], 'ns_1@10.242.238.88'}}, {mfargs, {ebucketmigrator_srv,start_link, [{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{old_state_retriever, #Fun}, {on_not_ready_vbuckets, #Fun}, {username,"default"}, {password,get_from_config}, {vbuckets, [86,88,89,90,91,92,93,94,95,96,97,98,99, 100,101,102,103,104,105,106,107,108,109, 110,111,112,113,114,115,116,117,118,119, 120,121,122,123,124,125,126,127,128,129, 130,131,132,133,134,135,136,137,138,139, 140,141,142,143,144,145,146]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]]}}, {restart_type,temporary}, {shutdown,60000}, {child_type,worker}] [rebalance:debug,2014-08-19T16:52:34.285,ns_1@10.242.238.90:<0.32169.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:52:34.285,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.32169.0> (ok) [rebalance:debug,2014-08-19T16:52:34.285,ns_1@10.242.238.90:<0.32164.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:52:34.285,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.32164.0> (ok) [rebalance:debug,2014-08-19T16:52:34.287,ns_1@10.242.238.90:<0.32167.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:52:34.287,ns_1@10.242.238.90:<0.32167.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:52:34.287,ns_1@10.242.238.90:<0.1615.1>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:52:34.287,ns_1@10.242.238.90:<0.1615.1>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:52:34.288,ns_1@10.242.238.90:<0.32167.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [rebalance:debug,2014-08-19T16:52:34.288,ns_1@10.242.238.90:<0.32148.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:52:34.288,ns_1@10.242.238.90:<0.32148.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:52:34.288,ns_1@10.242.238.90:<0.1616.1>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:52:34.288,ns_1@10.242.238.90:<0.1616.1>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:52:34.288,ns_1@10.242.238.90:<0.32148.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:52:34.288,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:52:34.292,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:34.292,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 3501 us [ns_server:debug,2014-08-19T16:52:34.293,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:34.293,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{146, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.90']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:info,2014-08-19T16:52:34.293,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 147 state to replica [ns_server:info,2014-08-19T16:52:34.294,ns_1@10.242.238.90:tap_replication_manager-default<0.18775.0>:tap_replication_manager:change_vbucket_filter:200]Going to change replication from 'ns_1@10.242.238.88' to have [86,88,89,90,91,92,93,94,95,96,97,98,99,100,101,102,103,104,105,106,107,108, 109,110,111,112,113,114,115,116,117,118,119,120,121,122,123,124,125,126,127, 128,129,130,131,132,133,134,135,136,137,138,139,140,141,142,143,144,145,146, 147] ([147], []) [ns_server:debug,2014-08-19T16:52:34.295,ns_1@10.242.238.90:<0.1618.1>:ns_vbm_new_sup:do_perform_vbucket_filter_change:135]Registered myself under id:{"default", {new_child_id, [86,88,89,90,91,92,93,94,95,96,97,98,99,100, 101,102,103,104,105,106,107,108,109,110,111, 112,113,114,115,116,117,118,119,120,121,122, 123,124,125,126,127,128,129,130,131,132,133, 134,135,136,137,138,139,140,141,142,143,144, 145,146,147], 'ns_1@10.242.238.88'}, #Ref<0.0.1.143768>} Args:[{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{old_state_retriever,#Fun}, {on_not_ready_vbuckets,#Fun}, {username,"default"}, {password,get_from_config}, {vbuckets,[86,88,89,90,91,92,93,94,95,96,97,98,99,100,101,102,103,104, 105,106,107,108,109,110,111,112,113,114,115,116,117,118,119, 120,121,122,123,124,125,126,127,128,129,130,131,132,133,134, 135,136,137,138,139,140,141,142,143,144,145,146,147]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]] [ns_server:debug,2014-08-19T16:52:34.295,ns_1@10.242.238.90:<0.1618.1>:ns_vbm_new_sup:do_perform_vbucket_filter_change:139]Linked myself to old ebucketmigrator <0.1614.1> [ns_server:debug,2014-08-19T16:52:34.305,ns_1@10.242.238.90:<0.1614.1>:ebucketmigrator_srv:init:621]Reusing old upstream: [{vbuckets,[86,88,89,90,91,92,93,94,95,96,97,98,99,100,101,102,103,104,105, 106,107,108,109,110,111,112,113,114,115,116,117,118,119,120,121, 122,123,124,125,126,127,128,129,130,131,132,133,134,135,136,137, 138,139,140,141,142,143,144,145,146]}, {name,<<"replication_ns_1@10.242.238.90">>}, {takeover,false}] [rebalance:debug,2014-08-19T16:52:34.305,ns_1@10.242.238.90:<0.1614.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.1620.1> [ns_server:info,2014-08-19T16:52:34.306,ns_1@10.242.238.90:<0.1614.1>:ebucketmigrator_srv:handle_call:270]Starting new-style vbucket filter change on stream `replication_ns_1@10.242.238.90` [ns_server:info,2014-08-19T16:52:34.326,ns_1@10.242.238.90:<0.1614.1>:ebucketmigrator_srv:handle_call:297]Changing vbucket filter on tap stream `replication_ns_1@10.242.238.90`: [{86,1}, {88,1}, {89,1}, {90,1}, {91,1}, {92,1}, {93,1}, {94,1}, {95,1}, {96,1}, {97,1}, {98,1}, {99,1}, {100,1}, {101,1}, {102,1}, {103,1}, {104,1}, {105,1}, {106,1}, {107,1}, {108,1}, {109,1}, {110,1}, {111,1}, {112,1}, {113,1}, {114,1}, {115,1}, {116,1}, {117,1}, {118,1}, {119,1}, {120,1}, {121,1}, {122,1}, {123,1}, {124,1}, {125,1}, {126,1}, {127,1}, {128,1}, {129,1}, {130,1}, {131,1}, {132,1}, {133,1}, {134,1}, {135,1}, {136,1}, {137,1}, {138,1}, {139,1}, {140,1}, {141,1}, {142,1}, {143,1}, {144,1}, {145,1}, {146,1}, {147,1}] [ns_server:info,2014-08-19T16:52:34.327,ns_1@10.242.238.90:<0.1614.1>:ebucketmigrator_srv:handle_call:307]Successfully changed vbucket filter on tap stream `replication_ns_1@10.242.238.90`. [ns_server:info,2014-08-19T16:52:34.332,ns_1@10.242.238.90:<0.1614.1>:ebucketmigrator_srv:process_upstream:1027]Got vbucket filter change completion message. Silencing upstream sender [ns_server:info,2014-08-19T16:52:34.332,ns_1@10.242.238.90:<0.1614.1>:ebucketmigrator_srv:handle_info:366]Got reply from upstream silencing request. Completing state transition to a new ebucketmigrator. [ns_server:debug,2014-08-19T16:52:34.332,ns_1@10.242.238.90:<0.1614.1>:ebucketmigrator_srv:complete_native_vb_filter_change:170]Proceeding with reading unread binaries [ns_server:debug,2014-08-19T16:52:34.332,ns_1@10.242.238.90:<0.1614.1>:ebucketmigrator_srv:confirm_downstream:197]Going to confirm reception downstream messages [ns_server:debug,2014-08-19T16:52:34.333,ns_1@10.242.238.90:<0.1614.1>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:52:34.333,ns_1@10.242.238.90:<0.1621.1>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:52:34.333,ns_1@10.242.238.90:<0.1621.1>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:52:34.333,ns_1@10.242.238.90:<0.1614.1>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:52:34.333,ns_1@10.242.238.90:<0.1614.1>:ebucketmigrator_srv:confirm_downstream:201]Confirmed upstream messages are feeded to kernel [ns_server:debug,2014-08-19T16:52:34.333,ns_1@10.242.238.90:<0.1614.1>:ebucketmigrator_srv:reply_and_die:210]Passed old state to caller [ns_server:debug,2014-08-19T16:52:34.333,ns_1@10.242.238.90:<0.1614.1>:ebucketmigrator_srv:reply_and_die:213]Sent out state. Preparing to die [ns_server:debug,2014-08-19T16:52:34.333,ns_1@10.242.238.90:<0.1618.1>:ns_vbm_new_sup:do_perform_vbucket_filter_change:143]Got old state from previous ebucketmigrator: <0.1614.1> [ns_server:debug,2014-08-19T16:52:34.334,ns_1@10.242.238.90:<0.1618.1>:ns_vbm_new_sup:perform_vbucket_filter_change_loop:184]Sent old state to new instance [ns_server:info,2014-08-19T16:52:34.334,ns_1@10.242.238.90:<0.1623.1>:ns_vbm_new_sup:mk_old_state_retriever:83]Got vbucket filter change old state. Proceeding vbucket filter change operation [ns_server:debug,2014-08-19T16:52:34.334,ns_1@10.242.238.90:<0.1623.1>:ebucketmigrator_srv:init:494]Got old ebucketmigrator state from <0.1614.1>: {state,#Port<0.20537>,#Port<0.20533>,#Port<0.20538>,#Port<0.20534>,<0.1620.1>, <<"cut off">>,<<"cut off">>,[],184,false,false,0, {1408,452754,332517}, completed, {<0.1618.1>,#Ref<0.0.1.143781>}, <<"replication_ns_1@10.242.238.90">>,<0.1614.1>, {had_backfill,false,undefined,[]}, completed,false}. [ns_server:debug,2014-08-19T16:52:34.334,ns_1@10.242.238.90:<0.17162.0>:ns_process_registry:handle_info:98]Got exit msg: {'EXIT',<0.1618.1>,{#Ref<0.0.1.143770>,<0.1623.1>}} [error_logger:info,2014-08-19T16:52:34.335,ns_1@10.242.238.90:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,'ns_vbm_new_sup-default'} started: [{pid,<0.1623.1>}, {name, {new_child_id, [86,88,89,90,91,92,93,94,95,96,97,98,99,100, 101,102,103,104,105,106,107,108,109,110,111, 112,113,114,115,116,117,118,119,120,121,122, 123,124,125,126,127,128,129,130,131,132,133, 134,135,136,137,138,139,140,141,142,143,144, 145,146,147], 'ns_1@10.242.238.88'}}, {mfargs, {ebucketmigrator_srv,start_link, [{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{old_state_retriever, #Fun}, {on_not_ready_vbuckets, #Fun}, {username,"default"}, {password,get_from_config}, {vbuckets, [86,88,89,90,91,92,93,94,95,96,97,98,99, 100,101,102,103,104,105,106,107,108,109, 110,111,112,113,114,115,116,117,118,119, 120,121,122,123,124,125,126,127,128,129, 130,131,132,133,134,135,136,137,138,139, 140,141,142,143,144,145,146,147]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]]}}, {restart_type,temporary}, {shutdown,60000}, {child_type,worker}] [ns_server:debug,2014-08-19T16:52:34.340,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:52:34.344,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:34.344,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 3416 us [ns_server:debug,2014-08-19T16:52:34.344,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:34.345,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{147, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.90']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:52:34.356,ns_1@10.242.238.90:<0.1623.1>:ebucketmigrator_srv:init:621]Reusing old upstream: [{vbuckets,[86,88,89,90,91,92,93,94,95,96,97,98,99,100,101,102,103,104,105, 106,107,108,109,110,111,112,113,114,115,116,117,118,119,120,121, 122,123,124,125,126,127,128,129,130,131,132,133,134,135,136,137, 138,139,140,141,142,143,144,145,146,147]}, {name,<<"replication_ns_1@10.242.238.90">>}, {takeover,false}] [rebalance:debug,2014-08-19T16:52:34.357,ns_1@10.242.238.90:<0.1623.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.1625.1> [ns_server:debug,2014-08-19T16:52:34.376,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:52:34.380,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:34.380,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 3334 us [ns_server:debug,2014-08-19T16:52:34.380,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:34.380,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{4, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.89']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [rebalance:debug,2014-08-19T16:52:34.385,ns_1@10.242.238.90:<0.32145.0>:janitor_agent:handle_call:795]Done [rebalance:debug,2014-08-19T16:52:34.385,ns_1@10.242.238.90:<0.32140.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:52:34.385,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.32145.0> (ok) [ns_server:debug,2014-08-19T16:52:34.385,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.32140.0> (ok) [rebalance:debug,2014-08-19T16:52:34.387,ns_1@10.242.238.90:<0.32143.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:52:34.387,ns_1@10.242.238.90:<0.32143.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:52:34.387,ns_1@10.242.238.90:<0.1627.1>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:52:34.387,ns_1@10.242.238.90:<0.1627.1>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:52:34.387,ns_1@10.242.238.90:<0.32143.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [rebalance:debug,2014-08-19T16:52:34.387,ns_1@10.242.238.90:<0.32138.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:52:34.387,ns_1@10.242.238.90:<0.32138.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:52:34.388,ns_1@10.242.238.90:<0.1628.1>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:52:34.388,ns_1@10.242.238.90:<0.1628.1>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:52:34.388,ns_1@10.242.238.90:<0.32138.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:52:34.423,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:52:34.426,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:34.426,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 3773 us [ns_server:debug,2014-08-19T16:52:34.427,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:34.427,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{5, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.89']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:info,2014-08-19T16:52:34.431,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 148 state to replica [ns_server:info,2014-08-19T16:52:34.432,ns_1@10.242.238.90:tap_replication_manager-default<0.18775.0>:tap_replication_manager:change_vbucket_filter:200]Going to change replication from 'ns_1@10.242.238.88' to have [86,88,89,90,91,92,93,94,95,96,97,98,99,100,101,102,103,104,105,106,107,108, 109,110,111,112,113,114,115,116,117,118,119,120,121,122,123,124,125,126,127, 128,129,130,131,132,133,134,135,136,137,138,139,140,141,142,143,144,145,146, 147,148] ([148], []) [ns_server:debug,2014-08-19T16:52:34.433,ns_1@10.242.238.90:<0.1630.1>:ns_vbm_new_sup:do_perform_vbucket_filter_change:135]Registered myself under id:{"default", {new_child_id, [86,88,89,90,91,92,93,94,95,96,97,98,99,100, 101,102,103,104,105,106,107,108,109,110,111, 112,113,114,115,116,117,118,119,120,121,122, 123,124,125,126,127,128,129,130,131,132,133, 134,135,136,137,138,139,140,141,142,143,144, 145,146,147,148], 'ns_1@10.242.238.88'}, #Ref<0.0.1.144039>} Args:[{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{old_state_retriever,#Fun}, {on_not_ready_vbuckets,#Fun}, {username,"default"}, {password,get_from_config}, {vbuckets,[86,88,89,90,91,92,93,94,95,96,97,98,99,100,101,102,103,104, 105,106,107,108,109,110,111,112,113,114,115,116,117,118,119, 120,121,122,123,124,125,126,127,128,129,130,131,132,133,134, 135,136,137,138,139,140,141,142,143,144,145,146,147,148]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]] [ns_server:debug,2014-08-19T16:52:34.433,ns_1@10.242.238.90:<0.1630.1>:ns_vbm_new_sup:do_perform_vbucket_filter_change:139]Linked myself to old ebucketmigrator <0.1623.1> [ns_server:info,2014-08-19T16:52:34.434,ns_1@10.242.238.90:<0.1623.1>:ebucketmigrator_srv:handle_call:270]Starting new-style vbucket filter change on stream `replication_ns_1@10.242.238.90` [ns_server:info,2014-08-19T16:52:34.457,ns_1@10.242.238.90:<0.1623.1>:ebucketmigrator_srv:handle_call:297]Changing vbucket filter on tap stream `replication_ns_1@10.242.238.90`: [{86,1}, {88,1}, {89,1}, {90,1}, {91,1}, {92,1}, {93,1}, {94,1}, {95,1}, {96,1}, {97,1}, {98,1}, {99,1}, {100,1}, {101,1}, {102,1}, {103,1}, {104,1}, {105,1}, {106,1}, {107,1}, {108,1}, {109,1}, {110,1}, {111,1}, {112,1}, {113,1}, {114,1}, {115,1}, {116,1}, {117,1}, {118,1}, {119,1}, {120,1}, {121,1}, {122,1}, {123,1}, {124,1}, {125,1}, {126,1}, {127,1}, {128,1}, {129,1}, {130,1}, {131,1}, {132,1}, {133,1}, {134,1}, {135,1}, {136,1}, {137,1}, {138,1}, {139,1}, {140,1}, {141,1}, {142,1}, {143,1}, {144,1}, {145,1}, {146,1}, {147,1}, {148,1}] [ns_server:info,2014-08-19T16:52:34.457,ns_1@10.242.238.90:<0.1623.1>:ebucketmigrator_srv:handle_call:307]Successfully changed vbucket filter on tap stream `replication_ns_1@10.242.238.90`. [ns_server:info,2014-08-19T16:52:34.458,ns_1@10.242.238.90:<0.1623.1>:ebucketmigrator_srv:process_upstream:1027]Got vbucket filter change completion message. Silencing upstream sender [ns_server:info,2014-08-19T16:52:34.458,ns_1@10.242.238.90:<0.1623.1>:ebucketmigrator_srv:handle_info:366]Got reply from upstream silencing request. Completing state transition to a new ebucketmigrator. [ns_server:debug,2014-08-19T16:52:34.458,ns_1@10.242.238.90:<0.1623.1>:ebucketmigrator_srv:complete_native_vb_filter_change:170]Proceeding with reading unread binaries [ns_server:debug,2014-08-19T16:52:34.458,ns_1@10.242.238.90:<0.1623.1>:ebucketmigrator_srv:confirm_downstream:197]Going to confirm reception downstream messages [ns_server:debug,2014-08-19T16:52:34.458,ns_1@10.242.238.90:<0.1623.1>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:52:34.458,ns_1@10.242.238.90:<0.1632.1>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:52:34.459,ns_1@10.242.238.90:<0.1632.1>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:52:34.459,ns_1@10.242.238.90:<0.1623.1>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:52:34.459,ns_1@10.242.238.90:<0.1623.1>:ebucketmigrator_srv:confirm_downstream:201]Confirmed upstream messages are feeded to kernel [ns_server:debug,2014-08-19T16:52:34.459,ns_1@10.242.238.90:<0.1623.1>:ebucketmigrator_srv:reply_and_die:210]Passed old state to caller [ns_server:debug,2014-08-19T16:52:34.459,ns_1@10.242.238.90:<0.1623.1>:ebucketmigrator_srv:reply_and_die:213]Sent out state. Preparing to die [ns_server:debug,2014-08-19T16:52:34.459,ns_1@10.242.238.90:<0.1630.1>:ns_vbm_new_sup:do_perform_vbucket_filter_change:143]Got old state from previous ebucketmigrator: <0.1623.1> [ns_server:debug,2014-08-19T16:52:34.459,ns_1@10.242.238.90:<0.1630.1>:ns_vbm_new_sup:perform_vbucket_filter_change_loop:184]Sent old state to new instance [ns_server:info,2014-08-19T16:52:34.460,ns_1@10.242.238.90:<0.1634.1>:ns_vbm_new_sup:mk_old_state_retriever:83]Got vbucket filter change old state. Proceeding vbucket filter change operation [ns_server:debug,2014-08-19T16:52:34.460,ns_1@10.242.238.90:<0.1634.1>:ebucketmigrator_srv:init:494]Got old ebucketmigrator state from <0.1623.1>: {state,#Port<0.20537>,#Port<0.20533>,#Port<0.20538>,#Port<0.20534>,<0.1625.1>, <<"cut off">>,<<"cut off">>,[],187,false,false,0, {1408,452754,458472}, completed, {<0.1630.1>,#Ref<0.0.1.144052>}, <<"replication_ns_1@10.242.238.90">>,<0.1623.1>, {had_backfill,false,undefined,[]}, completed,false}. [ns_server:debug,2014-08-19T16:52:34.460,ns_1@10.242.238.90:<0.17162.0>:ns_process_registry:handle_info:98]Got exit msg: {'EXIT',<0.1630.1>,{#Ref<0.0.1.144041>,<0.1634.1>}} [error_logger:info,2014-08-19T16:52:34.460,ns_1@10.242.238.90:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,'ns_vbm_new_sup-default'} started: [{pid,<0.1634.1>}, {name, {new_child_id, [86,88,89,90,91,92,93,94,95,96,97,98,99,100, 101,102,103,104,105,106,107,108,109,110,111, 112,113,114,115,116,117,118,119,120,121,122, 123,124,125,126,127,128,129,130,131,132,133, 134,135,136,137,138,139,140,141,142,143,144, 145,146,147,148], 'ns_1@10.242.238.88'}}, {mfargs, {ebucketmigrator_srv,start_link, [{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{old_state_retriever, #Fun}, {on_not_ready_vbuckets, #Fun}, {username,"default"}, {password,get_from_config}, {vbuckets, [86,88,89,90,91,92,93,94,95,96,97,98,99, 100,101,102,103,104,105,106,107,108,109, 110,111,112,113,114,115,116,117,118,119, 120,121,122,123,124,125,126,127,128,129, 130,131,132,133,134,135,136,137,138,139, 140,141,142,143,144,145,146,147,148]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]]}}, {restart_type,temporary}, {shutdown,60000}, {child_type,worker}] [ns_server:debug,2014-08-19T16:52:34.465,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:52:34.468,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:34.468,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 3047 us [ns_server:debug,2014-08-19T16:52:34.468,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:34.469,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{148, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.90']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:info,2014-08-19T16:52:34.470,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 149 state to replica [ns_server:info,2014-08-19T16:52:34.470,ns_1@10.242.238.90:tap_replication_manager-default<0.18775.0>:tap_replication_manager:change_vbucket_filter:200]Going to change replication from 'ns_1@10.242.238.88' to have [86,88,89,90,91,92,93,94,95,96,97,98,99,100,101,102,103,104,105,106,107,108, 109,110,111,112,113,114,115,116,117,118,119,120,121,122,123,124,125,126,127, 128,129,130,131,132,133,134,135,136,137,138,139,140,141,142,143,144,145,146, 147,148,149] ([149], []) [ns_server:debug,2014-08-19T16:52:34.471,ns_1@10.242.238.90:<0.1636.1>:ns_vbm_new_sup:do_perform_vbucket_filter_change:135]Registered myself under id:{"default", {new_child_id, [86,88,89,90,91,92,93,94,95,96,97,98,99,100, 101,102,103,104,105,106,107,108,109,110,111, 112,113,114,115,116,117,118,119,120,121,122, 123,124,125,126,127,128,129,130,131,132,133, 134,135,136,137,138,139,140,141,142,143,144, 145,146,147,148,149], 'ns_1@10.242.238.88'}, #Ref<0.0.1.144174>} Args:[{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{old_state_retriever,#Fun}, {on_not_ready_vbuckets,#Fun}, {username,"default"}, {password,get_from_config}, {vbuckets,[86,88,89,90,91,92,93,94,95,96,97,98,99,100,101,102,103,104, 105,106,107,108,109,110,111,112,113,114,115,116,117,118,119, 120,121,122,123,124,125,126,127,128,129,130,131,132,133,134, 135,136,137,138,139,140,141,142,143,144,145,146,147,148, 149]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]] [ns_server:debug,2014-08-19T16:52:34.472,ns_1@10.242.238.90:<0.1636.1>:ns_vbm_new_sup:do_perform_vbucket_filter_change:139]Linked myself to old ebucketmigrator <0.1634.1> [ns_server:debug,2014-08-19T16:52:34.481,ns_1@10.242.238.90:<0.1634.1>:ebucketmigrator_srv:init:621]Reusing old upstream: [{vbuckets,[86,88,89,90,91,92,93,94,95,96,97,98,99,100,101,102,103,104,105, 106,107,108,109,110,111,112,113,114,115,116,117,118,119,120,121, 122,123,124,125,126,127,128,129,130,131,132,133,134,135,136,137, 138,139,140,141,142,143,144,145,146,147,148]}, {name,<<"replication_ns_1@10.242.238.90">>}, {takeover,false}] [rebalance:debug,2014-08-19T16:52:34.481,ns_1@10.242.238.90:<0.1634.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.1638.1> [ns_server:info,2014-08-19T16:52:34.482,ns_1@10.242.238.90:<0.1634.1>:ebucketmigrator_srv:handle_call:270]Starting new-style vbucket filter change on stream `replication_ns_1@10.242.238.90` [ns_server:info,2014-08-19T16:52:34.502,ns_1@10.242.238.90:<0.1634.1>:ebucketmigrator_srv:handle_call:297]Changing vbucket filter on tap stream `replication_ns_1@10.242.238.90`: [{86,1}, {88,1}, {89,1}, {90,1}, {91,1}, {92,1}, {93,1}, {94,1}, {95,1}, {96,1}, {97,1}, {98,1}, {99,1}, {100,1}, {101,1}, {102,1}, {103,1}, {104,1}, {105,1}, {106,1}, {107,1}, {108,1}, {109,1}, {110,1}, {111,1}, {112,1}, {113,1}, {114,1}, {115,1}, {116,1}, {117,1}, {118,1}, {119,1}, {120,1}, {121,1}, {122,1}, {123,1}, {124,1}, {125,1}, {126,1}, {127,1}, {128,1}, {129,1}, {130,1}, {131,1}, {132,1}, {133,1}, {134,1}, {135,1}, {136,1}, {137,1}, {138,1}, {139,1}, {140,1}, {141,1}, {142,1}, {143,1}, {144,1}, {145,1}, {146,1}, {147,1}, {148,1}, {149,1}] [ns_server:info,2014-08-19T16:52:34.503,ns_1@10.242.238.90:<0.1634.1>:ebucketmigrator_srv:handle_call:307]Successfully changed vbucket filter on tap stream `replication_ns_1@10.242.238.90`. [ns_server:info,2014-08-19T16:52:34.503,ns_1@10.242.238.90:<0.1634.1>:ebucketmigrator_srv:process_upstream:1027]Got vbucket filter change completion message. Silencing upstream sender [ns_server:info,2014-08-19T16:52:34.504,ns_1@10.242.238.90:<0.1634.1>:ebucketmigrator_srv:handle_info:366]Got reply from upstream silencing request. Completing state transition to a new ebucketmigrator. [ns_server:debug,2014-08-19T16:52:34.504,ns_1@10.242.238.90:<0.1634.1>:ebucketmigrator_srv:complete_native_vb_filter_change:170]Proceeding with reading unread binaries [ns_server:debug,2014-08-19T16:52:34.504,ns_1@10.242.238.90:<0.1634.1>:ebucketmigrator_srv:confirm_downstream:197]Going to confirm reception downstream messages [ns_server:debug,2014-08-19T16:52:34.504,ns_1@10.242.238.90:<0.1634.1>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:52:34.504,ns_1@10.242.238.90:<0.1639.1>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:52:34.504,ns_1@10.242.238.90:<0.1639.1>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:52:34.504,ns_1@10.242.238.90:<0.1634.1>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:52:34.504,ns_1@10.242.238.90:<0.1634.1>:ebucketmigrator_srv:confirm_downstream:201]Confirmed upstream messages are feeded to kernel [ns_server:debug,2014-08-19T16:52:34.504,ns_1@10.242.238.90:<0.1634.1>:ebucketmigrator_srv:reply_and_die:210]Passed old state to caller [ns_server:debug,2014-08-19T16:52:34.504,ns_1@10.242.238.90:<0.1634.1>:ebucketmigrator_srv:reply_and_die:213]Sent out state. Preparing to die [ns_server:debug,2014-08-19T16:52:34.504,ns_1@10.242.238.90:<0.1636.1>:ns_vbm_new_sup:do_perform_vbucket_filter_change:143]Got old state from previous ebucketmigrator: <0.1634.1> [rebalance:debug,2014-08-19T16:52:34.505,ns_1@10.242.238.90:<0.32130.0>:janitor_agent:handle_call:795]Done [rebalance:debug,2014-08-19T16:52:34.505,ns_1@10.242.238.90:<0.32135.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:52:34.505,ns_1@10.242.238.90:<0.1636.1>:ns_vbm_new_sup:perform_vbucket_filter_change_loop:184]Sent old state to new instance [ns_server:info,2014-08-19T16:52:34.505,ns_1@10.242.238.90:<0.1641.1>:ns_vbm_new_sup:mk_old_state_retriever:83]Got vbucket filter change old state. Proceeding vbucket filter change operation [ns_server:debug,2014-08-19T16:52:34.505,ns_1@10.242.238.90:<0.1641.1>:ebucketmigrator_srv:init:494]Got old ebucketmigrator state from <0.1634.1>: {state,#Port<0.20537>,#Port<0.20533>,#Port<0.20538>,#Port<0.20534>,<0.1638.1>, <<"cut off">>,<<"cut off">>,[],190,false,false,0, {1408,452754,503922}, completed, {<0.1636.1>,#Ref<0.0.1.144187>}, <<"replication_ns_1@10.242.238.90">>,<0.1634.1>, {had_backfill,false,undefined,[]}, completed,false}. [ns_server:debug,2014-08-19T16:52:34.506,ns_1@10.242.238.90:<0.17162.0>:ns_process_registry:handle_info:98]Got exit msg: {'EXIT',<0.1636.1>,{#Ref<0.0.1.144176>,<0.1641.1>}} [ns_server:debug,2014-08-19T16:52:34.506,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.32130.0> (ok) [ns_server:debug,2014-08-19T16:52:34.506,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.32135.0> (ok) [error_logger:info,2014-08-19T16:52:34.506,ns_1@10.242.238.90:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,'ns_vbm_new_sup-default'} started: [{pid,<0.1641.1>}, {name, {new_child_id, [86,88,89,90,91,92,93,94,95,96,97,98,99,100, 101,102,103,104,105,106,107,108,109,110,111, 112,113,114,115,116,117,118,119,120,121,122, 123,124,125,126,127,128,129,130,131,132,133, 134,135,136,137,138,139,140,141,142,143,144, 145,146,147,148,149], 'ns_1@10.242.238.88'}}, {mfargs, {ebucketmigrator_srv,start_link, [{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{old_state_retriever, #Fun}, {on_not_ready_vbuckets, #Fun}, {username,"default"}, {password,get_from_config}, {vbuckets, [86,88,89,90,91,92,93,94,95,96,97,98,99, 100,101,102,103,104,105,106,107,108,109, 110,111,112,113,114,115,116,117,118,119, 120,121,122,123,124,125,126,127,128,129, 130,131,132,133,134,135,136,137,138,139, 140,141,142,143,144,145,146,147,148, 149]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]]}}, {restart_type,temporary}, {shutdown,60000}, {child_type,worker}] [rebalance:debug,2014-08-19T16:52:34.507,ns_1@10.242.238.90:<0.32128.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [rebalance:debug,2014-08-19T16:52:34.507,ns_1@10.242.238.90:<0.32133.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:52:34.508,ns_1@10.242.238.90:<0.32128.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:52:34.508,ns_1@10.242.238.90:<0.1642.1>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:52:34.508,ns_1@10.242.238.90:<0.32133.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:52:34.508,ns_1@10.242.238.90:<0.1643.1>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:52:34.508,ns_1@10.242.238.90:<0.1643.1>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [ns_server:debug,2014-08-19T16:52:34.508,ns_1@10.242.238.90:<0.1642.1>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:52:34.508,ns_1@10.242.238.90:<0.32133.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [rebalance:info,2014-08-19T16:52:34.508,ns_1@10.242.238.90:<0.32128.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:52:34.513,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:52:34.518,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:34.518,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 4957 us [ns_server:debug,2014-08-19T16:52:34.518,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:34.519,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{149, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.90']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:52:34.528,ns_1@10.242.238.90:<0.1641.1>:ebucketmigrator_srv:init:621]Reusing old upstream: [{vbuckets,[86,88,89,90,91,92,93,94,95,96,97,98,99,100,101,102,103,104,105, 106,107,108,109,110,111,112,113,114,115,116,117,118,119,120,121, 122,123,124,125,126,127,128,129,130,131,132,133,134,135,136,137, 138,139,140,141,142,143,144,145,146,147,148,149]}, {name,<<"replication_ns_1@10.242.238.90">>}, {takeover,false}] [rebalance:debug,2014-08-19T16:52:34.528,ns_1@10.242.238.90:<0.1641.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.1645.1> [ns_server:debug,2014-08-19T16:52:34.551,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:52:34.554,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:34.554,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 3020 us [ns_server:debug,2014-08-19T16:52:34.554,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:34.555,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{7, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.89']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:52:34.595,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:52:34.598,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:34.599,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:34.599,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 3415 us [ns_server:debug,2014-08-19T16:52:34.599,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{6, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.89']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:info,2014-08-19T16:52:34.600,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 150 state to replica [ns_server:info,2014-08-19T16:52:34.600,ns_1@10.242.238.90:tap_replication_manager-default<0.18775.0>:tap_replication_manager:change_vbucket_filter:200]Going to change replication from 'ns_1@10.242.238.88' to have [86,88,89,90,91,92,93,94,95,96,97,98,99,100,101,102,103,104,105,106,107,108, 109,110,111,112,113,114,115,116,117,118,119,120,121,122,123,124,125,126,127, 128,129,130,131,132,133,134,135,136,137,138,139,140,141,142,143,144,145,146, 147,148,149,150] ([150], []) [ns_server:debug,2014-08-19T16:52:34.602,ns_1@10.242.238.90:<0.1648.1>:ns_vbm_new_sup:do_perform_vbucket_filter_change:135]Registered myself under id:{"default", {new_child_id, [86,88,89,90,91,92,93,94,95,96,97,98,99,100, 101,102,103,104,105,106,107,108,109,110,111, 112,113,114,115,116,117,118,119,120,121,122, 123,124,125,126,127,128,129,130,131,132,133, 134,135,136,137,138,139,140,141,142,143,144, 145,146,147,148,149,150], 'ns_1@10.242.238.88'}, #Ref<0.0.1.144441>} Args:[{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{old_state_retriever,#Fun}, {on_not_ready_vbuckets,#Fun}, {username,"default"}, {password,get_from_config}, {vbuckets,[86,88,89,90,91,92,93,94,95,96,97,98,99,100,101,102,103,104, 105,106,107,108,109,110,111,112,113,114,115,116,117,118,119, 120,121,122,123,124,125,126,127,128,129,130,131,132,133,134, 135,136,137,138,139,140,141,142,143,144,145,146,147,148,149, 150]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]] [ns_server:debug,2014-08-19T16:52:34.602,ns_1@10.242.238.90:<0.1648.1>:ns_vbm_new_sup:do_perform_vbucket_filter_change:139]Linked myself to old ebucketmigrator <0.1641.1> [ns_server:info,2014-08-19T16:52:34.602,ns_1@10.242.238.90:<0.1641.1>:ebucketmigrator_srv:handle_call:270]Starting new-style vbucket filter change on stream `replication_ns_1@10.242.238.90` [ns_server:info,2014-08-19T16:52:34.623,ns_1@10.242.238.90:<0.1641.1>:ebucketmigrator_srv:handle_call:297]Changing vbucket filter on tap stream `replication_ns_1@10.242.238.90`: [{86,1}, {88,1}, {89,1}, {90,1}, {91,1}, {92,1}, {93,1}, {94,1}, {95,1}, {96,1}, {97,1}, {98,1}, {99,1}, {100,1}, {101,1}, {102,1}, {103,1}, {104,1}, {105,1}, {106,1}, {107,1}, {108,1}, {109,1}, {110,1}, {111,1}, {112,1}, {113,1}, {114,1}, {115,1}, {116,1}, {117,1}, {118,1}, {119,1}, {120,1}, {121,1}, {122,1}, {123,1}, {124,1}, {125,1}, {126,1}, {127,1}, {128,1}, {129,1}, {130,1}, {131,1}, {132,1}, {133,1}, {134,1}, {135,1}, {136,1}, {137,1}, {138,1}, {139,1}, {140,1}, {141,1}, {142,1}, {143,1}, {144,1}, {145,1}, {146,1}, {147,1}, {148,1}, {149,1}, {150,1}] [ns_server:info,2014-08-19T16:52:34.625,ns_1@10.242.238.90:<0.1641.1>:ebucketmigrator_srv:handle_call:307]Successfully changed vbucket filter on tap stream `replication_ns_1@10.242.238.90`. [ns_server:info,2014-08-19T16:52:34.625,ns_1@10.242.238.90:<0.1641.1>:ebucketmigrator_srv:process_upstream:1027]Got vbucket filter change completion message. Silencing upstream sender [ns_server:info,2014-08-19T16:52:34.625,ns_1@10.242.238.90:<0.1641.1>:ebucketmigrator_srv:handle_info:366]Got reply from upstream silencing request. Completing state transition to a new ebucketmigrator. [ns_server:debug,2014-08-19T16:52:34.625,ns_1@10.242.238.90:<0.1641.1>:ebucketmigrator_srv:complete_native_vb_filter_change:170]Proceeding with reading unread binaries [ns_server:debug,2014-08-19T16:52:34.626,ns_1@10.242.238.90:<0.1641.1>:ebucketmigrator_srv:confirm_downstream:197]Going to confirm reception downstream messages [ns_server:debug,2014-08-19T16:52:34.626,ns_1@10.242.238.90:<0.1641.1>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:52:34.626,ns_1@10.242.238.90:<0.1650.1>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:52:34.626,ns_1@10.242.238.90:<0.1650.1>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:52:34.626,ns_1@10.242.238.90:<0.1641.1>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:52:34.626,ns_1@10.242.238.90:<0.1641.1>:ebucketmigrator_srv:confirm_downstream:201]Confirmed upstream messages are feeded to kernel [ns_server:debug,2014-08-19T16:52:34.626,ns_1@10.242.238.90:<0.1641.1>:ebucketmigrator_srv:reply_and_die:210]Passed old state to caller [ns_server:debug,2014-08-19T16:52:34.626,ns_1@10.242.238.90:<0.1641.1>:ebucketmigrator_srv:reply_and_die:213]Sent out state. Preparing to die [ns_server:debug,2014-08-19T16:52:34.626,ns_1@10.242.238.90:<0.1648.1>:ns_vbm_new_sup:do_perform_vbucket_filter_change:143]Got old state from previous ebucketmigrator: <0.1641.1> [ns_server:debug,2014-08-19T16:52:34.627,ns_1@10.242.238.90:<0.1648.1>:ns_vbm_new_sup:perform_vbucket_filter_change_loop:184]Sent old state to new instance [ns_server:info,2014-08-19T16:52:34.627,ns_1@10.242.238.90:<0.1652.1>:ns_vbm_new_sup:mk_old_state_retriever:83]Got vbucket filter change old state. Proceeding vbucket filter change operation [ns_server:debug,2014-08-19T16:52:34.627,ns_1@10.242.238.90:<0.1652.1>:ebucketmigrator_srv:init:494]Got old ebucketmigrator state from <0.1641.1>: {state,#Port<0.20537>,#Port<0.20533>,#Port<0.20538>,#Port<0.20534>,<0.1645.1>, <<"cut off">>,<<"cut off">>,[],193,false,false,0, {1408,452754,625532}, completed, {<0.1648.1>,#Ref<0.0.1.144454>}, <<"replication_ns_1@10.242.238.90">>,<0.1641.1>, {had_backfill,false,undefined,[]}, completed,false}. [ns_server:debug,2014-08-19T16:52:34.627,ns_1@10.242.238.90:<0.17162.0>:ns_process_registry:handle_info:98]Got exit msg: {'EXIT',<0.1648.1>,{#Ref<0.0.1.144443>,<0.1652.1>}} [error_logger:info,2014-08-19T16:52:34.627,ns_1@10.242.238.90:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,'ns_vbm_new_sup-default'} started: [{pid,<0.1652.1>}, {name, {new_child_id, [86,88,89,90,91,92,93,94,95,96,97,98,99,100, 101,102,103,104,105,106,107,108,109,110,111, 112,113,114,115,116,117,118,119,120,121,122, 123,124,125,126,127,128,129,130,131,132,133, 134,135,136,137,138,139,140,141,142,143,144, 145,146,147,148,149,150], 'ns_1@10.242.238.88'}}, {mfargs, {ebucketmigrator_srv,start_link, [{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{old_state_retriever, #Fun}, {on_not_ready_vbuckets, #Fun}, {username,"default"}, {password,get_from_config}, {vbuckets, [86,88,89,90,91,92,93,94,95,96,97,98,99, 100,101,102,103,104,105,106,107,108,109, 110,111,112,113,114,115,116,117,118,119, 120,121,122,123,124,125,126,127,128,129, 130,131,132,133,134,135,136,137,138,139, 140,141,142,143,144,145,146,147,148,149, 150]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]]}}, {restart_type,temporary}, {shutdown,60000}, {child_type,worker}] [rebalance:debug,2014-08-19T16:52:34.630,ns_1@10.242.238.90:<0.32125.0>:janitor_agent:handle_call:795]Done [rebalance:debug,2014-08-19T16:52:34.630,ns_1@10.242.238.90:<0.32120.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:52:34.630,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.32125.0> (ok) [ns_server:debug,2014-08-19T16:52:34.630,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.32120.0> (ok) [rebalance:debug,2014-08-19T16:52:34.631,ns_1@10.242.238.90:<0.32123.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:52:34.632,ns_1@10.242.238.90:<0.32123.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:52:34.632,ns_1@10.242.238.90:<0.1653.1>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:52:34.632,ns_1@10.242.238.90:<0.1653.1>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:52:34.632,ns_1@10.242.238.90:<0.32123.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:52:34.634,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [rebalance:debug,2014-08-19T16:52:34.634,ns_1@10.242.238.90:<0.32117.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:52:34.634,ns_1@10.242.238.90:<0.32117.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:52:34.634,ns_1@10.242.238.90:<0.1654.1>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:52:34.635,ns_1@10.242.238.90:<0.1654.1>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:52:34.635,ns_1@10.242.238.90:<0.32117.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:52:34.635,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:34.635,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 1191 us [ns_server:debug,2014-08-19T16:52:34.636,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:34.636,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{150, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.90']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:info,2014-08-19T16:52:34.637,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 151 state to replica [ns_server:info,2014-08-19T16:52:34.637,ns_1@10.242.238.90:tap_replication_manager-default<0.18775.0>:tap_replication_manager:change_vbucket_filter:200]Going to change replication from 'ns_1@10.242.238.88' to have [86,88,89,90,91,92,93,94,95,96,97,98,99,100,101,102,103,104,105,106,107,108, 109,110,111,112,113,114,115,116,117,118,119,120,121,122,123,124,125,126,127, 128,129,130,131,132,133,134,135,136,137,138,139,140,141,142,143,144,145,146, 147,148,149,150,151] ([151], []) [ns_server:debug,2014-08-19T16:52:34.638,ns_1@10.242.238.90:<0.1656.1>:ns_vbm_new_sup:do_perform_vbucket_filter_change:135]Registered myself under id:{"default", {new_child_id, [86,88,89,90,91,92,93,94,95,96,97,98,99,100, 101,102,103,104,105,106,107,108,109,110,111, 112,113,114,115,116,117,118,119,120,121,122, 123,124,125,126,127,128,129,130,131,132,133, 134,135,136,137,138,139,140,141,142,143,144, 145,146,147,148,149,150,151], 'ns_1@10.242.238.88'}, #Ref<0.0.1.144620>} Args:[{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{old_state_retriever,#Fun}, {on_not_ready_vbuckets,#Fun}, {username,"default"}, {password,get_from_config}, {vbuckets,[86,88,89,90,91,92,93,94,95,96,97,98,99,100,101,102,103,104, 105,106,107,108,109,110,111,112,113,114,115,116,117,118,119, 120,121,122,123,124,125,126,127,128,129,130,131,132,133,134, 135,136,137,138,139,140,141,142,143,144,145,146,147,148,149, 150,151]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]] [ns_server:debug,2014-08-19T16:52:34.639,ns_1@10.242.238.90:<0.1656.1>:ns_vbm_new_sup:do_perform_vbucket_filter_change:139]Linked myself to old ebucketmigrator <0.1652.1> [ns_server:debug,2014-08-19T16:52:34.650,ns_1@10.242.238.90:<0.1652.1>:ebucketmigrator_srv:init:621]Reusing old upstream: [{vbuckets,[86,88,89,90,91,92,93,94,95,96,97,98,99,100,101,102,103,104,105, 106,107,108,109,110,111,112,113,114,115,116,117,118,119,120,121, 122,123,124,125,126,127,128,129,130,131,132,133,134,135,136,137, 138,139,140,141,142,143,144,145,146,147,148,149,150]}, {name,<<"replication_ns_1@10.242.238.90">>}, {takeover,false}] [rebalance:debug,2014-08-19T16:52:34.650,ns_1@10.242.238.90:<0.1652.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.1658.1> [ns_server:info,2014-08-19T16:52:34.650,ns_1@10.242.238.90:<0.1652.1>:ebucketmigrator_srv:handle_call:270]Starting new-style vbucket filter change on stream `replication_ns_1@10.242.238.90` [ns_server:info,2014-08-19T16:52:34.671,ns_1@10.242.238.90:<0.1652.1>:ebucketmigrator_srv:handle_call:297]Changing vbucket filter on tap stream `replication_ns_1@10.242.238.90`: [{86,1}, {88,1}, {89,1}, {90,1}, {91,1}, {92,1}, {93,1}, {94,1}, {95,1}, {96,1}, {97,1}, {98,1}, {99,1}, {100,1}, {101,1}, {102,1}, {103,1}, {104,1}, {105,1}, {106,1}, {107,1}, {108,1}, {109,1}, {110,1}, {111,1}, {112,1}, {113,1}, {114,1}, {115,1}, {116,1}, {117,1}, {118,1}, {119,1}, {120,1}, {121,1}, {122,1}, {123,1}, {124,1}, {125,1}, {126,1}, {127,1}, {128,1}, {129,1}, {130,1}, {131,1}, {132,1}, {133,1}, {134,1}, {135,1}, {136,1}, {137,1}, {138,1}, {139,1}, {140,1}, {141,1}, {142,1}, {143,1}, {144,1}, {145,1}, {146,1}, {147,1}, {148,1}, {149,1}, {150,1}, {151,1}] [ns_server:info,2014-08-19T16:52:34.672,ns_1@10.242.238.90:<0.1652.1>:ebucketmigrator_srv:handle_call:307]Successfully changed vbucket filter on tap stream `replication_ns_1@10.242.238.90`. [ns_server:info,2014-08-19T16:52:34.672,ns_1@10.242.238.90:<0.1652.1>:ebucketmigrator_srv:process_upstream:1027]Got vbucket filter change completion message. Silencing upstream sender [ns_server:info,2014-08-19T16:52:34.672,ns_1@10.242.238.90:<0.1652.1>:ebucketmigrator_srv:handle_info:366]Got reply from upstream silencing request. Completing state transition to a new ebucketmigrator. [ns_server:debug,2014-08-19T16:52:34.673,ns_1@10.242.238.90:<0.1652.1>:ebucketmigrator_srv:complete_native_vb_filter_change:170]Proceeding with reading unread binaries [ns_server:debug,2014-08-19T16:52:34.673,ns_1@10.242.238.90:<0.1652.1>:ebucketmigrator_srv:confirm_downstream:197]Going to confirm reception downstream messages [ns_server:debug,2014-08-19T16:52:34.673,ns_1@10.242.238.90:<0.1652.1>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:52:34.673,ns_1@10.242.238.90:<0.1659.1>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:52:34.673,ns_1@10.242.238.90:<0.1659.1>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:52:34.673,ns_1@10.242.238.90:<0.1652.1>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:52:34.673,ns_1@10.242.238.90:<0.1652.1>:ebucketmigrator_srv:confirm_downstream:201]Confirmed upstream messages are feeded to kernel [ns_server:debug,2014-08-19T16:52:34.673,ns_1@10.242.238.90:<0.1652.1>:ebucketmigrator_srv:reply_and_die:210]Passed old state to caller [ns_server:debug,2014-08-19T16:52:34.673,ns_1@10.242.238.90:<0.1652.1>:ebucketmigrator_srv:reply_and_die:213]Sent out state. Preparing to die [ns_server:debug,2014-08-19T16:52:34.673,ns_1@10.242.238.90:<0.1656.1>:ns_vbm_new_sup:do_perform_vbucket_filter_change:143]Got old state from previous ebucketmigrator: <0.1652.1> [ns_server:debug,2014-08-19T16:52:34.674,ns_1@10.242.238.90:<0.1656.1>:ns_vbm_new_sup:perform_vbucket_filter_change_loop:184]Sent old state to new instance [ns_server:info,2014-08-19T16:52:34.674,ns_1@10.242.238.90:<0.1661.1>:ns_vbm_new_sup:mk_old_state_retriever:83]Got vbucket filter change old state. Proceeding vbucket filter change operation [ns_server:debug,2014-08-19T16:52:34.674,ns_1@10.242.238.90:<0.1661.1>:ebucketmigrator_srv:init:494]Got old ebucketmigrator state from <0.1652.1>: {state,#Port<0.20537>,#Port<0.20533>,#Port<0.20538>,#Port<0.20534>,<0.1658.1>, <<"cut off">>,<<"cut off">>,[],196,false,false,0, {1408,452754,672741}, completed, {<0.1656.1>,#Ref<0.0.1.144633>}, <<"replication_ns_1@10.242.238.90">>,<0.1652.1>, {had_backfill,false,undefined,[]}, completed,false}. [ns_server:debug,2014-08-19T16:52:34.674,ns_1@10.242.238.90:<0.17162.0>:ns_process_registry:handle_info:98]Got exit msg: {'EXIT',<0.1656.1>,{#Ref<0.0.1.144622>,<0.1661.1>}} [error_logger:info,2014-08-19T16:52:34.674,ns_1@10.242.238.90:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,'ns_vbm_new_sup-default'} started: [{pid,<0.1661.1>}, {name, {new_child_id, [86,88,89,90,91,92,93,94,95,96,97,98,99,100, 101,102,103,104,105,106,107,108,109,110,111, 112,113,114,115,116,117,118,119,120,121,122, 123,124,125,126,127,128,129,130,131,132,133, 134,135,136,137,138,139,140,141,142,143,144, 145,146,147,148,149,150,151], 'ns_1@10.242.238.88'}}, {mfargs, {ebucketmigrator_srv,start_link, [{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{old_state_retriever, #Fun}, {on_not_ready_vbuckets, #Fun}, {username,"default"}, {password,get_from_config}, {vbuckets, [86,88,89,90,91,92,93,94,95,96,97,98,99, 100,101,102,103,104,105,106,107,108,109, 110,111,112,113,114,115,116,117,118,119, 120,121,122,123,124,125,126,127,128,129, 130,131,132,133,134,135,136,137,138,139, 140,141,142,143,144,145,146,147,148,149, 150,151]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]]}}, {restart_type,temporary}, {shutdown,60000}, {child_type,worker}] [ns_server:debug,2014-08-19T16:52:34.679,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:52:34.682,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:34.683,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 3600 us [ns_server:debug,2014-08-19T16:52:34.683,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:34.684,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{151, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.90']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:52:34.697,ns_1@10.242.238.90:<0.1661.1>:ebucketmigrator_srv:init:621]Reusing old upstream: [{vbuckets,[86,88,89,90,91,92,93,94,95,96,97,98,99,100,101,102,103,104,105, 106,107,108,109,110,111,112,113,114,115,116,117,118,119,120,121, 122,123,124,125,126,127,128,129,130,131,132,133,134,135,136,137, 138,139,140,141,142,143,144,145,146,147,148,149,150,151]}, {name,<<"replication_ns_1@10.242.238.90">>}, {takeover,false}] [rebalance:debug,2014-08-19T16:52:34.697,ns_1@10.242.238.90:<0.1661.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.1663.1> [ns_server:debug,2014-08-19T16:52:34.716,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:52:34.719,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:34.719,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 3106 us [ns_server:debug,2014-08-19T16:52:34.719,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:34.720,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{9, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.89']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [rebalance:debug,2014-08-19T16:52:34.722,ns_1@10.242.238.90:<0.32096.0>:janitor_agent:handle_call:795]Done [rebalance:debug,2014-08-19T16:52:34.722,ns_1@10.242.238.90:<0.32101.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:52:34.722,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.32096.0> (ok) [ns_server:debug,2014-08-19T16:52:34.723,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.32101.0> (ok) [rebalance:debug,2014-08-19T16:52:34.724,ns_1@10.242.238.90:<0.32094.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:52:34.725,ns_1@10.242.238.90:<0.32094.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:52:34.725,ns_1@10.242.238.90:<0.1665.1>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:52:34.725,ns_1@10.242.238.90:<0.1665.1>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:52:34.725,ns_1@10.242.238.90:<0.32094.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [rebalance:debug,2014-08-19T16:52:34.725,ns_1@10.242.238.90:<0.32099.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:52:34.725,ns_1@10.242.238.90:<0.32099.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:52:34.726,ns_1@10.242.238.90:<0.1666.1>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:52:34.726,ns_1@10.242.238.90:<0.1666.1>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:52:34.726,ns_1@10.242.238.90:<0.32099.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:52:34.762,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:52:34.768,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:34.768,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 5314 us [ns_server:debug,2014-08-19T16:52:34.769,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:34.770,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{8, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.89']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:info,2014-08-19T16:52:34.771,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 153 state to replica [ns_server:info,2014-08-19T16:52:34.771,ns_1@10.242.238.90:tap_replication_manager-default<0.18775.0>:tap_replication_manager:change_vbucket_filter:200]Going to change replication from 'ns_1@10.242.238.88' to have [86,88,89,90,91,92,93,94,95,96,97,98,99,100,101,102,103,104,105,106,107,108, 109,110,111,112,113,114,115,116,117,118,119,120,121,122,123,124,125,126,127, 128,129,130,131,132,133,134,135,136,137,138,139,140,141,142,143,144,145,146, 147,148,149,150,151,153] ([153], []) [ns_server:debug,2014-08-19T16:52:34.772,ns_1@10.242.238.90:<0.1668.1>:ns_vbm_new_sup:do_perform_vbucket_filter_change:135]Registered myself under id:{"default", {new_child_id, [86,88,89,90,91,92,93,94,95,96,97,98,99,100, 101,102,103,104,105,106,107,108,109,110,111, 112,113,114,115,116,117,118,119,120,121,122, 123,124,125,126,127,128,129,130,131,132,133, 134,135,136,137,138,139,140,141,142,143,144, 145,146,147,148,149,150,151,153], 'ns_1@10.242.238.88'}, #Ref<0.0.1.144915>} Args:[{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{old_state_retriever,#Fun}, {on_not_ready_vbuckets,#Fun}, {username,"default"}, {password,get_from_config}, {vbuckets,[86,88,89,90,91,92,93,94,95,96,97,98,99,100,101,102,103,104, 105,106,107,108,109,110,111,112,113,114,115,116,117,118,119, 120,121,122,123,124,125,126,127,128,129,130,131,132,133,134, 135,136,137,138,139,140,141,142,143,144,145,146,147,148,149, 150,151,153]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]] [ns_server:debug,2014-08-19T16:52:34.772,ns_1@10.242.238.90:<0.1668.1>:ns_vbm_new_sup:do_perform_vbucket_filter_change:139]Linked myself to old ebucketmigrator <0.1661.1> [ns_server:info,2014-08-19T16:52:34.773,ns_1@10.242.238.90:<0.1661.1>:ebucketmigrator_srv:handle_call:270]Starting new-style vbucket filter change on stream `replication_ns_1@10.242.238.90` [ns_server:info,2014-08-19T16:52:34.793,ns_1@10.242.238.90:<0.1661.1>:ebucketmigrator_srv:handle_call:297]Changing vbucket filter on tap stream `replication_ns_1@10.242.238.90`: [{86,1}, {88,1}, {89,1}, {90,1}, {91,1}, {92,1}, {93,1}, {94,1}, {95,1}, {96,1}, {97,1}, {98,1}, {99,1}, {100,1}, {101,1}, {102,1}, {103,1}, {104,1}, {105,1}, {106,1}, {107,1}, {108,1}, {109,1}, {110,1}, {111,1}, {112,1}, {113,1}, {114,1}, {115,1}, {116,1}, {117,1}, {118,1}, {119,1}, {120,1}, {121,1}, {122,1}, {123,1}, {124,1}, {125,1}, {126,1}, {127,1}, {128,1}, {129,1}, {130,1}, {131,1}, {132,1}, {133,1}, {134,1}, {135,1}, {136,1}, {137,1}, {138,1}, {139,1}, {140,1}, {141,1}, {142,1}, {143,1}, {144,1}, {145,1}, {146,1}, {147,1}, {148,1}, {149,1}, {150,1}, {151,1}, {153,1}] [ns_server:info,2014-08-19T16:52:34.794,ns_1@10.242.238.90:<0.1661.1>:ebucketmigrator_srv:handle_call:307]Successfully changed vbucket filter on tap stream `replication_ns_1@10.242.238.90`. [ns_server:info,2014-08-19T16:52:34.794,ns_1@10.242.238.90:<0.1661.1>:ebucketmigrator_srv:process_upstream:1027]Got vbucket filter change completion message. Silencing upstream sender [ns_server:info,2014-08-19T16:52:34.794,ns_1@10.242.238.90:<0.1661.1>:ebucketmigrator_srv:handle_info:366]Got reply from upstream silencing request. Completing state transition to a new ebucketmigrator. [ns_server:debug,2014-08-19T16:52:34.794,ns_1@10.242.238.90:<0.1661.1>:ebucketmigrator_srv:complete_native_vb_filter_change:170]Proceeding with reading unread binaries [ns_server:debug,2014-08-19T16:52:34.794,ns_1@10.242.238.90:<0.1661.1>:ebucketmigrator_srv:confirm_downstream:197]Going to confirm reception downstream messages [ns_server:debug,2014-08-19T16:52:34.794,ns_1@10.242.238.90:<0.1661.1>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:52:34.794,ns_1@10.242.238.90:<0.1670.1>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:52:34.795,ns_1@10.242.238.90:<0.1670.1>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:52:34.795,ns_1@10.242.238.90:<0.1661.1>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:52:34.795,ns_1@10.242.238.90:<0.1661.1>:ebucketmigrator_srv:confirm_downstream:201]Confirmed upstream messages are feeded to kernel [ns_server:debug,2014-08-19T16:52:34.795,ns_1@10.242.238.90:<0.1661.1>:ebucketmigrator_srv:reply_and_die:210]Passed old state to caller [ns_server:debug,2014-08-19T16:52:34.795,ns_1@10.242.238.90:<0.1661.1>:ebucketmigrator_srv:reply_and_die:213]Sent out state. Preparing to die [ns_server:debug,2014-08-19T16:52:34.795,ns_1@10.242.238.90:<0.1668.1>:ns_vbm_new_sup:do_perform_vbucket_filter_change:143]Got old state from previous ebucketmigrator: <0.1661.1> [ns_server:debug,2014-08-19T16:52:34.795,ns_1@10.242.238.90:<0.1668.1>:ns_vbm_new_sup:perform_vbucket_filter_change_loop:184]Sent old state to new instance [ns_server:info,2014-08-19T16:52:34.796,ns_1@10.242.238.90:<0.1672.1>:ns_vbm_new_sup:mk_old_state_retriever:83]Got vbucket filter change old state. Proceeding vbucket filter change operation [ns_server:debug,2014-08-19T16:52:34.796,ns_1@10.242.238.90:<0.1672.1>:ebucketmigrator_srv:init:494]Got old ebucketmigrator state from <0.1661.1>: {state,#Port<0.20537>,#Port<0.20533>,#Port<0.20538>,#Port<0.20534>,<0.1663.1>, <<"cut off">>,<<"cut off">>,[],199,false,false,0, {1408,452754,794286}, completed, {<0.1668.1>,#Ref<0.0.1.144928>}, <<"replication_ns_1@10.242.238.90">>,<0.1661.1>, {had_backfill,false,undefined,[]}, completed,false}. [ns_server:debug,2014-08-19T16:52:34.796,ns_1@10.242.238.90:<0.17162.0>:ns_process_registry:handle_info:98]Got exit msg: {'EXIT',<0.1668.1>,{#Ref<0.0.1.144917>,<0.1672.1>}} [error_logger:info,2014-08-19T16:52:34.796,ns_1@10.242.238.90:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,'ns_vbm_new_sup-default'} started: [{pid,<0.1672.1>}, {name, {new_child_id, [86,88,89,90,91,92,93,94,95,96,97,98,99,100, 101,102,103,104,105,106,107,108,109,110,111, 112,113,114,115,116,117,118,119,120,121,122, 123,124,125,126,127,128,129,130,131,132,133, 134,135,136,137,138,139,140,141,142,143,144, 145,146,147,148,149,150,151,153], 'ns_1@10.242.238.88'}}, {mfargs, {ebucketmigrator_srv,start_link, [{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{old_state_retriever, #Fun}, {on_not_ready_vbuckets, #Fun}, {username,"default"}, {password,get_from_config}, {vbuckets, [86,88,89,90,91,92,93,94,95,96,97,98,99, 100,101,102,103,104,105,106,107,108,109, 110,111,112,113,114,115,116,117,118,119, 120,121,122,123,124,125,126,127,128,129, 130,131,132,133,134,135,136,137,138,139, 140,141,142,143,144,145,146,147,148,149, 150,151,153]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]]}}, {restart_type,temporary}, {shutdown,60000}, {child_type,worker}] [ns_server:debug,2014-08-19T16:52:34.801,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:52:34.804,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:34.804,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 3344 us [ns_server:debug,2014-08-19T16:52:34.805,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:34.805,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{153, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.90']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:info,2014-08-19T16:52:34.805,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 152 state to replica [ns_server:info,2014-08-19T16:52:34.806,ns_1@10.242.238.90:tap_replication_manager-default<0.18775.0>:tap_replication_manager:change_vbucket_filter:200]Going to change replication from 'ns_1@10.242.238.88' to have [86,88,89,90,91,92,93,94,95,96,97,98,99,100,101,102,103,104,105,106,107,108, 109,110,111,112,113,114,115,116,117,118,119,120,121,122,123,124,125,126,127, 128,129,130,131,132,133,134,135,136,137,138,139,140,141,142,143,144,145,146, 147,148,149,150,151,152,153] ([152], []) [ns_server:debug,2014-08-19T16:52:34.807,ns_1@10.242.238.90:<0.1674.1>:ns_vbm_new_sup:do_perform_vbucket_filter_change:135]Registered myself under id:{"default", {new_child_id, [86,88,89,90,91,92,93,94,95,96,97,98,99,100, 101,102,103,104,105,106,107,108,109,110,111, 112,113,114,115,116,117,118,119,120,121,122, 123,124,125,126,127,128,129,130,131,132,133, 134,135,136,137,138,139,140,141,142,143,144, 145,146,147,148,149,150,151,152,153], 'ns_1@10.242.238.88'}, #Ref<0.0.1.145042>} Args:[{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{old_state_retriever,#Fun}, {on_not_ready_vbuckets,#Fun}, {username,"default"}, {password,get_from_config}, {vbuckets,[86,88,89,90,91,92,93,94,95,96,97,98,99,100,101,102,103,104, 105,106,107,108,109,110,111,112,113,114,115,116,117,118,119, 120,121,122,123,124,125,126,127,128,129,130,131,132,133,134, 135,136,137,138,139,140,141,142,143,144,145,146,147,148,149, 150,151,152,153]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]] [ns_server:debug,2014-08-19T16:52:34.807,ns_1@10.242.238.90:<0.1674.1>:ns_vbm_new_sup:do_perform_vbucket_filter_change:139]Linked myself to old ebucketmigrator <0.1672.1> [ns_server:debug,2014-08-19T16:52:34.817,ns_1@10.242.238.90:<0.1672.1>:ebucketmigrator_srv:init:621]Reusing old upstream: [{vbuckets,[86,88,89,90,91,92,93,94,95,96,97,98,99,100,101,102,103,104,105, 106,107,108,109,110,111,112,113,114,115,116,117,118,119,120,121, 122,123,124,125,126,127,128,129,130,131,132,133,134,135,136,137, 138,139,140,141,142,143,144,145,146,147,148,149,150,151,153]}, {name,<<"replication_ns_1@10.242.238.90">>}, {takeover,false}] [rebalance:debug,2014-08-19T16:52:34.818,ns_1@10.242.238.90:<0.1672.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.1676.1> [ns_server:info,2014-08-19T16:52:34.818,ns_1@10.242.238.90:<0.1672.1>:ebucketmigrator_srv:handle_call:270]Starting new-style vbucket filter change on stream `replication_ns_1@10.242.238.90` [rebalance:debug,2014-08-19T16:52:34.823,ns_1@10.242.238.90:<0.32091.0>:janitor_agent:handle_call:795]Done [ns_server:info,2014-08-19T16:52:34.838,ns_1@10.242.238.90:<0.1672.1>:ebucketmigrator_srv:handle_call:297]Changing vbucket filter on tap stream `replication_ns_1@10.242.238.90`: [{86,1}, {88,1}, {89,1}, {90,1}, {91,1}, {92,1}, {93,1}, {94,1}, {95,1}, {96,1}, {97,1}, {98,1}, {99,1}, {100,1}, {101,1}, {102,1}, {103,1}, {104,1}, {105,1}, {106,1}, {107,1}, {108,1}, {109,1}, {110,1}, {111,1}, {112,1}, {113,1}, {114,1}, {115,1}, {116,1}, {117,1}, {118,1}, {119,1}, {120,1}, {121,1}, {122,1}, {123,1}, {124,1}, {125,1}, {126,1}, {127,1}, {128,1}, {129,1}, {130,1}, {131,1}, {132,1}, {133,1}, {134,1}, {135,1}, {136,1}, {137,1}, {138,1}, {139,1}, {140,1}, {141,1}, {142,1}, {143,1}, {144,1}, {145,1}, {146,1}, {147,1}, {148,1}, {149,1}, {150,1}, {151,1}, {152,1}, {153,1}] [ns_server:info,2014-08-19T16:52:34.839,ns_1@10.242.238.90:<0.1672.1>:ebucketmigrator_srv:handle_call:307]Successfully changed vbucket filter on tap stream `replication_ns_1@10.242.238.90`. [ns_server:info,2014-08-19T16:52:34.839,ns_1@10.242.238.90:<0.1672.1>:ebucketmigrator_srv:process_upstream:1027]Got vbucket filter change completion message. Silencing upstream sender [ns_server:info,2014-08-19T16:52:34.840,ns_1@10.242.238.90:<0.1672.1>:ebucketmigrator_srv:handle_info:366]Got reply from upstream silencing request. Completing state transition to a new ebucketmigrator. [ns_server:debug,2014-08-19T16:52:34.840,ns_1@10.242.238.90:<0.1672.1>:ebucketmigrator_srv:complete_native_vb_filter_change:170]Proceeding with reading unread binaries [ns_server:debug,2014-08-19T16:52:34.840,ns_1@10.242.238.90:<0.1672.1>:ebucketmigrator_srv:confirm_downstream:197]Going to confirm reception downstream messages [ns_server:debug,2014-08-19T16:52:34.840,ns_1@10.242.238.90:<0.1672.1>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:52:34.840,ns_1@10.242.238.90:<0.1677.1>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:52:34.840,ns_1@10.242.238.90:<0.1677.1>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:52:34.840,ns_1@10.242.238.90:<0.1672.1>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:52:34.840,ns_1@10.242.238.90:<0.1672.1>:ebucketmigrator_srv:confirm_downstream:201]Confirmed upstream messages are feeded to kernel [ns_server:debug,2014-08-19T16:52:34.840,ns_1@10.242.238.90:<0.1672.1>:ebucketmigrator_srv:reply_and_die:210]Passed old state to caller [ns_server:debug,2014-08-19T16:52:34.841,ns_1@10.242.238.90:<0.1672.1>:ebucketmigrator_srv:reply_and_die:213]Sent out state. Preparing to die [ns_server:debug,2014-08-19T16:52:34.841,ns_1@10.242.238.90:<0.1674.1>:ns_vbm_new_sup:do_perform_vbucket_filter_change:143]Got old state from previous ebucketmigrator: <0.1672.1> [ns_server:debug,2014-08-19T16:52:34.841,ns_1@10.242.238.90:<0.1674.1>:ns_vbm_new_sup:perform_vbucket_filter_change_loop:184]Sent old state to new instance [ns_server:info,2014-08-19T16:52:34.841,ns_1@10.242.238.90:<0.1679.1>:ns_vbm_new_sup:mk_old_state_retriever:83]Got vbucket filter change old state. Proceeding vbucket filter change operation [ns_server:debug,2014-08-19T16:52:34.841,ns_1@10.242.238.90:<0.1679.1>:ebucketmigrator_srv:init:494]Got old ebucketmigrator state from <0.1672.1>: {state,#Port<0.20537>,#Port<0.20533>,#Port<0.20538>,#Port<0.20534>,<0.1676.1>, <<"cut off">>,<<"cut off">>,[],202,false,false,0, {1408,452754,839875}, completed, {<0.1674.1>,#Ref<0.0.1.145055>}, <<"replication_ns_1@10.242.238.90">>,<0.1672.1>, {had_backfill,false,undefined,[]}, completed,false}. [ns_server:debug,2014-08-19T16:52:34.842,ns_1@10.242.238.90:<0.17162.0>:ns_process_registry:handle_info:98]Got exit msg: {'EXIT',<0.1674.1>,{#Ref<0.0.1.145044>,<0.1679.1>}} [ns_server:debug,2014-08-19T16:52:34.842,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.32091.0> (ok) [error_logger:info,2014-08-19T16:52:34.842,ns_1@10.242.238.90:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,'ns_vbm_new_sup-default'} started: [{pid,<0.1679.1>}, {name, {new_child_id, [86,88,89,90,91,92,93,94,95,96,97,98,99,100, 101,102,103,104,105,106,107,108,109,110,111, 112,113,114,115,116,117,118,119,120,121,122, 123,124,125,126,127,128,129,130,131,132,133, 134,135,136,137,138,139,140,141,142,143,144, 145,146,147,148,149,150,151,152,153], 'ns_1@10.242.238.88'}}, {mfargs, {ebucketmigrator_srv,start_link, [{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{old_state_retriever, #Fun}, {on_not_ready_vbuckets, #Fun}, {username,"default"}, {password,get_from_config}, {vbuckets, [86,88,89,90,91,92,93,94,95,96,97,98,99, 100,101,102,103,104,105,106,107,108,109, 110,111,112,113,114,115,116,117,118,119, 120,121,122,123,124,125,126,127,128,129, 130,131,132,133,134,135,136,137,138,139, 140,141,142,143,144,145,146,147,148,149, 150,151,152,153]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]]}}, {restart_type,temporary}, {shutdown,60000}, {child_type,worker}] [rebalance:debug,2014-08-19T16:52:34.843,ns_1@10.242.238.90:<0.32089.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:52:34.844,ns_1@10.242.238.90:<0.32089.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:52:34.844,ns_1@10.242.238.90:<0.1680.1>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:52:34.844,ns_1@10.242.238.90:<0.1680.1>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:52:34.844,ns_1@10.242.238.90:<0.32089.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:52:34.849,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:52:34.851,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:34.851,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 1500 us [ns_server:debug,2014-08-19T16:52:34.851,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:34.852,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{152, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.90']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:52:34.863,ns_1@10.242.238.90:<0.1679.1>:ebucketmigrator_srv:init:621]Reusing old upstream: [{vbuckets,[86,88,89,90,91,92,93,94,95,96,97,98,99,100,101,102,103,104,105, 106,107,108,109,110,111,112,113,114,115,116,117,118,119,120,121, 122,123,124,125,126,127,128,129,130,131,132,133,134,135,136,137, 138,139,140,141,142,143,144,145,146,147,148,149,150,151,152,153]}, {name,<<"replication_ns_1@10.242.238.90">>}, {takeover,false}] [rebalance:debug,2014-08-19T16:52:34.863,ns_1@10.242.238.90:<0.1679.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.1682.1> [rebalance:debug,2014-08-19T16:52:34.865,ns_1@10.242.238.90:<0.32724.0>:janitor_agent:handle_call:795]Done [ns_server:debug,2014-08-19T16:52:34.865,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:854]Got done message from subprocess: <0.32724.0> (ok) [rebalance:debug,2014-08-19T16:52:34.866,ns_1@10.242.238.90:<0.32708.0>:ebucketmigrator_srv:terminate:737]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:52:34.867,ns_1@10.242.238.90:<0.32708.0>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:52:34.867,ns_1@10.242.238.90:<0.1683.1>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:52:34.867,ns_1@10.242.238.90:<0.1683.1>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:52:34.867,ns_1@10.242.238.90:<0.32708.0>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:52:34.882,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:52:34.885,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 3061 us [ns_server:debug,2014-08-19T16:52:34.885,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:34.886,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:34.887,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{10, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.89']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:52:34.942,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:52:34.945,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:34.945,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 3103 us [ns_server:debug,2014-08-19T16:52:34.946,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:34.946,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{11, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.89']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:info,2014-08-19T16:52:34.947,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 154 state to replica [ns_server:info,2014-08-19T16:52:34.948,ns_1@10.242.238.90:tap_replication_manager-default<0.18775.0>:tap_replication_manager:change_vbucket_filter:200]Going to change replication from 'ns_1@10.242.238.88' to have [86,88,89,90,91,92,93,94,95,96,97,98,99,100,101,102,103,104,105,106,107,108, 109,110,111,112,113,114,115,116,117,118,119,120,121,122,123,124,125,126,127, 128,129,130,131,132,133,134,135,136,137,138,139,140,141,142,143,144,145,146, 147,148,149,150,151,152,153,154] ([154], []) [ns_server:debug,2014-08-19T16:52:34.949,ns_1@10.242.238.90:<0.1686.1>:ns_vbm_new_sup:do_perform_vbucket_filter_change:135]Registered myself under id:{"default", {new_child_id, [86,88,89,90,91,92,93,94,95,96,97,98,99,100, 101,102,103,104,105,106,107,108,109,110,111, 112,113,114,115,116,117,118,119,120,121,122, 123,124,125,126,127,128,129,130,131,132,133, 134,135,136,137,138,139,140,141,142,143,144, 145,146,147,148,149,150,151,152,153,154], 'ns_1@10.242.238.88'}, #Ref<0.0.1.145319>} Args:[{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{old_state_retriever,#Fun}, {on_not_ready_vbuckets,#Fun}, {username,"default"}, {password,get_from_config}, {vbuckets,[86,88,89,90,91,92,93,94,95,96,97,98,99,100,101,102,103,104, 105,106,107,108,109,110,111,112,113,114,115,116,117,118,119, 120,121,122,123,124,125,126,127,128,129,130,131,132,133,134, 135,136,137,138,139,140,141,142,143,144,145,146,147,148,149, 150,151,152,153,154]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]] [ns_server:debug,2014-08-19T16:52:34.949,ns_1@10.242.238.90:<0.1686.1>:ns_vbm_new_sup:do_perform_vbucket_filter_change:139]Linked myself to old ebucketmigrator <0.1679.1> [ns_server:info,2014-08-19T16:52:34.949,ns_1@10.242.238.90:<0.1679.1>:ebucketmigrator_srv:handle_call:270]Starting new-style vbucket filter change on stream `replication_ns_1@10.242.238.90` [ns_server:info,2014-08-19T16:52:34.969,ns_1@10.242.238.90:<0.1679.1>:ebucketmigrator_srv:handle_call:297]Changing vbucket filter on tap stream `replication_ns_1@10.242.238.90`: [{86,1}, {88,1}, {89,1}, {90,1}, {91,1}, {92,1}, {93,1}, {94,1}, {95,1}, {96,1}, {97,1}, {98,1}, {99,1}, {100,1}, {101,1}, {102,1}, {103,1}, {104,1}, {105,1}, {106,1}, {107,1}, {108,1}, {109,1}, {110,1}, {111,1}, {112,1}, {113,1}, {114,1}, {115,1}, {116,1}, {117,1}, {118,1}, {119,1}, {120,1}, {121,1}, {122,1}, {123,1}, {124,1}, {125,1}, {126,1}, {127,1}, {128,1}, {129,1}, {130,1}, {131,1}, {132,1}, {133,1}, {134,1}, {135,1}, {136,1}, {137,1}, {138,1}, {139,1}, {140,1}, {141,1}, {142,1}, {143,1}, {144,1}, {145,1}, {146,1}, {147,1}, {148,1}, {149,1}, {150,1}, {151,1}, {152,1}, {153,1}, {154,1}] [ns_server:info,2014-08-19T16:52:34.970,ns_1@10.242.238.90:<0.1679.1>:ebucketmigrator_srv:handle_call:307]Successfully changed vbucket filter on tap stream `replication_ns_1@10.242.238.90`. [ns_server:info,2014-08-19T16:52:34.971,ns_1@10.242.238.90:<0.1679.1>:ebucketmigrator_srv:process_upstream:1027]Got vbucket filter change completion message. Silencing upstream sender [ns_server:info,2014-08-19T16:52:34.971,ns_1@10.242.238.90:<0.1679.1>:ebucketmigrator_srv:handle_info:366]Got reply from upstream silencing request. Completing state transition to a new ebucketmigrator. [ns_server:debug,2014-08-19T16:52:34.971,ns_1@10.242.238.90:<0.1679.1>:ebucketmigrator_srv:complete_native_vb_filter_change:170]Proceeding with reading unread binaries [ns_server:debug,2014-08-19T16:52:34.971,ns_1@10.242.238.90:<0.1679.1>:ebucketmigrator_srv:confirm_downstream:197]Going to confirm reception downstream messages [ns_server:debug,2014-08-19T16:52:34.971,ns_1@10.242.238.90:<0.1679.1>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:52:34.971,ns_1@10.242.238.90:<0.1688.1>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:52:34.971,ns_1@10.242.238.90:<0.1688.1>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:52:34.971,ns_1@10.242.238.90:<0.1679.1>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:52:34.972,ns_1@10.242.238.90:<0.1679.1>:ebucketmigrator_srv:confirm_downstream:201]Confirmed upstream messages are feeded to kernel [ns_server:debug,2014-08-19T16:52:34.972,ns_1@10.242.238.90:<0.1679.1>:ebucketmigrator_srv:reply_and_die:210]Passed old state to caller [ns_server:debug,2014-08-19T16:52:34.972,ns_1@10.242.238.90:<0.1679.1>:ebucketmigrator_srv:reply_and_die:213]Sent out state. Preparing to die [ns_server:debug,2014-08-19T16:52:34.972,ns_1@10.242.238.90:<0.1686.1>:ns_vbm_new_sup:do_perform_vbucket_filter_change:143]Got old state from previous ebucketmigrator: <0.1679.1> [ns_server:debug,2014-08-19T16:52:34.972,ns_1@10.242.238.90:<0.1686.1>:ns_vbm_new_sup:perform_vbucket_filter_change_loop:184]Sent old state to new instance [ns_server:info,2014-08-19T16:52:34.972,ns_1@10.242.238.90:<0.1690.1>:ns_vbm_new_sup:mk_old_state_retriever:83]Got vbucket filter change old state. Proceeding vbucket filter change operation [ns_server:debug,2014-08-19T16:52:34.972,ns_1@10.242.238.90:<0.1690.1>:ebucketmigrator_srv:init:494]Got old ebucketmigrator state from <0.1679.1>: {state,#Port<0.20537>,#Port<0.20533>,#Port<0.20538>,#Port<0.20534>,<0.1682.1>, <<"cut off">>,<<"cut off">>,[],205,false,false,0, {1408,452754,971010}, completed, {<0.1686.1>,#Ref<0.0.1.145332>}, <<"replication_ns_1@10.242.238.90">>,<0.1679.1>, {had_backfill,false,undefined,[]}, completed,false}. [ns_server:debug,2014-08-19T16:52:34.973,ns_1@10.242.238.90:<0.17162.0>:ns_process_registry:handle_info:98]Got exit msg: {'EXIT',<0.1686.1>,{#Ref<0.0.1.145321>,<0.1690.1>}} [error_logger:info,2014-08-19T16:52:34.973,ns_1@10.242.238.90:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,'ns_vbm_new_sup-default'} started: [{pid,<0.1690.1>}, {name, {new_child_id, [86,88,89,90,91,92,93,94,95,96,97,98,99,100, 101,102,103,104,105,106,107,108,109,110,111, 112,113,114,115,116,117,118,119,120,121,122, 123,124,125,126,127,128,129,130,131,132,133, 134,135,136,137,138,139,140,141,142,143,144, 145,146,147,148,149,150,151,152,153,154], 'ns_1@10.242.238.88'}}, {mfargs, {ebucketmigrator_srv,start_link, [{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{old_state_retriever, #Fun}, {on_not_ready_vbuckets, #Fun}, {username,"default"}, {password,get_from_config}, {vbuckets, [86,88,89,90,91,92,93,94,95,96,97,98,99, 100,101,102,103,104,105,106,107,108,109, 110,111,112,113,114,115,116,117,118,119, 120,121,122,123,124,125,126,127,128,129, 130,131,132,133,134,135,136,137,138,139, 140,141,142,143,144,145,146,147,148,149, 150,151,152,153,154]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]]}}, {restart_type,temporary}, {shutdown,60000}, {child_type,worker}] [ns_server:debug,2014-08-19T16:52:34.977,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:52:34.980,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:34.981,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 3314 us [ns_server:debug,2014-08-19T16:52:34.981,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:34.981,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{154, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.90']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:info,2014-08-19T16:52:34.982,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 155 state to replica [ns_server:info,2014-08-19T16:52:34.982,ns_1@10.242.238.90:tap_replication_manager-default<0.18775.0>:tap_replication_manager:change_vbucket_filter:200]Going to change replication from 'ns_1@10.242.238.88' to have [86,88,89,90,91,92,93,94,95,96,97,98,99,100,101,102,103,104,105,106,107,108, 109,110,111,112,113,114,115,116,117,118,119,120,121,122,123,124,125,126,127, 128,129,130,131,132,133,134,135,136,137,138,139,140,141,142,143,144,145,146, 147,148,149,150,151,152,153,154,155] ([155], []) [ns_server:debug,2014-08-19T16:52:34.984,ns_1@10.242.238.90:<0.1692.1>:ns_vbm_new_sup:do_perform_vbucket_filter_change:135]Registered myself under id:{"default", {new_child_id, [86,88,89,90,91,92,93,94,95,96,97,98,99,100, 101,102,103,104,105,106,107,108,109,110,111, 112,113,114,115,116,117,118,119,120,121,122, 123,124,125,126,127,128,129,130,131,132,133, 134,135,136,137,138,139,140,141,142,143,144, 145,146,147,148,149,150,151,152,153,154,155], 'ns_1@10.242.238.88'}, #Ref<0.0.1.145446>} Args:[{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{old_state_retriever,#Fun}, {on_not_ready_vbuckets,#Fun}, {username,"default"}, {password,get_from_config}, {vbuckets,[86,88,89,90,91,92,93,94,95,96,97,98,99,100,101,102,103,104, 105,106,107,108,109,110,111,112,113,114,115,116,117,118,119, 120,121,122,123,124,125,126,127,128,129,130,131,132,133,134, 135,136,137,138,139,140,141,142,143,144,145,146,147,148,149, 150,151,152,153,154,155]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]] [ns_server:debug,2014-08-19T16:52:34.984,ns_1@10.242.238.90:<0.1692.1>:ns_vbm_new_sup:do_perform_vbucket_filter_change:139]Linked myself to old ebucketmigrator <0.1690.1> [ns_server:debug,2014-08-19T16:52:34.995,ns_1@10.242.238.90:<0.1690.1>:ebucketmigrator_srv:init:621]Reusing old upstream: [{vbuckets,[86,88,89,90,91,92,93,94,95,96,97,98,99,100,101,102,103,104,105, 106,107,108,109,110,111,112,113,114,115,116,117,118,119,120,121, 122,123,124,125,126,127,128,129,130,131,132,133,134,135,136,137, 138,139,140,141,142,143,144,145,146,147,148,149,150,151,152,153, 154]}, {name,<<"replication_ns_1@10.242.238.90">>}, {takeover,false}] [rebalance:debug,2014-08-19T16:52:34.995,ns_1@10.242.238.90:<0.1690.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.1694.1> [ns_server:info,2014-08-19T16:52:34.996,ns_1@10.242.238.90:<0.1690.1>:ebucketmigrator_srv:handle_call:270]Starting new-style vbucket filter change on stream `replication_ns_1@10.242.238.90` [ns_server:info,2014-08-19T16:52:35.016,ns_1@10.242.238.90:<0.1690.1>:ebucketmigrator_srv:handle_call:297]Changing vbucket filter on tap stream `replication_ns_1@10.242.238.90`: [{86,1}, {88,1}, {89,1}, {90,1}, {91,1}, {92,1}, {93,1}, {94,1}, {95,1}, {96,1}, {97,1}, {98,1}, {99,1}, {100,1}, {101,1}, {102,1}, {103,1}, {104,1}, {105,1}, {106,1}, {107,1}, {108,1}, {109,1}, {110,1}, {111,1}, {112,1}, {113,1}, {114,1}, {115,1}, {116,1}, {117,1}, {118,1}, {119,1}, {120,1}, {121,1}, {122,1}, {123,1}, {124,1}, {125,1}, {126,1}, {127,1}, {128,1}, {129,1}, {130,1}, {131,1}, {132,1}, {133,1}, {134,1}, {135,1}, {136,1}, {137,1}, {138,1}, {139,1}, {140,1}, {141,1}, {142,1}, {143,1}, {144,1}, {145,1}, {146,1}, {147,1}, {148,1}, {149,1}, {150,1}, {151,1}, {152,1}, {153,1}, {154,1}, {155,1}] [ns_server:info,2014-08-19T16:52:35.017,ns_1@10.242.238.90:<0.1690.1>:ebucketmigrator_srv:handle_call:307]Successfully changed vbucket filter on tap stream `replication_ns_1@10.242.238.90`. [ns_server:info,2014-08-19T16:52:35.017,ns_1@10.242.238.90:<0.1690.1>:ebucketmigrator_srv:process_upstream:1027]Got vbucket filter change completion message. Silencing upstream sender [ns_server:info,2014-08-19T16:52:35.017,ns_1@10.242.238.90:<0.1690.1>:ebucketmigrator_srv:handle_info:366]Got reply from upstream silencing request. Completing state transition to a new ebucketmigrator. [ns_server:debug,2014-08-19T16:52:35.018,ns_1@10.242.238.90:<0.1690.1>:ebucketmigrator_srv:complete_native_vb_filter_change:170]Proceeding with reading unread binaries [ns_server:debug,2014-08-19T16:52:35.018,ns_1@10.242.238.90:<0.1690.1>:ebucketmigrator_srv:confirm_downstream:197]Going to confirm reception downstream messages [ns_server:debug,2014-08-19T16:52:35.018,ns_1@10.242.238.90:<0.1690.1>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:52:35.018,ns_1@10.242.238.90:<0.1695.1>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:52:35.018,ns_1@10.242.238.90:<0.1695.1>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:52:35.018,ns_1@10.242.238.90:<0.1690.1>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:52:35.018,ns_1@10.242.238.90:<0.1690.1>:ebucketmigrator_srv:confirm_downstream:201]Confirmed upstream messages are feeded to kernel [ns_server:debug,2014-08-19T16:52:35.018,ns_1@10.242.238.90:<0.1690.1>:ebucketmigrator_srv:reply_and_die:210]Passed old state to caller [ns_server:debug,2014-08-19T16:52:35.018,ns_1@10.242.238.90:<0.1690.1>:ebucketmigrator_srv:reply_and_die:213]Sent out state. Preparing to die [ns_server:debug,2014-08-19T16:52:35.018,ns_1@10.242.238.90:<0.1692.1>:ns_vbm_new_sup:do_perform_vbucket_filter_change:143]Got old state from previous ebucketmigrator: <0.1690.1> [ns_server:debug,2014-08-19T16:52:35.019,ns_1@10.242.238.90:<0.1692.1>:ns_vbm_new_sup:perform_vbucket_filter_change_loop:184]Sent old state to new instance [ns_server:info,2014-08-19T16:52:35.019,ns_1@10.242.238.90:<0.1697.1>:ns_vbm_new_sup:mk_old_state_retriever:83]Got vbucket filter change old state. Proceeding vbucket filter change operation [ns_server:debug,2014-08-19T16:52:35.019,ns_1@10.242.238.90:<0.1697.1>:ebucketmigrator_srv:init:494]Got old ebucketmigrator state from <0.1690.1>: {state,#Port<0.20537>,#Port<0.20533>,#Port<0.20538>,#Port<0.20534>,<0.1694.1>, <<"cut off">>,<<"cut off">>,[],208,false,false,0, {1408,452755,17811}, completed, {<0.1692.1>,#Ref<0.0.1.145459>}, <<"replication_ns_1@10.242.238.90">>,<0.1690.1>, {had_backfill,false,undefined,[]}, completed,false}. [ns_server:debug,2014-08-19T16:52:35.019,ns_1@10.242.238.90:<0.17162.0>:ns_process_registry:handle_info:98]Got exit msg: {'EXIT',<0.1692.1>,{#Ref<0.0.1.145448>,<0.1697.1>}} [error_logger:info,2014-08-19T16:52:35.019,ns_1@10.242.238.90:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,'ns_vbm_new_sup-default'} started: [{pid,<0.1697.1>}, {name, {new_child_id, [86,88,89,90,91,92,93,94,95,96,97,98,99,100, 101,102,103,104,105,106,107,108,109,110,111, 112,113,114,115,116,117,118,119,120,121,122, 123,124,125,126,127,128,129,130,131,132,133, 134,135,136,137,138,139,140,141,142,143,144, 145,146,147,148,149,150,151,152,153,154,155], 'ns_1@10.242.238.88'}}, {mfargs, {ebucketmigrator_srv,start_link, [{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{old_state_retriever, #Fun}, {on_not_ready_vbuckets, #Fun}, {username,"default"}, {password,get_from_config}, {vbuckets, [86,88,89,90,91,92,93,94,95,96,97,98,99, 100,101,102,103,104,105,106,107,108,109, 110,111,112,113,114,115,116,117,118,119, 120,121,122,123,124,125,126,127,128,129, 130,131,132,133,134,135,136,137,138,139, 140,141,142,143,144,145,146,147,148,149, 150,151,152,153,154,155]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]]}}, {restart_type,temporary}, {shutdown,60000}, {child_type,worker}] [ns_server:debug,2014-08-19T16:52:35.026,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:52:35.028,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:35.028,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 1990 us [ns_server:debug,2014-08-19T16:52:35.029,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:35.029,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{155, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.90']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:52:35.042,ns_1@10.242.238.90:<0.1697.1>:ebucketmigrator_srv:init:621]Reusing old upstream: [{vbuckets,[86,88,89,90,91,92,93,94,95,96,97,98,99,100,101,102,103,104,105, 106,107,108,109,110,111,112,113,114,115,116,117,118,119,120,121, 122,123,124,125,126,127,128,129,130,131,132,133,134,135,136,137, 138,139,140,141,142,143,144,145,146,147,148,149,150,151,152,153, 154,155]}, {name,<<"replication_ns_1@10.242.238.90">>}, {takeover,false}] [rebalance:debug,2014-08-19T16:52:35.042,ns_1@10.242.238.90:<0.1697.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.1699.1> [ns_server:debug,2014-08-19T16:52:35.088,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:52:35.091,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:35.091,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 3081 us [ns_server:debug,2014-08-19T16:52:35.092,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:35.092,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{12, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.89']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:52:35.135,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:52:35.138,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 2614 us [ns_server:debug,2014-08-19T16:52:35.138,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:35.138,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:35.139,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{13, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.89']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:info,2014-08-19T16:52:35.140,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 157 state to replica [ns_server:info,2014-08-19T16:52:35.141,ns_1@10.242.238.90:tap_replication_manager-default<0.18775.0>:tap_replication_manager:change_vbucket_filter:200]Going to change replication from 'ns_1@10.242.238.88' to have [86,88,89,90,91,92,93,94,95,96,97,98,99,100,101,102,103,104,105,106,107,108, 109,110,111,112,113,114,115,116,117,118,119,120,121,122,123,124,125,126,127, 128,129,130,131,132,133,134,135,136,137,138,139,140,141,142,143,144,145,146, 147,148,149,150,151,152,153,154,155,157] ([157], []) [ns_server:debug,2014-08-19T16:52:35.143,ns_1@10.242.238.90:<0.1701.1>:ns_vbm_new_sup:do_perform_vbucket_filter_change:135]Registered myself under id:{"default", {new_child_id, [86,88,89,90,91,92,93,94,95,96,97,98,99,100, 101,102,103,104,105,106,107,108,109,110,111, 112,113,114,115,116,117,118,119,120,121,122, 123,124,125,126,127,128,129,130,131,132,133, 134,135,136,137,138,139,140,141,142,143,144, 145,146,147,148,149,150,151,152,153,154,155, 157], 'ns_1@10.242.238.88'}, #Ref<0.0.1.145666>} Args:[{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{old_state_retriever,#Fun}, {on_not_ready_vbuckets,#Fun}, {username,"default"}, {password,get_from_config}, {vbuckets,[86,88,89,90,91,92,93,94,95,96,97,98,99,100,101,102,103,104, 105,106,107,108,109,110,111,112,113,114,115,116,117,118,119, 120,121,122,123,124,125,126,127,128,129,130,131,132,133,134, 135,136,137,138,139,140,141,142,143,144,145,146,147,148,149, 150,151,152,153,154,155,157]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]] [ns_server:debug,2014-08-19T16:52:35.143,ns_1@10.242.238.90:<0.1701.1>:ns_vbm_new_sup:do_perform_vbucket_filter_change:139]Linked myself to old ebucketmigrator <0.1697.1> [ns_server:info,2014-08-19T16:52:35.144,ns_1@10.242.238.90:<0.1697.1>:ebucketmigrator_srv:handle_call:270]Starting new-style vbucket filter change on stream `replication_ns_1@10.242.238.90` [ns_server:info,2014-08-19T16:52:35.170,ns_1@10.242.238.90:<0.1697.1>:ebucketmigrator_srv:handle_call:297]Changing vbucket filter on tap stream `replication_ns_1@10.242.238.90`: [{86,1}, {88,1}, {89,1}, {90,1}, {91,1}, {92,1}, {93,1}, {94,1}, {95,1}, {96,1}, {97,1}, {98,1}, {99,1}, {100,1}, {101,1}, {102,1}, {103,1}, {104,1}, {105,1}, {106,1}, {107,1}, {108,1}, {109,1}, {110,1}, {111,1}, {112,1}, {113,1}, {114,1}, {115,1}, {116,1}, {117,1}, {118,1}, {119,1}, {120,1}, {121,1}, {122,1}, {123,1}, {124,1}, {125,1}, {126,1}, {127,1}, {128,1}, {129,1}, {130,1}, {131,1}, {132,1}, {133,1}, {134,1}, {135,1}, {136,1}, {137,1}, {138,1}, {139,1}, {140,1}, {141,1}, {142,1}, {143,1}, {144,1}, {145,1}, {146,1}, {147,1}, {148,1}, {149,1}, {150,1}, {151,1}, {152,1}, {153,1}, {154,1}, {155,1}, {157,1}] [ns_server:info,2014-08-19T16:52:35.171,ns_1@10.242.238.90:<0.1697.1>:ebucketmigrator_srv:handle_call:307]Successfully changed vbucket filter on tap stream `replication_ns_1@10.242.238.90`. [ns_server:info,2014-08-19T16:52:35.171,ns_1@10.242.238.90:<0.1697.1>:ebucketmigrator_srv:process_upstream:1027]Got vbucket filter change completion message. Silencing upstream sender [ns_server:info,2014-08-19T16:52:35.171,ns_1@10.242.238.90:<0.1697.1>:ebucketmigrator_srv:handle_info:366]Got reply from upstream silencing request. Completing state transition to a new ebucketmigrator. [ns_server:debug,2014-08-19T16:52:35.171,ns_1@10.242.238.90:<0.1697.1>:ebucketmigrator_srv:complete_native_vb_filter_change:170]Proceeding with reading unread binaries [ns_server:debug,2014-08-19T16:52:35.172,ns_1@10.242.238.90:<0.1697.1>:ebucketmigrator_srv:confirm_downstream:197]Going to confirm reception downstream messages [ns_server:debug,2014-08-19T16:52:35.172,ns_1@10.242.238.90:<0.1697.1>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:52:35.172,ns_1@10.242.238.90:<0.1703.1>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:52:35.172,ns_1@10.242.238.90:<0.1703.1>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:52:35.172,ns_1@10.242.238.90:<0.1697.1>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:52:35.172,ns_1@10.242.238.90:<0.1697.1>:ebucketmigrator_srv:confirm_downstream:201]Confirmed upstream messages are feeded to kernel [ns_server:debug,2014-08-19T16:52:35.172,ns_1@10.242.238.90:<0.1697.1>:ebucketmigrator_srv:reply_and_die:210]Passed old state to caller [ns_server:debug,2014-08-19T16:52:35.172,ns_1@10.242.238.90:<0.1697.1>:ebucketmigrator_srv:reply_and_die:213]Sent out state. Preparing to die [ns_server:debug,2014-08-19T16:52:35.172,ns_1@10.242.238.90:<0.1701.1>:ns_vbm_new_sup:do_perform_vbucket_filter_change:143]Got old state from previous ebucketmigrator: <0.1697.1> [ns_server:debug,2014-08-19T16:52:35.173,ns_1@10.242.238.90:<0.1701.1>:ns_vbm_new_sup:perform_vbucket_filter_change_loop:184]Sent old state to new instance [ns_server:info,2014-08-19T16:52:35.173,ns_1@10.242.238.90:<0.1705.1>:ns_vbm_new_sup:mk_old_state_retriever:83]Got vbucket filter change old state. Proceeding vbucket filter change operation [ns_server:debug,2014-08-19T16:52:35.173,ns_1@10.242.238.90:<0.1705.1>:ebucketmigrator_srv:init:494]Got old ebucketmigrator state from <0.1697.1>: {state,#Port<0.20537>,#Port<0.20533>,#Port<0.20538>,#Port<0.20534>,<0.1699.1>, <<"cut off">>,<<"cut off">>,[],211,false,false,0, {1408,452755,171704}, completed, {<0.1701.1>,#Ref<0.0.1.145679>}, <<"replication_ns_1@10.242.238.90">>,<0.1697.1>, {had_backfill,false,undefined,[]}, completed,false}. [ns_server:debug,2014-08-19T16:52:35.173,ns_1@10.242.238.90:<0.17162.0>:ns_process_registry:handle_info:98]Got exit msg: {'EXIT',<0.1701.1>,{#Ref<0.0.1.145668>,<0.1705.1>}} [error_logger:info,2014-08-19T16:52:35.173,ns_1@10.242.238.90:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,'ns_vbm_new_sup-default'} started: [{pid,<0.1705.1>}, {name, {new_child_id, [86,88,89,90,91,92,93,94,95,96,97,98,99,100, 101,102,103,104,105,106,107,108,109,110,111, 112,113,114,115,116,117,118,119,120,121,122, 123,124,125,126,127,128,129,130,131,132,133, 134,135,136,137,138,139,140,141,142,143,144, 145,146,147,148,149,150,151,152,153,154,155, 157], 'ns_1@10.242.238.88'}}, {mfargs, {ebucketmigrator_srv,start_link, [{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{old_state_retriever, #Fun}, {on_not_ready_vbuckets, #Fun}, {username,"default"}, {password,get_from_config}, {vbuckets, [86,88,89,90,91,92,93,94,95,96,97,98,99, 100,101,102,103,104,105,106,107,108,109, 110,111,112,113,114,115,116,117,118,119, 120,121,122,123,124,125,126,127,128,129, 130,131,132,133,134,135,136,137,138,139, 140,141,142,143,144,145,146,147,148,149, 150,151,152,153,154,155,157]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]]}}, {restart_type,temporary}, {shutdown,60000}, {child_type,worker}] [ns_server:debug,2014-08-19T16:52:35.179,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:52:35.182,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:35.182,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 3112 us [ns_server:debug,2014-08-19T16:52:35.182,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:35.183,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{157, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.90']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:info,2014-08-19T16:52:35.185,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 156 state to replica [ns_server:info,2014-08-19T16:52:35.186,ns_1@10.242.238.90:tap_replication_manager-default<0.18775.0>:tap_replication_manager:change_vbucket_filter:200]Going to change replication from 'ns_1@10.242.238.88' to have [86,88,89,90,91,92,93,94,95,96,97,98,99,100,101,102,103,104,105,106,107,108, 109,110,111,112,113,114,115,116,117,118,119,120,121,122,123,124,125,126,127, 128,129,130,131,132,133,134,135,136,137,138,139,140,141,142,143,144,145,146, 147,148,149,150,151,152,153,154,155,156,157] ([156], []) [ns_server:debug,2014-08-19T16:52:35.187,ns_1@10.242.238.90:<0.1707.1>:ns_vbm_new_sup:do_perform_vbucket_filter_change:135]Registered myself under id:{"default", {new_child_id, [86,88,89,90,91,92,93,94,95,96,97,98,99,100, 101,102,103,104,105,106,107,108,109,110,111, 112,113,114,115,116,117,118,119,120,121,122, 123,124,125,126,127,128,129,130,131,132,133, 134,135,136,137,138,139,140,141,142,143,144, 145,146,147,148,149,150,151,152,153,154,155, 156,157], 'ns_1@10.242.238.88'}, #Ref<0.0.1.145798>} Args:[{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{old_state_retriever,#Fun}, {on_not_ready_vbuckets,#Fun}, {username,"default"}, {password,get_from_config}, {vbuckets,[86,88,89,90,91,92,93,94,95,96,97,98,99,100,101,102,103,104, 105,106,107,108,109,110,111,112,113,114,115,116,117,118,119, 120,121,122,123,124,125,126,127,128,129,130,131,132,133,134, 135,136,137,138,139,140,141,142,143,144,145,146,147,148,149, 150,151,152,153,154,155,156,157]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]] [ns_server:debug,2014-08-19T16:52:35.187,ns_1@10.242.238.90:<0.1707.1>:ns_vbm_new_sup:do_perform_vbucket_filter_change:139]Linked myself to old ebucketmigrator <0.1705.1> [ns_server:debug,2014-08-19T16:52:35.196,ns_1@10.242.238.90:<0.1705.1>:ebucketmigrator_srv:init:621]Reusing old upstream: [{vbuckets,[86,88,89,90,91,92,93,94,95,96,97,98,99,100,101,102,103,104,105, 106,107,108,109,110,111,112,113,114,115,116,117,118,119,120,121, 122,123,124,125,126,127,128,129,130,131,132,133,134,135,136,137, 138,139,140,141,142,143,144,145,146,147,148,149,150,151,152,153, 154,155,157]}, {name,<<"replication_ns_1@10.242.238.90">>}, {takeover,false}] [rebalance:debug,2014-08-19T16:52:35.196,ns_1@10.242.238.90:<0.1705.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.1709.1> [ns_server:info,2014-08-19T16:52:35.197,ns_1@10.242.238.90:<0.1705.1>:ebucketmigrator_srv:handle_call:270]Starting new-style vbucket filter change on stream `replication_ns_1@10.242.238.90` [ns_server:info,2014-08-19T16:52:35.217,ns_1@10.242.238.90:<0.1705.1>:ebucketmigrator_srv:handle_call:297]Changing vbucket filter on tap stream `replication_ns_1@10.242.238.90`: [{86,1}, {88,1}, {89,1}, {90,1}, {91,1}, {92,1}, {93,1}, {94,1}, {95,1}, {96,1}, {97,1}, {98,1}, {99,1}, {100,1}, {101,1}, {102,1}, {103,1}, {104,1}, {105,1}, {106,1}, {107,1}, {108,1}, {109,1}, {110,1}, {111,1}, {112,1}, {113,1}, {114,1}, {115,1}, {116,1}, {117,1}, {118,1}, {119,1}, {120,1}, {121,1}, {122,1}, {123,1}, {124,1}, {125,1}, {126,1}, {127,1}, {128,1}, {129,1}, {130,1}, {131,1}, {132,1}, {133,1}, {134,1}, {135,1}, {136,1}, {137,1}, {138,1}, {139,1}, {140,1}, {141,1}, {142,1}, {143,1}, {144,1}, {145,1}, {146,1}, {147,1}, {148,1}, {149,1}, {150,1}, {151,1}, {152,1}, {153,1}, {154,1}, {155,1}, {156,1}, {157,1}] [ns_server:info,2014-08-19T16:52:35.218,ns_1@10.242.238.90:<0.1705.1>:ebucketmigrator_srv:handle_call:307]Successfully changed vbucket filter on tap stream `replication_ns_1@10.242.238.90`. [ns_server:info,2014-08-19T16:52:35.223,ns_1@10.242.238.90:<0.1705.1>:ebucketmigrator_srv:process_upstream:1027]Got vbucket filter change completion message. Silencing upstream sender [ns_server:info,2014-08-19T16:52:35.223,ns_1@10.242.238.90:<0.1705.1>:ebucketmigrator_srv:handle_info:366]Got reply from upstream silencing request. Completing state transition to a new ebucketmigrator. [ns_server:debug,2014-08-19T16:52:35.223,ns_1@10.242.238.90:<0.1705.1>:ebucketmigrator_srv:complete_native_vb_filter_change:170]Proceeding with reading unread binaries [ns_server:debug,2014-08-19T16:52:35.223,ns_1@10.242.238.90:<0.1705.1>:ebucketmigrator_srv:confirm_downstream:197]Going to confirm reception downstream messages [ns_server:debug,2014-08-19T16:52:35.224,ns_1@10.242.238.90:<0.1705.1>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:52:35.224,ns_1@10.242.238.90:<0.1711.1>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:52:35.224,ns_1@10.242.238.90:<0.1711.1>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:52:35.224,ns_1@10.242.238.90:<0.1705.1>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:52:35.224,ns_1@10.242.238.90:<0.1705.1>:ebucketmigrator_srv:confirm_downstream:201]Confirmed upstream messages are feeded to kernel [ns_server:debug,2014-08-19T16:52:35.224,ns_1@10.242.238.90:<0.1705.1>:ebucketmigrator_srv:reply_and_die:210]Passed old state to caller [ns_server:debug,2014-08-19T16:52:35.224,ns_1@10.242.238.90:<0.1705.1>:ebucketmigrator_srv:reply_and_die:213]Sent out state. Preparing to die [ns_server:debug,2014-08-19T16:52:35.224,ns_1@10.242.238.90:<0.1707.1>:ns_vbm_new_sup:do_perform_vbucket_filter_change:143]Got old state from previous ebucketmigrator: <0.1705.1> [ns_server:debug,2014-08-19T16:52:35.225,ns_1@10.242.238.90:<0.1707.1>:ns_vbm_new_sup:perform_vbucket_filter_change_loop:184]Sent old state to new instance [ns_server:info,2014-08-19T16:52:35.225,ns_1@10.242.238.90:<0.1713.1>:ns_vbm_new_sup:mk_old_state_retriever:83]Got vbucket filter change old state. Proceeding vbucket filter change operation [ns_server:debug,2014-08-19T16:52:35.225,ns_1@10.242.238.90:<0.1713.1>:ebucketmigrator_srv:init:494]Got old ebucketmigrator state from <0.1705.1>: {state,#Port<0.20537>,#Port<0.20533>,#Port<0.20538>,#Port<0.20534>,<0.1709.1>, <<"cut off">>,<<"cut off">>,[],214,false,false,0, {1408,452755,223512}, completed, {<0.1707.1>,#Ref<0.0.1.145811>}, <<"replication_ns_1@10.242.238.90">>,<0.1705.1>, {had_backfill,false,undefined,[]}, completed,false}. [ns_server:debug,2014-08-19T16:52:35.225,ns_1@10.242.238.90:<0.17162.0>:ns_process_registry:handle_info:98]Got exit msg: {'EXIT',<0.1707.1>,{#Ref<0.0.1.145800>,<0.1713.1>}} [error_logger:info,2014-08-19T16:52:35.225,ns_1@10.242.238.90:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,'ns_vbm_new_sup-default'} started: [{pid,<0.1713.1>}, {name, {new_child_id, [86,88,89,90,91,92,93,94,95,96,97,98,99,100, 101,102,103,104,105,106,107,108,109,110,111, 112,113,114,115,116,117,118,119,120,121,122, 123,124,125,126,127,128,129,130,131,132,133, 134,135,136,137,138,139,140,141,142,143,144, 145,146,147,148,149,150,151,152,153,154,155, 156,157], 'ns_1@10.242.238.88'}}, {mfargs, {ebucketmigrator_srv,start_link, [{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{old_state_retriever, #Fun}, {on_not_ready_vbuckets, #Fun}, {username,"default"}, {password,get_from_config}, {vbuckets, [86,88,89,90,91,92,93,94,95,96,97,98,99, 100,101,102,103,104,105,106,107,108,109, 110,111,112,113,114,115,116,117,118,119, 120,121,122,123,124,125,126,127,128,129, 130,131,132,133,134,135,136,137,138,139, 140,141,142,143,144,145,146,147,148,149, 150,151,152,153,154,155,156,157]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]]}}, {restart_type,temporary}, {shutdown,60000}, {child_type,worker}] [ns_server:debug,2014-08-19T16:52:35.231,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:52:35.235,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:35.236,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{156, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.90']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:52:35.237,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 5701 us [ns_server:debug,2014-08-19T16:52:35.238,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:info,2014-08-19T16:52:35.238,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 158 state to replica [ns_server:info,2014-08-19T16:52:35.239,ns_1@10.242.238.90:tap_replication_manager-default<0.18775.0>:tap_replication_manager:change_vbucket_filter:200]Going to change replication from 'ns_1@10.242.238.88' to have [86,88,89,90,91,92,93,94,95,96,97,98,99,100,101,102,103,104,105,106,107,108, 109,110,111,112,113,114,115,116,117,118,119,120,121,122,123,124,125,126,127, 128,129,130,131,132,133,134,135,136,137,138,139,140,141,142,143,144,145,146, 147,148,149,150,151,152,153,154,155,156,157,158] ([158], []) [ns_server:debug,2014-08-19T16:52:35.240,ns_1@10.242.238.90:<0.1714.1>:ns_vbm_new_sup:do_perform_vbucket_filter_change:135]Registered myself under id:{"default", {new_child_id, [86,88,89,90,91,92,93,94,95,96,97,98,99,100, 101,102,103,104,105,106,107,108,109,110,111, 112,113,114,115,116,117,118,119,120,121,122, 123,124,125,126,127,128,129,130,131,132,133, 134,135,136,137,138,139,140,141,142,143,144, 145,146,147,148,149,150,151,152,153,154,155, 156,157,158], 'ns_1@10.242.238.88'}, #Ref<0.0.1.145940>} Args:[{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{old_state_retriever,#Fun}, {on_not_ready_vbuckets,#Fun}, {username,"default"}, {password,get_from_config}, {vbuckets,[86,88,89,90,91,92,93,94,95,96,97,98,99,100,101,102,103,104, 105,106,107,108,109,110,111,112,113,114,115,116,117,118,119, 120,121,122,123,124,125,126,127,128,129,130,131,132,133,134, 135,136,137,138,139,140,141,142,143,144,145,146,147,148,149, 150,151,152,153,154,155,156,157,158]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]] [ns_server:debug,2014-08-19T16:52:35.241,ns_1@10.242.238.90:<0.1714.1>:ns_vbm_new_sup:do_perform_vbucket_filter_change:139]Linked myself to old ebucketmigrator <0.1713.1> [ns_server:debug,2014-08-19T16:52:35.250,ns_1@10.242.238.90:<0.1713.1>:ebucketmigrator_srv:init:621]Reusing old upstream: [{vbuckets,[86,88,89,90,91,92,93,94,95,96,97,98,99,100,101,102,103,104,105, 106,107,108,109,110,111,112,113,114,115,116,117,118,119,120,121, 122,123,124,125,126,127,128,129,130,131,132,133,134,135,136,137, 138,139,140,141,142,143,144,145,146,147,148,149,150,151,152,153, 154,155,156,157]}, {name,<<"replication_ns_1@10.242.238.90">>}, {takeover,false}] [rebalance:debug,2014-08-19T16:52:35.250,ns_1@10.242.238.90:<0.1713.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.1717.1> [ns_server:info,2014-08-19T16:52:35.250,ns_1@10.242.238.90:<0.1713.1>:ebucketmigrator_srv:handle_call:270]Starting new-style vbucket filter change on stream `replication_ns_1@10.242.238.90` [ns_server:info,2014-08-19T16:52:35.271,ns_1@10.242.238.90:<0.1713.1>:ebucketmigrator_srv:handle_call:297]Changing vbucket filter on tap stream `replication_ns_1@10.242.238.90`: [{86,1}, {88,1}, {89,1}, {90,1}, {91,1}, {92,1}, {93,1}, {94,1}, {95,1}, {96,1}, {97,1}, {98,1}, {99,1}, {100,1}, {101,1}, {102,1}, {103,1}, {104,1}, {105,1}, {106,1}, {107,1}, {108,1}, {109,1}, {110,1}, {111,1}, {112,1}, {113,1}, {114,1}, {115,1}, {116,1}, {117,1}, {118,1}, {119,1}, {120,1}, {121,1}, {122,1}, {123,1}, {124,1}, {125,1}, {126,1}, {127,1}, {128,1}, {129,1}, {130,1}, {131,1}, {132,1}, {133,1}, {134,1}, {135,1}, {136,1}, {137,1}, {138,1}, {139,1}, {140,1}, {141,1}, {142,1}, {143,1}, {144,1}, {145,1}, {146,1}, {147,1}, {148,1}, {149,1}, {150,1}, {151,1}, {152,1}, {153,1}, {154,1}, {155,1}, {156,1}, {157,1}, {158,1}] [ns_server:info,2014-08-19T16:52:35.272,ns_1@10.242.238.90:<0.1713.1>:ebucketmigrator_srv:handle_call:307]Successfully changed vbucket filter on tap stream `replication_ns_1@10.242.238.90`. [ns_server:info,2014-08-19T16:52:35.273,ns_1@10.242.238.90:<0.1713.1>:ebucketmigrator_srv:process_upstream:1027]Got vbucket filter change completion message. Silencing upstream sender [ns_server:info,2014-08-19T16:52:35.273,ns_1@10.242.238.90:<0.1713.1>:ebucketmigrator_srv:handle_info:366]Got reply from upstream silencing request. Completing state transition to a new ebucketmigrator. [ns_server:debug,2014-08-19T16:52:35.273,ns_1@10.242.238.90:<0.1713.1>:ebucketmigrator_srv:complete_native_vb_filter_change:170]Proceeding with reading unread binaries [ns_server:debug,2014-08-19T16:52:35.273,ns_1@10.242.238.90:<0.1713.1>:ebucketmigrator_srv:confirm_downstream:197]Going to confirm reception downstream messages [ns_server:debug,2014-08-19T16:52:35.273,ns_1@10.242.238.90:<0.1713.1>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:52:35.273,ns_1@10.242.238.90:<0.1718.1>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:52:35.273,ns_1@10.242.238.90:<0.1718.1>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:52:35.273,ns_1@10.242.238.90:<0.1713.1>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:52:35.274,ns_1@10.242.238.90:<0.1713.1>:ebucketmigrator_srv:confirm_downstream:201]Confirmed upstream messages are feeded to kernel [ns_server:debug,2014-08-19T16:52:35.274,ns_1@10.242.238.90:<0.1713.1>:ebucketmigrator_srv:reply_and_die:210]Passed old state to caller [ns_server:debug,2014-08-19T16:52:35.274,ns_1@10.242.238.90:<0.1713.1>:ebucketmigrator_srv:reply_and_die:213]Sent out state. Preparing to die [ns_server:debug,2014-08-19T16:52:35.274,ns_1@10.242.238.90:<0.1714.1>:ns_vbm_new_sup:do_perform_vbucket_filter_change:143]Got old state from previous ebucketmigrator: <0.1713.1> [ns_server:debug,2014-08-19T16:52:35.274,ns_1@10.242.238.90:<0.1714.1>:ns_vbm_new_sup:perform_vbucket_filter_change_loop:184]Sent old state to new instance [ns_server:info,2014-08-19T16:52:35.275,ns_1@10.242.238.90:<0.1720.1>:ns_vbm_new_sup:mk_old_state_retriever:83]Got vbucket filter change old state. Proceeding vbucket filter change operation [ns_server:debug,2014-08-19T16:52:35.275,ns_1@10.242.238.90:<0.1720.1>:ebucketmigrator_srv:init:494]Got old ebucketmigrator state from <0.1713.1>: {state,#Port<0.20537>,#Port<0.20533>,#Port<0.20538>,#Port<0.20534>,<0.1717.1>, <<"cut off">>,<<"cut off">>,[],217,false,false,0, {1408,452755,273028}, completed, {<0.1714.1>,#Ref<0.0.1.145954>}, <<"replication_ns_1@10.242.238.90">>,<0.1713.1>, {had_backfill,false,undefined,[]}, completed,false}. [ns_server:debug,2014-08-19T16:52:35.275,ns_1@10.242.238.90:<0.17162.0>:ns_process_registry:handle_info:98]Got exit msg: {'EXIT',<0.1714.1>,{#Ref<0.0.1.145942>,<0.1720.1>}} [error_logger:info,2014-08-19T16:52:35.275,ns_1@10.242.238.90:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,'ns_vbm_new_sup-default'} started: [{pid,<0.1720.1>}, {name, {new_child_id, [86,88,89,90,91,92,93,94,95,96,97,98,99,100, 101,102,103,104,105,106,107,108,109,110,111, 112,113,114,115,116,117,118,119,120,121,122, 123,124,125,126,127,128,129,130,131,132,133, 134,135,136,137,138,139,140,141,142,143,144, 145,146,147,148,149,150,151,152,153,154,155, 156,157,158], 'ns_1@10.242.238.88'}}, {mfargs, {ebucketmigrator_srv,start_link, [{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{old_state_retriever, #Fun}, {on_not_ready_vbuckets, #Fun}, {username,"default"}, {password,get_from_config}, {vbuckets, [86,88,89,90,91,92,93,94,95,96,97,98,99, 100,101,102,103,104,105,106,107,108,109, 110,111,112,113,114,115,116,117,118,119, 120,121,122,123,124,125,126,127,128,129, 130,131,132,133,134,135,136,137,138,139, 140,141,142,143,144,145,146,147,148,149, 150,151,152,153,154,155,156,157,158]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]]}}, {restart_type,temporary}, {shutdown,60000}, {child_type,worker}] [ns_server:debug,2014-08-19T16:52:35.280,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:52:35.282,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:35.283,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 2688 us [ns_server:debug,2014-08-19T16:52:35.283,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:35.284,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{158, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.90']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:info,2014-08-19T16:52:35.285,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 159 state to replica [ns_server:info,2014-08-19T16:52:35.286,ns_1@10.242.238.90:tap_replication_manager-default<0.18775.0>:tap_replication_manager:change_vbucket_filter:200]Going to change replication from 'ns_1@10.242.238.88' to have [86,88,89,90,91,92,93,94,95,96,97,98,99,100,101,102,103,104,105,106,107,108, 109,110,111,112,113,114,115,116,117,118,119,120,121,122,123,124,125,126,127, 128,129,130,131,132,133,134,135,136,137,138,139,140,141,142,143,144,145,146, 147,148,149,150,151,152,153,154,155,156,157,158,159] ([159], []) [ns_server:debug,2014-08-19T16:52:35.287,ns_1@10.242.238.90:<0.1722.1>:ns_vbm_new_sup:do_perform_vbucket_filter_change:135]Registered myself under id:{"default", {new_child_id, [86,88,89,90,91,92,93,94,95,96,97,98,99,100, 101,102,103,104,105,106,107,108,109,110,111, 112,113,114,115,116,117,118,119,120,121,122, 123,124,125,126,127,128,129,130,131,132,133, 134,135,136,137,138,139,140,141,142,143,144, 145,146,147,148,149,150,151,152,153,154,155, 156,157,158,159], 'ns_1@10.242.238.88'}, #Ref<0.0.1.146080>} Args:[{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{old_state_retriever,#Fun}, {on_not_ready_vbuckets,#Fun}, {username,"default"}, {password,get_from_config}, {vbuckets,[86,88,89,90,91,92,93,94,95,96,97,98,99,100,101,102,103,104, 105,106,107,108,109,110,111,112,113,114,115,116,117,118,119, 120,121,122,123,124,125,126,127,128,129,130,131,132,133,134, 135,136,137,138,139,140,141,142,143,144,145,146,147,148,149, 150,151,152,153,154,155,156,157,158,159]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]] [ns_server:debug,2014-08-19T16:52:35.287,ns_1@10.242.238.90:<0.1722.1>:ns_vbm_new_sup:do_perform_vbucket_filter_change:139]Linked myself to old ebucketmigrator <0.1720.1> [ns_server:debug,2014-08-19T16:52:35.301,ns_1@10.242.238.90:<0.1720.1>:ebucketmigrator_srv:init:621]Reusing old upstream: [{vbuckets,[86,88,89,90,91,92,93,94,95,96,97,98,99,100,101,102,103,104,105, 106,107,108,109,110,111,112,113,114,115,116,117,118,119,120,121, 122,123,124,125,126,127,128,129,130,131,132,133,134,135,136,137, 138,139,140,141,142,143,144,145,146,147,148,149,150,151,152,153, 154,155,156,157,158]}, {name,<<"replication_ns_1@10.242.238.90">>}, {takeover,false}] [rebalance:debug,2014-08-19T16:52:35.301,ns_1@10.242.238.90:<0.1720.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.1724.1> [ns_server:info,2014-08-19T16:52:35.302,ns_1@10.242.238.90:<0.1720.1>:ebucketmigrator_srv:handle_call:270]Starting new-style vbucket filter change on stream `replication_ns_1@10.242.238.90` [ns_server:info,2014-08-19T16:52:35.322,ns_1@10.242.238.90:<0.1720.1>:ebucketmigrator_srv:handle_call:297]Changing vbucket filter on tap stream `replication_ns_1@10.242.238.90`: [{86,1}, {88,1}, {89,1}, {90,1}, {91,1}, {92,1}, {93,1}, {94,1}, {95,1}, {96,1}, {97,1}, {98,1}, {99,1}, {100,1}, {101,1}, {102,1}, {103,1}, {104,1}, {105,1}, {106,1}, {107,1}, {108,1}, {109,1}, {110,1}, {111,1}, {112,1}, {113,1}, {114,1}, {115,1}, {116,1}, {117,1}, {118,1}, {119,1}, {120,1}, {121,1}, {122,1}, {123,1}, {124,1}, {125,1}, {126,1}, {127,1}, {128,1}, {129,1}, {130,1}, {131,1}, {132,1}, {133,1}, {134,1}, {135,1}, {136,1}, {137,1}, {138,1}, {139,1}, {140,1}, {141,1}, {142,1}, {143,1}, {144,1}, {145,1}, {146,1}, {147,1}, {148,1}, {149,1}, {150,1}, {151,1}, {152,1}, {153,1}, {154,1}, {155,1}, {156,1}, {157,1}, {158,1}, {159,1}] [ns_server:info,2014-08-19T16:52:35.323,ns_1@10.242.238.90:<0.1720.1>:ebucketmigrator_srv:handle_call:307]Successfully changed vbucket filter on tap stream `replication_ns_1@10.242.238.90`. [ns_server:info,2014-08-19T16:52:35.324,ns_1@10.242.238.90:<0.1720.1>:ebucketmigrator_srv:process_upstream:1027]Got vbucket filter change completion message. Silencing upstream sender [ns_server:info,2014-08-19T16:52:35.324,ns_1@10.242.238.90:<0.1720.1>:ebucketmigrator_srv:handle_info:366]Got reply from upstream silencing request. Completing state transition to a new ebucketmigrator. [ns_server:debug,2014-08-19T16:52:35.324,ns_1@10.242.238.90:<0.1720.1>:ebucketmigrator_srv:complete_native_vb_filter_change:170]Proceeding with reading unread binaries [ns_server:debug,2014-08-19T16:52:35.324,ns_1@10.242.238.90:<0.1720.1>:ebucketmigrator_srv:confirm_downstream:197]Going to confirm reception downstream messages [ns_server:debug,2014-08-19T16:52:35.324,ns_1@10.242.238.90:<0.1720.1>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:52:35.324,ns_1@10.242.238.90:<0.1725.1>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:52:35.324,ns_1@10.242.238.90:<0.1725.1>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:52:35.324,ns_1@10.242.238.90:<0.1720.1>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:52:35.324,ns_1@10.242.238.90:<0.1720.1>:ebucketmigrator_srv:confirm_downstream:201]Confirmed upstream messages are feeded to kernel [ns_server:debug,2014-08-19T16:52:35.325,ns_1@10.242.238.90:<0.1720.1>:ebucketmigrator_srv:reply_and_die:210]Passed old state to caller [ns_server:debug,2014-08-19T16:52:35.325,ns_1@10.242.238.90:<0.1720.1>:ebucketmigrator_srv:reply_and_die:213]Sent out state. Preparing to die [ns_server:debug,2014-08-19T16:52:35.325,ns_1@10.242.238.90:<0.1722.1>:ns_vbm_new_sup:do_perform_vbucket_filter_change:143]Got old state from previous ebucketmigrator: <0.1720.1> [ns_server:debug,2014-08-19T16:52:35.325,ns_1@10.242.238.90:<0.1722.1>:ns_vbm_new_sup:perform_vbucket_filter_change_loop:184]Sent old state to new instance [ns_server:info,2014-08-19T16:52:35.325,ns_1@10.242.238.90:<0.1727.1>:ns_vbm_new_sup:mk_old_state_retriever:83]Got vbucket filter change old state. Proceeding vbucket filter change operation [ns_server:debug,2014-08-19T16:52:35.325,ns_1@10.242.238.90:<0.1727.1>:ebucketmigrator_srv:init:494]Got old ebucketmigrator state from <0.1720.1>: {state,#Port<0.20537>,#Port<0.20533>,#Port<0.20538>,#Port<0.20534>,<0.1724.1>, <<"cut off">>,<<"cut off">>,[],220,false,false,0, {1408,452755,324088}, completed, {<0.1722.1>,#Ref<0.0.1.146093>}, <<"replication_ns_1@10.242.238.90">>,<0.1720.1>, {had_backfill,false,undefined,[]}, completed,false}. [ns_server:debug,2014-08-19T16:52:35.326,ns_1@10.242.238.90:<0.17162.0>:ns_process_registry:handle_info:98]Got exit msg: {'EXIT',<0.1722.1>,{#Ref<0.0.1.146082>,<0.1727.1>}} [error_logger:info,2014-08-19T16:52:35.326,ns_1@10.242.238.90:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,'ns_vbm_new_sup-default'} started: [{pid,<0.1727.1>}, {name, {new_child_id, [86,88,89,90,91,92,93,94,95,96,97,98,99,100, 101,102,103,104,105,106,107,108,109,110,111, 112,113,114,115,116,117,118,119,120,121,122, 123,124,125,126,127,128,129,130,131,132,133, 134,135,136,137,138,139,140,141,142,143,144, 145,146,147,148,149,150,151,152,153,154,155, 156,157,158,159], 'ns_1@10.242.238.88'}}, {mfargs, {ebucketmigrator_srv,start_link, [{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{old_state_retriever, #Fun}, {on_not_ready_vbuckets, #Fun}, {username,"default"}, {password,get_from_config}, {vbuckets, [86,88,89,90,91,92,93,94,95,96,97,98,99, 100,101,102,103,104,105,106,107,108,109, 110,111,112,113,114,115,116,117,118,119, 120,121,122,123,124,125,126,127,128,129, 130,131,132,133,134,135,136,137,138,139, 140,141,142,143,144,145,146,147,148,149, 150,151,152,153,154,155,156,157,158, 159]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]]}}, {restart_type,temporary}, {shutdown,60000}, {child_type,worker}] [ns_server:debug,2014-08-19T16:52:35.331,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:52:35.339,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:35.339,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 7335 us [ns_server:debug,2014-08-19T16:52:35.339,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:35.340,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{159, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.90']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:52:35.348,ns_1@10.242.238.90:<0.1727.1>:ebucketmigrator_srv:init:621]Reusing old upstream: [{vbuckets,[86,88,89,90,91,92,93,94,95,96,97,98,99,100,101,102,103,104,105, 106,107,108,109,110,111,112,113,114,115,116,117,118,119,120,121, 122,123,124,125,126,127,128,129,130,131,132,133,134,135,136,137, 138,139,140,141,142,143,144,145,146,147,148,149,150,151,152,153, 154,155,156,157,158,159]}, {name,<<"replication_ns_1@10.242.238.90">>}, {takeover,false}] [rebalance:debug,2014-08-19T16:52:35.348,ns_1@10.242.238.90:<0.1727.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.1729.1> [ns_server:debug,2014-08-19T16:52:35.371,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:52:35.373,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:35.373,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 1097 us [ns_server:debug,2014-08-19T16:52:35.373,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:35.374,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{15, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.89']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:52:35.415,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:52:35.418,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:35.418,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 3151 us [ns_server:debug,2014-08-19T16:52:35.418,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:35.419,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{14, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.89']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:52:35.459,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:52:35.462,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:35.463,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 3428 us [ns_server:debug,2014-08-19T16:52:35.463,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:35.463,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{17, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.89']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:52:35.504,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:52:35.508,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:35.508,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 3497 us [ns_server:debug,2014-08-19T16:52:35.508,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:35.509,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{16, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.89']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:info,2014-08-19T16:52:35.511,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 160 state to replica [ns_server:info,2014-08-19T16:52:35.513,ns_1@10.242.238.90:tap_replication_manager-default<0.18775.0>:tap_replication_manager:change_vbucket_filter:200]Going to change replication from 'ns_1@10.242.238.88' to have [86,88,89,90,91,92,93,94,95,96,97,98,99,100,101,102,103,104,105,106,107,108, 109,110,111,112,113,114,115,116,117,118,119,120,121,122,123,124,125,126,127, 128,129,130,131,132,133,134,135,136,137,138,139,140,141,142,143,144,145,146, 147,148,149,150,151,152,153,154,155,156,157,158,159,160] (" ", []) [ns_server:debug,2014-08-19T16:52:35.514,ns_1@10.242.238.90:<0.1740.1>:ns_vbm_new_sup:do_perform_vbucket_filter_change:135]Registered myself under id:{"default", {new_child_id, [86,88,89,90,91,92,93,94,95,96,97,98,99,100, 101,102,103,104,105,106,107,108,109,110,111, 112,113,114,115,116,117,118,119,120,121,122, 123,124,125,126,127,128,129,130,131,132,133, 134,135,136,137,138,139,140,141,142,143,144, 145,146,147,148,149,150,151,152,153,154,155, 156,157,158,159,160], 'ns_1@10.242.238.88'}, #Ref<0.0.1.146341>} Args:[{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{old_state_retriever,#Fun}, {on_not_ready_vbuckets,#Fun}, {username,"default"}, {password,get_from_config}, {vbuckets,[86,88,89,90,91,92,93,94,95,96,97,98,99,100,101,102,103,104, 105,106,107,108,109,110,111,112,113,114,115,116,117,118,119, 120,121,122,123,124,125,126,127,128,129,130,131,132,133,134, 135,136,137,138,139,140,141,142,143,144,145,146,147,148,149, 150,151,152,153,154,155,156,157,158,159,160]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]] [ns_server:debug,2014-08-19T16:52:35.548,ns_1@10.242.238.90:<0.1740.1>:ns_vbm_new_sup:do_perform_vbucket_filter_change:139]Linked myself to old ebucketmigrator <0.1727.1> [ns_server:info,2014-08-19T16:52:35.549,ns_1@10.242.238.90:<0.1727.1>:ebucketmigrator_srv:handle_call:270]Starting new-style vbucket filter change on stream `replication_ns_1@10.242.238.90` [ns_server:info,2014-08-19T16:52:35.570,ns_1@10.242.238.90:<0.1727.1>:ebucketmigrator_srv:handle_call:297]Changing vbucket filter on tap stream `replication_ns_1@10.242.238.90`: [{86,1}, {88,1}, {89,1}, {90,1}, {91,1}, {92,1}, {93,1}, {94,1}, {95,1}, {96,1}, {97,1}, {98,1}, {99,1}, {100,1}, {101,1}, {102,1}, {103,1}, {104,1}, {105,1}, {106,1}, {107,1}, {108,1}, {109,1}, {110,1}, {111,1}, {112,1}, {113,1}, {114,1}, {115,1}, {116,1}, {117,1}, {118,1}, {119,1}, {120,1}, {121,1}, {122,1}, {123,1}, {124,1}, {125,1}, {126,1}, {127,1}, {128,1}, {129,1}, {130,1}, {131,1}, {132,1}, {133,1}, {134,1}, {135,1}, {136,1}, {137,1}, {138,1}, {139,1}, {140,1}, {141,1}, {142,1}, {143,1}, {144,1}, {145,1}, {146,1}, {147,1}, {148,1}, {149,1}, {150,1}, {151,1}, {152,1}, {153,1}, {154,1}, {155,1}, {156,1}, {157,1}, {158,1}, {159,1}, {160,1}] [ns_server:info,2014-08-19T16:52:35.571,ns_1@10.242.238.90:<0.1727.1>:ebucketmigrator_srv:handle_call:307]Successfully changed vbucket filter on tap stream `replication_ns_1@10.242.238.90`. [ns_server:info,2014-08-19T16:52:35.571,ns_1@10.242.238.90:<0.1727.1>:ebucketmigrator_srv:process_upstream:1027]Got vbucket filter change completion message. Silencing upstream sender [ns_server:info,2014-08-19T16:52:35.571,ns_1@10.242.238.90:<0.1727.1>:ebucketmigrator_srv:handle_info:366]Got reply from upstream silencing request. Completing state transition to a new ebucketmigrator. [ns_server:debug,2014-08-19T16:52:35.571,ns_1@10.242.238.90:<0.1727.1>:ebucketmigrator_srv:complete_native_vb_filter_change:170]Proceeding with reading unread binaries [ns_server:debug,2014-08-19T16:52:35.571,ns_1@10.242.238.90:<0.1727.1>:ebucketmigrator_srv:confirm_downstream:197]Going to confirm reception downstream messages [ns_server:debug,2014-08-19T16:52:35.572,ns_1@10.242.238.90:<0.1727.1>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:52:35.572,ns_1@10.242.238.90:<0.1742.1>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:52:35.572,ns_1@10.242.238.90:<0.1742.1>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:52:35.572,ns_1@10.242.238.90:<0.1727.1>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:52:35.572,ns_1@10.242.238.90:<0.1727.1>:ebucketmigrator_srv:confirm_downstream:201]Confirmed upstream messages are feeded to kernel [ns_server:debug,2014-08-19T16:52:35.572,ns_1@10.242.238.90:<0.1727.1>:ebucketmigrator_srv:reply_and_die:210]Passed old state to caller [ns_server:debug,2014-08-19T16:52:35.572,ns_1@10.242.238.90:<0.1727.1>:ebucketmigrator_srv:reply_and_die:213]Sent out state. Preparing to die [ns_server:debug,2014-08-19T16:52:35.572,ns_1@10.242.238.90:<0.1740.1>:ns_vbm_new_sup:do_perform_vbucket_filter_change:143]Got old state from previous ebucketmigrator: <0.1727.1> [ns_server:debug,2014-08-19T16:52:35.573,ns_1@10.242.238.90:<0.1740.1>:ns_vbm_new_sup:perform_vbucket_filter_change_loop:184]Sent old state to new instance [ns_server:info,2014-08-19T16:52:35.573,ns_1@10.242.238.90:<0.1744.1>:ns_vbm_new_sup:mk_old_state_retriever:83]Got vbucket filter change old state. Proceeding vbucket filter change operation [ns_server:debug,2014-08-19T16:52:35.573,ns_1@10.242.238.90:<0.1744.1>:ebucketmigrator_srv:init:494]Got old ebucketmigrator state from <0.1727.1>: {state,#Port<0.20537>,#Port<0.20533>,#Port<0.20538>,#Port<0.20534>,<0.1729.1>, <<"cut off">>,<<"cut off">>,[],223,false,false,0, {1408,452755,571423}, completed, {<0.1740.1>,#Ref<0.0.1.146365>}, <<"replication_ns_1@10.242.238.90">>,<0.1727.1>, {had_backfill,false,undefined,[]}, completed,false}. [ns_server:debug,2014-08-19T16:52:35.573,ns_1@10.242.238.90:<0.17162.0>:ns_process_registry:handle_info:98]Got exit msg: {'EXIT',<0.1740.1>,{#Ref<0.0.1.146343>,<0.1744.1>}} [error_logger:info,2014-08-19T16:52:35.573,ns_1@10.242.238.90:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,'ns_vbm_new_sup-default'} started: [{pid,<0.1744.1>}, {name, {new_child_id, [86,88,89,90,91,92,93,94,95,96,97,98,99,100, 101,102,103,104,105,106,107,108,109,110,111, 112,113,114,115,116,117,118,119,120,121,122, 123,124,125,126,127,128,129,130,131,132,133, 134,135,136,137,138,139,140,141,142,143,144, 145,146,147,148,149,150,151,152,153,154,155, 156,157,158,159,160], 'ns_1@10.242.238.88'}}, {mfargs, {ebucketmigrator_srv,start_link, [{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{old_state_retriever, #Fun}, {on_not_ready_vbuckets, #Fun}, {username,"default"}, {password,get_from_config}, {vbuckets, [86,88,89,90,91,92,93,94,95,96,97,98,99, 100,101,102,103,104,105,106,107,108,109, 110,111,112,113,114,115,116,117,118,119, 120,121,122,123,124,125,126,127,128,129, 130,131,132,133,134,135,136,137,138,139, 140,141,142,143,144,145,146,147,148,149, 150,151,152,153,154,155,156,157,158,159, 160]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]]}}, {restart_type,temporary}, {shutdown,60000}, {child_type,worker}] [ns_server:debug,2014-08-19T16:52:35.578,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:52:35.581,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:35.581,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 3045 us [ns_server:debug,2014-08-19T16:52:35.581,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:35.582,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{160, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.90']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:info,2014-08-19T16:52:35.583,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 161 state to replica [ns_server:info,2014-08-19T16:52:35.583,ns_1@10.242.238.90:tap_replication_manager-default<0.18775.0>:tap_replication_manager:change_vbucket_filter:200]Going to change replication from 'ns_1@10.242.238.88' to have [86,88,89,90,91,92,93,94,95,96,97,98,99,100,101,102,103,104,105,106,107,108, 109,110,111,112,113,114,115,116,117,118,119,120,121,122,123,124,125,126,127, 128,129,130,131,132,133,134,135,136,137,138,139,140,141,142,143,144,145,146, 147,148,149,150,151,152,153,154,155,156,157,158,159,160,161] ("¡", []) [ns_server:debug,2014-08-19T16:52:35.584,ns_1@10.242.238.90:<0.1746.1>:ns_vbm_new_sup:do_perform_vbucket_filter_change:135]Registered myself under id:{"default", {new_child_id, [86,88,89,90,91,92,93,94,95,96,97,98,99,100, 101,102,103,104,105,106,107,108,109,110,111, 112,113,114,115,116,117,118,119,120,121,122, 123,124,125,126,127,128,129,130,131,132,133, 134,135,136,137,138,139,140,141,142,143,144, 145,146,147,148,149,150,151,152,153,154,155, 156,157,158,159,160,161], 'ns_1@10.242.238.88'}, #Ref<0.0.1.146484>} Args:[{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{old_state_retriever,#Fun}, {on_not_ready_vbuckets,#Fun}, {username,"default"}, {password,get_from_config}, {vbuckets,[86,88,89,90,91,92,93,94,95,96,97,98,99,100,101,102,103,104, 105,106,107,108,109,110,111,112,113,114,115,116,117,118,119, 120,121,122,123,124,125,126,127,128,129,130,131,132,133,134, 135,136,137,138,139,140,141,142,143,144,145,146,147,148,149, 150,151,152,153,154,155,156,157,158,159,160,161]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]] [ns_server:debug,2014-08-19T16:52:35.584,ns_1@10.242.238.90:<0.1746.1>:ns_vbm_new_sup:do_perform_vbucket_filter_change:139]Linked myself to old ebucketmigrator <0.1744.1> [ns_server:debug,2014-08-19T16:52:35.596,ns_1@10.242.238.90:<0.1744.1>:ebucketmigrator_srv:init:621]Reusing old upstream: [{vbuckets,[86,88,89,90,91,92,93,94,95,96,97,98,99,100,101,102,103,104,105, 106,107,108,109,110,111,112,113,114,115,116,117,118,119,120,121, 122,123,124,125,126,127,128,129,130,131,132,133,134,135,136,137, 138,139,140,141,142,143,144,145,146,147,148,149,150,151,152,153, 154,155,156,157,158,159,160]}, {name,<<"replication_ns_1@10.242.238.90">>}, {takeover,false}] [rebalance:debug,2014-08-19T16:52:35.596,ns_1@10.242.238.90:<0.1744.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.1748.1> [ns_server:info,2014-08-19T16:52:35.596,ns_1@10.242.238.90:<0.1744.1>:ebucketmigrator_srv:handle_call:270]Starting new-style vbucket filter change on stream `replication_ns_1@10.242.238.90` [ns_server:info,2014-08-19T16:52:35.616,ns_1@10.242.238.90:<0.1744.1>:ebucketmigrator_srv:handle_call:297]Changing vbucket filter on tap stream `replication_ns_1@10.242.238.90`: [{86,1}, {88,1}, {89,1}, {90,1}, {91,1}, {92,1}, {93,1}, {94,1}, {95,1}, {96,1}, {97,1}, {98,1}, {99,1}, {100,1}, {101,1}, {102,1}, {103,1}, {104,1}, {105,1}, {106,1}, {107,1}, {108,1}, {109,1}, {110,1}, {111,1}, {112,1}, {113,1}, {114,1}, {115,1}, {116,1}, {117,1}, {118,1}, {119,1}, {120,1}, {121,1}, {122,1}, {123,1}, {124,1}, {125,1}, {126,1}, {127,1}, {128,1}, {129,1}, {130,1}, {131,1}, {132,1}, {133,1}, {134,1}, {135,1}, {136,1}, {137,1}, {138,1}, {139,1}, {140,1}, {141,1}, {142,1}, {143,1}, {144,1}, {145,1}, {146,1}, {147,1}, {148,1}, {149,1}, {150,1}, {151,1}, {152,1}, {153,1}, {154,1}, {155,1}, {156,1}, {157,1}, {158,1}, {159,1}, {160,1}, {161,1}] [ns_server:info,2014-08-19T16:52:35.617,ns_1@10.242.238.90:<0.1744.1>:ebucketmigrator_srv:handle_call:307]Successfully changed vbucket filter on tap stream `replication_ns_1@10.242.238.90`. [ns_server:info,2014-08-19T16:52:35.617,ns_1@10.242.238.90:<0.1744.1>:ebucketmigrator_srv:process_upstream:1027]Got vbucket filter change completion message. Silencing upstream sender [ns_server:info,2014-08-19T16:52:35.617,ns_1@10.242.238.90:<0.1744.1>:ebucketmigrator_srv:handle_info:366]Got reply from upstream silencing request. Completing state transition to a new ebucketmigrator. [ns_server:debug,2014-08-19T16:52:35.618,ns_1@10.242.238.90:<0.1744.1>:ebucketmigrator_srv:complete_native_vb_filter_change:170]Proceeding with reading unread binaries [ns_server:debug,2014-08-19T16:52:35.618,ns_1@10.242.238.90:<0.1744.1>:ebucketmigrator_srv:confirm_downstream:197]Going to confirm reception downstream messages [ns_server:debug,2014-08-19T16:52:35.618,ns_1@10.242.238.90:<0.1744.1>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:52:35.618,ns_1@10.242.238.90:<0.1749.1>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:52:35.618,ns_1@10.242.238.90:<0.1749.1>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:52:35.618,ns_1@10.242.238.90:<0.1744.1>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:52:35.618,ns_1@10.242.238.90:<0.1744.1>:ebucketmigrator_srv:confirm_downstream:201]Confirmed upstream messages are feeded to kernel [ns_server:debug,2014-08-19T16:52:35.618,ns_1@10.242.238.90:<0.1744.1>:ebucketmigrator_srv:reply_and_die:210]Passed old state to caller [ns_server:debug,2014-08-19T16:52:35.618,ns_1@10.242.238.90:<0.1744.1>:ebucketmigrator_srv:reply_and_die:213]Sent out state. Preparing to die [ns_server:debug,2014-08-19T16:52:35.618,ns_1@10.242.238.90:<0.1746.1>:ns_vbm_new_sup:do_perform_vbucket_filter_change:143]Got old state from previous ebucketmigrator: <0.1744.1> [ns_server:debug,2014-08-19T16:52:35.619,ns_1@10.242.238.90:<0.1746.1>:ns_vbm_new_sup:perform_vbucket_filter_change_loop:184]Sent old state to new instance [ns_server:info,2014-08-19T16:52:35.619,ns_1@10.242.238.90:<0.1751.1>:ns_vbm_new_sup:mk_old_state_retriever:83]Got vbucket filter change old state. Proceeding vbucket filter change operation [ns_server:debug,2014-08-19T16:52:35.619,ns_1@10.242.238.90:<0.1751.1>:ebucketmigrator_srv:init:494]Got old ebucketmigrator state from <0.1744.1>: {state,#Port<0.20537>,#Port<0.20533>,#Port<0.20538>,#Port<0.20534>,<0.1748.1>, <<"cut off">>,<<"cut off">>,[],226,false,false,0, {1408,452755,617812}, completed, {<0.1746.1>,#Ref<0.0.1.146497>}, <<"replication_ns_1@10.242.238.90">>,<0.1744.1>, {had_backfill,false,undefined,[]}, completed,false}. [ns_server:debug,2014-08-19T16:52:35.619,ns_1@10.242.238.90:<0.17162.0>:ns_process_registry:handle_info:98]Got exit msg: {'EXIT',<0.1746.1>,{#Ref<0.0.1.146486>,<0.1751.1>}} [error_logger:info,2014-08-19T16:52:35.619,ns_1@10.242.238.90:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,'ns_vbm_new_sup-default'} started: [{pid,<0.1751.1>}, {name, {new_child_id, [86,88,89,90,91,92,93,94,95,96,97,98,99,100, 101,102,103,104,105,106,107,108,109,110,111, 112,113,114,115,116,117,118,119,120,121,122, 123,124,125,126,127,128,129,130,131,132,133, 134,135,136,137,138,139,140,141,142,143,144, 145,146,147,148,149,150,151,152,153,154,155, 156,157,158,159,160,161], 'ns_1@10.242.238.88'}}, {mfargs, {ebucketmigrator_srv,start_link, [{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{old_state_retriever, #Fun}, {on_not_ready_vbuckets, #Fun}, {username,"default"}, {password,get_from_config}, {vbuckets, [86,88,89,90,91,92,93,94,95,96,97,98,99, 100,101,102,103,104,105,106,107,108,109, 110,111,112,113,114,115,116,117,118,119, 120,121,122,123,124,125,126,127,128,129, 130,131,132,133,134,135,136,137,138,139, 140,141,142,143,144,145,146,147,148,149, 150,151,152,153,154,155,156,157,158,159, 160,161]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]]}}, {restart_type,temporary}, {shutdown,60000}, {child_type,worker}] [ns_server:debug,2014-08-19T16:52:35.624,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:52:35.629,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:35.629,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 4435 us [ns_server:debug,2014-08-19T16:52:35.629,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:35.630,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{161, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.90']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:info,2014-08-19T16:52:35.633,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 162 state to replica [ns_server:info,2014-08-19T16:52:35.634,ns_1@10.242.238.90:tap_replication_manager-default<0.18775.0>:tap_replication_manager:change_vbucket_filter:200]Going to change replication from 'ns_1@10.242.238.88' to have [86,88,89,90,91,92,93,94,95,96,97,98,99,100,101,102,103,104,105,106,107,108, 109,110,111,112,113,114,115,116,117,118,119,120,121,122,123,124,125,126,127, 128,129,130,131,132,133,134,135,136,137,138,139,140,141,142,143,144,145,146, 147,148,149,150,151,152,153,154,155,156,157,158,159,160,161,162] ("¢", []) [ns_server:debug,2014-08-19T16:52:35.635,ns_1@10.242.238.90:<0.1753.1>:ns_vbm_new_sup:do_perform_vbucket_filter_change:135]Registered myself under id:{"default", {new_child_id, [86,88,89,90,91,92,93,94,95,96,97,98,99,100, 101,102,103,104,105,106,107,108,109,110,111, 112,113,114,115,116,117,118,119,120,121,122, 123,124,125,126,127,128,129,130,131,132,133, 134,135,136,137,138,139,140,141,142,143,144, 145,146,147,148,149,150,151,152,153,154,155, 156,157,158,159,160,161,162], 'ns_1@10.242.238.88'}, #Ref<0.0.1.146625>} Args:[{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{old_state_retriever,#Fun}, {on_not_ready_vbuckets,#Fun}, {username,"default"}, {password,get_from_config}, {vbuckets,[86,88,89,90,91,92,93,94,95,96,97,98,99,100,101,102,103,104, 105,106,107,108,109,110,111,112,113,114,115,116,117,118,119, 120,121,122,123,124,125,126,127,128,129,130,131,132,133,134, 135,136,137,138,139,140,141,142,143,144,145,146,147,148,149, 150,151,152,153,154,155,156,157,158,159,160,161,162]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]] [ns_server:debug,2014-08-19T16:52:35.635,ns_1@10.242.238.90:<0.1753.1>:ns_vbm_new_sup:do_perform_vbucket_filter_change:139]Linked myself to old ebucketmigrator <0.1751.1> [ns_server:debug,2014-08-19T16:52:35.642,ns_1@10.242.238.90:<0.1751.1>:ebucketmigrator_srv:init:621]Reusing old upstream: [{vbuckets,[86,88,89,90,91,92,93,94,95,96,97,98,99,100,101,102,103,104,105, 106,107,108,109,110,111,112,113,114,115,116,117,118,119,120,121, 122,123,124,125,126,127,128,129,130,131,132,133,134,135,136,137, 138,139,140,141,142,143,144,145,146,147,148,149,150,151,152,153, 154,155,156,157,158,159,160,161]}, {name,<<"replication_ns_1@10.242.238.90">>}, {takeover,false}] [rebalance:debug,2014-08-19T16:52:35.643,ns_1@10.242.238.90:<0.1751.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.1755.1> [ns_server:info,2014-08-19T16:52:35.643,ns_1@10.242.238.90:<0.1751.1>:ebucketmigrator_srv:handle_call:270]Starting new-style vbucket filter change on stream `replication_ns_1@10.242.238.90` [ns_server:info,2014-08-19T16:52:35.663,ns_1@10.242.238.90:<0.1751.1>:ebucketmigrator_srv:handle_call:297]Changing vbucket filter on tap stream `replication_ns_1@10.242.238.90`: [{86,1}, {88,1}, {89,1}, {90,1}, {91,1}, {92,1}, {93,1}, {94,1}, {95,1}, {96,1}, {97,1}, {98,1}, {99,1}, {100,1}, {101,1}, {102,1}, {103,1}, {104,1}, {105,1}, {106,1}, {107,1}, {108,1}, {109,1}, {110,1}, {111,1}, {112,1}, {113,1}, {114,1}, {115,1}, {116,1}, {117,1}, {118,1}, {119,1}, {120,1}, {121,1}, {122,1}, {123,1}, {124,1}, {125,1}, {126,1}, {127,1}, {128,1}, {129,1}, {130,1}, {131,1}, {132,1}, {133,1}, {134,1}, {135,1}, {136,1}, {137,1}, {138,1}, {139,1}, {140,1}, {141,1}, {142,1}, {143,1}, {144,1}, {145,1}, {146,1}, {147,1}, {148,1}, {149,1}, {150,1}, {151,1}, {152,1}, {153,1}, {154,1}, {155,1}, {156,1}, {157,1}, {158,1}, {159,1}, {160,1}, {161,1}, {162,1}] [ns_server:info,2014-08-19T16:52:35.664,ns_1@10.242.238.90:<0.1751.1>:ebucketmigrator_srv:handle_call:307]Successfully changed vbucket filter on tap stream `replication_ns_1@10.242.238.90`. [ns_server:info,2014-08-19T16:52:35.665,ns_1@10.242.238.90:<0.1751.1>:ebucketmigrator_srv:process_upstream:1027]Got vbucket filter change completion message. Silencing upstream sender [ns_server:info,2014-08-19T16:52:35.665,ns_1@10.242.238.90:<0.1751.1>:ebucketmigrator_srv:handle_info:366]Got reply from upstream silencing request. Completing state transition to a new ebucketmigrator. [ns_server:debug,2014-08-19T16:52:35.665,ns_1@10.242.238.90:<0.1751.1>:ebucketmigrator_srv:complete_native_vb_filter_change:170]Proceeding with reading unread binaries [ns_server:debug,2014-08-19T16:52:35.665,ns_1@10.242.238.90:<0.1751.1>:ebucketmigrator_srv:confirm_downstream:197]Going to confirm reception downstream messages [ns_server:debug,2014-08-19T16:52:35.665,ns_1@10.242.238.90:<0.1751.1>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:52:35.665,ns_1@10.242.238.90:<0.1756.1>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:52:35.665,ns_1@10.242.238.90:<0.1756.1>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:52:35.665,ns_1@10.242.238.90:<0.1751.1>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:52:35.665,ns_1@10.242.238.90:<0.1751.1>:ebucketmigrator_srv:confirm_downstream:201]Confirmed upstream messages are feeded to kernel [ns_server:debug,2014-08-19T16:52:35.666,ns_1@10.242.238.90:<0.1751.1>:ebucketmigrator_srv:reply_and_die:210]Passed old state to caller [ns_server:debug,2014-08-19T16:52:35.666,ns_1@10.242.238.90:<0.1751.1>:ebucketmigrator_srv:reply_and_die:213]Sent out state. Preparing to die [ns_server:debug,2014-08-19T16:52:35.666,ns_1@10.242.238.90:<0.1753.1>:ns_vbm_new_sup:do_perform_vbucket_filter_change:143]Got old state from previous ebucketmigrator: <0.1751.1> [ns_server:debug,2014-08-19T16:52:35.666,ns_1@10.242.238.90:<0.1753.1>:ns_vbm_new_sup:perform_vbucket_filter_change_loop:184]Sent old state to new instance [ns_server:info,2014-08-19T16:52:35.666,ns_1@10.242.238.90:<0.1758.1>:ns_vbm_new_sup:mk_old_state_retriever:83]Got vbucket filter change old state. Proceeding vbucket filter change operation [ns_server:debug,2014-08-19T16:52:35.666,ns_1@10.242.238.90:<0.1758.1>:ebucketmigrator_srv:init:494]Got old ebucketmigrator state from <0.1751.1>: {state,#Port<0.20537>,#Port<0.20533>,#Port<0.20538>,#Port<0.20534>,<0.1755.1>, <<"cut off">>,<<"cut off">>,[],229,false,false,0, {1408,452755,665026}, completed, {<0.1753.1>,#Ref<0.0.1.146641>}, <<"replication_ns_1@10.242.238.90">>,<0.1751.1>, {had_backfill,false,undefined,[]}, completed,false}. [ns_server:debug,2014-08-19T16:52:35.667,ns_1@10.242.238.90:<0.17162.0>:ns_process_registry:handle_info:98]Got exit msg: {'EXIT',<0.1753.1>,{#Ref<0.0.1.146627>,<0.1758.1>}} [error_logger:info,2014-08-19T16:52:35.667,ns_1@10.242.238.90:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,'ns_vbm_new_sup-default'} started: [{pid,<0.1758.1>}, {name, {new_child_id, [86,88,89,90,91,92,93,94,95,96,97,98,99,100, 101,102,103,104,105,106,107,108,109,110,111, 112,113,114,115,116,117,118,119,120,121,122, 123,124,125,126,127,128,129,130,131,132,133, 134,135,136,137,138,139,140,141,142,143,144, 145,146,147,148,149,150,151,152,153,154,155, 156,157,158,159,160,161,162], 'ns_1@10.242.238.88'}}, {mfargs, {ebucketmigrator_srv,start_link, [{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{old_state_retriever, #Fun}, {on_not_ready_vbuckets, #Fun}, {username,"default"}, {password,get_from_config}, {vbuckets, [86,88,89,90,91,92,93,94,95,96,97,98,99, 100,101,102,103,104,105,106,107,108,109, 110,111,112,113,114,115,116,117,118,119, 120,121,122,123,124,125,126,127,128,129, 130,131,132,133,134,135,136,137,138,139, 140,141,142,143,144,145,146,147,148,149, 150,151,152,153,154,155,156,157,158,159, 160,161,162]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]]}}, {restart_type,temporary}, {shutdown,60000}, {child_type,worker}] [ns_server:debug,2014-08-19T16:52:35.671,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:52:35.674,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:35.675,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 3446 us [ns_server:debug,2014-08-19T16:52:35.675,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:35.676,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{162, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.90']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:info,2014-08-19T16:52:35.677,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 163 state to replica [ns_server:info,2014-08-19T16:52:35.677,ns_1@10.242.238.90:tap_replication_manager-default<0.18775.0>:tap_replication_manager:change_vbucket_filter:200]Going to change replication from 'ns_1@10.242.238.88' to have [86,88,89,90,91,92,93,94,95,96,97,98,99,100,101,102,103,104,105,106,107,108, 109,110,111,112,113,114,115,116,117,118,119,120,121,122,123,124,125,126,127, 128,129,130,131,132,133,134,135,136,137,138,139,140,141,142,143,144,145,146, 147,148,149,150,151,152,153,154,155,156,157,158,159,160,161,162,163] ("£", []) [ns_server:debug,2014-08-19T16:52:35.678,ns_1@10.242.238.90:<0.1760.1>:ns_vbm_new_sup:do_perform_vbucket_filter_change:135]Registered myself under id:{"default", {new_child_id, [86,88,89,90,91,92,93,94,95,96,97,98,99,100, 101,102,103,104,105,106,107,108,109,110,111, 112,113,114,115,116,117,118,119,120,121,122, 123,124,125,126,127,128,129,130,131,132,133, 134,135,136,137,138,139,140,141,142,143,144, 145,146,147,148,149,150,151,152,153,154,155, 156,157,158,159,160,161,162,163], 'ns_1@10.242.238.88'}, #Ref<0.0.1.146765>} Args:[{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{old_state_retriever,#Fun}, {on_not_ready_vbuckets,#Fun}, {username,"default"}, {password,get_from_config}, {vbuckets,[86,88,89,90,91,92,93,94,95,96,97,98,99,100,101,102,103,104, 105,106,107,108,109,110,111,112,113,114,115,116,117,118,119, 120,121,122,123,124,125,126,127,128,129,130,131,132,133,134, 135,136,137,138,139,140,141,142,143,144,145,146,147,148,149, 150,151,152,153,154,155,156,157,158,159,160,161,162,163]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]] [ns_server:debug,2014-08-19T16:52:35.678,ns_1@10.242.238.90:<0.1760.1>:ns_vbm_new_sup:do_perform_vbucket_filter_change:139]Linked myself to old ebucketmigrator <0.1758.1> [ns_server:debug,2014-08-19T16:52:35.688,ns_1@10.242.238.90:<0.1758.1>:ebucketmigrator_srv:init:621]Reusing old upstream: [{vbuckets,[86,88,89,90,91,92,93,94,95,96,97,98,99,100,101,102,103,104,105, 106,107,108,109,110,111,112,113,114,115,116,117,118,119,120,121, 122,123,124,125,126,127,128,129,130,131,132,133,134,135,136,137, 138,139,140,141,142,143,144,145,146,147,148,149,150,151,152,153, 154,155,156,157,158,159,160,161,162]}, {name,<<"replication_ns_1@10.242.238.90">>}, {takeover,false}] [rebalance:debug,2014-08-19T16:52:35.688,ns_1@10.242.238.90:<0.1758.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.1762.1> [ns_server:info,2014-08-19T16:52:35.689,ns_1@10.242.238.90:<0.1758.1>:ebucketmigrator_srv:handle_call:270]Starting new-style vbucket filter change on stream `replication_ns_1@10.242.238.90` [ns_server:info,2014-08-19T16:52:35.709,ns_1@10.242.238.90:<0.1758.1>:ebucketmigrator_srv:handle_call:297]Changing vbucket filter on tap stream `replication_ns_1@10.242.238.90`: [{86,1}, {88,1}, {89,1}, {90,1}, {91,1}, {92,1}, {93,1}, {94,1}, {95,1}, {96,1}, {97,1}, {98,1}, {99,1}, {100,1}, {101,1}, {102,1}, {103,1}, {104,1}, {105,1}, {106,1}, {107,1}, {108,1}, {109,1}, {110,1}, {111,1}, {112,1}, {113,1}, {114,1}, {115,1}, {116,1}, {117,1}, {118,1}, {119,1}, {120,1}, {121,1}, {122,1}, {123,1}, {124,1}, {125,1}, {126,1}, {127,1}, {128,1}, {129,1}, {130,1}, {131,1}, {132,1}, {133,1}, {134,1}, {135,1}, {136,1}, {137,1}, {138,1}, {139,1}, {140,1}, {141,1}, {142,1}, {143,1}, {144,1}, {145,1}, {146,1}, {147,1}, {148,1}, {149,1}, {150,1}, {151,1}, {152,1}, {153,1}, {154,1}, {155,1}, {156,1}, {157,1}, {158,1}, {159,1}, {160,1}, {161,1}, {162,1}, {163,1}] [ns_server:info,2014-08-19T16:52:35.710,ns_1@10.242.238.90:<0.1758.1>:ebucketmigrator_srv:handle_call:307]Successfully changed vbucket filter on tap stream `replication_ns_1@10.242.238.90`. [ns_server:info,2014-08-19T16:52:35.711,ns_1@10.242.238.90:<0.1758.1>:ebucketmigrator_srv:process_upstream:1027]Got vbucket filter change completion message. Silencing upstream sender [ns_server:info,2014-08-19T16:52:35.711,ns_1@10.242.238.90:<0.1758.1>:ebucketmigrator_srv:handle_info:366]Got reply from upstream silencing request. Completing state transition to a new ebucketmigrator. [ns_server:debug,2014-08-19T16:52:35.711,ns_1@10.242.238.90:<0.1758.1>:ebucketmigrator_srv:complete_native_vb_filter_change:170]Proceeding with reading unread binaries [ns_server:debug,2014-08-19T16:52:35.711,ns_1@10.242.238.90:<0.1758.1>:ebucketmigrator_srv:confirm_downstream:197]Going to confirm reception downstream messages [ns_server:debug,2014-08-19T16:52:35.711,ns_1@10.242.238.90:<0.1758.1>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:52:35.711,ns_1@10.242.238.90:<0.1763.1>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:52:35.711,ns_1@10.242.238.90:<0.1763.1>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:52:35.711,ns_1@10.242.238.90:<0.1758.1>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:52:35.711,ns_1@10.242.238.90:<0.1758.1>:ebucketmigrator_srv:confirm_downstream:201]Confirmed upstream messages are feeded to kernel [ns_server:debug,2014-08-19T16:52:35.712,ns_1@10.242.238.90:<0.1758.1>:ebucketmigrator_srv:reply_and_die:210]Passed old state to caller [ns_server:debug,2014-08-19T16:52:35.712,ns_1@10.242.238.90:<0.1758.1>:ebucketmigrator_srv:reply_and_die:213]Sent out state. Preparing to die [ns_server:debug,2014-08-19T16:52:35.712,ns_1@10.242.238.90:<0.1760.1>:ns_vbm_new_sup:do_perform_vbucket_filter_change:143]Got old state from previous ebucketmigrator: <0.1758.1> [ns_server:debug,2014-08-19T16:52:35.712,ns_1@10.242.238.90:<0.1760.1>:ns_vbm_new_sup:perform_vbucket_filter_change_loop:184]Sent old state to new instance [ns_server:info,2014-08-19T16:52:35.712,ns_1@10.242.238.90:<0.1765.1>:ns_vbm_new_sup:mk_old_state_retriever:83]Got vbucket filter change old state. Proceeding vbucket filter change operation [ns_server:debug,2014-08-19T16:52:35.712,ns_1@10.242.238.90:<0.1765.1>:ebucketmigrator_srv:init:494]Got old ebucketmigrator state from <0.1758.1>: {state,#Port<0.20537>,#Port<0.20533>,#Port<0.20538>,#Port<0.20534>,<0.1762.1>, <<"cut off">>,<<"cut off">>,[],232,false,false,0, {1408,452755,711086}, completed, {<0.1760.1>,#Ref<0.0.1.146778>}, <<"replication_ns_1@10.242.238.90">>,<0.1758.1>, {had_backfill,false,undefined,[]}, completed,false}. [ns_server:debug,2014-08-19T16:52:35.713,ns_1@10.242.238.90:<0.17162.0>:ns_process_registry:handle_info:98]Got exit msg: {'EXIT',<0.1760.1>,{#Ref<0.0.1.146767>,<0.1765.1>}} [error_logger:info,2014-08-19T16:52:35.713,ns_1@10.242.238.90:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,'ns_vbm_new_sup-default'} started: [{pid,<0.1765.1>}, {name, {new_child_id, [86,88,89,90,91,92,93,94,95,96,97,98,99,100, 101,102,103,104,105,106,107,108,109,110,111, 112,113,114,115,116,117,118,119,120,121,122, 123,124,125,126,127,128,129,130,131,132,133, 134,135,136,137,138,139,140,141,142,143,144, 145,146,147,148,149,150,151,152,153,154,155, 156,157,158,159,160,161,162,163], 'ns_1@10.242.238.88'}}, {mfargs, {ebucketmigrator_srv,start_link, [{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{old_state_retriever, #Fun}, {on_not_ready_vbuckets, #Fun}, {username,"default"}, {password,get_from_config}, {vbuckets, [86,88,89,90,91,92,93,94,95,96,97,98,99, 100,101,102,103,104,105,106,107,108,109, 110,111,112,113,114,115,116,117,118,119, 120,121,122,123,124,125,126,127,128,129, 130,131,132,133,134,135,136,137,138,139, 140,141,142,143,144,145,146,147,148,149, 150,151,152,153,154,155,156,157,158,159, 160,161,162,163]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]]}}, {restart_type,temporary}, {shutdown,60000}, {child_type,worker}] [ns_server:debug,2014-08-19T16:52:35.718,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:52:35.721,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:35.721,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 3291 us [ns_server:debug,2014-08-19T16:52:35.721,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:35.722,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{163, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.90']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:52:35.736,ns_1@10.242.238.90:<0.1765.1>:ebucketmigrator_srv:init:621]Reusing old upstream: [{vbuckets,[86,88,89,90,91,92,93,94,95,96,97,98,99,100,101,102,103,104,105, 106,107,108,109,110,111,112,113,114,115,116,117,118,119,120,121, 122,123,124,125,126,127,128,129,130,131,132,133,134,135,136,137, 138,139,140,141,142,143,144,145,146,147,148,149,150,151,152,153, 154,155,156,157,158,159,160,161,162,163]}, {name,<<"replication_ns_1@10.242.238.90">>}, {takeover,false}] [rebalance:debug,2014-08-19T16:52:35.737,ns_1@10.242.238.90:<0.1765.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.1767.1> [ns_server:debug,2014-08-19T16:52:35.756,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:52:35.759,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:35.759,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 2981 us [ns_server:debug,2014-08-19T16:52:35.759,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:35.760,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{19, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.89']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:52:35.799,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:52:35.802,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:35.803,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 3220 us [ns_server:debug,2014-08-19T16:52:35.803,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{18, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.89']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:52:35.804,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:35.843,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:52:35.846,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:35.847,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 1387 us [ns_server:debug,2014-08-19T16:52:35.847,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:35.847,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{21, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.89']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:52:35.891,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:52:35.893,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 2099 us [ns_server:debug,2014-08-19T16:52:35.893,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:35.893,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:35.894,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{20, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.89']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:info,2014-08-19T16:52:35.899,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 164 state to replica [ns_server:info,2014-08-19T16:52:35.899,ns_1@10.242.238.90:tap_replication_manager-default<0.18775.0>:tap_replication_manager:change_vbucket_filter:200]Going to change replication from 'ns_1@10.242.238.88' to have [86,88,89,90,91,92,93,94,95,96,97,98,99,100,101,102,103,104,105,106,107,108, 109,110,111,112,113,114,115,116,117,118,119,120,121,122,123,124,125,126,127, 128,129,130,131,132,133,134,135,136,137,138,139,140,141,142,143,144,145,146, 147,148,149,150,151,152,153,154,155,156,157,158,159,160,161,162,163,164] ("¤", []) [ns_server:debug,2014-08-19T16:52:35.901,ns_1@10.242.238.90:<0.1772.1>:ns_vbm_new_sup:do_perform_vbucket_filter_change:135]Registered myself under id:{"default", {new_child_id, [86,88,89,90,91,92,93,94,95,96,97,98,99,100, 101,102,103,104,105,106,107,108,109,110,111, 112,113,114,115,116,117,118,119,120,121,122, 123,124,125,126,127,128,129,130,131,132,133, 134,135,136,137,138,139,140,141,142,143,144, 145,146,147,148,149,150,151,152,153,154,155, 156,157,158,159,160,161,162,163,164], 'ns_1@10.242.238.88'}, #Ref<0.0.1.147028>} Args:[{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{old_state_retriever,#Fun}, {on_not_ready_vbuckets,#Fun}, {username,"default"}, {password,get_from_config}, {vbuckets,[86,88,89,90,91,92,93,94,95,96,97,98,99,100,101,102,103,104, 105,106,107,108,109,110,111,112,113,114,115,116,117,118,119, 120,121,122,123,124,125,126,127,128,129,130,131,132,133,134, 135,136,137,138,139,140,141,142,143,144,145,146,147,148,149, 150,151,152,153,154,155,156,157,158,159,160,161,162,163, 164]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]] [ns_server:debug,2014-08-19T16:52:35.902,ns_1@10.242.238.90:<0.1772.1>:ns_vbm_new_sup:do_perform_vbucket_filter_change:139]Linked myself to old ebucketmigrator <0.1765.1> [ns_server:info,2014-08-19T16:52:35.902,ns_1@10.242.238.90:<0.1765.1>:ebucketmigrator_srv:handle_call:270]Starting new-style vbucket filter change on stream `replication_ns_1@10.242.238.90` [ns_server:info,2014-08-19T16:52:35.922,ns_1@10.242.238.90:<0.1765.1>:ebucketmigrator_srv:handle_call:297]Changing vbucket filter on tap stream `replication_ns_1@10.242.238.90`: [{86,1}, {88,1}, {89,1}, {90,1}, {91,1}, {92,1}, {93,1}, {94,1}, {95,1}, {96,1}, {97,1}, {98,1}, {99,1}, {100,1}, {101,1}, {102,1}, {103,1}, {104,1}, {105,1}, {106,1}, {107,1}, {108,1}, {109,1}, {110,1}, {111,1}, {112,1}, {113,1}, {114,1}, {115,1}, {116,1}, {117,1}, {118,1}, {119,1}, {120,1}, {121,1}, {122,1}, {123,1}, {124,1}, {125,1}, {126,1}, {127,1}, {128,1}, {129,1}, {130,1}, {131,1}, {132,1}, {133,1}, {134,1}, {135,1}, {136,1}, {137,1}, {138,1}, {139,1}, {140,1}, {141,1}, {142,1}, {143,1}, {144,1}, {145,1}, {146,1}, {147,1}, {148,1}, {149,1}, {150,1}, {151,1}, {152,1}, {153,1}, {154,1}, {155,1}, {156,1}, {157,1}, {158,1}, {159,1}, {160,1}, {161,1}, {162,1}, {163,1}, {164,1}] [ns_server:info,2014-08-19T16:52:35.928,ns_1@10.242.238.90:<0.1765.1>:ebucketmigrator_srv:handle_call:307]Successfully changed vbucket filter on tap stream `replication_ns_1@10.242.238.90`. [ns_server:info,2014-08-19T16:52:35.928,ns_1@10.242.238.90:<0.1765.1>:ebucketmigrator_srv:process_upstream:1027]Got vbucket filter change completion message. Silencing upstream sender [ns_server:info,2014-08-19T16:52:35.928,ns_1@10.242.238.90:<0.1765.1>:ebucketmigrator_srv:handle_info:366]Got reply from upstream silencing request. Completing state transition to a new ebucketmigrator. [ns_server:debug,2014-08-19T16:52:35.928,ns_1@10.242.238.90:<0.1765.1>:ebucketmigrator_srv:complete_native_vb_filter_change:170]Proceeding with reading unread binaries [ns_server:debug,2014-08-19T16:52:35.928,ns_1@10.242.238.90:<0.1765.1>:ebucketmigrator_srv:confirm_downstream:197]Going to confirm reception downstream messages [ns_server:debug,2014-08-19T16:52:35.929,ns_1@10.242.238.90:<0.1765.1>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:52:35.929,ns_1@10.242.238.90:<0.1774.1>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:52:35.929,ns_1@10.242.238.90:<0.1774.1>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:52:35.929,ns_1@10.242.238.90:<0.1765.1>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:52:35.929,ns_1@10.242.238.90:<0.1765.1>:ebucketmigrator_srv:confirm_downstream:201]Confirmed upstream messages are feeded to kernel [ns_server:debug,2014-08-19T16:52:35.929,ns_1@10.242.238.90:<0.1765.1>:ebucketmigrator_srv:reply_and_die:210]Passed old state to caller [ns_server:debug,2014-08-19T16:52:35.929,ns_1@10.242.238.90:<0.1765.1>:ebucketmigrator_srv:reply_and_die:213]Sent out state. Preparing to die [ns_server:debug,2014-08-19T16:52:35.929,ns_1@10.242.238.90:<0.1772.1>:ns_vbm_new_sup:do_perform_vbucket_filter_change:143]Got old state from previous ebucketmigrator: <0.1765.1> [ns_server:debug,2014-08-19T16:52:35.930,ns_1@10.242.238.90:<0.1772.1>:ns_vbm_new_sup:perform_vbucket_filter_change_loop:184]Sent old state to new instance [ns_server:info,2014-08-19T16:52:35.930,ns_1@10.242.238.90:<0.1776.1>:ns_vbm_new_sup:mk_old_state_retriever:83]Got vbucket filter change old state. Proceeding vbucket filter change operation [ns_server:debug,2014-08-19T16:52:35.930,ns_1@10.242.238.90:<0.1776.1>:ebucketmigrator_srv:init:494]Got old ebucketmigrator state from <0.1765.1>: {state,#Port<0.20537>,#Port<0.20533>,#Port<0.20538>,#Port<0.20534>,<0.1767.1>, <<"cut off">>,<<"cut off">>,[],235,false,false,0, {1408,452755,928494}, completed, {<0.1772.1>,#Ref<0.0.1.147042>}, <<"replication_ns_1@10.242.238.90">>,<0.1765.1>, {had_backfill,false,undefined,[]}, completed,false}. [ns_server:debug,2014-08-19T16:52:35.930,ns_1@10.242.238.90:<0.17162.0>:ns_process_registry:handle_info:98]Got exit msg: {'EXIT',<0.1772.1>,{#Ref<0.0.1.147030>,<0.1776.1>}} [error_logger:info,2014-08-19T16:52:35.930,ns_1@10.242.238.90:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,'ns_vbm_new_sup-default'} started: [{pid,<0.1776.1>}, {name, {new_child_id, [86,88,89,90,91,92,93,94,95,96,97,98,99,100, 101,102,103,104,105,106,107,108,109,110,111, 112,113,114,115,116,117,118,119,120,121,122, 123,124,125,126,127,128,129,130,131,132,133, 134,135,136,137,138,139,140,141,142,143,144, 145,146,147,148,149,150,151,152,153,154,155, 156,157,158,159,160,161,162,163,164], 'ns_1@10.242.238.88'}}, {mfargs, {ebucketmigrator_srv,start_link, [{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{old_state_retriever, #Fun}, {on_not_ready_vbuckets, #Fun}, {username,"default"}, {password,get_from_config}, {vbuckets, [86,88,89,90,91,92,93,94,95,96,97,98,99, 100,101,102,103,104,105,106,107,108,109, 110,111,112,113,114,115,116,117,118,119, 120,121,122,123,124,125,126,127,128,129, 130,131,132,133,134,135,136,137,138,139, 140,141,142,143,144,145,146,147,148,149, 150,151,152,153,154,155,156,157,158,159, 160,161,162,163,164]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]]}}, {restart_type,temporary}, {shutdown,60000}, {child_type,worker}] [ns_server:debug,2014-08-19T16:52:35.935,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:52:35.938,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:35.939,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 3666 us [ns_server:debug,2014-08-19T16:52:35.939,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:35.939,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{164, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.90']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:info,2014-08-19T16:52:35.940,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 165 state to replica [ns_server:info,2014-08-19T16:52:35.940,ns_1@10.242.238.90:tap_replication_manager-default<0.18775.0>:tap_replication_manager:change_vbucket_filter:200]Going to change replication from 'ns_1@10.242.238.88' to have [86,88,89,90,91,92,93,94,95,96,97,98,99,100,101,102,103,104,105,106,107,108, 109,110,111,112,113,114,115,116,117,118,119,120,121,122,123,124,125,126,127, 128,129,130,131,132,133,134,135,136,137,138,139,140,141,142,143,144,145,146, 147,148,149,150,151,152,153,154,155,156,157,158,159,160,161,162,163,164,165] ("¥", []) [ns_server:debug,2014-08-19T16:52:35.941,ns_1@10.242.238.90:<0.1778.1>:ns_vbm_new_sup:do_perform_vbucket_filter_change:135]Registered myself under id:{"default", {new_child_id, [86,88,89,90,91,92,93,94,95,96,97,98,99,100, 101,102,103,104,105,106,107,108,109,110,111, 112,113,114,115,116,117,118,119,120,121,122, 123,124,125,126,127,128,129,130,131,132,133, 134,135,136,137,138,139,140,141,142,143,144, 145,146,147,148,149,150,151,152,153,154,155, 156,157,158,159,160,161,162,163,164,165], 'ns_1@10.242.238.88'}, #Ref<0.0.1.147156>} Args:[{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{old_state_retriever,#Fun}, {on_not_ready_vbuckets,#Fun}, {username,"default"}, {password,get_from_config}, {vbuckets,[86,88,89,90,91,92,93,94,95,96,97,98,99,100,101,102,103,104, 105,106,107,108,109,110,111,112,113,114,115,116,117,118,119, 120,121,122,123,124,125,126,127,128,129,130,131,132,133,134, 135,136,137,138,139,140,141,142,143,144,145,146,147,148,149, 150,151,152,153,154,155,156,157,158,159,160,161,162,163,164, 165]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]] [ns_server:debug,2014-08-19T16:52:35.942,ns_1@10.242.238.90:<0.1778.1>:ns_vbm_new_sup:do_perform_vbucket_filter_change:139]Linked myself to old ebucketmigrator <0.1776.1> [ns_server:debug,2014-08-19T16:52:35.953,ns_1@10.242.238.90:<0.1776.1>:ebucketmigrator_srv:init:621]Reusing old upstream: [{vbuckets,[86,88,89,90,91,92,93,94,95,96,97,98,99,100,101,102,103,104,105, 106,107,108,109,110,111,112,113,114,115,116,117,118,119,120,121, 122,123,124,125,126,127,128,129,130,131,132,133,134,135,136,137, 138,139,140,141,142,143,144,145,146,147,148,149,150,151,152,153, 154,155,156,157,158,159,160,161,162,163,164]}, {name,<<"replication_ns_1@10.242.238.90">>}, {takeover,false}] [rebalance:debug,2014-08-19T16:52:35.953,ns_1@10.242.238.90:<0.1776.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.1780.1> [ns_server:info,2014-08-19T16:52:35.954,ns_1@10.242.238.90:<0.1776.1>:ebucketmigrator_srv:handle_call:270]Starting new-style vbucket filter change on stream `replication_ns_1@10.242.238.90` [ns_server:info,2014-08-19T16:52:35.974,ns_1@10.242.238.90:<0.1776.1>:ebucketmigrator_srv:handle_call:297]Changing vbucket filter on tap stream `replication_ns_1@10.242.238.90`: [{86,1}, {88,1}, {89,1}, {90,1}, {91,1}, {92,1}, {93,1}, {94,1}, {95,1}, {96,1}, {97,1}, {98,1}, {99,1}, {100,1}, {101,1}, {102,1}, {103,1}, {104,1}, {105,1}, {106,1}, {107,1}, {108,1}, {109,1}, {110,1}, {111,1}, {112,1}, {113,1}, {114,1}, {115,1}, {116,1}, {117,1}, {118,1}, {119,1}, {120,1}, {121,1}, {122,1}, {123,1}, {124,1}, {125,1}, {126,1}, {127,1}, {128,1}, {129,1}, {130,1}, {131,1}, {132,1}, {133,1}, {134,1}, {135,1}, {136,1}, {137,1}, {138,1}, {139,1}, {140,1}, {141,1}, {142,1}, {143,1}, {144,1}, {145,1}, {146,1}, {147,1}, {148,1}, {149,1}, {150,1}, {151,1}, {152,1}, {153,1}, {154,1}, {155,1}, {156,1}, {157,1}, {158,1}, {159,1}, {160,1}, {161,1}, {162,1}, {163,1}, {164,1}, {165,1}] [ns_server:info,2014-08-19T16:52:35.975,ns_1@10.242.238.90:<0.1776.1>:ebucketmigrator_srv:handle_call:307]Successfully changed vbucket filter on tap stream `replication_ns_1@10.242.238.90`. [ns_server:info,2014-08-19T16:52:35.976,ns_1@10.242.238.90:<0.1776.1>:ebucketmigrator_srv:process_upstream:1027]Got vbucket filter change completion message. Silencing upstream sender [ns_server:info,2014-08-19T16:52:35.976,ns_1@10.242.238.90:<0.1776.1>:ebucketmigrator_srv:handle_info:366]Got reply from upstream silencing request. Completing state transition to a new ebucketmigrator. [ns_server:debug,2014-08-19T16:52:35.976,ns_1@10.242.238.90:<0.1776.1>:ebucketmigrator_srv:complete_native_vb_filter_change:170]Proceeding with reading unread binaries [ns_server:debug,2014-08-19T16:52:35.976,ns_1@10.242.238.90:<0.1776.1>:ebucketmigrator_srv:confirm_downstream:197]Going to confirm reception downstream messages [ns_server:debug,2014-08-19T16:52:35.976,ns_1@10.242.238.90:<0.1776.1>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:52:35.976,ns_1@10.242.238.90:<0.1781.1>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:52:35.976,ns_1@10.242.238.90:<0.1781.1>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:52:35.977,ns_1@10.242.238.90:<0.1776.1>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:52:35.977,ns_1@10.242.238.90:<0.1776.1>:ebucketmigrator_srv:confirm_downstream:201]Confirmed upstream messages are feeded to kernel [ns_server:debug,2014-08-19T16:52:35.977,ns_1@10.242.238.90:<0.1776.1>:ebucketmigrator_srv:reply_and_die:210]Passed old state to caller [ns_server:debug,2014-08-19T16:52:35.977,ns_1@10.242.238.90:<0.1776.1>:ebucketmigrator_srv:reply_and_die:213]Sent out state. Preparing to die [ns_server:debug,2014-08-19T16:52:35.977,ns_1@10.242.238.90:<0.1778.1>:ns_vbm_new_sup:do_perform_vbucket_filter_change:143]Got old state from previous ebucketmigrator: <0.1776.1> [ns_server:debug,2014-08-19T16:52:35.977,ns_1@10.242.238.90:<0.1778.1>:ns_vbm_new_sup:perform_vbucket_filter_change_loop:184]Sent old state to new instance [ns_server:info,2014-08-19T16:52:35.978,ns_1@10.242.238.90:<0.1783.1>:ns_vbm_new_sup:mk_old_state_retriever:83]Got vbucket filter change old state. Proceeding vbucket filter change operation [ns_server:debug,2014-08-19T16:52:35.978,ns_1@10.242.238.90:<0.1783.1>:ebucketmigrator_srv:init:494]Got old ebucketmigrator state from <0.1776.1>: {state,#Port<0.20537>,#Port<0.20533>,#Port<0.20538>,#Port<0.20534>,<0.1780.1>, <<"cut off">>,<<"cut off">>,[],238,false,false,0, {1408,452755,976125}, completed, {<0.1778.1>,#Ref<0.0.1.147169>}, <<"replication_ns_1@10.242.238.90">>,<0.1776.1>, {had_backfill,false,undefined,[]}, completed,false}. [ns_server:debug,2014-08-19T16:52:35.978,ns_1@10.242.238.90:<0.17162.0>:ns_process_registry:handle_info:98]Got exit msg: {'EXIT',<0.1778.1>,{#Ref<0.0.1.147158>,<0.1783.1>}} [error_logger:info,2014-08-19T16:52:35.978,ns_1@10.242.238.90:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,'ns_vbm_new_sup-default'} started: [{pid,<0.1783.1>}, {name, {new_child_id, [86,88,89,90,91,92,93,94,95,96,97,98,99,100, 101,102,103,104,105,106,107,108,109,110,111, 112,113,114,115,116,117,118,119,120,121,122, 123,124,125,126,127,128,129,130,131,132,133, 134,135,136,137,138,139,140,141,142,143,144, 145,146,147,148,149,150,151,152,153,154,155, 156,157,158,159,160,161,162,163,164,165], 'ns_1@10.242.238.88'}}, {mfargs, {ebucketmigrator_srv,start_link, [{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{old_state_retriever, #Fun}, {on_not_ready_vbuckets, #Fun}, {username,"default"}, {password,get_from_config}, {vbuckets, [86,88,89,90,91,92,93,94,95,96,97,98,99, 100,101,102,103,104,105,106,107,108,109, 110,111,112,113,114,115,116,117,118,119, 120,121,122,123,124,125,126,127,128,129, 130,131,132,133,134,135,136,137,138,139, 140,141,142,143,144,145,146,147,148,149, 150,151,152,153,154,155,156,157,158,159, 160,161,162,163,164,165]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]]}}, {restart_type,temporary}, {shutdown,60000}, {child_type,worker}] [ns_server:debug,2014-08-19T16:52:35.983,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:52:35.986,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:35.987,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 1541 us [ns_server:debug,2014-08-19T16:52:35.987,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:35.988,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{165, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.90']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:52:36.002,ns_1@10.242.238.90:<0.1783.1>:ebucketmigrator_srv:init:621]Reusing old upstream: [{vbuckets,[86,88,89,90,91,92,93,94,95,96,97,98,99,100,101,102,103,104,105, 106,107,108,109,110,111,112,113,114,115,116,117,118,119,120,121, 122,123,124,125,126,127,128,129,130,131,132,133,134,135,136,137, 138,139,140,141,142,143,144,145,146,147,148,149,150,151,152,153, 154,155,156,157,158,159,160,161,162,163,164,165]}, {name,<<"replication_ns_1@10.242.238.90">>}, {takeover,false}] [rebalance:debug,2014-08-19T16:52:36.002,ns_1@10.242.238.90:<0.1783.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.1785.1> [ns_server:debug,2014-08-19T16:52:36.021,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:52:36.024,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:36.024,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 3318 us [ns_server:debug,2014-08-19T16:52:36.025,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:36.026,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{22, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.89']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:52:36.067,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:52:36.069,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:36.069,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 1400 us [ns_server:debug,2014-08-19T16:52:36.069,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:36.070,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{23, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.89']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:info,2014-08-19T16:52:36.073,ns_1@10.242.238.90:<0.18785.0>:ns_memcached:do_handle_call:527]Changed vbucket 166 state to replica [ns_server:info,2014-08-19T16:52:36.074,ns_1@10.242.238.90:tap_replication_manager-default<0.18775.0>:tap_replication_manager:change_vbucket_filter:200]Going to change replication from 'ns_1@10.242.238.88' to have [86,88,89,90,91,92,93,94,95,96,97,98,99,100,101,102,103,104,105,106,107,108, 109,110,111,112,113,114,115,116,117,118,119,120,121,122,123,124,125,126,127, 128,129,130,131,132,133,134,135,136,137,138,139,140,141,142,143,144,145,146, 147,148,149,150,151,152,153,154,155,156,157,158,159,160,161,162,163,164,165, 166] ("¦", []) [ns_server:debug,2014-08-19T16:52:36.075,ns_1@10.242.238.90:<0.1788.1>:ns_vbm_new_sup:do_perform_vbucket_filter_change:135]Registered myself under id:{"default", {new_child_id, [86,88,89,90,91,92,93,94,95,96,97,98,99,100, 101,102,103,104,105,106,107,108,109,110,111, 112,113,114,115,116,117,118,119,120,121,122, 123,124,125,126,127,128,129,130,131,132,133, 134,135,136,137,138,139,140,141,142,143,144, 145,146,147,148,149,150,151,152,153,154,155, 156,157,158,159,160,161,162,163,164,165,166], 'ns_1@10.242.238.88'}, #Ref<0.0.1.147360>} Args:[{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{old_state_retriever,#Fun}, {on_not_ready_vbuckets,#Fun}, {username,"default"}, {password,get_from_config}, {vbuckets,[86,88,89,90,91,92,93,94,95,96,97,98,99,100,101,102,103,104, 105,106,107,108,109,110,111,112,113,114,115,116,117,118,119, 120,121,122,123,124,125,126,127,128,129,130,131,132,133,134, 135,136,137,138,139,140,141,142,143,144,145,146,147,148,149, 150,151,152,153,154,155,156,157,158,159,160,161,162,163,164, 165,166]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]] [ns_server:debug,2014-08-19T16:52:36.076,ns_1@10.242.238.90:<0.1788.1>:ns_vbm_new_sup:do_perform_vbucket_filter_change:139]Linked myself to old ebucketmigrator <0.1783.1> [ns_server:info,2014-08-19T16:52:36.076,ns_1@10.242.238.90:<0.1783.1>:ebucketmigrator_srv:handle_call:270]Starting new-style vbucket filter change on stream `replication_ns_1@10.242.238.90` [ns_server:info,2014-08-19T16:52:36.108,ns_1@10.242.238.90:<0.1783.1>:ebucketmigrator_srv:handle_call:297]Changing vbucket filter on tap stream `replication_ns_1@10.242.238.90`: [{86,1}, {88,1}, {89,1}, {90,1}, {91,1}, {92,1}, {93,1}, {94,1}, {95,1}, {96,1}, {97,1}, {98,1}, {99,1}, {100,1}, {101,1}, {102,1}, {103,1}, {104,1}, {105,1}, {106,1}, {107,1}, {108,1}, {109,1}, {110,1}, {111,1}, {112,1}, {113,1}, {114,1}, {115,1}, {116,1}, {117,1}, {118,1}, {119,1}, {120,1}, {121,1}, {122,1}, {123,1}, {124,1}, {125,1}, {126,1}, {127,1}, {128,1}, {129,1}, {130,1}, {131,1}, {132,1}, {133,1}, {134,1}, {135,1}, {136,1}, {137,1}, {138,1}, {139,1}, {140,1}, {141,1}, {142,1}, {143,1}, {144,1}, {145,1}, {146,1}, {147,1}, {148,1}, {149,1}, {150,1}, {151,1}, {152,1}, {153,1}, {154,1}, {155,1}, {156,1}, {157,1}, {158,1}, {159,1}, {160,1}, {161,1}, {162,1}, {163,1}, {164,1}, {165,1}, {166,1}] [ns_server:info,2014-08-19T16:52:36.109,ns_1@10.242.238.90:<0.1783.1>:ebucketmigrator_srv:handle_call:307]Successfully changed vbucket filter on tap stream `replication_ns_1@10.242.238.90`. [ns_server:info,2014-08-19T16:52:36.109,ns_1@10.242.238.90:<0.1783.1>:ebucketmigrator_srv:process_upstream:1027]Got vbucket filter change completion message. Silencing upstream sender [ns_server:info,2014-08-19T16:52:36.110,ns_1@10.242.238.90:<0.1783.1>:ebucketmigrator_srv:handle_info:366]Got reply from upstream silencing request. Completing state transition to a new ebucketmigrator. [ns_server:debug,2014-08-19T16:52:36.110,ns_1@10.242.238.90:<0.1783.1>:ebucketmigrator_srv:complete_native_vb_filter_change:170]Proceeding with reading unread binaries [ns_server:debug,2014-08-19T16:52:36.110,ns_1@10.242.238.90:<0.1783.1>:ebucketmigrator_srv:confirm_downstream:197]Going to confirm reception downstream messages [ns_server:debug,2014-08-19T16:52:36.110,ns_1@10.242.238.90:<0.1783.1>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:52:36.110,ns_1@10.242.238.90:<0.1790.1>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:52:36.110,ns_1@10.242.238.90:<0.1790.1>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:52:36.110,ns_1@10.242.238.90:<0.1783.1>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:52:36.110,ns_1@10.242.238.90:<0.1783.1>:ebucketmigrator_srv:confirm_downstream:201]Confirmed upstream messages are feeded to kernel [ns_server:debug,2014-08-19T16:52:36.111,ns_1@10.242.238.90:<0.1783.1>:ebucketmigrator_srv:reply_and_die:210]Passed old state to caller [ns_server:debug,2014-08-19T16:52:36.111,ns_1@10.242.238.90:<0.1783.1>:ebucketmigrator_srv:reply_and_die:213]Sent out state. Preparing to die [ns_server:debug,2014-08-19T16:52:36.111,ns_1@10.242.238.90:<0.1788.1>:ns_vbm_new_sup:do_perform_vbucket_filter_change:143]Got old state from previous ebucketmigrator: <0.1783.1> [ns_server:debug,2014-08-19T16:52:36.111,ns_1@10.242.238.90:<0.1788.1>:ns_vbm_new_sup:perform_vbucket_filter_change_loop:184]Sent old state to new instance [ns_server:info,2014-08-19T16:52:36.111,ns_1@10.242.238.90:<0.1792.1>:ns_vbm_new_sup:mk_old_state_retriever:83]Got vbucket filter change old state. Proceeding vbucket filter change operation [ns_server:debug,2014-08-19T16:52:36.111,ns_1@10.242.238.90:<0.1792.1>:ebucketmigrator_srv:init:494]Got old ebucketmigrator state from <0.1783.1>: {state,#Port<0.20537>,#Port<0.20533>,#Port<0.20538>,#Port<0.20534>,<0.1785.1>, <<"cut off">>,<<"cut off">>,[],241,false,false,0, {1408,452756,109938}, completed, {<0.1788.1>,#Ref<0.0.1.147374>}, <<"replication_ns_1@10.242.238.90">>,<0.1783.1>, {had_backfill,false,undefined,[]}, completed,false}. [ns_server:debug,2014-08-19T16:52:36.112,ns_1@10.242.238.90:<0.17162.0>:ns_process_registry:handle_info:98]Got exit msg: {'EXIT',<0.1788.1>,{#Ref<0.0.1.147362>,<0.1792.1>}} [error_logger:info,2014-08-19T16:52:36.112,ns_1@10.242.238.90:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,'ns_vbm_new_sup-default'} started: [{pid,<0.1792.1>}, {name, {new_child_id, [86,88,89,90,91,92,93,94,95,96,97,98,99,100, 101,102,103,104,105,106,107,108,109,110,111, 112,113,114,115,116,117,118,119,120,121,122, 123,124,125,126,127,128,129,130,131,132,133, 134,135,136,137,138,139,140,141,142,143,144, 145,146,147,148,149,150,151,152,153,154,155, 156,157,158,159,160,161,162,163,164,165,166], 'ns_1@10.242.238.88'}}, {mfargs, {ebucketmigrator_srv,start_link, [{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{old_state_retriever, #Fun}, {on_not_ready_vbuckets, #Fun}, {username,"default"}, {password,get_from_config}, {vbuckets, [86,88,89,90,91,92,93,94,95,96,97,98,99, 100,101,102,103,104,105,106,107,108,109, 110,111,112,113,114,115,116,117,118,119, 120,121,122,123,124,125,126,127,128,129, 130,131,132,133,134,135,136,137,138,139, 140,141,142,143,144,145,146,147,148,149, 150,151,152,153,154,155,156,157,158,159, 160,161,162,163,164,165,166]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]]}}, {restart_type,temporary}, {shutdown,60000}, {child_type,worker}] [ns_server:debug,2014-08-19T16:52:36.117,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:52:36.120,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:36.120,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 3127 us [ns_server:debug,2014-08-19T16:52:36.121,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:36.121,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{166, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.90']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:info,2014-08-19T16:52:36.122,ns_1@10.242.238.90:<0.18784.0>:ns_memcached:do_handle_call:527]Changed vbucket 167 state to replica [ns_server:info,2014-08-19T16:52:36.122,ns_1@10.242.238.90:tap_replication_manager-default<0.18775.0>:tap_replication_manager:change_vbucket_filter:200]Going to change replication from 'ns_1@10.242.238.88' to have [86,88,89,90,91,92,93,94,95,96,97,98,99,100,101,102,103,104,105,106,107,108, 109,110,111,112,113,114,115,116,117,118,119,120,121,122,123,124,125,126,127, 128,129,130,131,132,133,134,135,136,137,138,139,140,141,142,143,144,145,146, 147,148,149,150,151,152,153,154,155,156,157,158,159,160,161,162,163,164,165, 166,167] ("§", []) [ns_server:debug,2014-08-19T16:52:36.123,ns_1@10.242.238.90:<0.1794.1>:ns_vbm_new_sup:do_perform_vbucket_filter_change:135]Registered myself under id:{"default", {new_child_id, [86,88,89,90,91,92,93,94,95,96,97,98,99,100, 101,102,103,104,105,106,107,108,109,110,111, 112,113,114,115,116,117,118,119,120,121,122, 123,124,125,126,127,128,129,130,131,132,133, 134,135,136,137,138,139,140,141,142,143,144, 145,146,147,148,149,150,151,152,153,154,155, 156,157,158,159,160,161,162,163,164,165,166, 167], 'ns_1@10.242.238.88'}, #Ref<0.0.1.147502>} Args:[{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{old_state_retriever,#Fun}, {on_not_ready_vbuckets,#Fun}, {username,"default"}, {password,get_from_config}, {vbuckets,[86,88,89,90,91,92,93,94,95,96,97,98,99,100,101,102,103,104, 105,106,107,108,109,110,111,112,113,114,115,116,117,118,119, 120,121,122,123,124,125,126,127,128,129,130,131,132,133,134, 135,136,137,138,139,140,141,142,143,144,145,146,147,148,149, 150,151,152,153,154,155,156,157,158,159,160,161,162,163,164, 165,166,167]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]] [ns_server:debug,2014-08-19T16:52:36.123,ns_1@10.242.238.90:<0.1794.1>:ns_vbm_new_sup:do_perform_vbucket_filter_change:139]Linked myself to old ebucketmigrator <0.1792.1> [ns_server:debug,2014-08-19T16:52:36.133,ns_1@10.242.238.90:<0.1792.1>:ebucketmigrator_srv:init:621]Reusing old upstream: [{vbuckets,[86,88,89,90,91,92,93,94,95,96,97,98,99,100,101,102,103,104,105, 106,107,108,109,110,111,112,113,114,115,116,117,118,119,120,121, 122,123,124,125,126,127,128,129,130,131,132,133,134,135,136,137, 138,139,140,141,142,143,144,145,146,147,148,149,150,151,152,153, 154,155,156,157,158,159,160,161,162,163,164,165,166]}, {name,<<"replication_ns_1@10.242.238.90">>}, {takeover,false}] [rebalance:debug,2014-08-19T16:52:36.134,ns_1@10.242.238.90:<0.1792.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.1796.1> [ns_server:info,2014-08-19T16:52:36.134,ns_1@10.242.238.90:<0.1792.1>:ebucketmigrator_srv:handle_call:270]Starting new-style vbucket filter change on stream `replication_ns_1@10.242.238.90` [ns_server:info,2014-08-19T16:52:36.154,ns_1@10.242.238.90:<0.1792.1>:ebucketmigrator_srv:handle_call:297]Changing vbucket filter on tap stream `replication_ns_1@10.242.238.90`: [{86,1}, {88,1}, {89,1}, {90,1}, {91,1}, {92,1}, {93,1}, {94,1}, {95,1}, {96,1}, {97,1}, {98,1}, {99,1}, {100,1}, {101,1}, {102,1}, {103,1}, {104,1}, {105,1}, {106,1}, {107,1}, {108,1}, {109,1}, {110,1}, {111,1}, {112,1}, {113,1}, {114,1}, {115,1}, {116,1}, {117,1}, {118,1}, {119,1}, {120,1}, {121,1}, {122,1}, {123,1}, {124,1}, {125,1}, {126,1}, {127,1}, {128,1}, {129,1}, {130,1}, {131,1}, {132,1}, {133,1}, {134,1}, {135,1}, {136,1}, {137,1}, {138,1}, {139,1}, {140,1}, {141,1}, {142,1}, {143,1}, {144,1}, {145,1}, {146,1}, {147,1}, {148,1}, {149,1}, {150,1}, {151,1}, {152,1}, {153,1}, {154,1}, {155,1}, {156,1}, {157,1}, {158,1}, {159,1}, {160,1}, {161,1}, {162,1}, {163,1}, {164,1}, {165,1}, {166,1}, {167,1}] [ns_server:info,2014-08-19T16:52:36.155,ns_1@10.242.238.90:<0.1792.1>:ebucketmigrator_srv:handle_call:307]Successfully changed vbucket filter on tap stream `replication_ns_1@10.242.238.90`. [ns_server:info,2014-08-19T16:52:36.155,ns_1@10.242.238.90:<0.1792.1>:ebucketmigrator_srv:process_upstream:1027]Got vbucket filter change completion message. Silencing upstream sender [ns_server:info,2014-08-19T16:52:36.156,ns_1@10.242.238.90:<0.1792.1>:ebucketmigrator_srv:handle_info:366]Got reply from upstream silencing request. Completing state transition to a new ebucketmigrator. [ns_server:debug,2014-08-19T16:52:36.156,ns_1@10.242.238.90:<0.1792.1>:ebucketmigrator_srv:complete_native_vb_filter_change:170]Proceeding with reading unread binaries [ns_server:debug,2014-08-19T16:52:36.156,ns_1@10.242.238.90:<0.1792.1>:ebucketmigrator_srv:confirm_downstream:197]Going to confirm reception downstream messages [ns_server:debug,2014-08-19T16:52:36.156,ns_1@10.242.238.90:<0.1792.1>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:52:36.156,ns_1@10.242.238.90:<0.1797.1>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:52:36.156,ns_1@10.242.238.90:<0.1797.1>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:52:36.156,ns_1@10.242.238.90:<0.1792.1>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:52:36.156,ns_1@10.242.238.90:<0.1792.1>:ebucketmigrator_srv:confirm_downstream:201]Confirmed upstream messages are feeded to kernel [ns_server:debug,2014-08-19T16:52:36.156,ns_1@10.242.238.90:<0.1792.1>:ebucketmigrator_srv:reply_and_die:210]Passed old state to caller [ns_server:debug,2014-08-19T16:52:36.157,ns_1@10.242.238.90:<0.1792.1>:ebucketmigrator_srv:reply_and_die:213]Sent out state. Preparing to die [ns_server:debug,2014-08-19T16:52:36.157,ns_1@10.242.238.90:<0.1794.1>:ns_vbm_new_sup:do_perform_vbucket_filter_change:143]Got old state from previous ebucketmigrator: <0.1792.1> [ns_server:debug,2014-08-19T16:52:36.157,ns_1@10.242.238.90:<0.1794.1>:ns_vbm_new_sup:perform_vbucket_filter_change_loop:184]Sent old state to new instance [ns_server:info,2014-08-19T16:52:36.157,ns_1@10.242.238.90:<0.1799.1>:ns_vbm_new_sup:mk_old_state_retriever:83]Got vbucket filter change old state. Proceeding vbucket filter change operation [ns_server:debug,2014-08-19T16:52:36.157,ns_1@10.242.238.90:<0.1799.1>:ebucketmigrator_srv:init:494]Got old ebucketmigrator state from <0.1792.1>: {state,#Port<0.20537>,#Port<0.20533>,#Port<0.20538>,#Port<0.20534>,<0.1796.1>, <<"cut off">>,<<"cut off">>,[],244,false,false,0, {1408,452756,155977}, completed, {<0.1794.1>,#Ref<0.0.1.147515>}, <<"replication_ns_1@10.242.238.90">>,<0.1792.1>, {had_backfill,false,undefined,[]}, completed,false}. [ns_server:debug,2014-08-19T16:52:36.157,ns_1@10.242.238.90:<0.17162.0>:ns_process_registry:handle_info:98]Got exit msg: {'EXIT',<0.1794.1>,{#Ref<0.0.1.147504>,<0.1799.1>}} [error_logger:info,2014-08-19T16:52:36.157,ns_1@10.242.238.90:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,'ns_vbm_new_sup-default'} started: [{pid,<0.1799.1>}, {name, {new_child_id, [86,88,89,90,91,92,93,94,95,96,97,98,99,100, 101,102,103,104,105,106,107,108,109,110,111, 112,113,114,115,116,117,118,119,120,121,122, 123,124,125,126,127,128,129,130,131,132,133, 134,135,136,137,138,139,140,141,142,143,144, 145,146,147,148,149,150,151,152,153,154,155, 156,157,158,159,160,161,162,163,164,165,166, 167], 'ns_1@10.242.238.88'}}, {mfargs, {ebucketmigrator_srv,start_link, [{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{old_state_retriever, #Fun}, {on_not_ready_vbuckets, #Fun}, {username,"default"}, {password,get_from_config}, {vbuckets, [86,88,89,90,91,92,93,94,95,96,97,98,99, 100,101,102,103,104,105,106,107,108,109, 110,111,112,113,114,115,116,117,118,119, 120,121,122,123,124,125,126,127,128,129, 130,131,132,133,134,135,136,137,138,139, 140,141,142,143,144,145,146,147,148,149, 150,151,152,153,154,155,156,157,158,159, 160,161,162,163,164,165,166,167]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]]}}, {restart_type,temporary}, {shutdown,60000}, {child_type,worker}] [ns_server:debug,2014-08-19T16:52:36.162,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:52:36.166,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:36.166,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 3899 us [ns_server:debug,2014-08-19T16:52:36.166,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:36.167,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{167, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.90']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:info,2014-08-19T16:52:36.170,ns_1@10.242.238.90:<0.18784.0>:ns_memcached:do_handle_call:527]Changed vbucket 169 state to replica [ns_server:info,2014-08-19T16:52:36.170,ns_1@10.242.238.90:tap_replication_manager-default<0.18775.0>:tap_replication_manager:change_vbucket_filter:200]Going to change replication from 'ns_1@10.242.238.88' to have [86,88,89,90,91,92,93,94,95,96,97,98,99,100,101,102,103,104,105,106,107,108, 109,110,111,112,113,114,115,116,117,118,119,120,121,122,123,124,125,126,127, 128,129,130,131,132,133,134,135,136,137,138,139,140,141,142,143,144,145,146, 147,148,149,150,151,152,153,154,155,156,157,158,159,160,161,162,163,164,165, 166,167,169] ("©", []) [ns_server:debug,2014-08-19T16:52:36.172,ns_1@10.242.238.90:<0.1801.1>:ns_vbm_new_sup:do_perform_vbucket_filter_change:135]Registered myself under id:{"default", {new_child_id, [86,88,89,90,91,92,93,94,95,96,97,98,99,100, 101,102,103,104,105,106,107,108,109,110,111, 112,113,114,115,116,117,118,119,120,121,122, 123,124,125,126,127,128,129,130,131,132,133, 134,135,136,137,138,139,140,141,142,143,144, 145,146,147,148,149,150,151,152,153,154,155, 156,157,158,159,160,161,162,163,164,165,166, 167,169], 'ns_1@10.242.238.88'}, #Ref<0.0.1.147642>} Args:[{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{old_state_retriever,#Fun}, {on_not_ready_vbuckets,#Fun}, {username,"default"}, {password,get_from_config}, {vbuckets,[86,88,89,90,91,92,93,94,95,96,97,98,99,100,101,102,103,104, 105,106,107,108,109,110,111,112,113,114,115,116,117,118,119, 120,121,122,123,124,125,126,127,128,129,130,131,132,133,134, 135,136,137,138,139,140,141,142,143,144,145,146,147,148,149, 150,151,152,153,154,155,156,157,158,159,160,161,162,163,164, 165,166,167,169]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]] [ns_server:debug,2014-08-19T16:52:36.173,ns_1@10.242.238.90:<0.1801.1>:ns_vbm_new_sup:do_perform_vbucket_filter_change:139]Linked myself to old ebucketmigrator <0.1799.1> [ns_server:debug,2014-08-19T16:52:36.183,ns_1@10.242.238.90:<0.1799.1>:ebucketmigrator_srv:init:621]Reusing old upstream: [{vbuckets,[86,88,89,90,91,92,93,94,95,96,97,98,99,100,101,102,103,104,105, 106,107,108,109,110,111,112,113,114,115,116,117,118,119,120,121, 122,123,124,125,126,127,128,129,130,131,132,133,134,135,136,137, 138,139,140,141,142,143,144,145,146,147,148,149,150,151,152,153, 154,155,156,157,158,159,160,161,162,163,164,165,166,167]}, {name,<<"replication_ns_1@10.242.238.90">>}, {takeover,false}] [rebalance:debug,2014-08-19T16:52:36.184,ns_1@10.242.238.90:<0.1799.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.1803.1> [ns_server:info,2014-08-19T16:52:36.184,ns_1@10.242.238.90:<0.1799.1>:ebucketmigrator_srv:handle_call:270]Starting new-style vbucket filter change on stream `replication_ns_1@10.242.238.90` [ns_server:info,2014-08-19T16:52:36.204,ns_1@10.242.238.90:<0.1799.1>:ebucketmigrator_srv:handle_call:297]Changing vbucket filter on tap stream `replication_ns_1@10.242.238.90`: [{86,1}, {88,1}, {89,1}, {90,1}, {91,1}, {92,1}, {93,1}, {94,1}, {95,1}, {96,1}, {97,1}, {98,1}, {99,1}, {100,1}, {101,1}, {102,1}, {103,1}, {104,1}, {105,1}, {106,1}, {107,1}, {108,1}, {109,1}, {110,1}, {111,1}, {112,1}, {113,1}, {114,1}, {115,1}, {116,1}, {117,1}, {118,1}, {119,1}, {120,1}, {121,1}, {122,1}, {123,1}, {124,1}, {125,1}, {126,1}, {127,1}, {128,1}, {129,1}, {130,1}, {131,1}, {132,1}, {133,1}, {134,1}, {135,1}, {136,1}, {137,1}, {138,1}, {139,1}, {140,1}, {141,1}, {142,1}, {143,1}, {144,1}, {145,1}, {146,1}, {147,1}, {148,1}, {149,1}, {150,1}, {151,1}, {152,1}, {153,1}, {154,1}, {155,1}, {156,1}, {157,1}, {158,1}, {159,1}, {160,1}, {161,1}, {162,1}, {163,1}, {164,1}, {165,1}, {166,1}, {167,1}, {169,1}] [ns_server:info,2014-08-19T16:52:36.205,ns_1@10.242.238.90:<0.1799.1>:ebucketmigrator_srv:handle_call:307]Successfully changed vbucket filter on tap stream `replication_ns_1@10.242.238.90`. [ns_server:info,2014-08-19T16:52:36.206,ns_1@10.242.238.90:<0.1799.1>:ebucketmigrator_srv:process_upstream:1027]Got vbucket filter change completion message. Silencing upstream sender [ns_server:info,2014-08-19T16:52:36.206,ns_1@10.242.238.90:<0.1799.1>:ebucketmigrator_srv:handle_info:366]Got reply from upstream silencing request. Completing state transition to a new ebucketmigrator. [ns_server:debug,2014-08-19T16:52:36.206,ns_1@10.242.238.90:<0.1799.1>:ebucketmigrator_srv:complete_native_vb_filter_change:170]Proceeding with reading unread binaries [ns_server:debug,2014-08-19T16:52:36.206,ns_1@10.242.238.90:<0.1799.1>:ebucketmigrator_srv:confirm_downstream:197]Going to confirm reception downstream messages [ns_server:debug,2014-08-19T16:52:36.206,ns_1@10.242.238.90:<0.1799.1>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:52:36.206,ns_1@10.242.238.90:<0.1804.1>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:52:36.206,ns_1@10.242.238.90:<0.1804.1>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:52:36.207,ns_1@10.242.238.90:<0.1799.1>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:52:36.207,ns_1@10.242.238.90:<0.1799.1>:ebucketmigrator_srv:confirm_downstream:201]Confirmed upstream messages are feeded to kernel [ns_server:debug,2014-08-19T16:52:36.207,ns_1@10.242.238.90:<0.1799.1>:ebucketmigrator_srv:reply_and_die:210]Passed old state to caller [ns_server:debug,2014-08-19T16:52:36.207,ns_1@10.242.238.90:<0.1799.1>:ebucketmigrator_srv:reply_and_die:213]Sent out state. Preparing to die [ns_server:debug,2014-08-19T16:52:36.207,ns_1@10.242.238.90:<0.1801.1>:ns_vbm_new_sup:do_perform_vbucket_filter_change:143]Got old state from previous ebucketmigrator: <0.1799.1> [ns_server:debug,2014-08-19T16:52:36.207,ns_1@10.242.238.90:<0.1801.1>:ns_vbm_new_sup:perform_vbucket_filter_change_loop:184]Sent old state to new instance [ns_server:info,2014-08-19T16:52:36.207,ns_1@10.242.238.90:<0.1806.1>:ns_vbm_new_sup:mk_old_state_retriever:83]Got vbucket filter change old state. Proceeding vbucket filter change operation [ns_server:debug,2014-08-19T16:52:36.207,ns_1@10.242.238.90:<0.1806.1>:ebucketmigrator_srv:init:494]Got old ebucketmigrator state from <0.1799.1>: {state,#Port<0.20537>,#Port<0.20533>,#Port<0.20538>,#Port<0.20534>,<0.1803.1>, <<"cut off">>,<<"cut off">>,[],247,false,false,0, {1408,452756,206129}, completed, {<0.1801.1>,#Ref<0.0.1.147658>}, <<"replication_ns_1@10.242.238.90">>,<0.1799.1>, {had_backfill,false,undefined,[]}, completed,false}. [ns_server:debug,2014-08-19T16:52:36.208,ns_1@10.242.238.90:<0.17162.0>:ns_process_registry:handle_info:98]Got exit msg: {'EXIT',<0.1801.1>,{#Ref<0.0.1.147646>,<0.1806.1>}} [error_logger:info,2014-08-19T16:52:36.208,ns_1@10.242.238.90:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,'ns_vbm_new_sup-default'} started: [{pid,<0.1806.1>}, {name, {new_child_id, [86,88,89,90,91,92,93,94,95,96,97,98,99,100, 101,102,103,104,105,106,107,108,109,110,111, 112,113,114,115,116,117,118,119,120,121,122, 123,124,125,126,127,128,129,130,131,132,133, 134,135,136,137,138,139,140,141,142,143,144, 145,146,147,148,149,150,151,152,153,154,155, 156,157,158,159,160,161,162,163,164,165,166, 167,169], 'ns_1@10.242.238.88'}}, {mfargs, {ebucketmigrator_srv,start_link, [{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{old_state_retriever, #Fun}, {on_not_ready_vbuckets, #Fun}, {username,"default"}, {password,get_from_config}, {vbuckets, [86,88,89,90,91,92,93,94,95,96,97,98,99, 100,101,102,103,104,105,106,107,108,109, 110,111,112,113,114,115,116,117,118,119, 120,121,122,123,124,125,126,127,128,129, 130,131,132,133,134,135,136,137,138,139, 140,141,142,143,144,145,146,147,148,149, 150,151,152,153,154,155,156,157,158,159, 160,161,162,163,164,165,166,167,169]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]]}}, {restart_type,temporary}, {shutdown,60000}, {child_type,worker}] [ns_server:debug,2014-08-19T16:52:36.213,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:52:36.216,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:36.217,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 3841 us [ns_server:debug,2014-08-19T16:52:36.217,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:36.217,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{169, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.90']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:info,2014-08-19T16:52:36.218,ns_1@10.242.238.90:<0.18784.0>:ns_memcached:do_handle_call:527]Changed vbucket 168 state to replica [ns_server:info,2014-08-19T16:52:36.219,ns_1@10.242.238.90:tap_replication_manager-default<0.18775.0>:tap_replication_manager:change_vbucket_filter:200]Going to change replication from 'ns_1@10.242.238.88' to have [86,88,89,90,91,92,93,94,95,96,97,98,99,100,101,102,103,104,105,106,107,108, 109,110,111,112,113,114,115,116,117,118,119,120,121,122,123,124,125,126,127, 128,129,130,131,132,133,134,135,136,137,138,139,140,141,142,143,144,145,146, 147,148,149,150,151,152,153,154,155,156,157,158,159,160,161,162,163,164,165, 166,167,168,169] ("¨", []) [ns_server:debug,2014-08-19T16:52:36.219,ns_1@10.242.238.90:<0.1808.1>:ns_vbm_new_sup:do_perform_vbucket_filter_change:135]Registered myself under id:{"default", {new_child_id, [86,88,89,90,91,92,93,94,95,96,97,98,99,100, 101,102,103,104,105,106,107,108,109,110,111, 112,113,114,115,116,117,118,119,120,121,122, 123,124,125,126,127,128,129,130,131,132,133, 134,135,136,137,138,139,140,141,142,143,144, 145,146,147,148,149,150,151,152,153,154,155, 156,157,158,159,160,161,162,163,164,165,166, 167,168,169], 'ns_1@10.242.238.88'}, #Ref<0.0.1.147782>} Args:[{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{old_state_retriever,#Fun}, {on_not_ready_vbuckets,#Fun}, {username,"default"}, {password,get_from_config}, {vbuckets,[86,88,89,90,91,92,93,94,95,96,97,98,99,100,101,102,103,104, 105,106,107,108,109,110,111,112,113,114,115,116,117,118,119, 120,121,122,123,124,125,126,127,128,129,130,131,132,133,134, 135,136,137,138,139,140,141,142,143,144,145,146,147,148,149, 150,151,152,153,154,155,156,157,158,159,160,161,162,163,164, 165,166,167,168,169]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]] [ns_server:debug,2014-08-19T16:52:36.220,ns_1@10.242.238.90:<0.1808.1>:ns_vbm_new_sup:do_perform_vbucket_filter_change:139]Linked myself to old ebucketmigrator <0.1806.1> [ns_server:debug,2014-08-19T16:52:36.232,ns_1@10.242.238.90:<0.1806.1>:ebucketmigrator_srv:init:621]Reusing old upstream: [{vbuckets,[86,88,89,90,91,92,93,94,95,96,97,98,99,100,101,102,103,104,105, 106,107,108,109,110,111,112,113,114,115,116,117,118,119,120,121, 122,123,124,125,126,127,128,129,130,131,132,133,134,135,136,137, 138,139,140,141,142,143,144,145,146,147,148,149,150,151,152,153, 154,155,156,157,158,159,160,161,162,163,164,165,166,167,169]}, {name,<<"replication_ns_1@10.242.238.90">>}, {takeover,false}] [rebalance:debug,2014-08-19T16:52:36.232,ns_1@10.242.238.90:<0.1806.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.1810.1> [ns_server:info,2014-08-19T16:52:36.232,ns_1@10.242.238.90:<0.1806.1>:ebucketmigrator_srv:handle_call:270]Starting new-style vbucket filter change on stream `replication_ns_1@10.242.238.90` [ns_server:info,2014-08-19T16:52:36.253,ns_1@10.242.238.90:<0.1806.1>:ebucketmigrator_srv:handle_call:297]Changing vbucket filter on tap stream `replication_ns_1@10.242.238.90`: [{86,1}, {88,1}, {89,1}, {90,1}, {91,1}, {92,1}, {93,1}, {94,1}, {95,1}, {96,1}, {97,1}, {98,1}, {99,1}, {100,1}, {101,1}, {102,1}, {103,1}, {104,1}, {105,1}, {106,1}, {107,1}, {108,1}, {109,1}, {110,1}, {111,1}, {112,1}, {113,1}, {114,1}, {115,1}, {116,1}, {117,1}, {118,1}, {119,1}, {120,1}, {121,1}, {122,1}, {123,1}, {124,1}, {125,1}, {126,1}, {127,1}, {128,1}, {129,1}, {130,1}, {131,1}, {132,1}, {133,1}, {134,1}, {135,1}, {136,1}, {137,1}, {138,1}, {139,1}, {140,1}, {141,1}, {142,1}, {143,1}, {144,1}, {145,1}, {146,1}, {147,1}, {148,1}, {149,1}, {150,1}, {151,1}, {152,1}, {153,1}, {154,1}, {155,1}, {156,1}, {157,1}, {158,1}, {159,1}, {160,1}, {161,1}, {162,1}, {163,1}, {164,1}, {165,1}, {166,1}, {167,1}, {168,1}, {169,1}] [ns_server:info,2014-08-19T16:52:36.254,ns_1@10.242.238.90:<0.1806.1>:ebucketmigrator_srv:handle_call:307]Successfully changed vbucket filter on tap stream `replication_ns_1@10.242.238.90`. [ns_server:info,2014-08-19T16:52:36.254,ns_1@10.242.238.90:<0.1806.1>:ebucketmigrator_srv:process_upstream:1027]Got vbucket filter change completion message. Silencing upstream sender [ns_server:info,2014-08-19T16:52:36.254,ns_1@10.242.238.90:<0.1806.1>:ebucketmigrator_srv:handle_info:366]Got reply from upstream silencing request. Completing state transition to a new ebucketmigrator. [ns_server:debug,2014-08-19T16:52:36.254,ns_1@10.242.238.90:<0.1806.1>:ebucketmigrator_srv:complete_native_vb_filter_change:170]Proceeding with reading unread binaries [ns_server:debug,2014-08-19T16:52:36.254,ns_1@10.242.238.90:<0.1806.1>:ebucketmigrator_srv:confirm_downstream:197]Going to confirm reception downstream messages [ns_server:debug,2014-08-19T16:52:36.254,ns_1@10.242.238.90:<0.1806.1>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:52:36.255,ns_1@10.242.238.90:<0.1811.1>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:52:36.255,ns_1@10.242.238.90:<0.1811.1>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:52:36.255,ns_1@10.242.238.90:<0.1806.1>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:52:36.255,ns_1@10.242.238.90:<0.1806.1>:ebucketmigrator_srv:confirm_downstream:201]Confirmed upstream messages are feeded to kernel [ns_server:debug,2014-08-19T16:52:36.255,ns_1@10.242.238.90:<0.1806.1>:ebucketmigrator_srv:reply_and_die:210]Passed old state to caller [ns_server:debug,2014-08-19T16:52:36.255,ns_1@10.242.238.90:<0.1806.1>:ebucketmigrator_srv:reply_and_die:213]Sent out state. Preparing to die [ns_server:debug,2014-08-19T16:52:36.255,ns_1@10.242.238.90:<0.1808.1>:ns_vbm_new_sup:do_perform_vbucket_filter_change:143]Got old state from previous ebucketmigrator: <0.1806.1> [ns_server:debug,2014-08-19T16:52:36.256,ns_1@10.242.238.90:<0.1808.1>:ns_vbm_new_sup:perform_vbucket_filter_change_loop:184]Sent old state to new instance [ns_server:info,2014-08-19T16:52:36.256,ns_1@10.242.238.90:<0.1813.1>:ns_vbm_new_sup:mk_old_state_retriever:83]Got vbucket filter change old state. Proceeding vbucket filter change operation [ns_server:debug,2014-08-19T16:52:36.256,ns_1@10.242.238.90:<0.1813.1>:ebucketmigrator_srv:init:494]Got old ebucketmigrator state from <0.1806.1>: {state,#Port<0.20537>,#Port<0.20533>,#Port<0.20538>,#Port<0.20534>,<0.1810.1>, <<"cut off">>,<<"cut off">>,[],250,false,false,0, {1408,452756,254337}, completed, {<0.1808.1>,#Ref<0.0.1.147795>}, <<"replication_ns_1@10.242.238.90">>,<0.1806.1>, {had_backfill,false,undefined,[]}, completed,false}. [ns_server:debug,2014-08-19T16:52:36.256,ns_1@10.242.238.90:<0.17162.0>:ns_process_registry:handle_info:98]Got exit msg: {'EXIT',<0.1808.1>,{#Ref<0.0.1.147784>,<0.1813.1>}} [error_logger:info,2014-08-19T16:52:36.256,ns_1@10.242.238.90:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,'ns_vbm_new_sup-default'} started: [{pid,<0.1813.1>}, {name, {new_child_id, [86,88,89,90,91,92,93,94,95,96,97,98,99,100, 101,102,103,104,105,106,107,108,109,110,111, 112,113,114,115,116,117,118,119,120,121,122, 123,124,125,126,127,128,129,130,131,132,133, 134,135,136,137,138,139,140,141,142,143,144, 145,146,147,148,149,150,151,152,153,154,155, 156,157,158,159,160,161,162,163,164,165,166, 167,168,169], 'ns_1@10.242.238.88'}}, {mfargs, {ebucketmigrator_srv,start_link, [{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{old_state_retriever, #Fun}, {on_not_ready_vbuckets, #Fun}, {username,"default"}, {password,get_from_config}, {vbuckets, [86,88,89,90,91,92,93,94,95,96,97,98,99, 100,101,102,103,104,105,106,107,108,109, 110,111,112,113,114,115,116,117,118,119, 120,121,122,123,124,125,126,127,128,129, 130,131,132,133,134,135,136,137,138,139, 140,141,142,143,144,145,146,147,148,149, 150,151,152,153,154,155,156,157,158,159, 160,161,162,163,164,165,166,167,168, 169]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]]}}, {restart_type,temporary}, {shutdown,60000}, {child_type,worker}] [ns_server:debug,2014-08-19T16:52:36.261,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:52:36.265,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:36.265,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 1382 us [ns_server:debug,2014-08-19T16:52:36.265,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:36.266,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{168, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.90']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:52:36.282,ns_1@10.242.238.90:<0.1813.1>:ebucketmigrator_srv:init:621]Reusing old upstream: [{vbuckets,[86,88,89,90,91,92,93,94,95,96,97,98,99,100,101,102,103,104,105, 106,107,108,109,110,111,112,113,114,115,116,117,118,119,120,121, 122,123,124,125,126,127,128,129,130,131,132,133,134,135,136,137, 138,139,140,141,142,143,144,145,146,147,148,149,150,151,152,153, 154,155,156,157,158,159,160,161,162,163,164,165,166,167,168,169]}, {name,<<"replication_ns_1@10.242.238.90">>}, {takeover,false}] [rebalance:debug,2014-08-19T16:52:36.282,ns_1@10.242.238.90:<0.1813.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.1815.1> [ns_server:debug,2014-08-19T16:52:36.301,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:52:36.304,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:36.305,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 3560 us [ns_server:debug,2014-08-19T16:52:36.305,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:36.305,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{25, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.89']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:52:36.345,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:52:36.348,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:36.348,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 3336 us [ns_server:debug,2014-08-19T16:52:36.349,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{24, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.89']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:52:36.349,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:36.393,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:52:36.396,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:36.397,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 3218 us [ns_server:debug,2014-08-19T16:52:36.397,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:36.398,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{26, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.89']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:52:36.440,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:52:36.444,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:36.444,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 4493 us [ns_server:debug,2014-08-19T16:52:36.446,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:36.445,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{27, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.89']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:info,2014-08-19T16:52:36.450,ns_1@10.242.238.90:<0.18784.0>:ns_memcached:do_handle_call:527]Changed vbucket 170 state to replica [ns_server:info,2014-08-19T16:52:36.450,ns_1@10.242.238.90:tap_replication_manager-default<0.18775.0>:tap_replication_manager:change_vbucket_filter:200]Going to change replication from 'ns_1@10.242.238.88' to have [86,88,89,90,91,92,93,94,95,96,97,98,99,100,101,102,103,104,105,106,107,108, 109,110,111,112,113,114,115,116,117,118,119,120,121,122,123,124,125,126,127, 128,129,130,131,132,133,134,135,136,137,138,139,140,141,142,143,144,145,146, 147,148,149,150,151,152,153,154,155,156,157,158,159,160,161,162,163,164,165, 166,167,168,169,170] ("ª", []) [ns_server:debug,2014-08-19T16:52:36.452,ns_1@10.242.238.90:<0.1821.1>:ns_vbm_new_sup:do_perform_vbucket_filter_change:135]Registered myself under id:{"default", {new_child_id, [86,88,89,90,91,92,93,94,95,96,97,98,99,100, 101,102,103,104,105,106,107,108,109,110,111, 112,113,114,115,116,117,118,119,120,121,122, 123,124,125,126,127,128,129,130,131,132,133, 134,135,136,137,138,139,140,141,142,143,144, 145,146,147,148,149,150,151,152,153,154,155, 156,157,158,159,160,161,162,163,164,165,166, 167,168,169,170], 'ns_1@10.242.238.88'}, #Ref<0.0.1.148076>} Args:[{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{old_state_retriever,#Fun}, {on_not_ready_vbuckets,#Fun}, {username,"default"}, {password,get_from_config}, {vbuckets,[86,88,89,90,91,92,93,94,95,96,97,98,99,100,101,102,103,104, 105,106,107,108,109,110,111,112,113,114,115,116,117,118,119, 120,121,122,123,124,125,126,127,128,129,130,131,132,133,134, 135,136,137,138,139,140,141,142,143,144,145,146,147,148,149, 150,151,152,153,154,155,156,157,158,159,160,161,162,163,164, 165,166,167,168,169,170]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]] [ns_server:debug,2014-08-19T16:52:36.453,ns_1@10.242.238.90:<0.1821.1>:ns_vbm_new_sup:do_perform_vbucket_filter_change:139]Linked myself to old ebucketmigrator <0.1813.1> [ns_server:info,2014-08-19T16:52:36.453,ns_1@10.242.238.90:<0.1813.1>:ebucketmigrator_srv:handle_call:270]Starting new-style vbucket filter change on stream `replication_ns_1@10.242.238.90` [ns_server:info,2014-08-19T16:52:36.475,ns_1@10.242.238.90:<0.1813.1>:ebucketmigrator_srv:handle_call:297]Changing vbucket filter on tap stream `replication_ns_1@10.242.238.90`: [{86,1}, {88,1}, {89,1}, {90,1}, {91,1}, {92,1}, {93,1}, {94,1}, {95,1}, {96,1}, {97,1}, {98,1}, {99,1}, {100,1}, {101,1}, {102,1}, {103,1}, {104,1}, {105,1}, {106,1}, {107,1}, {108,1}, {109,1}, {110,1}, {111,1}, {112,1}, {113,1}, {114,1}, {115,1}, {116,1}, {117,1}, {118,1}, {119,1}, {120,1}, {121,1}, {122,1}, {123,1}, {124,1}, {125,1}, {126,1}, {127,1}, {128,1}, {129,1}, {130,1}, {131,1}, {132,1}, {133,1}, {134,1}, {135,1}, {136,1}, {137,1}, {138,1}, {139,1}, {140,1}, {141,1}, {142,1}, {143,1}, {144,1}, {145,1}, {146,1}, {147,1}, {148,1}, {149,1}, {150,1}, {151,1}, {152,1}, {153,1}, {154,1}, {155,1}, {156,1}, {157,1}, {158,1}, {159,1}, {160,1}, {161,1}, {162,1}, {163,1}, {164,1}, {165,1}, {166,1}, {167,1}, {168,1}, {169,1}, {170,1}] [ns_server:info,2014-08-19T16:52:36.476,ns_1@10.242.238.90:<0.1813.1>:ebucketmigrator_srv:handle_call:307]Successfully changed vbucket filter on tap stream `replication_ns_1@10.242.238.90`. [ns_server:info,2014-08-19T16:52:36.476,ns_1@10.242.238.90:<0.1813.1>:ebucketmigrator_srv:process_upstream:1027]Got vbucket filter change completion message. Silencing upstream sender [ns_server:info,2014-08-19T16:52:36.476,ns_1@10.242.238.90:<0.1813.1>:ebucketmigrator_srv:handle_info:366]Got reply from upstream silencing request. Completing state transition to a new ebucketmigrator. [ns_server:debug,2014-08-19T16:52:36.476,ns_1@10.242.238.90:<0.1813.1>:ebucketmigrator_srv:complete_native_vb_filter_change:170]Proceeding with reading unread binaries [ns_server:debug,2014-08-19T16:52:36.476,ns_1@10.242.238.90:<0.1813.1>:ebucketmigrator_srv:confirm_downstream:197]Going to confirm reception downstream messages [ns_server:debug,2014-08-19T16:52:36.476,ns_1@10.242.238.90:<0.1813.1>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:52:36.476,ns_1@10.242.238.90:<0.1823.1>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:52:36.477,ns_1@10.242.238.90:<0.1823.1>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:52:36.477,ns_1@10.242.238.90:<0.1813.1>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:52:36.477,ns_1@10.242.238.90:<0.1813.1>:ebucketmigrator_srv:confirm_downstream:201]Confirmed upstream messages are feeded to kernel [ns_server:debug,2014-08-19T16:52:36.477,ns_1@10.242.238.90:<0.1813.1>:ebucketmigrator_srv:reply_and_die:210]Passed old state to caller [ns_server:debug,2014-08-19T16:52:36.477,ns_1@10.242.238.90:<0.1813.1>:ebucketmigrator_srv:reply_and_die:213]Sent out state. Preparing to die [ns_server:debug,2014-08-19T16:52:36.477,ns_1@10.242.238.90:<0.1821.1>:ns_vbm_new_sup:do_perform_vbucket_filter_change:143]Got old state from previous ebucketmigrator: <0.1813.1> [ns_server:debug,2014-08-19T16:52:36.478,ns_1@10.242.238.90:<0.1821.1>:ns_vbm_new_sup:perform_vbucket_filter_change_loop:184]Sent old state to new instance [ns_server:info,2014-08-19T16:52:36.478,ns_1@10.242.238.90:<0.1825.1>:ns_vbm_new_sup:mk_old_state_retriever:83]Got vbucket filter change old state. Proceeding vbucket filter change operation [ns_server:debug,2014-08-19T16:52:36.478,ns_1@10.242.238.90:<0.1825.1>:ebucketmigrator_srv:init:494]Got old ebucketmigrator state from <0.1813.1>: {state,#Port<0.20537>,#Port<0.20533>,#Port<0.20538>,#Port<0.20534>,<0.1815.1>, <<"cut off">>,<<"cut off">>,[],253,false,false,0, {1408,452756,476432}, completed, {<0.1821.1>,#Ref<0.0.1.148091>}, <<"replication_ns_1@10.242.238.90">>,<0.1813.1>, {had_backfill,false,undefined,[]}, completed,false}. [ns_server:debug,2014-08-19T16:52:36.478,ns_1@10.242.238.90:<0.17162.0>:ns_process_registry:handle_info:98]Got exit msg: {'EXIT',<0.1821.1>,{#Ref<0.0.1.148078>,<0.1825.1>}} [error_logger:info,2014-08-19T16:52:36.478,ns_1@10.242.238.90:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,'ns_vbm_new_sup-default'} started: [{pid,<0.1825.1>}, {name, {new_child_id, [86,88,89,90,91,92,93,94,95,96,97,98,99,100, 101,102,103,104,105,106,107,108,109,110,111, 112,113,114,115,116,117,118,119,120,121,122, 123,124,125,126,127,128,129,130,131,132,133, 134,135,136,137,138,139,140,141,142,143,144, 145,146,147,148,149,150,151,152,153,154,155, 156,157,158,159,160,161,162,163,164,165,166, 167,168,169,170], 'ns_1@10.242.238.88'}}, {mfargs, {ebucketmigrator_srv,start_link, [{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{old_state_retriever, #Fun}, {on_not_ready_vbuckets, #Fun}, {username,"default"}, {password,get_from_config}, {vbuckets, [86,88,89,90,91,92,93,94,95,96,97,98,99, 100,101,102,103,104,105,106,107,108,109, 110,111,112,113,114,115,116,117,118,119, 120,121,122,123,124,125,126,127,128,129, 130,131,132,133,134,135,136,137,138,139, 140,141,142,143,144,145,146,147,148,149, 150,151,152,153,154,155,156,157,158,159, 160,161,162,163,164,165,166,167,168,169, 170]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]]}}, {restart_type,temporary}, {shutdown,60000}, {child_type,worker}] [ns_server:debug,2014-08-19T16:52:36.483,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:52:36.486,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:36.486,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 3258 us [ns_server:debug,2014-08-19T16:52:36.487,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:36.487,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{170, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.90']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:info,2014-08-19T16:52:36.489,ns_1@10.242.238.90:<0.18784.0>:ns_memcached:do_handle_call:527]Changed vbucket 87 state to replica [ns_server:info,2014-08-19T16:52:36.490,ns_1@10.242.238.90:tap_replication_manager-default<0.18775.0>:tap_replication_manager:change_vbucket_filter:200]Going to change replication from 'ns_1@10.242.238.88' to have [86,87,88,89,90,91,92,93,94,95,96,97,98,99,100,101,102,103,104,105,106,107, 108,109,110,111,112,113,114,115,116,117,118,119,120,121,122,123,124,125,126, 127,128,129,130,131,132,133,134,135,136,137,138,139,140,141,142,143,144,145, 146,147,148,149,150,151,152,153,154,155,156,157,158,159,160,161,162,163,164, 165,166,167,168,169,170] ("W", []) [ns_server:debug,2014-08-19T16:52:36.491,ns_1@10.242.238.90:<0.1827.1>:ns_vbm_new_sup:do_perform_vbucket_filter_change:135]Registered myself under id:{"default", {new_child_id, [86,87,88,89,90,91,92,93,94,95,96,97,98,99, 100,101,102,103,104,105,106,107,108,109,110, 111,112,113,114,115,116,117,118,119,120,121, 122,123,124,125,126,127,128,129,130,131,132, 133,134,135,136,137,138,139,140,141,142,143, 144,145,146,147,148,149,150,151,152,153,154, 155,156,157,158,159,160,161,162,163,164,165, 166,167,168,169,170], 'ns_1@10.242.238.88'}, #Ref<0.0.1.148204>} Args:[{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{old_state_retriever,#Fun}, {on_not_ready_vbuckets,#Fun}, {username,"default"}, {password,get_from_config}, {vbuckets,[86,87,88,89,90,91,92,93,94,95,96,97,98,99,100,101,102,103, 104,105,106,107,108,109,110,111,112,113,114,115,116,117,118, 119,120,121,122,123,124,125,126,127,128,129,130,131,132,133, 134,135,136,137,138,139,140,141,142,143,144,145,146,147,148, 149,150,151,152,153,154,155,156,157,158,159,160,161,162,163, 164,165,166,167,168,169,170]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]] [ns_server:debug,2014-08-19T16:52:36.491,ns_1@10.242.238.90:<0.1827.1>:ns_vbm_new_sup:do_perform_vbucket_filter_change:139]Linked myself to old ebucketmigrator <0.1825.1> [ns_server:debug,2014-08-19T16:52:36.500,ns_1@10.242.238.90:<0.1825.1>:ebucketmigrator_srv:init:621]Reusing old upstream: [{vbuckets,[86,88,89,90,91,92,93,94,95,96,97,98,99,100,101,102,103,104,105, 106,107,108,109,110,111,112,113,114,115,116,117,118,119,120,121, 122,123,124,125,126,127,128,129,130,131,132,133,134,135,136,137, 138,139,140,141,142,143,144,145,146,147,148,149,150,151,152,153, 154,155,156,157,158,159,160,161,162,163,164,165,166,167,168,169, 170]}, {name,<<"replication_ns_1@10.242.238.90">>}, {takeover,false}] [rebalance:debug,2014-08-19T16:52:36.501,ns_1@10.242.238.90:<0.1825.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.1829.1> [ns_server:info,2014-08-19T16:52:36.501,ns_1@10.242.238.90:<0.1825.1>:ebucketmigrator_srv:handle_call:270]Starting new-style vbucket filter change on stream `replication_ns_1@10.242.238.90` [ns_server:info,2014-08-19T16:52:36.521,ns_1@10.242.238.90:<0.1825.1>:ebucketmigrator_srv:handle_call:297]Changing vbucket filter on tap stream `replication_ns_1@10.242.238.90`: [{86,1}, {87,1}, {88,1}, {89,1}, {90,1}, {91,1}, {92,1}, {93,1}, {94,1}, {95,1}, {96,1}, {97,1}, {98,1}, {99,1}, {100,1}, {101,1}, {102,1}, {103,1}, {104,1}, {105,1}, {106,1}, {107,1}, {108,1}, {109,1}, {110,1}, {111,1}, {112,1}, {113,1}, {114,1}, {115,1}, {116,1}, {117,1}, {118,1}, {119,1}, {120,1}, {121,1}, {122,1}, {123,1}, {124,1}, {125,1}, {126,1}, {127,1}, {128,1}, {129,1}, {130,1}, {131,1}, {132,1}, {133,1}, {134,1}, {135,1}, {136,1}, {137,1}, {138,1}, {139,1}, {140,1}, {141,1}, {142,1}, {143,1}, {144,1}, {145,1}, {146,1}, {147,1}, {148,1}, {149,1}, {150,1}, {151,1}, {152,1}, {153,1}, {154,1}, {155,1}, {156,1}, {157,1}, {158,1}, {159,1}, {160,1}, {161,1}, {162,1}, {163,1}, {164,1}, {165,1}, {166,1}, {167,1}, {168,1}, {169,1}, {170,1}] [ns_server:info,2014-08-19T16:52:36.522,ns_1@10.242.238.90:<0.1825.1>:ebucketmigrator_srv:handle_call:307]Successfully changed vbucket filter on tap stream `replication_ns_1@10.242.238.90`. [ns_server:info,2014-08-19T16:52:36.523,ns_1@10.242.238.90:<0.1825.1>:ebucketmigrator_srv:process_upstream:1027]Got vbucket filter change completion message. Silencing upstream sender [ns_server:info,2014-08-19T16:52:36.523,ns_1@10.242.238.90:<0.1825.1>:ebucketmigrator_srv:handle_info:366]Got reply from upstream silencing request. Completing state transition to a new ebucketmigrator. [ns_server:debug,2014-08-19T16:52:36.523,ns_1@10.242.238.90:<0.1825.1>:ebucketmigrator_srv:complete_native_vb_filter_change:170]Proceeding with reading unread binaries [ns_server:debug,2014-08-19T16:52:36.523,ns_1@10.242.238.90:<0.1825.1>:ebucketmigrator_srv:confirm_downstream:197]Going to confirm reception downstream messages [ns_server:debug,2014-08-19T16:52:36.523,ns_1@10.242.238.90:<0.1825.1>:ebucketmigrator_srv:confirm_sent_messages:805]Going to wait for reception of opaque message ack [ns_server:debug,2014-08-19T16:52:36.523,ns_1@10.242.238.90:<0.1830.1>:ebucketmigrator_srv:confirm_sent_messages:796]Sending opaque message to confirm downstream reception [ns_server:debug,2014-08-19T16:52:36.523,ns_1@10.242.238.90:<0.1830.1>:ebucketmigrator_srv:confirm_sent_messages:801]Opaque message was succesfully sent [rebalance:info,2014-08-19T16:52:36.524,ns_1@10.242.238.90:<0.1825.1>:ebucketmigrator_srv:do_confirm_sent_messages:775]Got close ack! [ns_server:debug,2014-08-19T16:52:36.524,ns_1@10.242.238.90:<0.1825.1>:ebucketmigrator_srv:confirm_downstream:201]Confirmed upstream messages are feeded to kernel [ns_server:debug,2014-08-19T16:52:36.524,ns_1@10.242.238.90:<0.1825.1>:ebucketmigrator_srv:reply_and_die:210]Passed old state to caller [ns_server:debug,2014-08-19T16:52:36.524,ns_1@10.242.238.90:<0.1825.1>:ebucketmigrator_srv:reply_and_die:213]Sent out state. Preparing to die [ns_server:debug,2014-08-19T16:52:36.524,ns_1@10.242.238.90:<0.1827.1>:ns_vbm_new_sup:do_perform_vbucket_filter_change:143]Got old state from previous ebucketmigrator: <0.1825.1> [ns_server:debug,2014-08-19T16:52:36.524,ns_1@10.242.238.90:<0.1827.1>:ns_vbm_new_sup:perform_vbucket_filter_change_loop:184]Sent old state to new instance [ns_server:info,2014-08-19T16:52:36.525,ns_1@10.242.238.90:<0.1832.1>:ns_vbm_new_sup:mk_old_state_retriever:83]Got vbucket filter change old state. Proceeding vbucket filter change operation [ns_server:debug,2014-08-19T16:52:36.525,ns_1@10.242.238.90:<0.1832.1>:ebucketmigrator_srv:init:494]Got old ebucketmigrator state from <0.1825.1>: {state,#Port<0.20537>,#Port<0.20533>,#Port<0.20538>,#Port<0.20534>,<0.1829.1>, <<"cut off">>,<<"cut off">>,[],256,false,false,0, {1408,452756,523337}, completed, {<0.1827.1>,#Ref<0.0.1.148217>}, <<"replication_ns_1@10.242.238.90">>,<0.1825.1>, {had_backfill,false,undefined,[]}, completed,false}. [ns_server:debug,2014-08-19T16:52:36.525,ns_1@10.242.238.90:<0.17162.0>:ns_process_registry:handle_info:98]Got exit msg: {'EXIT',<0.1827.1>,{#Ref<0.0.1.148206>,<0.1832.1>}} [error_logger:info,2014-08-19T16:52:36.525,ns_1@10.242.238.90:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,'ns_vbm_new_sup-default'} started: [{pid,<0.1832.1>}, {name, {new_child_id, [86,87,88,89,90,91,92,93,94,95,96,97,98,99,100, 101,102,103,104,105,106,107,108,109,110,111, 112,113,114,115,116,117,118,119,120,121,122, 123,124,125,126,127,128,129,130,131,132,133, 134,135,136,137,138,139,140,141,142,143,144, 145,146,147,148,149,150,151,152,153,154,155, 156,157,158,159,160,161,162,163,164,165,166, 167,168,169,170], 'ns_1@10.242.238.88'}}, {mfargs, {ebucketmigrator_srv,start_link, [{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{old_state_retriever, #Fun}, {on_not_ready_vbuckets, #Fun}, {username,"default"}, {password,get_from_config}, {vbuckets, [86,87,88,89,90,91,92,93,94,95,96,97,98, 99,100,101,102,103,104,105,106,107,108, 109,110,111,112,113,114,115,116,117,118, 119,120,121,122,123,124,125,126,127,128, 129,130,131,132,133,134,135,136,137,138, 139,140,141,142,143,144,145,146,147,148, 149,150,151,152,153,154,155,156,157,158, 159,160,161,162,163,164,165,166,167,168, 169,170]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]]}}, {restart_type,temporary}, {shutdown,60000}, {child_type,worker}] [ns_server:debug,2014-08-19T16:52:36.530,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:52:36.533,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:36.534,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 2954 us [ns_server:debug,2014-08-19T16:52:36.534,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:36.534,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{87, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.90']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:52:36.547,ns_1@10.242.238.90:<0.1832.1>:ebucketmigrator_srv:init:621]Reusing old upstream: [{vbuckets,[86,87,88,89,90,91,92,93,94,95,96,97,98,99,100,101,102,103,104,105, 106,107,108,109,110,111,112,113,114,115,116,117,118,119,120,121, 122,123,124,125,126,127,128,129,130,131,132,133,134,135,136,137, 138,139,140,141,142,143,144,145,146,147,148,149,150,151,152,153, 154,155,156,157,158,159,160,161,162,163,164,165,166,167,168,169, 170]}, {name,<<"replication_ns_1@10.242.238.90">>}, {takeover,false}] [rebalance:debug,2014-08-19T16:52:36.547,ns_1@10.242.238.90:<0.1832.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.1834.1> [ns_server:debug,2014-08-19T16:52:36.565,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:52:36.568,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:36.569,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 1830 us [ns_server:debug,2014-08-19T16:52:36.569,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:36.569,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{28, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.89']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:52:36.612,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:52:36.615,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:36.615,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 3279 us [ns_server:debug,2014-08-19T16:52:36.616,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:36.616,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{29, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.89']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:52:36.665,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:52:36.666,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:36.667,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 1318 us [ns_server:debug,2014-08-19T16:52:36.667,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{31, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.89']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:52:36.668,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:36.709,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:52:36.712,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:36.712,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 2853 us [ns_server:debug,2014-08-19T16:52:36.713,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{30, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.89']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:52:36.713,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:36.756,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:52:36.758,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:36.758,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 1537 us [ns_server:debug,2014-08-19T16:52:36.758,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:36.759,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{33, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.89']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:52:36.803,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:36.803,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:52:36.803,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 7 us [ns_server:debug,2014-08-19T16:52:36.804,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:36.804,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{32, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.89']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:52:36.851,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:52:36.853,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:36.853,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 1417 us [ns_server:debug,2014-08-19T16:52:36.853,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:36.854,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{35, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.89']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:52:36.894,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:52:36.898,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:36.898,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 3339 us [ns_server:debug,2014-08-19T16:52:36.898,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:36.899,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{34, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.89']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:52:36.942,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:52:36.945,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:36.945,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 3293 us [ns_server:debug,2014-08-19T16:52:36.945,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:36.946,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{36, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.89']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:52:36.993,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:52:36.997,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:36.997,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 4268 us [ns_server:debug,2014-08-19T16:52:36.998,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{37, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.89']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:52:36.998,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:37.041,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:52:37.042,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:37.043,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 1743 us [ns_server:debug,2014-08-19T16:52:37.043,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:37.043,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{39, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.89']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:52:37.090,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:37.091,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:52:37.091,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:37.091,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 202 us [ns_server:debug,2014-08-19T16:52:37.091,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{38, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.89']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:52:37.137,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:52:37.138,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:37.139,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 1260 us [ns_server:debug,2014-08-19T16:52:37.139,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:37.140,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{40, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.89']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:52:37.182,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:52:37.185,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:37.186,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 3262 us [ns_server:debug,2014-08-19T16:52:37.186,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:37.186,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{41, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.89']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:52:37.229,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:52:37.232,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:37.232,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 2911 us [ns_server:debug,2014-08-19T16:52:37.232,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:37.233,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{43, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.89']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:52:37.278,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:52:37.280,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:37.280,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 1958 us [ns_server:debug,2014-08-19T16:52:37.280,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:37.281,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{42, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.89']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:52:37.326,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:52:37.327,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:37.327,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 1199 us [ns_server:debug,2014-08-19T16:52:37.328,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:37.328,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{44, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.89']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:52:37.372,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:52:37.373,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:37.374,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 1537 us [ns_server:debug,2014-08-19T16:52:37.374,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:37.375,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{45, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.89']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:52:37.416,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:52:37.419,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:37.419,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 3302 us [ns_server:debug,2014-08-19T16:52:37.420,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:37.420,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{46, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.89']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:52:37.463,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:52:37.465,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:37.465,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 1431 us [ns_server:debug,2014-08-19T16:52:37.465,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:37.466,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{47, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.89']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:52:37.509,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:52:37.512,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:37.513,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 3085 us [ns_server:debug,2014-08-19T16:52:37.513,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:37.513,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{48, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.89']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:52:37.557,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:52:37.559,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:37.560,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 2225 us [ns_server:debug,2014-08-19T16:52:37.560,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:37.560,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{49, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.89']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:52:37.604,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:52:37.607,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:37.607,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 3250 us [ns_server:debug,2014-08-19T16:52:37.607,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:37.607,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{51, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.89']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:52:37.655,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:52:37.657,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:37.657,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 1482 us [ns_server:debug,2014-08-19T16:52:37.657,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:37.658,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{50, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.89']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:52:37.698,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:52:37.701,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:37.701,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 3250 us [ns_server:debug,2014-08-19T16:52:37.702,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:37.702,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{53, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.89']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:52:37.744,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:52:37.747,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:37.747,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 3258 us [ns_server:debug,2014-08-19T16:52:37.747,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:37.748,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{52, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.89']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:52:37.791,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:52:37.793,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:37.793,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 1485 us [ns_server:debug,2014-08-19T16:52:37.793,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:37.794,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{54, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.89']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:52:37.838,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:52:37.841,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:37.842,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 3435 us [ns_server:debug,2014-08-19T16:52:37.842,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:37.842,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{55, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.89']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:52:37.891,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:52:37.891,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:37.892,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 545 us [ns_server:debug,2014-08-19T16:52:37.892,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:37.893,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{56, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.89']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:52:37.931,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:52:37.934,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:37.934,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 3151 us [ns_server:debug,2014-08-19T16:52:37.934,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:37.935,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{57, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.89']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:52:38.007,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:38.008,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:38.008,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{58, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.89']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:52:38.044,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:52:38.044,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 14 us [ns_server:debug,2014-08-19T16:52:38.081,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:52:38.083,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:38.083,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 1256 us [ns_server:debug,2014-08-19T16:52:38.084,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{59, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.89']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:52:38.084,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:38.128,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:52:38.128,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:38.128,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 316 us [ns_server:debug,2014-08-19T16:52:38.129,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:38.129,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{60, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.89']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:52:38.181,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:52:38.184,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:38.184,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 3122 us [ns_server:debug,2014-08-19T16:52:38.184,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:38.185,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{61, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.89']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:52:38.225,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:52:38.228,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:38.228,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 3089 us [ns_server:debug,2014-08-19T16:52:38.229,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:38.229,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{62, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.89']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:52:38.270,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:52:38.273,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:38.273,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 3111 us [ns_server:debug,2014-08-19T16:52:38.273,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:38.274,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{63, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.89']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:52:38.315,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:52:38.318,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:38.318,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 3152 us [ns_server:debug,2014-08-19T16:52:38.318,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:38.319,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{65, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.89']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:52:38.363,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:52:38.364,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:38.364,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 1177 us [ns_server:debug,2014-08-19T16:52:38.365,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:38.365,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{64, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.89']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:52:38.408,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:38.408,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:52:38.408,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 8 us [ns_server:debug,2014-08-19T16:52:38.409,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:38.409,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{67, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.89']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:52:38.453,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:52:38.456,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:38.456,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 2384 us [ns_server:debug,2014-08-19T16:52:38.457,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{66, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.89']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:52:38.457,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:38.496,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:52:38.499,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:38.500,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 3301 us [ns_server:debug,2014-08-19T16:52:38.500,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:38.501,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{69, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.89']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:52:38.548,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:52:38.548,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:38.548,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 8 us [ns_server:debug,2014-08-19T16:52:38.549,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:38.549,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{68, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.89']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:52:38.593,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:52:38.594,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:38.594,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 1104 us [ns_server:debug,2014-08-19T16:52:38.594,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:38.594,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{70, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.89']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:52:38.636,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:52:38.640,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:38.640,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 3290 us [ns_server:debug,2014-08-19T16:52:38.640,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:38.641,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{71, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.89']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:52:38.684,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:52:38.685,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:38.685,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 1501 us [ns_server:debug,2014-08-19T16:52:38.686,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:38.686,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{73, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.89']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:52:38.731,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:52:38.734,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 2707 us [ns_server:debug,2014-08-19T16:52:38.734,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:38.735,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:38.735,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{72, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.89']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:52:38.778,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:52:38.779,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:38.780,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 1719 us [ns_server:debug,2014-08-19T16:52:38.780,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:38.780,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{74, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.89']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:52:38.824,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:52:38.826,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:38.827,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 2908 us [ns_server:debug,2014-08-19T16:52:38.827,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:38.828,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{75, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.89']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:52:38.873,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:52:38.874,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:38.874,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 1349 us [ns_server:debug,2014-08-19T16:52:38.874,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:38.875,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{77, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.89']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:52:38.919,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:52:38.920,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:38.920,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 1077 us [ns_server:debug,2014-08-19T16:52:38.921,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:38.921,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{76, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.89']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:52:38.968,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:38.968,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:52:38.968,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 20 us [ns_server:debug,2014-08-19T16:52:38.969,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:38.969,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{78, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.89']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:52:39.013,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:52:39.018,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:39.019,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 5978 us [ns_server:debug,2014-08-19T16:52:39.019,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:39.019,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{79, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.89']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:52:39.056,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:52:39.059,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:39.059,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 3262 us [ns_server:debug,2014-08-19T16:52:39.060,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:39.060,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{81, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.89']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:52:39.137,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:52:39.140,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:39.140,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 3039 us [ns_server:debug,2014-08-19T16:52:39.140,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:39.141,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{80, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.89']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:52:39.173,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:52:39.176,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:39.176,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 3143 us [ns_server:debug,2014-08-19T16:52:39.177,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:39.177,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{82, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.89']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:52:39.221,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:52:39.223,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:39.223,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 1292 us [ns_server:debug,2014-08-19T16:52:39.223,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:39.224,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{83, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.89']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:52:39.267,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:52:39.268,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:39.269,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 1568 us [ns_server:debug,2014-08-19T16:52:39.269,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:39.270,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{84, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.89']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:52:39.316,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:52:39.318,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:39.318,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 2233 us [ns_server:debug,2014-08-19T16:52:39.319,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{85, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.89']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:52:39.319,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:39.364,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:52:39.366,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:39.366,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 1811 us [ns_server:debug,2014-08-19T16:52:39.366,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:39.367,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{0, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.89']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:info,2014-08-19T16:52:39.373,ns_1@10.242.238.90:janitor_agent-default<0.18778.0>:janitor_agent:handle_info:848]Undoing temporary vbucket states caused by rebalance [ns_server:debug,2014-08-19T16:52:39.376,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:39.376,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:39.380,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[]}, {fastForwardMap,[{0, ['ns_1@10.242.238.88','ns_1@10.242.238.89'], []}, {1, ['ns_1@10.242.238.88','ns_1@10.242.238.89'], []}, {2, ['ns_1@10.242.238.88','ns_1@10.242.238.89'], []}, {3, ['ns_1@10.242.238.88','ns_1@10.242.238.89'], []}, {4, ['ns_1@10.242.238.88','ns_1@10.242.238.89'], []}, {5, ['ns_1@10.242.238.88','ns_1@10.242.238.89'], []}, {6, ['ns_1@10.242.238.88','ns_1@10.242.238.89'], []}, {7, ['ns_1@10.242.238.88','ns_1@10.242.238.89'], []}, {8, ['ns_1@10.242.238.88','ns_1@10.242.238.89'], []}, {9, ['ns_1@10.242.238.88','ns_1@10.242.238.89'], []}, {10, ['ns_1@10.242.238.88','ns_1@10.242.238.89'], []}, {11, ['ns_1@10.242.238.88','ns_1@10.242.238.89'], []}, {12, ['ns_1@10.242.238.88','ns_1@10.242.238.89'], []}, {13, ['ns_1@10.242.238.88','ns_1@10.242.238.89'], []}, {14, ['ns_1@10.242.238.88','ns_1@10.242.238.89'], []}, {15, ['ns_1@10.242.238.88','ns_1@10.242.238.89'], []}, {16, ['ns_1@10.242.238.88','ns_1@10.242.238.89'], []}, {17, ['ns_1@10.242.238.88','ns_1@10.242.238.89'], []}, {18, ['ns_1@10.242.238.88','ns_1@10.242.238.89'], []}, {19, ['ns_1@10.242.238.88','ns_1@10.242.238.89'], []}, {20, ['ns_1@10.242.238.88','ns_1@10.242.238.89'], []}, {21, ['ns_1@10.242.238.88','ns_1@10.242.238.89'], []}, {22, ['ns_1@10.242.238.88','ns_1@10.242.238.89'], []}, {23, ['ns_1@10.242.238.88','ns_1@10.242.238.89'], []}, {24, ['ns_1@10.242.238.88','ns_1@10.242.238.89'], []}, {25, ['ns_1@10.242.238.88','ns_1@10.242.238.89'], []}, {26, ['ns_1@10.242.238.88','ns_1@10.242.238.89'], []}, {27, ['ns_1@10.242.238.88','ns_1@10.242.238.89'], []}, {28, ['ns_1@10.242.238.88','ns_1@10.242.238.89'], []}, {29, ['ns_1@10.242.238.88','ns_1@10.242.238.89'], []}, {30, ['ns_1@10.242.238.88','ns_1@10.242.238.89'], []}, {31, ['ns_1@10.242.238.88','ns_1@10.242.238.89'], []}, {32, ['ns_1@10.242.238.88','ns_1@10.242.238.89'], []}, {33, ['ns_1@10.242.238.88','ns_1@10.242.238.89'], []}, {34, ['ns_1@10.242.238.88','ns_1@10.242.238.89'], []}, {35, ['ns_1@10.242.238.88','ns_1@10.242.238.89'], []}, {36, ['ns_1@10.242.238.88','ns_1@10.242.238.89'], []}, {37, ['ns_1@10.242.238.88','ns_1@10.242.238.89'], []}, {38, ['ns_1@10.242.238.88','ns_1@10.242.238.89'], []}, {39, ['ns_1@10.242.238.88','ns_1@10.242.238.89'], []}, {40, ['ns_1@10.242.238.88','ns_1@10.242.238.89'], []}, {41, ['ns_1@10.242.238.88','ns_1@10.242.238.89'], []}, {42, ['ns_1@10.242.238.88','ns_1@10.242.238.89'], []}, {43, ['ns_1@10.242.238.88','ns_1@10.242.238.89'], []}, {44, ['ns_1@10.242.238.88','ns_1@10.242.238.89'], []}, {45, ['ns_1@10.242.238.88','ns_1@10.242.238.89'], []}, {46, ['ns_1@10.242.238.88','ns_1@10.242.238.89'], []}, {47, ['ns_1@10.242.238.88','ns_1@10.242.238.89'], []}, {48, ['ns_1@10.242.238.88','ns_1@10.242.238.89'], []}, {49, ['ns_1@10.242.238.88','ns_1@10.242.238.89'], []}, {50, ['ns_1@10.242.238.88','ns_1@10.242.238.89'], []}, {51, ['ns_1@10.242.238.88','ns_1@10.242.238.89'], []}, {52, ['ns_1@10.242.238.88','ns_1@10.242.238.89'], []}, {53, ['ns_1@10.242.238.88','ns_1@10.242.238.89'], []}, {54, ['ns_1@10.242.238.88','ns_1@10.242.238.89'], []}, {55, ['ns_1@10.242.238.88','ns_1@10.242.238.89'], []}, {56, ['ns_1@10.242.238.88','ns_1@10.242.238.89'], []}, {57, ['ns_1@10.242.238.88','ns_1@10.242.238.89'], []}, {58, ['ns_1@10.242.238.88','ns_1@10.242.238.89'], []}, {59, ['ns_1@10.242.238.88','ns_1@10.242.238.89'], []}, {60, ['ns_1@10.242.238.88','ns_1@10.242.238.89'], []}, {61, ['ns_1@10.242.238.88','ns_1@10.242.238.89'], []}, {62, ['ns_1@10.242.238.88','ns_1@10.242.238.89'], []}, {63, ['ns_1@10.242.238.88','ns_1@10.242.238.89'], []}, {64, ['ns_1@10.242.238.88','ns_1@10.242.238.89'], []}, {65, ['ns_1@10.242.238.88','ns_1@10.242.238.89'], []}, {66, ['ns_1@10.242.238.88','ns_1@10.242.238.89'], []}, {67, ['ns_1@10.242.238.88','ns_1@10.242.238.89'], []}, {68, ['ns_1@10.242.238.88','ns_1@10.242.238.89'], []}, {69, ['ns_1@10.242.238.88','ns_1@10.242.238.89'], []}, {70, ['ns_1@10.242.238.88','ns_1@10.242.238.89'], []}, {71, ['ns_1@10.242.238.88','ns_1@10.242.238.89'], []}, {72, ['ns_1@10.242.238.88','ns_1@10.242.238.89'], []}, {73, ['ns_1@10.242.238.88','ns_1@10.242.238.89'], []}, {74, ['ns_1@10.242.238.88','ns_1@10.242.238.89'], []}, {75, ['ns_1@10.242.238.88','ns_1@10.242.238.89'], []}, {76, ['ns_1@10.242.238.88','ns_1@10.242.238.89'], []}, {77, ['ns_1@10.242.238.88','ns_1@10.242.238.89'], []}, {78, ['ns_1@10.242.238.88','ns_1@10.242.238.89'], []}, {79, ['ns_1@10.242.238.88','ns_1@10.242.238.89'], []}, {80, ['ns_1@10.242.238.88','ns_1@10.242.238.89'], []}, {81, ['ns_1@10.242.238.88','ns_1@10.242.238.89'], []}, {82, ['ns_1@10.242.238.88','ns_1@10.242.238.89'], []}, {83, ['ns_1@10.242.238.88','ns_1@10.242.238.89'], []}, {84, ['ns_1@10.242.238.88','ns_1@10.242.238.89'], []}, {85,['ns_1@10.242.238.88'|...],[]}, {86,[...],...}, {87,...}, {...}|...]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:52:39.380,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:52:39.383,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:handle_call:119]Fully synchronized config in 2225 us [ns_server:info,2014-08-19T16:52:39.388,ns_1@10.242.238.90:<0.1917.1>:diag_handler:log_all_tap_and_checkpoint_stats:128]logging tap & checkpoint stats [ns_server:debug,2014-08-19T16:52:39.392,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:39.392,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: counters -> [{rebalance_success,1},{rebalance_start,1}] [ns_server:debug,2014-08-19T16:52:39.392,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: rebalance_status -> none [ns_server:debug,2014-08-19T16:52:39.392,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:39.392,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: rebalancer_pid -> undefined [ns_server:debug,2014-08-19T16:52:39.392,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:52:39.392,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:info,2014-08-19T16:52:39.418,ns_1@10.242.238.90:<0.1917.1>:diag_handler:log_all_tap_and_checkpoint_stats:130]tap:default: [{<<"ep_tap_throttle_queue_cap">>,<<"1000000">>}, {<<"ep_tap_throttle_threshold">>,<<"90">>}, {<<"ep_tap_backoff_period">>,<<"5">>}, {<<"ep_tap_ack_grace_period">>,<<"300">>}, {<<"ep_tap_ack_interval">>,<<"1000">>}, {<<"ep_tap_ack_window_size">>,<<"10">>}, {<<"ep_tap_total_backlog_size">>,<<"0">>}, {<<"ep_tap_queue_itemondisk">>,<<"0">>}, {<<"ep_tap_queue_backfillremaining">>,<<"0">>}, {<<"ep_tap_queue_backoff">>,<<"0">>}, {<<"ep_tap_queue_drain">>,<<"0">>}, {<<"ep_tap_queue_fill">>,<<"0">>}, {<<"ep_tap_total_queue">>,<<"0">>}, {<<"ep_tap_count">>,<<"6">>}, {<<"ep_tap_noop_interval">>,<<"20">>}, {<<"ep_tap_throttled">>,<<"0">>}, {<<"ep_tap_deletes">>,<<"0">>}, {<<"ep_tap_fg_fetched">>,<<"0">>}, {<<"ep_tap_bg_fetch_requeued">>,<<"0">>}, {<<"ep_tap_bg_fetched">>,<<"0">>}, {<<"ep_tap_bg_max_pending">>,<<"500">>}, {<<"ep_tap_total_fetched">>,<<"515">>}, {<<"eq_tapq:anon_771:num_unknown">>,<<"0">>}, {<<"eq_tapq:anon_771:num_checkpoint_end_failed">>,<<"0">>}, {<<"eq_tapq:anon_771:num_checkpoint_end">>,<<"0">>}, {<<"eq_tapq:anon_771:num_checkpoint_start_failed">>,<<"0">>}, {<<"eq_tapq:anon_771:num_checkpoint_start">>,<<"85">>}, {<<"eq_tapq:anon_771:num_vbucket_set_failed">>,<<"0">>}, {<<"eq_tapq:anon_771:num_vbucket_set">>,<<"0">>}, {<<"eq_tapq:anon_771:num_opaque_failed">>,<<"0">>}, {<<"eq_tapq:anon_771:num_opaque">>,<<"170">>}, {<<"eq_tapq:anon_771:num_mutation_failed">>,<<"0">>}, {<<"eq_tapq:anon_771:num_mutation">>,<<"0">>}, {<<"eq_tapq:anon_771:num_flush_failed">>,<<"0">>}, {<<"eq_tapq:anon_771:num_flush">>,<<"0">>}, {<<"eq_tapq:anon_771:num_delete_failed">>,<<"0">>}, {<<"eq_tapq:anon_771:num_delete">>,<<"0">>}, {<<"eq_tapq:anon_771:reserved">>,<<"0">>}, {<<"eq_tapq:anon_771:supports_ack">>,<<"true">>}, {<<"eq_tapq:anon_771:pending_disconnect">>,<<"false">>}, {<<"eq_tapq:anon_771:connected">>,<<"true">>}, {<<"eq_tapq:anon_771:created">>,<<"3948">>}, {<<"eq_tapq:anon_771:type">>,<<"consumer">>}, {<<"eq_tapq:replication_ns_1@10.242.238.88:sent_from_vb_596">>,<<"0">>}, {<<"eq_tapq:replication_ns_1@10.242.238.88:sent_from_vb_595">>,<<"0">>}, {<<"eq_tapq:replication_ns_1@10.242.238.88:sent_from_vb_594">>,<<"0">>}, {<<"eq_tapq:replication_ns_1@10.242.238.88:sent_from_vb_593">>,<<"0">>}, {<<"eq_tapq:replication_ns_1@10.242.238.88:sent_from_vb_592">>,<<"0">>}, {<<"eq_tapq:replication_ns_1@10.242.238.88:sent_from_vb_591">>,<<"0">>}, {<<"eq_tapq:replication_ns_1@10.242.238.88:sent_from_vb_590">>,<<"0">>}, {<<"eq_tapq:replication_ns_1@10.242.238.88:sent_from_vb_589">>,<<"0">>}, {<<"eq_tapq:replication_ns_1@10.242.238.88:sent_from_vb_588">>,<<"0">>}, {<<"eq_tapq:replication_ns_1@10.242.238.88:sent_from_vb_587">>,<<"0">>}, {<<"eq_tapq:replication_ns_1@10.242.238.88:sent_from_vb_586">>,<<"0">>}, {<<"eq_tapq:replication_ns_1@10.242.238.88:sent_from_vb_585">>,<<"0">>}, {<<"eq_tapq:replication_ns_1@10.242.238.88:sent_from_vb_584">>,<<"0">>}, {<<"eq_tapq:replication_ns_1@10.242.238.88:sent_from_vb_583">>,<<"0">>}, {<<"eq_tapq:replication_ns_1@10.242.238.88:sent_from_vb_582">>,<<"0">>}, {<<"eq_tapq:replication_ns_1@10.242.238.88:sent_from_vb_581">>,<<"0">>}, {<<"eq_tapq:replication_ns_1@10.242.238.88:sent_from_vb_580">>,<<"0">>}, {<<"eq_tapq:replication_ns_1@10.242.238.88:sent_from_vb_579">>,<<"0">>}, {<<"eq_tapq:replication_ns_1@10.242.238.88:sent_from_vb_578">>,<<"0">>}, {<<"eq_tapq:replication_ns_1@10.242.238.88:sent_from_vb_577">>,<<"0">>}, {<<"eq_tapq:replication_ns_1@10.242.238.88:sent_from_vb_576">>,<<"0">>}, {<<"eq_tapq:replication_ns_1@10.242.238.88:sent_from_vb_575">>,<<"0">>}, {<<"eq_tapq:replication_ns_1@10.242.238.88:sent_from_vb_574">>,<<"0">>}, {<<"eq_tapq:replication_ns_1@10.242.238.88:sent_from_vb_573">>,<<"0">>}, {<<"eq_tapq:replication_ns_1@10.242.238.88:sent_from_vb_572">>,<<"0">>}, {<<"eq_tapq:replication_ns_1@10.242.238.88:sent_from_vb_571">>,<<"0">>}, {<<"eq_tapq:replication_ns_1@10.242.238.88:sent_from_vb_570">>,<<"0">>}, {<<"eq_tapq:replication_ns_1@10.242.238.88:sent_from_vb_569">>,<<"0">>}, {<<"eq_tapq:replication_ns_1@10.242.238.88:sent_from_vb_568">>,<<"0">>}, {<<"eq_tapq:replication_ns_1@10.242.238.88:sent_from_vb_567">>,<<"0">>}, {<<"eq_tapq:replication_ns_1@10.242.238.88:sent_from_vb_566">>,<<"0">>}, {<<"eq_tapq:replication_ns_1@10.242.238.88:sent_from_vb_565">>,<<"0">>}, {<<"eq_tapq:replication_ns_1@10.242.238.88:sent_from_vb_564">>,<<"0">>}, {<<"eq_tapq:replication_ns_1@10.242.238.88:sent_from_vb_563">>,<<"0">>}, {<<"eq_tapq:replication_ns_1@10.242.238.88:sent_from_vb_562">>,<<"0">>}, {<<"eq_tapq:replication_ns_1@10.242.238.88:sent_from_vb_561">>,<<"0">>}, {<<"eq_tapq:replication_ns_1@10.242.238.88:sent_from_vb_560">>,<<"0">>}, {<<"eq_tapq:replication_ns_1@10.242.238.88:sent_from_vb_559">>,<<"0">>}, {<<"eq_tapq:replication_ns_1@10.242.238.88:sent_from_vb_558">>,<<"0">>}, {<<"eq_tapq:replication_ns_1@10.242.238.88:sent_from_vb_557">>,<<"0">>}, {<<"eq_tapq:replication_ns_1@10.242.238.88:sent_from_vb_556">>,<<"0">>}, {<<"eq_tapq:replication_ns_1@10.242.238.88:sent_from_vb_555">>,<<"0">>}, {<<"eq_tapq:replication_ns_1@10.242.238.88:sent_from_vb_554">>,<<"0">>}, {<<"eq_tapq:replication_ns_1@10.242.238.88:sent_from_vb_553">>,<<"0">>}, {<<"eq_tapq:replication_ns_1@10.242.238.88:sent_from_vb_552">>,<<"0">>}, {<<"eq_tapq:replication_ns_1@10.242.238.88:sent_from_vb_551">>,<<"0">>}, {<<"eq_tapq:replication_ns_1@10.242.238.88:sent_from_vb_550">>,<<"0">>}, {<<"eq_tapq:replication_ns_1@10.242.238.88:sent_from_vb_549">>,<<"0">>}, {<<"eq_tapq:replication_ns_1@10.242.238.88:sent_from_vb_548">>,<<"0">>}, {<<"eq_tapq:replication_ns_1@10.242.238.88:sent_from_vb_547">>,<<"0">>}, {<<"eq_tapq:replication_ns_1@10.242.238.88:sent_from_vb_546">>,<<"0">>}, {<<"eq_tapq:replication_ns_1@10.242.238.88:sent_from_vb_545">>,<<"0">>}, {<<"eq_tapq:replication_ns_1@10.242.238.88:sent_from_vb_544">>,<<"0">>}, {<<"eq_tapq:replication_ns_1@10.242.238.88:sent_from_vb_543">>,<<"0">>}, {<<"eq_tapq:replication_ns_1@10.242.238.88:sent_from_vb_542">>,<<"0">>}, {<<"eq_tapq:replication_ns_1@10.242.238.88:sent_from_vb_541">>,<<"0">>}, {<<"eq_tapq:replication_ns_1@10.242.238.88:sent_from_vb_540">>,<<"0">>}, {<<"eq_tapq:replication_ns_1@10.242.238.88:sent_from_vb_539">>,<<"0">>}, {<<"eq_tapq:replication_ns_1@10.242.238.88:sent_from_vb_538">>,<<"0">>}, {<<"eq_tapq:replication_ns_1@10.242.238.88:sent_from_vb_537">>,<<"0">>}, {<<"eq_tapq:replication_ns_1@10.242.238.88:sent_from_vb_536">>,<<"0">>}, {<<"eq_tapq:replication_ns_1@10.242.238.88:sent_from_vb_535">>,<<"0">>}, {<<"eq_tapq:replication_ns_1@10.242.238.88:sent_from_vb_534">>,<<"0">>}, {<<"eq_tapq:replication_ns_1@10.242.238.88:sent_from_vb_533">>,<<"0">>}, {<<"eq_tapq:replication_ns_1@10.242.238.88:sent_from_vb_532">>,<<"0">>}, {<<"eq_tapq:replication_ns_1@10.242.238.88:sent_from_vb_531">>,<<"0">>}, {<<"eq_tapq:replication_ns_1@10.242.238.88:sent_from_vb_530">>,<<"0">>}, {<<"eq_tapq:replication_ns_1@10.242.238.88:sent_from_vb_529">>,<<"0">>}, {<<"eq_tapq:replication_ns_1@10.242.238.88:sent_from_vb_528">>,<<"0">>}, {<<"eq_tapq:replication_ns_1@10.242.238.88:sent_from_vb_527">>,<<"0">>}, {<<"eq_tapq:replication_ns_1@10.242.238.88:sent_from_vb_526">>,<<"0">>}, {<<"eq_tapq:replication_ns_1@10.242.238.88:sent_from_vb_525">>,<<"0">>}, {<<"eq_tapq:replication_ns_1@10.242.238.88:sent_from_vb_524">>,<<"0">>}, {<<"eq_tapq:replication_ns_1@10.242.238.88:sent_from_vb_523">>,<<"0">>}, {<<"eq_tapq:replication_ns_1@10.242.238.88:sent_from_vb_522">>,<<"0">>}, {<<"eq_tapq:replication_ns_1@10.242.238.88:sent_from_vb_521">>,<<"0">>}, {<<"eq_tapq:replication_ns_1@10.242.238.88:sent_from_vb_520">>,<<"0">>}, {<<"eq_tapq:replication_ns_1@10.242.238.88:sent_from_vb_519">>,<<"0">>}, {<<"eq_tapq:replication_ns_1@10.242.238.88:sent_from_vb_518">>,<<"0">>}, {<<"eq_tapq:replication_ns_1@10.242.238.88:sent_from_vb_517">>,<<"0">>}, {<<"eq_tapq:replication_ns_1@10.242.238.88:sent_from_vb_516">>,<<"0">>}, {<<"eq_tapq:replication_ns_1@10.242.238.88:sent_from_vb_515">>,<<"0">>}, {<<"eq_tapq:replication_ns_1@10.242.238.88:sent_from_vb_514">>,<<"0">>}, {<<"eq_tapq:replication_ns_1@10.242.238.88:sent_from_vb_513">>,<<"0">>}, {<<"eq_tapq:replication_ns_1@10.242.238.88:sent_from_vb_512">>,<<"0">>}, {<<"eq_tapq:replication_ns_1@10.242.238.88:ack_window_full">>,<<"false">>}, {<<"eq_tapq:replication_ns_1@10.242.238.88:ack_log_size">>,<<"0">>}, {<<"eq_tapq:replication_ns_1@10.242.238.88:seqno_ack_requested">>,<<"257">>}, {<<"eq_tapq:replication_ns_1@10.242.238.88:recv_ack_seqno">>,<<"257">>}, {<<"eq_tapq:replication_ns_1@10.242.238.88:ack_seqno">>,<<"258">>}, {<<"eq_tapq:replication_ns_1@10.242.238.88:total_noops">>,<<"0">>}, {<<"eq_tapq:replication_ns_1@10.242.238.88:total_backlog_size">>,<<"0">>}, {<<"eq_tapq:replication_ns_1@10.242.238.88:queue_itemondisk">>,<<"0">>}, {<<"eq_tapq:replication_ns_1@10.242.238.88:queue_backfillremaining">>, <<"0">>}, {<<"eq_tapq:replication_ns_1@10.242.238.88:queue_backoff">>,<<"0">>}, {<<"eq_tapq:replication_ns_1@10.242.238.88:queue_drain">>,<<"0">>}, {<<"eq_tapq:replication_ns_1@10.242.238.88:queue_fill">>,<<"0">>}, {<<"eq_tapq:replication_ns_1@10.242.238.88:queue_memory">>,<<"0">>}, {<<"eq_tapq:replication_ns_1@10.242.238.88:backfill_start_timestamp">>, <<"0">>}, {<<"eq_tapq:replication_ns_1@10.242.238.88:backfill_completed">>,<<"true">>}, {<<"eq_tapq:replication_ns_1@10.242.238.88:pending_disk_backfill">>, <<"false">>}, {<<"eq_tapq:replication_ns_1@10.242.238.88:pending_backfill">>,<<"false">>}, {<<"eq_tapq:replication_ns_1@10.242.238.88:paused">>,<<"1">>}, {<<"eq_tapq:replication_ns_1@10.242.238.88:suspended">>,<<"false">>}, {<<"eq_tapq:replication_ns_1@10.242.238.88:flags">>, <<"85 (ack,backfill,vblist,checkpoints)">>}, {<<"eq_tapq:replication_ns_1@10.242.238.88:bg_jobs_completed">>,<<"0">>}, {<<"eq_tapq:replication_ns_1@10.242.238.88:bg_jobs_issued">>,<<"0">>}, {<<"eq_tapq:replication_ns_1@10.242.238.88:bg_result_size">>,<<"0">>}, {<<"eq_tapq:replication_ns_1@10.242.238.88:has_queued_item">>,<<"false">>}, {<<"eq_tapq:replication_ns_1@10.242.238.88:idle">>,<<"true">>}, {<<"eq_tapq:replication_ns_1@10.242.238.88:rec_fetched">>,<<"171">>}, {<<"eq_tapq:replication_ns_1@10.242.238.88:vb_filter">>,<<"{ [512,596] }">>}, {<<"eq_tapq:replication_ns_1@10.242.238.88:vb_filters">>,<<"85">>}, {<<"eq_tapq:replication_ns_1@10.242.238.88:qlen_low_pri">>,<<"0">>}, {<<"eq_tapq:replication_ns_1@10.242.238.88:qlen_high_pri">>,<<"0">>}, {<<"eq_tapq:replication_ns_1@10.242.238.88:qlen">>,<<"0">>}, {<<"eq_tapq:replication_ns_1@10.242.238.88:reserved">>,<<"1">>}, {<<"eq_tapq:replication_ns_1@10.242.238.88:supports_ack">>,<<"true">>}, {<<"eq_tapq:replication_ns_1@10.242.238.88:pending_disconnect">>,<<"false">>}, {<<"eq_tapq:replication_ns_1@10.242.238.88:connected">>,<<"true">>}, {<<"eq_tapq:replication_ns_1@10.242.238.88:created">>,<<"3918">>}, {<<"eq_tapq:replication_ns_1@10.242.238.88:type">>,<<"producer">>}, {<<"eq_tapq:replication_ns_1@10.242.238.89:sent_from_vb_681">>,<<"0">>}, {<<"eq_tapq:replication_ns_1@10.242.238.89:sent_from_vb_680">>,<<"0">>}, {<<"eq_tapq:replication_ns_1@10.242.238.89:sent_from_vb_679">>,<<"0">>}, {<<"eq_tapq:replication_ns_1@10.242.238.89:sent_from_vb_678">>,<<"0">>}, {<<"eq_tapq:replication_ns_1@10.242.238.89:sent_from_vb_677">>,<<"0">>}, {<<"eq_tapq:replication_ns_1@10.242.238.89:sent_from_vb_676">>,<<"0">>}, {<<"eq_tapq:replication_ns_1@10.242.238.89:sent_from_vb_675">>,<<"0">>}, {<<"eq_tapq:replication_ns_1@10.242.238.89:sent_from_vb_674">>,<<"0">>}, {<<"eq_tapq:replication_ns_1@10.242.238.89:sent_from_vb_673">>,<<"0">>}, {<<"eq_tapq:replication_ns_1@10.242.238.89:sent_from_vb_672">>,<<"0">>}, {<<"eq_tapq:replication_ns_1@10.242.238.89:sent_from_vb_671">>,<<"0">>}, {<<"eq_tapq:replication_ns_1@10.242.238.89:sent_from_vb_670">>,<<"0">>}, {<<"eq_tapq:replication_ns_1@10.242.238.89:sent_from_vb_669">>,<<"0">>}, {<<"eq_tapq:replication_ns_1@10.242.238.89:sent_from_vb_668">>,<<"0">>}, {<<"eq_tapq:replication_ns_1@10.242.238.89:sent_from_vb_667">>,<<"0">>}, {<<"eq_tapq:replication_ns_1@10.242.238.89:sent_from_vb_666">>,<<"0">>}, {<<"eq_tapq:replication_ns_1@10.242.238.89:sent_from_vb_665">>,<<"0">>}, {<<"eq_tapq:replication_ns_1@10.242.238.89:sent_from_vb_664">>,<<"0">>}, {<<"eq_tapq:replication_ns_1@10.242.238.89:sent_from_vb_663">>,<<"0">>}, {<<"eq_tapq:replication_ns_1@10.242.238.89:sent_from_vb_662">>,<<"0">>}, {<<"eq_tapq:replication_ns_1@10.242.238.89:sent_from_vb_661">>,<<"0">>}, {<<"eq_tapq:replication_ns_1@10.242.238.89:sent_from_vb_660">>,<<"0">>}, {<<"eq_tapq:replication_ns_1@10.242.238.89:sent_from_vb_659">>,<<"0">>}, {<<"eq_tapq:replication_ns_1@10.242.238.89:sent_from_vb_658">>,<<"0">>}, {<<"eq_tapq:replication_ns_1@10.242.238.89:sent_from_vb_657">>,<<"0">>}, {<<"eq_tapq:replication_ns_1@10.242.238.89:sent_from_vb_656">>,<<"0">>}, {<<"eq_tapq:replication_ns_1@10.242.238.89:sent_from_vb_655">>,<<"0">>}, {<<"eq_tapq:replication_ns_1@10.242.238.89:sent_from_vb_654">>,<<"0">>}, {<<"eq_tapq:replication_ns_1@10.242.238.89:sent_from_vb_653">>,<<"0">>}, {<<"eq_tapq:replication_ns_1@10.242.238.89:sent_from_vb_652">>,<<"0">>}, {<<"eq_tapq:replication_ns_1@10.242.238.89:sent_from_vb_651">>,<<"0">>}, {<<"eq_tapq:replication_ns_1@10.242.238.89:sent_from_vb_650">>,<<"0">>}, {<<"eq_tapq:replication_ns_1@10.242.238.89:sent_from_vb_649">>,<<"0">>}, {<<"eq_tapq:replication_ns_1@10.242.238.89:sent_from_vb_648">>,<<"0">>}, {<<"eq_tapq:replication_ns_1@10.242.238.89:sent_from_vb_647">>,<<"0">>}, {<<"eq_tapq:replication_ns_1@10.242.238.89:sent_from_vb_646">>,<<"0">>}, {<<"eq_tapq:replication_ns_1@10.242.238.89:sent_from_vb_645">>,<<"0">>}, {<<"eq_tapq:replication_ns_1@10.242.238.89:sent_from_vb_644">>,<<"0">>}, {<<"eq_tapq:replication_ns_1@10.242.238.89:sent_from_vb_643">>,<<"0">>}, {<<"eq_tapq:replication_ns_1@10.242.238.89:sent_from_vb_642">>,<<"0">>}, {<<"eq_tapq:replication_ns_1@10.242.238.89:sent_from_vb_641">>,<<"0">>}, {<<"eq_tapq:replication_ns_1@10.242.238.89:sent_from_vb_640">>,<<"0">>}, {<<"eq_tapq:replication_ns_1@10.242.238.89:sent_from_vb_639">>,<<"0">>}, {<<"eq_tapq:replication_ns_1@10.242.238.89:sent_from_vb_638">>,<<"0">>}, {<<"eq_tapq:replication_ns_1@10.242.238.89:sent_from_vb_637">>,<<"0">>}, {<<"eq_tapq:replication_ns_1@10.242.238.89:sent_from_vb_636">>,<<"0">>}, {<<"eq_tapq:replication_ns_1@10.242.238.89:sent_from_vb_635">>,<<"0">>}, {<<"eq_tapq:replication_ns_1@10.242.238.89:sent_from_vb_634">>,<<"0">>}, {<<"eq_tapq:replication_ns_1@10.242.238.89:sent_from_vb_633">>,<<"0">>}, {<<"eq_tapq:replication_ns_1@10.242.238.89:sent_from_vb_632">>,<<"0">>}, {<<"eq_tapq:replication_ns_1@10.242.238.89:sent_from_vb_631">>,<<"0">>}, {<<"eq_tapq:replication_ns_1@10.242.238.89:sent_from_vb_630">>,<<"0">>}, {<<"eq_tapq:replication_ns_1@10.242.238.89:sent_from_vb_629">>,<<"0">>}, {<<"eq_tapq:replication_ns_1@10.242.238.89:sent_from_vb_628">>,<<"0">>}, {<<"eq_tapq:replication_ns_1@10.242.238.89:sent_from_vb_627">>,<<"0">>}, {<<"eq_tapq:replication_ns_1@10.242.238.89:sent_from_vb_626">>,<<"0">>}, {<<"eq_tapq:replication_ns_1@10.242.238.89:sent_from_vb_625">>,<<"0">>}, {<<"eq_tapq:replication_ns_1@10.242.238.89:sent_from_vb_624">>,<<"0">>}, {<<"eq_tapq:replication_ns_1@10.242.238.89:sent_from_vb_623">>,<<"0">>}, {<<"eq_tapq:replication_ns_1@10.242.238.89:sent_from_vb_622">>,<<"0">>}, {<<"eq_tapq:replication_ns_1@10.242.238.89:sent_from_vb_621">>,<<"0">>}, {<<"eq_tapq:replication_ns_1@10.242.238.89:sent_from_vb_620">>,<<"0">>}, {<<"eq_tapq:replication_ns_1@10.242.238.89:sent_from_vb_619">>,<<"0">>}, {<<"eq_tapq:replication_ns_1@10.242.238.89:sent_from_vb_618">>,<<"0">>}, {<<"eq_tapq:replication_ns_1@10.242.238.89:sent_from_vb_617">>,<<"0">>}, {<<"eq_tapq:replication_ns_1@10.242.238.89:sent_from_vb_616">>,<<"0">>}, {<<"eq_tapq:replication_ns_1@10.242.238.89:sent_from_vb_615">>,<<"0">>}, {<<"eq_tapq:replication_ns_1@10.242.238.89:sent_from_vb_614">>,<<"0">>}, {<<"eq_tapq:replication_ns_1@10.242.238.89:sent_from_vb_613">>,<<"0">>}, {<<"eq_tapq:replication_ns_1@10.242.238.89:sent_from_vb_612">>,<<"0">>}, {<<"eq_tapq:replication_ns_1@10.242.238.89:sent_from_vb_611">>,<<"0">>}, {<<"eq_tapq:replication_ns_1@10.242.238.89:sent_from_vb_610">>,<<"0">>}, {<<"eq_tapq:replication_ns_1@10.242.238.89:sent_from_vb_609">>,<<"0">>}, {<<"eq_tapq:replication_ns_1@10.242.238.89:sent_from_vb_608">>,<<"0">>}, {<<"eq_tapq:replication_ns_1@10.242.238.89:sent_from_vb_607">>,<<"0">>}, {<<"eq_tapq:replication_ns_1@10.242.238.89:sent_from_vb_606">>,<<"0">>}, {<<"eq_tapq:replication_ns_1@10.242.238.89:sent_from_vb_605">>,<<"0">>}, {<<"eq_tapq:replication_ns_1@10.242.238.89:sent_from_vb_604">>,<<"0">>}, {<<"eq_tapq:replication_ns_1@10.242.238.89:sent_from_vb_603">>,<<"0">>}, {<<"eq_tapq:replication_ns_1@10.242.238.89:sent_from_vb_602">>,<<"0">>}, {<<"eq_tapq:replication_ns_1@10.242.238.89:sent_from_vb_601">>,<<"0">>}, {<<"eq_tapq:replication_ns_1@10.242.238.89:sent_from_vb_600">>,<<"0">>}, {<<"eq_tapq:replication_ns_1@10.242.238.89:sent_from_vb_599">>,<<"0">>}, {<<"eq_tapq:replication_ns_1@10.242.238.89:sent_from_vb_598">>,<<"0">>}, {<<"eq_tapq:replication_ns_1@10.242.238.89:sent_from_vb_597">>,<<"0">>}, {<<"eq_tapq:replication_ns_1@10.242.238.89:ack_window_full">>,<<"false">>}, {<<"eq_tapq:replication_ns_1@10.242.238.89:ack_log_size">>,<<"0">>}, {<<"eq_tapq:replication_ns_1@10.242.238.89:seqno_ack_requested">>,<<"257">>}, {<<"eq_tapq:replication_ns_1@10.242.238.89:recv_ack_seqno">>,<<"257">>}, {<<"eq_tapq:replication_ns_1@10.242.238.89:ack_seqno">>,<<"258">>}, {<<"eq_tapq:replication_ns_1@10.242.238.89:total_noops">>,<<"1">>}, {<<"eq_tapq:replication_ns_1@10.242.238.89:total_backlog_size">>,<<"0">>}, {<<"eq_tapq:replication_ns_1@10.242.238.89:queue_itemondisk">>,<<"0">>}, {<<"eq_tapq:replication_ns_1@10.242.238.89:queue_backfillremaining">>, <<"0">>}, {<<"eq_tapq:replication_ns_1@10.242.238.89:queue_backoff">>,<<"0">>}, {<<"eq_tapq:replication_ns_1@10.242.238.89:queue_drain">>,<<"0">>}, {<<"eq_tapq:replication_ns_1@10.242.238.89:queue_fill">>,<<"0">>}, {<<"eq_tapq:replication_ns_1@10.242.238.89:queue_memory">>,<<"0">>}, {<<"eq_tapq:replication_ns_1@10.242.238.89:backfill_start_timestamp">>, <<"0">>}, {<<"eq_tapq:replication_ns_1@10.242.238.89:backfill_completed">>,<<"true">>}, {<<"eq_tapq:replication_ns_1@10.242.238.89:pending_disk_backfill">>, <<"false">>}, {<<"eq_tapq:replication_ns_1@10.242.238.89:pending_backfill">>,<<"false">>}, {<<"eq_tapq:replication_ns_1@10.242.238.89:paused">>,<<"1">>}, {<<"eq_tapq:replication_ns_1@10.242.238.89:suspended">>,<<"false">>}, {<<"eq_tapq:replication_ns_1@10.242.238.89:flags">>, <<"85 (ack,backfill,vblist,checkpoints)">>}, {<<"eq_tapq:replication_ns_1@10.242.238.89:bg_jobs_completed">>,<<"0">>}, {<<"eq_tapq:replication_ns_1@10.242.238.89:bg_jobs_issued">>,<<"0">>}, {<<"eq_tapq:replication_ns_1@10.242.238.89:bg_result_size">>,<<"0">>}, {<<"eq_tapq:replication_ns_1@10.242.238.89:has_queued_item">>,<<"false">>}, {<<"eq_tapq:replication_ns_1@10.242.238.89:idle">>,<<"true">>}, {<<"eq_tapq:replication_ns_1@10.242.238.89:rec_fetched">>,<<"171">>}, {<<"eq_tapq:replication_ns_1@10.242.238.89:vb_filter">>,<<"{ [597,681] }">>}, {<<"eq_tapq:replication_ns_1@10.242.238.89:vb_filters">>,<<"85">>}, {<<"eq_tapq:replication_ns_1@10.242.238.89:qlen_low_pri">>,<<"0">>}, {<<"eq_tapq:replication_ns_1@10.242.238.89:qlen_high_pri">>,<<"0">>}, {<<"eq_tapq:replication_ns_1@10.242.238.89:qlen">>,<<"0">>}, {<<"eq_tapq:replication_ns_1@10.242.238.89:reserved">>,<<"1">>}, {<<"eq_tapq:replication_ns_1@10.242.238.89:supports_ack">>,<<"true">>}, {<<"eq_tapq:replication_ns_1@10.242.238.89:pending_disconnect">>,<<"false">>}, {<<"eq_tapq:replication_ns_1@10.242.238.89:connected">>,<<"true">>}, {<<"eq_tapq:replication_ns_1@10.242.238.89:created">>,<<"3860">>}, {<<"eq_tapq:replication_ns_1@10.242.238.89:type">>,<<"producer">>}, {<<"eq_tapq:anon_301:num_unknown">>,<<"0">>}, {<<"eq_tapq:anon_301:num_checkpoint_end_failed">>,<<"0">>}, {<<"eq_tapq:anon_301:num_checkpoint_end">>,<<"0">>}, {<<"eq_tapq:anon_301:num_checkpoint_start_failed">>,<<"0">>}, {<<"eq_tapq:anon_301:num_checkpoint_start">>,<<"85">>}, {<<"eq_tapq:anon_301:num_vbucket_set_failed">>,<<"0">>}, {<<"eq_tapq:anon_301:num_vbucket_set">>,<<"0">>}, {<<"eq_tapq:anon_301:num_opaque_failed">>,<<"0">>}, {<<"eq_tapq:anon_301:num_opaque">>,<<"170">>}, {<<"eq_tapq:anon_301:num_mutation_failed">>,<<"0">>}, {<<"eq_tapq:anon_301:num_mutation">>,<<"0">>}, {<<"eq_tapq:anon_301:num_flush_failed">>,<<"0">>}, {<<"eq_tapq:anon_301:num_flush">>,<<"0">>}, {<<"eq_tapq:anon_301:num_delete_failed">>,<<"0">>}, {<<"eq_tapq:anon_301:num_delete">>,<<"0">>}, {<<"eq_tapq:anon_301:reserved">>,<<"0">>}, {<<"eq_tapq:anon_301:supports_ack">>,<<"true">>}, {<<"eq_tapq:anon_301:pending_disconnect">>,<<"false">>}, {<<"eq_tapq:anon_301:connected">>,<<"true">>}, {<<"eq_tapq:anon_301:created">>,<<"3857">>}, {<<"eq_tapq:anon_301:type">>,<<"consumer">>}, {<<"eq_tapq:anon_45:num_unknown">>,<<"0">>}, {<<"eq_tapq:anon_45:num_checkpoint_end_failed">>,<<"0">>}, {<<"eq_tapq:anon_45:num_checkpoint_end">>,<<"0">>}, {<<"eq_tapq:anon_45:num_checkpoint_start_failed">>,<<"0">>}, {<<"eq_tapq:anon_45:num_checkpoint_start">>,<<"86">>}, {<<"eq_tapq:anon_45:num_vbucket_set_failed">>,<<"0">>}, {<<"eq_tapq:anon_45:num_vbucket_set">>,<<"0">>}, {<<"eq_tapq:anon_45:num_opaque_failed">>,<<"0">>}, {<<"eq_tapq:anon_45:num_opaque">>,<<"172">>}, {<<"eq_tapq:anon_45:num_mutation_failed">>,<<"0">>}, {<<"eq_tapq:anon_45:num_mutation">>,<<"0">>}, {<<"eq_tapq:anon_45:num_flush_failed">>,<<"0">>}, {<<"eq_tapq:anon_45:num_flush">>,<<"0">>}, {<<"eq_tapq:anon_45:num_delete_failed">>,<<"0">>}, {<<"eq_tapq:anon_45:num_delete">>,<<"0">>}, {<<"eq_tapq:anon_45:reserved">>,<<"0">>}, {<<"eq_tapq:anon_45:supports_ack">>,<<"true">>}, {<<"eq_tapq:anon_45:pending_disconnect">>,<<"false">>}, {<<"eq_tapq:anon_45:connected">>,<<"true">>}, {<<"eq_tapq:anon_45:created">>,<<"3794">>}, {<<"eq_tapq:anon_45:type">>,<<"consumer">>}, {<<"eq_tapq:replication_ns_1@10.242.238.91:sent_from_vb_767">>,<<"0">>}, {<<"eq_tapq:replication_ns_1@10.242.238.91:sent_from_vb_766">>,<<"0">>}, {<<"eq_tapq:replication_ns_1@10.242.238.91:sent_from_vb_765">>,<<"0">>}, {<<"eq_tapq:replication_ns_1@10.242.238.91:sent_from_vb_764">>,<<"0">>}, {<<"eq_tapq:replication_ns_1@10.242.238.91:sent_from_vb_763">>,<<"0">>}, {<<"eq_tapq:replication_ns_1@10.242.238.91:sent_from_vb_762">>,<<"0">>}, {<<"eq_tapq:replication_ns_1@10.242.238.91:sent_from_vb_761">>,<<"0">>}, {<<"eq_tapq:replication_ns_1@10.242.238.91:sent_from_vb_760">>,<<"0">>}, {<<"eq_tapq:replication_ns_1@10.242.238.91:sent_from_vb_759">>,<<"0">>}, {<<"eq_tapq:replication_ns_1@10.242.238.91:sent_from_vb_758">>,<<"0">>}, {<<"eq_tapq:replication_ns_1@10.242.238.91:sent_from_vb_757">>,<<"0">>}, {<<"eq_tapq:replication_ns_1@10.242.238.91:sent_from_vb_756">>,<<"0">>}, {<<"eq_tapq:replication_ns_1@10.242.238.91:sent_from_vb_755">>,<<"0">>}, {<<"eq_tapq:replication_ns_1@10.242.238.91:sent_from_vb_754">>,<<"0">>}, {<<"eq_tapq:replication_ns_1@10.242.238.91:sent_from_vb_753">>,<<"0">>}, {<<"eq_tapq:replication_ns_1@10.242.238.91:sent_from_vb_752">>,<<"0">>}, {<<"eq_tapq:replication_ns_1@10.242.238.91:sent_from_vb_751">>,<<"0">>}, {<<"eq_tapq:replication_ns_1@10.242.238.91:sent_from_vb_750">>,<<"0">>}, {<<"eq_tapq:replication_ns_1@10.242.238.91:sent_from_vb_749">>,<<"0">>}, {<<"eq_tapq:replication_ns_1@10.242.238.91:sent_from_vb_748">>,<<"0">>}, {<<"eq_tapq:replication_ns_1@10.242.238.91:sent_from_vb_747">>,<<"0">>}, {<<"eq_tapq:replication_ns_1@10.242.238.91:sent_from_vb_746">>,<<"0">>}, {<<"eq_tapq:replication_ns_1@10.242.238.91:sent_from_vb_745">>,<<"0">>}, {<<"eq_tapq:replication_ns_1@10.242.238.91:sent_from_vb_744">>,<<"0">>}, {<<"eq_tapq:replication_ns_1@10.242.238.91:sent_from_vb_743">>,<<"0">>}, {<<"eq_tapq:replication_ns_1@10.242.238.91:sent_from_vb_742">>,<<"0">>}, {<<"eq_tapq:replication_ns_1@10.242.238.91:sent_from_vb_741">>,<<"0">>}, {<<"eq_tapq:replication_ns_1@10.242.238.91:sent_from_vb_740">>,<<"0">>}, {<<"eq_tapq:replication_ns_1@10.242.238.91:sent_from_vb_739">>,<<"0">>}, {<<"eq_tapq:replication_ns_1@10.242.238.91:sent_from_vb_738">>,<<"0">>}, {<<"eq_tapq:replication_ns_1@10.242.238.91:sent_from_vb_737">>,<<"0">>}, {<<"eq_tapq:replication_ns_1@10.242.238.91:sent_from_vb_736">>,<<"0">>}, {<<"eq_tapq:replication_ns_1@10.242.238.91:sent_from_vb_735">>,<<"0">>}, {<<"eq_tapq:replication_ns_1@10.242.238.91:sent_from_vb_734">>,<<"0">>}, {<<"eq_tapq:replication_ns_1@10.242.238.91:sent_from_vb_733">>,<<"0">>}, {<<"eq_tapq:replication_ns_1@10.242.238.91:sent_from_vb_732">>,<<"0">>}, {<<"eq_tapq:replication_ns_1@10.242.238.91:sent_from_vb_731">>,<<"0">>}, {<<"eq_tapq:replication_ns_1@10.242.238.91:sent_from_vb_730">>,<<"0">>}, {<<"eq_tapq:replication_ns_1@10.242.238.91:sent_from_vb_729">>,<<"0">>}, {<<"eq_tapq:replication_ns_1@10.242.238.91:sent_from_vb_728">>,<<"0">>}, {<<"eq_tapq:replication_ns_1@10.242.238.91:sent_from_vb_727">>,<<"0">>}, {<<"eq_tapq:replication_ns_1@10.242.238.91:sent_from_vb_726">>,<<"0">>}, {<<"eq_tapq:replication_ns_1@10.242.238.91:sent_from_vb_725">>,<<"0">>}, {<<"eq_tapq:replication_ns_1@10.242.238.91:sent_from_vb_724">>,<<"0">>}, {<<"eq_tapq:replication_ns_1@10.242.238.91:sent_from_vb_723">>,<<"0">>}, {<<"eq_tapq:replication_ns_1@10.242.238.91:sent_from_vb_722">>,<<"0">>}, {<<"eq_tapq:replication_ns_1@10.242.238.91:sent_from_vb_721">>,<<"0">>}, {<<"eq_tapq:replication_ns_1@10.242.238.91:sent_from_vb_720">>,<<"0">>}, {<<"eq_tapq:replication_ns_1@10.242.238.91:sent_from_vb_719">>,<<"0">>}, {<<"eq_tapq:replication_ns_1@10.242.238.91:sent_from_vb_718">>,<<"0">>}, {<<"eq_tapq:replication_ns_1@10.242.238.91:sent_from_vb_717">>,<<"0">>}, {<<"eq_tapq:replication_ns_1@10.242.238.91:sent_from_vb_716">>,<<"0">>}, {<<"eq_tapq:replication_ns_1@10.242.238.91:sent_from_vb_715">>,<<"0">>}, {<<"eq_tapq:replication_ns_1@10.242.238.91:sent_from_vb_714">>,<<"0">>}, {<<"eq_tapq:replication_ns_1@10.242.238.91:sent_from_vb_713">>,<<"0">>}, {<<"eq_tapq:replication_ns_1@10.242.238.91:sent_from_vb_712">>,<<"0">>}, {<<"eq_tapq:replication_ns_1@10.242.238.91:sent_from_vb_711">>,<<"0">>}, {<<"eq_tapq:replication_ns_1@10.242.238.91:sent_from_vb_710">>,<<"0">>}, {<<"eq_tapq:replication_ns_1@10.242.238.91:sent_from_vb_709">>,<<"0">>}, {<<"eq_tapq:replication_ns_1@10.242.238.91:sent_from_vb_708">>,<<"0">>}, {<<"eq_tapq:replication_ns_1@10.242.238.91:sent_from_vb_707">>,<<"0">>}, {<<"eq_tapq:replication_ns_1@10.242.238.91:sent_from_vb_706">>,<<"0">>}, {<<"eq_tapq:replication_ns_1@10.242.238.91:sent_from_vb_705">>,<<"0">>}, {<<"eq_tapq:replication_ns_1@10.242.238.91:sent_from_vb_704">>,<<"0">>}, {<<"eq_tapq:replication_ns_1@10.242.238.91:sent_from_vb_703">>,<<"0">>}, {<<"eq_tapq:replication_ns_1@10.242.238.91:sent_from_vb_702">>,<<"0">>}, {<<"eq_tapq:replication_ns_1@10.242.238.91:sent_from_vb_701">>,<<"0">>}, {<<"eq_tapq:replication_ns_1@10.242.238.91:sent_from_vb_700">>,<<"0">>}, {<<"eq_tapq:replication_ns_1@10.242.238.91:sent_from_vb_699">>,<<"0">>}, {<<"eq_tapq:replication_ns_1@10.242.238.91:sent_from_vb_698">>,<<"0">>}, {<<"eq_tapq:replication_ns_1@10.242.238.91:sent_from_vb_697">>,<<"0">>}, {<<"eq_tapq:replication_ns_1@10.242.238.91:sent_from_vb_696">>,<<"0">>}, {<<"eq_tapq:replication_ns_1@10.242.238.91:sent_from_vb_695">>,<<"0">>}, {<<"eq_tapq:replication_ns_1@10.242.238.91:sent_from_vb_694">>,<<"0">>}, {<<"eq_tapq:replication_ns_1@10.242.238.91:sent_from_vb_693">>,<<"0">>}, {<<"eq_tapq:replication_ns_1@10.242.238.91:sent_from_vb_692">>,<<"0">>}, {<<"eq_tapq:replication_ns_1@10.242.238.91:sent_from_vb_691">>,<<"0">>}, {<<"eq_tapq:replication_ns_1@10.242.238.91:sent_from_vb_690">>,<<"0">>}, {<<"eq_tapq:replication_ns_1@10.242.238.91:sent_from_vb_689">>,<<"0">>}, {<<"eq_tapq:replication_ns_1@10.242.238.91:sent_from_vb_688">>,<<"0">>}, {<<"eq_tapq:replication_ns_1@10.242.238.91:sent_from_vb_687">>,<<"0">>}, {<<"eq_tapq:replication_ns_1@10.242.238.91:sent_from_vb_686">>,<<"0">>}, {<<"eq_tapq:replication_ns_1@10.242.238.91:sent_from_vb_685">>,<<"0">>}, {<<"eq_tapq:replication_ns_1@10.242.238.91:sent_from_vb_684">>,<<"0">>}, {<<"eq_tapq:replication_ns_1@10.242.238.91:sent_from_vb_683">>,<<"0">>}, {<<"eq_tapq:replication_ns_1@10.242.238.91:sent_from_vb_682">>,<<"0">>}, {<<"eq_tapq:replication_ns_1@10.242.238.91:ack_window_full">>,<<"false">>}, {<<"eq_tapq:replication_ns_1@10.242.238.91:ack_log_size">>,<<"0">>}, {<<"eq_tapq:replication_ns_1@10.242.238.91:seqno_ack_requested">>,<<"260">>}, {<<"eq_tapq:replication_ns_1@10.242.238.91:recv_ack_seqno">>,<<"260">>}, {<<"eq_tapq:replication_ns_1@10.242.238.91:ack_seqno">>,<<"261">>}, {<<"eq_tapq:replication_ns_1@10.242.238.91:total_noops">>,<<"4">>}, {<<"eq_tapq:replication_ns_1@10.242.238.91:total_backlog_size">>,<<"0">>}, {<<"eq_tapq:replication_ns_1@10.242.238.91:queue_itemondisk">>,<<"0">>}, {<<"eq_tapq:replication_ns_1@10.242.238.91:queue_backfillremaining">>, <<"0">>}, {<<"eq_tapq:replication_ns_1@10.242.238.91:queue_backoff">>,<<"0">>}, {<<"eq_tapq:replication_ns_1@10.242.238.91:queue_drain">>,<<"0">>}, {<<"eq_tapq:replication_ns_1@10.242.238.91:queue_fill">>,<<"0">>}, {<<"eq_tapq:replication_ns_1@10.242.238.91:queue_memory">>,<<"0">>}, {<<"eq_tapq:replication_ns_1@10.242.238.91:backfill_start_timestamp">>, <<"0">>}, {<<"eq_tapq:replication_ns_1@10.242.238.91:backfill_completed">>,<<"true">>}, {<<"eq_tapq:replication_ns_1@10.242.238.91:pending_disk_backfill">>, <<"false">>}, {<<"eq_tapq:replication_ns_1@10.242.238.91:pending_backfill">>,<<"false">>}, {<<"eq_tapq:replication_ns_1@10.242.238.91:paused">>,<<"1">>}, {<<"eq_tapq:replication_ns_1@10.242.238.91:suspended">>,<<"false">>}, {<<"eq_tapq:replication_ns_1@10.242.238.91:flags">>, <<"85 (ack,backfill,vblist,checkpoints)">>}, {<<"eq_tapq:replication_ns_1@10.242.238.91:bg_jobs_completed">>,<<"0">>}, {<<"eq_tapq:replication_ns_1@10.242.238.91:bg_jobs_issued">>,<<"0">>}, {<<"eq_tapq:replication_ns_1@10.242.238.91:bg_result_size">>,<<"0">>}, {<<"eq_tapq:replication_ns_1@10.242.238.91:has_queued_item">>,<<"false">>}, {<<"eq_tapq:replication_ns_1@10.242.238.91:idle">>,<<"true">>}, {<<"eq_tapq:replication_ns_1@10.242.238.91:rec_fetched">>,<<"173">>}, {<<"eq_tapq:replication_ns_1@10.242.238.91:vb_filter">>,<<"{ [682,767] }">>}, {<<"eq_tapq:replication_ns_1@10.242.238.91:vb_filters">>,<<"86">>}, {<<"eq_tapq:replication_ns_1@10.242.238.91:qlen_low_pri">>,<<"0">>}, {<<"eq_tapq:replication_ns_1@10.242.238.91:qlen_high_pri">>,<<"0">>}, {<<"eq_tapq:replication_ns_1@10.242.238.91:qlen">>,<<"0">>}, {<<"eq_tapq:replication_ns_1@10.242.238.91:reserved">>,<<"1">>}, {<<"eq_tapq:replication_ns_1@10.242.238.91:supports_ack">>,<<"true">>}, {<<"eq_tapq:replication_ns_1@10.242.238.91:pending_disconnect">>,<<"false">>}, {<<"eq_tapq:replication_ns_1@10.242.238.91:connected">>,<<"true">>}, {<<"eq_tapq:replication_ns_1@10.242.238.91:created">>,<<"3794">>}, {<<"eq_tapq:replication_ns_1@10.242.238.91:type">>,<<"producer">>}] [ns_server:info,2014-08-19T16:52:39.425,ns_1@10.242.238.90:<0.1917.1>:diag_handler:log_all_tap_and_checkpoint_stats:130]checkpoint:default: [{<<"vb_1023:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_1023:checkpoint_extension">>,<<"false">>}, {<<"vb_1023:num_items_for_persistence">>,<<"0">>}, {<<"vb_1023:num_checkpoints">>,<<"1">>}, {<<"vb_1023:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_1023:num_checkpoint_items">>,<<"1">>}, {<<"vb_1023:num_tap_cursors">>,<<"0">>}, {<<"vb_1023:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_1023:open_checkpoint_id">>,<<"2">>}, {<<"vb_1023:state">>,<<"replica">>}, {<<"vb_1022:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_1022:checkpoint_extension">>,<<"false">>}, {<<"vb_1022:num_items_for_persistence">>,<<"0">>}, {<<"vb_1022:num_checkpoints">>,<<"1">>}, {<<"vb_1022:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_1022:num_checkpoint_items">>,<<"1">>}, {<<"vb_1022:num_tap_cursors">>,<<"0">>}, {<<"vb_1022:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_1022:open_checkpoint_id">>,<<"2">>}, {<<"vb_1022:state">>,<<"replica">>}, {<<"vb_1021:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_1021:checkpoint_extension">>,<<"false">>}, {<<"vb_1021:num_items_for_persistence">>,<<"0">>}, {<<"vb_1021:num_checkpoints">>,<<"1">>}, {<<"vb_1021:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_1021:num_checkpoint_items">>,<<"1">>}, {<<"vb_1021:num_tap_cursors">>,<<"0">>}, {<<"vb_1021:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_1021:open_checkpoint_id">>,<<"2">>}, {<<"vb_1021:state">>,<<"replica">>}, {<<"vb_1020:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_1020:checkpoint_extension">>,<<"false">>}, {<<"vb_1020:num_items_for_persistence">>,<<"0">>}, {<<"vb_1020:num_checkpoints">>,<<"1">>}, {<<"vb_1020:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_1020:num_checkpoint_items">>,<<"1">>}, {<<"vb_1020:num_tap_cursors">>,<<"0">>}, {<<"vb_1020:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_1020:open_checkpoint_id">>,<<"2">>}, {<<"vb_1020:state">>,<<"replica">>}, {<<"vb_1019:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_1019:checkpoint_extension">>,<<"false">>}, {<<"vb_1019:num_items_for_persistence">>,<<"0">>}, {<<"vb_1019:num_checkpoints">>,<<"1">>}, {<<"vb_1019:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_1019:num_checkpoint_items">>,<<"1">>}, {<<"vb_1019:num_tap_cursors">>,<<"0">>}, {<<"vb_1019:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_1019:open_checkpoint_id">>,<<"2">>}, {<<"vb_1019:state">>,<<"replica">>}, {<<"vb_1018:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_1018:checkpoint_extension">>,<<"false">>}, {<<"vb_1018:num_items_for_persistence">>,<<"0">>}, {<<"vb_1018:num_checkpoints">>,<<"1">>}, {<<"vb_1018:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_1018:num_checkpoint_items">>,<<"1">>}, {<<"vb_1018:num_tap_cursors">>,<<"0">>}, {<<"vb_1018:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_1018:open_checkpoint_id">>,<<"2">>}, {<<"vb_1018:state">>,<<"replica">>}, {<<"vb_1017:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_1017:checkpoint_extension">>,<<"false">>}, {<<"vb_1017:num_items_for_persistence">>,<<"0">>}, {<<"vb_1017:num_checkpoints">>,<<"1">>}, {<<"vb_1017:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_1017:num_checkpoint_items">>,<<"1">>}, {<<"vb_1017:num_tap_cursors">>,<<"0">>}, {<<"vb_1017:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_1017:open_checkpoint_id">>,<<"2">>}, {<<"vb_1017:state">>,<<"replica">>}, {<<"vb_1016:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_1016:checkpoint_extension">>,<<"false">>}, {<<"vb_1016:num_items_for_persistence">>,<<"0">>}, {<<"vb_1016:num_checkpoints">>,<<"1">>}, {<<"vb_1016:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_1016:num_checkpoint_items">>,<<"1">>}, {<<"vb_1016:num_tap_cursors">>,<<"0">>}, {<<"vb_1016:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_1016:open_checkpoint_id">>,<<"2">>}, {<<"vb_1016:state">>,<<"replica">>}, {<<"vb_1015:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_1015:checkpoint_extension">>,<<"false">>}, {<<"vb_1015:num_items_for_persistence">>,<<"0">>}, {<<"vb_1015:num_checkpoints">>,<<"1">>}, {<<"vb_1015:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_1015:num_checkpoint_items">>,<<"1">>}, {<<"vb_1015:num_tap_cursors">>,<<"0">>}, {<<"vb_1015:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_1015:open_checkpoint_id">>,<<"2">>}, {<<"vb_1015:state">>,<<"replica">>}, {<<"vb_1014:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_1014:checkpoint_extension">>,<<"false">>}, {<<"vb_1014:num_items_for_persistence">>,<<"0">>}, {<<"vb_1014:num_checkpoints">>,<<"1">>}, {<<"vb_1014:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_1014:num_checkpoint_items">>,<<"1">>}, {<<"vb_1014:num_tap_cursors">>,<<"0">>}, {<<"vb_1014:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_1014:open_checkpoint_id">>,<<"2">>}, {<<"vb_1014:state">>,<<"replica">>}, {<<"vb_1013:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_1013:checkpoint_extension">>,<<"false">>}, {<<"vb_1013:num_items_for_persistence">>,<<"0">>}, {<<"vb_1013:num_checkpoints">>,<<"1">>}, {<<"vb_1013:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_1013:num_checkpoint_items">>,<<"1">>}, {<<"vb_1013:num_tap_cursors">>,<<"0">>}, {<<"vb_1013:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_1013:open_checkpoint_id">>,<<"2">>}, {<<"vb_1013:state">>,<<"replica">>}, {<<"vb_1012:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_1012:checkpoint_extension">>,<<"false">>}, {<<"vb_1012:num_items_for_persistence">>,<<"0">>}, {<<"vb_1012:num_checkpoints">>,<<"1">>}, {<<"vb_1012:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_1012:num_checkpoint_items">>,<<"1">>}, {<<"vb_1012:num_tap_cursors">>,<<"0">>}, {<<"vb_1012:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_1012:open_checkpoint_id">>,<<"2">>}, {<<"vb_1012:state">>,<<"replica">>}, {<<"vb_1011:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_1011:checkpoint_extension">>,<<"false">>}, {<<"vb_1011:num_items_for_persistence">>,<<"0">>}, {<<"vb_1011:num_checkpoints">>,<<"1">>}, {<<"vb_1011:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_1011:num_checkpoint_items">>,<<"1">>}, {<<"vb_1011:num_tap_cursors">>,<<"0">>}, {<<"vb_1011:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_1011:open_checkpoint_id">>,<<"2">>}, {<<"vb_1011:state">>,<<"replica">>}, {<<"vb_1010:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_1010:checkpoint_extension">>,<<"false">>}, {<<"vb_1010:num_items_for_persistence">>,<<"0">>}, {<<"vb_1010:num_checkpoints">>,<<"1">>}, {<<"vb_1010:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_1010:num_checkpoint_items">>,<<"1">>}, {<<"vb_1010:num_tap_cursors">>,<<"0">>}, {<<"vb_1010:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_1010:open_checkpoint_id">>,<<"2">>}, {<<"vb_1010:state">>,<<"replica">>}, {<<"vb_1009:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_1009:checkpoint_extension">>,<<"false">>}, {<<"vb_1009:num_items_for_persistence">>,<<"0">>}, {<<"vb_1009:num_checkpoints">>,<<"1">>}, {<<"vb_1009:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_1009:num_checkpoint_items">>,<<"1">>}, {<<"vb_1009:num_tap_cursors">>,<<"0">>}, {<<"vb_1009:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_1009:open_checkpoint_id">>,<<"2">>}, {<<"vb_1009:state">>,<<"replica">>}, {<<"vb_1008:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_1008:checkpoint_extension">>,<<"false">>}, {<<"vb_1008:num_items_for_persistence">>,<<"0">>}, {<<"vb_1008:num_checkpoints">>,<<"1">>}, {<<"vb_1008:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_1008:num_checkpoint_items">>,<<"1">>}, {<<"vb_1008:num_tap_cursors">>,<<"0">>}, {<<"vb_1008:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_1008:open_checkpoint_id">>,<<"2">>}, {<<"vb_1008:state">>,<<"replica">>}, {<<"vb_1007:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_1007:checkpoint_extension">>,<<"false">>}, {<<"vb_1007:num_items_for_persistence">>,<<"0">>}, {<<"vb_1007:num_checkpoints">>,<<"1">>}, {<<"vb_1007:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_1007:num_checkpoint_items">>,<<"1">>}, {<<"vb_1007:num_tap_cursors">>,<<"0">>}, {<<"vb_1007:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_1007:open_checkpoint_id">>,<<"2">>}, {<<"vb_1007:state">>,<<"replica">>}, {<<"vb_1006:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_1006:checkpoint_extension">>,<<"false">>}, {<<"vb_1006:num_items_for_persistence">>,<<"0">>}, {<<"vb_1006:num_checkpoints">>,<<"1">>}, {<<"vb_1006:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_1006:num_checkpoint_items">>,<<"1">>}, {<<"vb_1006:num_tap_cursors">>,<<"0">>}, {<<"vb_1006:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_1006:open_checkpoint_id">>,<<"2">>}, {<<"vb_1006:state">>,<<"replica">>}, {<<"vb_1005:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_1005:checkpoint_extension">>,<<"false">>}, {<<"vb_1005:num_items_for_persistence">>,<<"0">>}, {<<"vb_1005:num_checkpoints">>,<<"1">>}, {<<"vb_1005:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_1005:num_checkpoint_items">>,<<"1">>}, {<<"vb_1005:num_tap_cursors">>,<<"0">>}, {<<"vb_1005:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_1005:open_checkpoint_id">>,<<"2">>}, {<<"vb_1005:state">>,<<"replica">>}, {<<"vb_1004:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_1004:checkpoint_extension">>,<<"false">>}, {<<"vb_1004:num_items_for_persistence">>,<<"0">>}, {<<"vb_1004:num_checkpoints">>,<<"1">>}, {<<"vb_1004:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_1004:num_checkpoint_items">>,<<"1">>}, {<<"vb_1004:num_tap_cursors">>,<<"0">>}, {<<"vb_1004:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_1004:open_checkpoint_id">>,<<"2">>}, {<<"vb_1004:state">>,<<"replica">>}, {<<"vb_1003:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_1003:checkpoint_extension">>,<<"false">>}, {<<"vb_1003:num_items_for_persistence">>,<<"0">>}, {<<"vb_1003:num_checkpoints">>,<<"1">>}, {<<"vb_1003:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_1003:num_checkpoint_items">>,<<"1">>}, {<<"vb_1003:num_tap_cursors">>,<<"0">>}, {<<"vb_1003:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_1003:open_checkpoint_id">>,<<"2">>}, {<<"vb_1003:state">>,<<"replica">>}, {<<"vb_1002:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_1002:checkpoint_extension">>,<<"false">>}, {<<"vb_1002:num_items_for_persistence">>,<<"0">>}, {<<"vb_1002:num_checkpoints">>,<<"1">>}, {<<"vb_1002:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_1002:num_checkpoint_items">>,<<"1">>}, {<<"vb_1002:num_tap_cursors">>,<<"0">>}, {<<"vb_1002:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_1002:open_checkpoint_id">>,<<"2">>}, {<<"vb_1002:state">>,<<"replica">>}, {<<"vb_1001:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_1001:checkpoint_extension">>,<<"false">>}, {<<"vb_1001:num_items_for_persistence">>,<<"0">>}, {<<"vb_1001:num_checkpoints">>,<<"1">>}, {<<"vb_1001:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_1001:num_checkpoint_items">>,<<"1">>}, {<<"vb_1001:num_tap_cursors">>,<<"0">>}, {<<"vb_1001:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_1001:open_checkpoint_id">>,<<"2">>}, {<<"vb_1001:state">>,<<"replica">>}, {<<"vb_1000:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_1000:checkpoint_extension">>,<<"false">>}, {<<"vb_1000:num_items_for_persistence">>,<<"0">>}, {<<"vb_1000:num_checkpoints">>,<<"1">>}, {<<"vb_1000:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_1000:num_checkpoint_items">>,<<"1">>}, {<<"vb_1000:num_tap_cursors">>,<<"0">>}, {<<"vb_1000:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_1000:open_checkpoint_id">>,<<"2">>}, {<<"vb_1000:state">>,<<"replica">>}, {<<"vb_999:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_999:checkpoint_extension">>,<<"false">>}, {<<"vb_999:num_items_for_persistence">>,<<"0">>}, {<<"vb_999:num_checkpoints">>,<<"1">>}, {<<"vb_999:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_999:num_checkpoint_items">>,<<"1">>}, {<<"vb_999:num_tap_cursors">>,<<"0">>}, {<<"vb_999:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_999:open_checkpoint_id">>,<<"2">>}, {<<"vb_999:state">>,<<"replica">>}, {<<"vb_998:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_998:checkpoint_extension">>,<<"false">>}, {<<"vb_998:num_items_for_persistence">>,<<"0">>}, {<<"vb_998:num_checkpoints">>,<<"1">>}, {<<"vb_998:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_998:num_checkpoint_items">>,<<"1">>}, {<<"vb_998:num_tap_cursors">>,<<"0">>}, {<<"vb_998:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_998:open_checkpoint_id">>,<<"2">>}, {<<"vb_998:state">>,<<"replica">>}, {<<"vb_997:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_997:checkpoint_extension">>,<<"false">>}, {<<"vb_997:num_items_for_persistence">>,<<"0">>}, {<<"vb_997:num_checkpoints">>,<<"1">>}, {<<"vb_997:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_997:num_checkpoint_items">>,<<"1">>}, {<<"vb_997:num_tap_cursors">>,<<"0">>}, {<<"vb_997:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_997:open_checkpoint_id">>,<<"2">>}, {<<"vb_997:state">>,<<"replica">>}, {<<"vb_996:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_996:checkpoint_extension">>,<<"false">>}, {<<"vb_996:num_items_for_persistence">>,<<"0">>}, {<<"vb_996:num_checkpoints">>,<<"1">>}, {<<"vb_996:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_996:num_checkpoint_items">>,<<"1">>}, {<<"vb_996:num_tap_cursors">>,<<"0">>}, {<<"vb_996:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_996:open_checkpoint_id">>,<<"2">>}, {<<"vb_996:state">>,<<"replica">>}, {<<"vb_995:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_995:checkpoint_extension">>,<<"false">>}, {<<"vb_995:num_items_for_persistence">>,<<"0">>}, {<<"vb_995:num_checkpoints">>,<<"1">>}, {<<"vb_995:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_995:num_checkpoint_items">>,<<"1">>}, {<<"vb_995:num_tap_cursors">>,<<"0">>}, {<<"vb_995:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_995:open_checkpoint_id">>,<<"2">>}, {<<"vb_995:state">>,<<"replica">>}, {<<"vb_994:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_994:checkpoint_extension">>,<<"false">>}, {<<"vb_994:num_items_for_persistence">>,<<"0">>}, {<<"vb_994:num_checkpoints">>,<<"1">>}, {<<"vb_994:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_994:num_checkpoint_items">>,<<"1">>}, {<<"vb_994:num_tap_cursors">>,<<"0">>}, {<<"vb_994:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_994:open_checkpoint_id">>,<<"2">>}, {<<"vb_994:state">>,<<"replica">>}, {<<"vb_993:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_993:checkpoint_extension">>,<<"false">>}, {<<"vb_993:num_items_for_persistence">>,<<"0">>}, {<<"vb_993:num_checkpoints">>,<<"1">>}, {<<"vb_993:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_993:num_checkpoint_items">>,<<"1">>}, {<<"vb_993:num_tap_cursors">>,<<"0">>}, {<<"vb_993:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_993:open_checkpoint_id">>,<<"2">>}, {<<"vb_993:state">>,<<"replica">>}, {<<"vb_992:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_992:checkpoint_extension">>,<<"false">>}, {<<"vb_992:num_items_for_persistence">>,<<"0">>}, {<<"vb_992:num_checkpoints">>,<<"1">>}, {<<"vb_992:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_992:num_checkpoint_items">>,<<"1">>}, {<<"vb_992:num_tap_cursors">>,<<"0">>}, {<<"vb_992:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_992:open_checkpoint_id">>,<<"2">>}, {<<"vb_992:state">>,<<"replica">>}, {<<"vb_991:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_991:checkpoint_extension">>,<<"false">>}, {<<"vb_991:num_items_for_persistence">>,<<"0">>}, {<<"vb_991:num_checkpoints">>,<<"1">>}, {<<"vb_991:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_991:num_checkpoint_items">>,<<"1">>}, {<<"vb_991:num_tap_cursors">>,<<"0">>}, {<<"vb_991:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_991:open_checkpoint_id">>,<<"2">>}, {<<"vb_991:state">>,<<"replica">>}, {<<"vb_990:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_990:checkpoint_extension">>,<<"false">>}, {<<"vb_990:num_items_for_persistence">>,<<"0">>}, {<<"vb_990:num_checkpoints">>,<<"1">>}, {<<"vb_990:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_990:num_checkpoint_items">>,<<"1">>}, {<<"vb_990:num_tap_cursors">>,<<"0">>}, {<<"vb_990:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_990:open_checkpoint_id">>,<<"2">>}, {<<"vb_990:state">>,<<"replica">>}, {<<"vb_989:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_989:checkpoint_extension">>,<<"false">>}, {<<"vb_989:num_items_for_persistence">>,<<"0">>}, {<<"vb_989:num_checkpoints">>,<<"1">>}, {<<"vb_989:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_989:num_checkpoint_items">>,<<"1">>}, {<<"vb_989:num_tap_cursors">>,<<"0">>}, {<<"vb_989:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_989:open_checkpoint_id">>,<<"2">>}, {<<"vb_989:state">>,<<"replica">>}, {<<"vb_988:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_988:checkpoint_extension">>,<<"false">>}, {<<"vb_988:num_items_for_persistence">>,<<"0">>}, {<<"vb_988:num_checkpoints">>,<<"1">>}, {<<"vb_988:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_988:num_checkpoint_items">>,<<"1">>}, {<<"vb_988:num_tap_cursors">>,<<"0">>}, {<<"vb_988:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_988:open_checkpoint_id">>,<<"2">>}, {<<"vb_988:state">>,<<"replica">>}, {<<"vb_987:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_987:checkpoint_extension">>,<<"false">>}, {<<"vb_987:num_items_for_persistence">>,<<"0">>}, {<<"vb_987:num_checkpoints">>,<<"1">>}, {<<"vb_987:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_987:num_checkpoint_items">>,<<"1">>}, {<<"vb_987:num_tap_cursors">>,<<"0">>}, {<<"vb_987:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_987:open_checkpoint_id">>,<<"2">>}, {<<"vb_987:state">>,<<"replica">>}, {<<"vb_986:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_986:checkpoint_extension">>,<<"false">>}, {<<"vb_986:num_items_for_persistence">>,<<"0">>}, {<<"vb_986:num_checkpoints">>,<<"1">>}, {<<"vb_986:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_986:num_checkpoint_items">>,<<"1">>}, {<<"vb_986:num_tap_cursors">>,<<"0">>}, {<<"vb_986:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_986:open_checkpoint_id">>,<<"2">>}, {<<"vb_986:state">>,<<"replica">>}, {<<"vb_985:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_985:checkpoint_extension">>,<<"false">>}, {<<"vb_985:num_items_for_persistence">>,<<"0">>}, {<<"vb_985:num_checkpoints">>,<<"1">>}, {<<"vb_985:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_985:num_checkpoint_items">>,<<"1">>}, {<<"vb_985:num_tap_cursors">>,<<"0">>}, {<<"vb_985:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_985:open_checkpoint_id">>,<<"2">>}, {<<"vb_985:state">>,<<"replica">>}, {<<"vb_984:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_984:checkpoint_extension">>,<<"false">>}, {<<"vb_984:num_items_for_persistence">>,<<"0">>}, {<<"vb_984:num_checkpoints">>,<<"1">>}, {<<"vb_984:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_984:num_checkpoint_items">>,<<"1">>}, {<<"vb_984:num_tap_cursors">>,<<"0">>}, {<<"vb_984:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_984:open_checkpoint_id">>,<<"2">>}, {<<"vb_984:state">>,<<"replica">>}, {<<"vb_983:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_983:checkpoint_extension">>,<<"false">>}, {<<"vb_983:num_items_for_persistence">>,<<"0">>}, {<<"vb_983:num_checkpoints">>,<<"1">>}, {<<"vb_983:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_983:num_checkpoint_items">>,<<"1">>}, {<<"vb_983:num_tap_cursors">>,<<"0">>}, {<<"vb_983:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_983:open_checkpoint_id">>,<<"2">>}, {<<"vb_983:state">>,<<"replica">>}, {<<"vb_982:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_982:checkpoint_extension">>,<<"false">>}, {<<"vb_982:num_items_for_persistence">>,<<"0">>}, {<<"vb_982:num_checkpoints">>,<<"1">>}, {<<"vb_982:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_982:num_checkpoint_items">>,<<"1">>}, {<<"vb_982:num_tap_cursors">>,<<"0">>}, {<<"vb_982:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_982:open_checkpoint_id">>,<<"2">>}, {<<"vb_982:state">>,<<"replica">>}, {<<"vb_981:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_981:checkpoint_extension">>,<<"false">>}, {<<"vb_981:num_items_for_persistence">>,<<"0">>}, {<<"vb_981:num_checkpoints">>,<<"1">>}, {<<"vb_981:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_981:num_checkpoint_items">>,<<"1">>}, {<<"vb_981:num_tap_cursors">>,<<"0">>}, {<<"vb_981:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_981:open_checkpoint_id">>,<<"2">>}, {<<"vb_981:state">>,<<"replica">>}, {<<"vb_980:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_980:checkpoint_extension">>,<<"false">>}, {<<"vb_980:num_items_for_persistence">>,<<"0">>}, {<<"vb_980:num_checkpoints">>,<<"1">>}, {<<"vb_980:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_980:num_checkpoint_items">>,<<"1">>}, {<<"vb_980:num_tap_cursors">>,<<"0">>}, {<<"vb_980:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_980:open_checkpoint_id">>,<<"2">>}, {<<"vb_980:state">>,<<"replica">>}, {<<"vb_979:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_979:checkpoint_extension">>,<<"false">>}, {<<"vb_979:num_items_for_persistence">>,<<"0">>}, {<<"vb_979:num_checkpoints">>,<<"1">>}, {<<"vb_979:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_979:num_checkpoint_items">>,<<"1">>}, {<<"vb_979:num_tap_cursors">>,<<"0">>}, {<<"vb_979:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_979:open_checkpoint_id">>,<<"2">>}, {<<"vb_979:state">>,<<"replica">>}, {<<"vb_978:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_978:checkpoint_extension">>,<<"false">>}, {<<"vb_978:num_items_for_persistence">>,<<"0">>}, {<<"vb_978:num_checkpoints">>,<<"1">>}, {<<"vb_978:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_978:num_checkpoint_items">>,<<"1">>}, {<<"vb_978:num_tap_cursors">>,<<"0">>}, {<<"vb_978:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_978:open_checkpoint_id">>,<<"2">>}, {<<"vb_978:state">>,<<"replica">>}, {<<"vb_977:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_977:checkpoint_extension">>,<<"false">>}, {<<"vb_977:num_items_for_persistence">>,<<"0">>}, {<<"vb_977:num_checkpoints">>,<<"1">>}, {<<"vb_977:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_977:num_checkpoint_items">>,<<"1">>}, {<<"vb_977:num_tap_cursors">>,<<"0">>}, {<<"vb_977:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_977:open_checkpoint_id">>,<<"2">>}, {<<"vb_977:state">>,<<"replica">>}, {<<"vb_976:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_976:checkpoint_extension">>,<<"false">>}, {<<"vb_976:num_items_for_persistence">>,<<"0">>}, {<<"vb_976:num_checkpoints">>,<<"1">>}, {<<"vb_976:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_976:num_checkpoint_items">>,<<"1">>}, {<<"vb_976:num_tap_cursors">>,<<"0">>}, {<<"vb_976:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_976:open_checkpoint_id">>,<<"2">>}, {<<"vb_976:state">>,<<"replica">>}, {<<"vb_975:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_975:checkpoint_extension">>,<<"false">>}, {<<"vb_975:num_items_for_persistence">>,<<"0">>}, {<<"vb_975:num_checkpoints">>,<<"1">>}, {<<"vb_975:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_975:num_checkpoint_items">>,<<"1">>}, {<<"vb_975:num_tap_cursors">>,<<"0">>}, {<<"vb_975:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_975:open_checkpoint_id">>,<<"2">>}, {<<"vb_975:state">>,<<"replica">>}, {<<"vb_974:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_974:checkpoint_extension">>,<<"false">>}, {<<"vb_974:num_items_for_persistence">>,<<"0">>}, {<<"vb_974:num_checkpoints">>,<<"1">>}, {<<"vb_974:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_974:num_checkpoint_items">>,<<"1">>}, {<<"vb_974:num_tap_cursors">>,<<"0">>}, {<<"vb_974:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_974:open_checkpoint_id">>,<<"2">>}, {<<"vb_974:state">>,<<"replica">>}, {<<"vb_973:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_973:checkpoint_extension">>,<<"false">>}, {<<"vb_973:num_items_for_persistence">>,<<"0">>}, {<<"vb_973:num_checkpoints">>,<<"1">>}, {<<"vb_973:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_973:num_checkpoint_items">>,<<"1">>}, {<<"vb_973:num_tap_cursors">>,<<"0">>}, {<<"vb_973:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_973:open_checkpoint_id">>,<<"2">>}, {<<"vb_973:state">>,<<"replica">>}, {<<"vb_972:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_972:checkpoint_extension">>,<<"false">>}, {<<"vb_972:num_items_for_persistence">>,<<"0">>}, {<<"vb_972:num_checkpoints">>,<<"1">>}, {<<"vb_972:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_972:num_checkpoint_items">>,<<"1">>}, {<<"vb_972:num_tap_cursors">>,<<"0">>}, {<<"vb_972:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_972:open_checkpoint_id">>,<<"2">>}, {<<"vb_972:state">>,<<"replica">>}, {<<"vb_971:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_971:checkpoint_extension">>,<<"false">>}, {<<"vb_971:num_items_for_persistence">>,<<"0">>}, {<<"vb_971:num_checkpoints">>,<<"1">>}, {<<"vb_971:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_971:num_checkpoint_items">>,<<"1">>}, {<<"vb_971:num_tap_cursors">>,<<"0">>}, {<<"vb_971:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_971:open_checkpoint_id">>,<<"2">>}, {<<"vb_971:state">>,<<"replica">>}, {<<"vb_970:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_970:checkpoint_extension">>,<<"false">>}, {<<"vb_970:num_items_for_persistence">>,<<"0">>}, {<<"vb_970:num_checkpoints">>,<<"1">>}, {<<"vb_970:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_970:num_checkpoint_items">>,<<"1">>}, {<<"vb_970:num_tap_cursors">>,<<"0">>}, {<<"vb_970:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_970:open_checkpoint_id">>,<<"2">>}, {<<"vb_970:state">>,<<"replica">>}, {<<"vb_969:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_969:checkpoint_extension">>,<<"false">>}, {<<"vb_969:num_items_for_persistence">>,<<"0">>}, {<<"vb_969:num_checkpoints">>,<<"1">>}, {<<"vb_969:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_969:num_checkpoint_items">>,<<"1">>}, {<<"vb_969:num_tap_cursors">>,<<"0">>}, {<<"vb_969:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_969:open_checkpoint_id">>,<<"2">>}, {<<"vb_969:state">>,<<"replica">>}, {<<"vb_968:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_968:checkpoint_extension">>,<<"false">>}, {<<"vb_968:num_items_for_persistence">>,<<"0">>}, {<<"vb_968:num_checkpoints">>,<<"1">>}, {<<"vb_968:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_968:num_checkpoint_items">>,<<"1">>}, {<<"vb_968:num_tap_cursors">>,<<"0">>}, {<<"vb_968:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_968:open_checkpoint_id">>,<<"2">>}, {<<"vb_968:state">>,<<"replica">>}, {<<"vb_967:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_967:checkpoint_extension">>,<<"false">>}, {<<"vb_967:num_items_for_persistence">>,<<"0">>}, {<<"vb_967:num_checkpoints">>,<<"1">>}, {<<"vb_967:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_967:num_checkpoint_items">>,<<"1">>}, {<<"vb_967:num_tap_cursors">>,<<"0">>}, {<<"vb_967:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_967:open_checkpoint_id">>,<<"2">>}, {<<"vb_967:state">>,<<"replica">>}, {<<"vb_966:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_966:checkpoint_extension">>,<<"false">>}, {<<"vb_966:num_items_for_persistence">>,<<"0">>}, {<<"vb_966:num_checkpoints">>,<<"1">>}, {<<"vb_966:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_966:num_checkpoint_items">>,<<"1">>}, {<<"vb_966:num_tap_cursors">>,<<"0">>}, {<<"vb_966:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_966:open_checkpoint_id">>,<<"2">>}, {<<"vb_966:state">>,<<"replica">>}, {<<"vb_965:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_965:checkpoint_extension">>,<<"false">>}, {<<"vb_965:num_items_for_persistence">>,<<"0">>}, {<<"vb_965:num_checkpoints">>,<<"1">>}, {<<"vb_965:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_965:num_checkpoint_items">>,<<"1">>}, {<<"vb_965:num_tap_cursors">>,<<"0">>}, {<<"vb_965:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_965:open_checkpoint_id">>,<<"2">>}, {<<"vb_965:state">>,<<"replica">>}, {<<"vb_964:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_964:checkpoint_extension">>,<<"false">>}, {<<"vb_964:num_items_for_persistence">>,<<"0">>}, {<<"vb_964:num_checkpoints">>,<<"1">>}, {<<"vb_964:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_964:num_checkpoint_items">>,<<"1">>}, {<<"vb_964:num_tap_cursors">>,<<"0">>}, {<<"vb_964:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_964:open_checkpoint_id">>,<<"2">>}, {<<"vb_964:state">>,<<"replica">>}, {<<"vb_963:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_963:checkpoint_extension">>,<<"false">>}, {<<"vb_963:num_items_for_persistence">>,<<"0">>}, {<<"vb_963:num_checkpoints">>,<<"1">>}, {<<"vb_963:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_963:num_checkpoint_items">>,<<"1">>}, {<<"vb_963:num_tap_cursors">>,<<"0">>}, {<<"vb_963:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_963:open_checkpoint_id">>,<<"2">>}, {<<"vb_963:state">>,<<"replica">>}, {<<"vb_962:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_962:checkpoint_extension">>,<<"false">>}, {<<"vb_962:num_items_for_persistence">>,<<"0">>}, {<<"vb_962:num_checkpoints">>,<<"1">>}, {<<"vb_962:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_962:num_checkpoint_items">>,<<"1">>}, {<<"vb_962:num_tap_cursors">>,<<"0">>}, {<<"vb_962:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_962:open_checkpoint_id">>,<<"2">>}, {<<"vb_962:state">>,<<"replica">>}, {<<"vb_961:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_961:checkpoint_extension">>,<<"false">>}, {<<"vb_961:num_items_for_persistence">>,<<"0">>}, {<<"vb_961:num_checkpoints">>,<<"1">>}, {<<"vb_961:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_961:num_checkpoint_items">>,<<"1">>}, {<<"vb_961:num_tap_cursors">>,<<"0">>}, {<<"vb_961:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_961:open_checkpoint_id">>,<<"2">>}, {<<"vb_961:state">>,<<"replica">>}, {<<"vb_960:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_960:checkpoint_extension">>,<<"false">>}, {<<"vb_960:num_items_for_persistence">>,<<"0">>}, {<<"vb_960:num_checkpoints">>,<<"1">>}, {<<"vb_960:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_960:num_checkpoint_items">>,<<"1">>}, {<<"vb_960:num_tap_cursors">>,<<"0">>}, {<<"vb_960:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_960:open_checkpoint_id">>,<<"2">>}, {<<"vb_960:state">>,<<"replica">>}, {<<"vb_959:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_959:checkpoint_extension">>,<<"false">>}, {<<"vb_959:num_items_for_persistence">>,<<"0">>}, {<<"vb_959:num_checkpoints">>,<<"1">>}, {<<"vb_959:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_959:num_checkpoint_items">>,<<"1">>}, {<<"vb_959:num_tap_cursors">>,<<"0">>}, {<<"vb_959:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_959:open_checkpoint_id">>,<<"2">>}, {<<"vb_959:state">>,<<"replica">>}, {<<"vb_958:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_958:checkpoint_extension">>,<<"false">>}, {<<"vb_958:num_items_for_persistence">>,<<"0">>}, {<<"vb_958:num_checkpoints">>,<<"1">>}, {<<"vb_958:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_958:num_checkpoint_items">>,<<"1">>}, {<<"vb_958:num_tap_cursors">>,<<"0">>}, {<<"vb_958:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_958:open_checkpoint_id">>,<<"2">>}, {<<"vb_958:state">>,<<"replica">>}, {<<"vb_957:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_957:checkpoint_extension">>,<<"false">>}, {<<"vb_957:num_items_for_persistence">>,<<"0">>}, {<<"vb_957:num_checkpoints">>,<<"1">>}, {<<"vb_957:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_957:num_checkpoint_items">>,<<"1">>}, {<<"vb_957:num_tap_cursors">>,<<"0">>}, {<<"vb_957:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_957:open_checkpoint_id">>,<<"2">>}, {<<"vb_957:state">>,<<"replica">>}, {<<"vb_956:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_956:checkpoint_extension">>,<<"false">>}, {<<"vb_956:num_items_for_persistence">>,<<"0">>}, {<<"vb_956:num_checkpoints">>,<<"1">>}, {<<"vb_956:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_956:num_checkpoint_items">>,<<"1">>}, {<<"vb_956:num_tap_cursors">>,<<"0">>}, {<<"vb_956:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_956:open_checkpoint_id">>,<<"2">>}, {<<"vb_956:state">>,<<"replica">>}, {<<"vb_955:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_955:checkpoint_extension">>,<<"false">>}, {<<"vb_955:num_items_for_persistence">>,<<"0">>}, {<<"vb_955:num_checkpoints">>,<<"1">>}, {<<"vb_955:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_955:num_checkpoint_items">>,<<"1">>}, {<<"vb_955:num_tap_cursors">>,<<"0">>}, {<<"vb_955:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_955:open_checkpoint_id">>,<<"2">>}, {<<"vb_955:state">>,<<"replica">>}, {<<"vb_954:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_954:checkpoint_extension">>,<<"false">>}, {<<"vb_954:num_items_for_persistence">>,<<"0">>}, {<<"vb_954:num_checkpoints">>,<<"1">>}, {<<"vb_954:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_954:num_checkpoint_items">>,<<"1">>}, {<<"vb_954:num_tap_cursors">>,<<"0">>}, {<<"vb_954:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_954:open_checkpoint_id">>,<<"2">>}, {<<"vb_954:state">>,<<"replica">>}, {<<"vb_953:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_953:checkpoint_extension">>,<<"false">>}, {<<"vb_953:num_items_for_persistence">>,<<"0">>}, {<<"vb_953:num_checkpoints">>,<<"1">>}, {<<"vb_953:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_953:num_checkpoint_items">>,<<"1">>}, {<<"vb_953:num_tap_cursors">>,<<"0">>}, {<<"vb_953:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_953:open_checkpoint_id">>,<<"2">>}, {<<"vb_953:state">>,<<"replica">>}, {<<"vb_952:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_952:checkpoint_extension">>,<<"false">>}, {<<"vb_952:num_items_for_persistence">>,<<"0">>}, {<<"vb_952:num_checkpoints">>,<<"1">>}, {<<"vb_952:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_952:num_checkpoint_items">>,<<"1">>}, {<<"vb_952:num_tap_cursors">>,<<"0">>}, {<<"vb_952:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_952:open_checkpoint_id">>,<<"2">>}, {<<"vb_952:state">>,<<"replica">>}, {<<"vb_951:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_951:checkpoint_extension">>,<<"false">>}, {<<"vb_951:num_items_for_persistence">>,<<"0">>}, {<<"vb_951:num_checkpoints">>,<<"1">>}, {<<"vb_951:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_951:num_checkpoint_items">>,<<"1">>}, {<<"vb_951:num_tap_cursors">>,<<"0">>}, {<<"vb_951:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_951:open_checkpoint_id">>,<<"2">>}, {<<"vb_951:state">>,<<"replica">>}, {<<"vb_950:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_950:checkpoint_extension">>,<<"false">>}, {<<"vb_950:num_items_for_persistence">>,<<"0">>}, {<<"vb_950:num_checkpoints">>,<<"1">>}, {<<"vb_950:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_950:num_checkpoint_items">>,<<"1">>}, {<<"vb_950:num_tap_cursors">>,<<"0">>}, {<<"vb_950:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_950:open_checkpoint_id">>,<<"2">>}, {<<"vb_950:state">>,<<"replica">>}, {<<"vb_949:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_949:checkpoint_extension">>,<<"false">>}, {<<"vb_949:num_items_for_persistence">>,<<"0">>}, {<<"vb_949:num_checkpoints">>,<<"1">>}, {<<"vb_949:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_949:num_checkpoint_items">>,<<"1">>}, {<<"vb_949:num_tap_cursors">>,<<"0">>}, {<<"vb_949:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_949:open_checkpoint_id">>,<<"2">>}, {<<"vb_949:state">>,<<"replica">>}, {<<"vb_948:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_948:checkpoint_extension">>,<<"false">>}, {<<"vb_948:num_items_for_persistence">>,<<"0">>}, {<<"vb_948:num_checkpoints">>,<<"1">>}, {<<"vb_948:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_948:num_checkpoint_items">>,<<"1">>}, {<<"vb_948:num_tap_cursors">>,<<"0">>}, {<<"vb_948:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_948:open_checkpoint_id">>,<<"2">>}, {<<"vb_948:state">>,<<"replica">>}, {<<"vb_947:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_947:checkpoint_extension">>,<<"false">>}, {<<"vb_947:num_items_for_persistence">>,<<"0">>}, {<<"vb_947:num_checkpoints">>,<<"1">>}, {<<"vb_947:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_947:num_checkpoint_items">>,<<"1">>}, {<<"vb_947:num_tap_cursors">>,<<"0">>}, {<<"vb_947:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_947:open_checkpoint_id">>,<<"2">>}, {<<"vb_947:state">>,<<"replica">>}, {<<"vb_946:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_946:checkpoint_extension">>,<<"false">>}, {<<"vb_946:num_items_for_persistence">>,<<"0">>}, {<<"vb_946:num_checkpoints">>,<<"1">>}, {<<"vb_946:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_946:num_checkpoint_items">>,<<"1">>}, {<<"vb_946:num_tap_cursors">>,<<"0">>}, {<<"vb_946:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_946:open_checkpoint_id">>,<<"2">>}, {<<"vb_946:state">>,<<"replica">>}, {<<"vb_945:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_945:checkpoint_extension">>,<<"false">>}, {<<"vb_945:num_items_for_persistence">>,<<"0">>}, {<<"vb_945:num_checkpoints">>,<<"1">>}, {<<"vb_945:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_945:num_checkpoint_items">>,<<"1">>}, {<<"vb_945:num_tap_cursors">>,<<"0">>}, {<<"vb_945:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_945:open_checkpoint_id">>,<<"2">>}, {<<"vb_945:state">>,<<"replica">>}, {<<"vb_944:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_944:checkpoint_extension">>,<<"false">>}, {<<"vb_944:num_items_for_persistence">>,<<"0">>}, {<<"vb_944:num_checkpoints">>,<<"1">>}, {<<"vb_944:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_944:num_checkpoint_items">>,<<"1">>}, {<<"vb_944:num_tap_cursors">>,<<"0">>}, {<<"vb_944:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_944:open_checkpoint_id">>,<<"2">>}, {<<"vb_944:state">>,<<"replica">>}, {<<"vb_943:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_943:checkpoint_extension">>,<<"false">>}, {<<"vb_943:num_items_for_persistence">>,<<"0">>}, {<<"vb_943:num_checkpoints">>,<<"1">>}, {<<"vb_943:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_943:num_checkpoint_items">>,<<"1">>}, {<<"vb_943:num_tap_cursors">>,<<"0">>}, {<<"vb_943:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_943:open_checkpoint_id">>,<<"2">>}, {<<"vb_943:state">>,<<"replica">>}, {<<"vb_942:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_942:checkpoint_extension">>,<<"false">>}, {<<"vb_942:num_items_for_persistence">>,<<"0">>}, {<<"vb_942:num_checkpoints">>,<<"1">>}, {<<"vb_942:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_942:num_checkpoint_items">>,<<"1">>}, {<<"vb_942:num_tap_cursors">>,<<"0">>}, {<<"vb_942:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_942:open_checkpoint_id">>,<<"2">>}, {<<"vb_942:state">>,<<"replica">>}, {<<"vb_941:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_941:checkpoint_extension">>,<<"false">>}, {<<"vb_941:num_items_for_persistence">>,<<"0">>}, {<<"vb_941:num_checkpoints">>,<<"1">>}, {<<"vb_941:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_941:num_checkpoint_items">>,<<"1">>}, {<<"vb_941:num_tap_cursors">>,<<"0">>}, {<<"vb_941:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_941:open_checkpoint_id">>,<<"2">>}, {<<"vb_941:state">>,<<"replica">>}, {<<"vb_940:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_940:checkpoint_extension">>,<<"false">>}, {<<"vb_940:num_items_for_persistence">>,<<"0">>}, {<<"vb_940:num_checkpoints">>,<<"1">>}, {<<"vb_940:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_940:num_checkpoint_items">>,<<"1">>}, {<<"vb_940:num_tap_cursors">>,<<"0">>}, {<<"vb_940:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_940:open_checkpoint_id">>,<<"2">>}, {<<"vb_940:state">>,<<"replica">>}, {<<"vb_939:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_939:checkpoint_extension">>,<<"false">>}, {<<"vb_939:num_items_for_persistence">>,<<"0">>}, {<<"vb_939:num_checkpoints">>,<<"1">>}, {<<"vb_939:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_939:num_checkpoint_items">>,<<"1">>}, {<<"vb_939:num_tap_cursors">>,<<"0">>}, {<<"vb_939:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_939:open_checkpoint_id">>,<<"2">>}, {<<"vb_939:state">>,<<"replica">>}, {<<"vb_938:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_938:checkpoint_extension">>,<<"false">>}, {<<"vb_938:num_items_for_persistence">>,<<"0">>}, {<<"vb_938:num_checkpoints">>,<<"1">>}, {<<"vb_938:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_938:num_checkpoint_items">>,<<"1">>}, {<<"vb_938:num_tap_cursors">>,<<"0">>}, {<<"vb_938:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_938:open_checkpoint_id">>,<<"2">>}, {<<"vb_938:state">>,<<"replica">>}, {<<"vb_767:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_767:eq_tapq:replication_ns_1@10.242.238.91:cursor_checkpoint_id">>, <<"2">>}, {<<"vb_767:checkpoint_extension">>,<<"false">>}, {<<"vb_767:num_items_for_persistence">>,<<"0">>}, {<<"vb_767:num_checkpoints">>,<<"1">>}, {<<"vb_767:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_767:num_checkpoint_items">>,<<"1">>}, {<<"vb_767:num_tap_cursors">>,<<"1">>}, {<<"vb_767:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_767:open_checkpoint_id">>,<<"2">>}, {<<"vb_767:state">>,<<"active">>}, {<<"vb_766:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_766:eq_tapq:replication_ns_1@10.242.238.91:cursor_checkpoint_id">>, <<"2">>}, {<<"vb_766:checkpoint_extension">>,<<"false">>}, {<<"vb_766:num_items_for_persistence">>,<<"0">>}, {<<"vb_766:num_checkpoints">>,<<"1">>}, {<<"vb_766:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_766:num_checkpoint_items">>,<<"1">>}, {<<"vb_766:num_tap_cursors">>,<<"1">>}, {<<"vb_766:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_766:open_checkpoint_id">>,<<"2">>}, {<<"vb_766:state">>,<<"active">>}, {<<"vb_765:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_765:eq_tapq:replication_ns_1@10.242.238.91:cursor_checkpoint_id">>, <<"2">>}, {<<"vb_765:checkpoint_extension">>,<<"false">>}, {<<"vb_765:num_items_for_persistence">>,<<"0">>}, {<<"vb_765:num_checkpoints">>,<<"1">>}, {<<"vb_765:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_765:num_checkpoint_items">>,<<"1">>}, {<<"vb_765:num_tap_cursors">>,<<"1">>}, {<<"vb_765:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_765:open_checkpoint_id">>,<<"2">>}, {<<"vb_765:state">>,<<"active">>}, {<<"vb_764:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_764:eq_tapq:replication_ns_1@10.242.238.91:cursor_checkpoint_id">>, <<"2">>}, {<<"vb_764:checkpoint_extension">>,<<"false">>}, {<<"vb_764:num_items_for_persistence">>,<<"0">>}, {<<"vb_764:num_checkpoints">>,<<"1">>}, {<<"vb_764:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_764:num_checkpoint_items">>,<<"1">>}, {<<"vb_764:num_tap_cursors">>,<<"1">>}, {<<"vb_764:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_764:open_checkpoint_id">>,<<"2">>}, {<<"vb_764:state">>,<<"active">>}, {<<"vb_763:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_763:eq_tapq:replication_ns_1@10.242.238.91:cursor_checkpoint_id">>, <<"2">>}, {<<"vb_763:checkpoint_extension">>,<<"false">>}, {<<"vb_763:num_items_for_persistence">>,<<"0">>}, {<<"vb_763:num_checkpoints">>,<<"1">>}, {<<"vb_763:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_763:num_checkpoint_items">>,<<"1">>}, {<<"vb_763:num_tap_cursors">>,<<"1">>}, {<<"vb_763:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_763:open_checkpoint_id">>,<<"2">>}, {<<"vb_763:state">>,<<"active">>}, {<<"vb_762:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_762:eq_tapq:replication_ns_1@10.242.238.91:cursor_checkpoint_id">>, <<"2">>}, {<<"vb_762:checkpoint_extension">>,<<"false">>}, {<<"vb_762:num_items_for_persistence">>,<<"0">>}, {<<"vb_762:num_checkpoints">>,<<"1">>}, {<<"vb_762:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_762:num_checkpoint_items">>,<<"1">>}, {<<"vb_762:num_tap_cursors">>,<<"1">>}, {<<"vb_762:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_762:open_checkpoint_id">>,<<"2">>}, {<<"vb_762:state">>,<<"active">>}, {<<"vb_761:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_761:eq_tapq:replication_ns_1@10.242.238.91:cursor_checkpoint_id">>, <<"2">>}, {<<"vb_761:checkpoint_extension">>,<<"false">>}, {<<"vb_761:num_items_for_persistence">>,<<"0">>}, {<<"vb_761:num_checkpoints">>,<<"1">>}, {<<"vb_761:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_761:num_checkpoint_items">>,<<"1">>}, {<<"vb_761:num_tap_cursors">>,<<"1">>}, {<<"vb_761:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_761:open_checkpoint_id">>,<<"2">>}, {<<"vb_761:state">>,<<"active">>}, {<<"vb_760:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_760:eq_tapq:replication_ns_1@10.242.238.91:cursor_checkpoint_id">>, <<"2">>}, {<<"vb_760:checkpoint_extension">>,<<"false">>}, {<<"vb_760:num_items_for_persistence">>,<<"0">>}, {<<"vb_760:num_checkpoints">>,<<"1">>}, {<<"vb_760:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_760:num_checkpoint_items">>,<<"1">>}, {<<"vb_760:num_tap_cursors">>,<<"1">>}, {<<"vb_760:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_760:open_checkpoint_id">>,<<"2">>}, {<<"vb_760:state">>,<<"active">>}, {<<"vb_759:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_759:eq_tapq:replication_ns_1@10.242.238.91:cursor_checkpoint_id">>, <<"2">>}, {<<"vb_759:checkpoint_extension">>,<<"false">>}, {<<"vb_759:num_items_for_persistence">>,<<"0">>}, {<<"vb_759:num_checkpoints">>,<<"1">>}, {<<"vb_759:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_759:num_checkpoint_items">>,<<"1">>}, {<<"vb_759:num_tap_cursors">>,<<"1">>}, {<<"vb_759:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_759:open_checkpoint_id">>,<<"2">>}, {<<"vb_759:state">>,<<"active">>}, {<<"vb_758:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_758:eq_tapq:replication_ns_1@10.242.238.91:cursor_checkpoint_id">>, <<"2">>}, {<<"vb_758:checkpoint_extension">>,<<"false">>}, {<<"vb_758:num_items_for_persistence">>,<<"0">>}, {<<"vb_758:num_checkpoints">>,<<"1">>}, {<<"vb_758:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_758:num_checkpoint_items">>,<<"1">>}, {<<"vb_758:num_tap_cursors">>,<<"1">>}, {<<"vb_758:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_758:open_checkpoint_id">>,<<"2">>}, {<<"vb_758:state">>,<<"active">>}, {<<"vb_757:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_757:eq_tapq:replication_ns_1@10.242.238.91:cursor_checkpoint_id">>, <<"2">>}, {<<"vb_757:checkpoint_extension">>,<<"false">>}, {<<"vb_757:num_items_for_persistence">>,<<"0">>}, {<<"vb_757:num_checkpoints">>,<<"1">>}, {<<"vb_757:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_757:num_checkpoint_items">>,<<"1">>}, {<<"vb_757:num_tap_cursors">>,<<"1">>}, {<<"vb_757:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_757:open_checkpoint_id">>,<<"2">>}, {<<"vb_757:state">>,<<"active">>}, {<<"vb_756:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_756:eq_tapq:replication_ns_1@10.242.238.91:cursor_checkpoint_id">>, <<"2">>}, {<<"vb_756:checkpoint_extension">>,<<"false">>}, {<<"vb_756:num_items_for_persistence">>,<<"0">>}, {<<"vb_756:num_checkpoints">>,<<"1">>}, {<<"vb_756:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_756:num_checkpoint_items">>,<<"1">>}, {<<"vb_756:num_tap_cursors">>,<<"1">>}, {<<"vb_756:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_756:open_checkpoint_id">>,<<"2">>}, {<<"vb_756:state">>,<<"active">>}, {<<"vb_755:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_755:eq_tapq:replication_ns_1@10.242.238.91:cursor_checkpoint_id">>, <<"2">>}, {<<"vb_755:checkpoint_extension">>,<<"false">>}, {<<"vb_755:num_items_for_persistence">>,<<"0">>}, {<<"vb_755:num_checkpoints">>,<<"1">>}, {<<"vb_755:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_755:num_checkpoint_items">>,<<"1">>}, {<<"vb_755:num_tap_cursors">>,<<"1">>}, {<<"vb_755:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_755:open_checkpoint_id">>,<<"2">>}, {<<"vb_755:state">>,<<"active">>}, {<<"vb_754:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_754:eq_tapq:replication_ns_1@10.242.238.91:cursor_checkpoint_id">>, <<"2">>}, {<<"vb_754:checkpoint_extension">>,<<"false">>}, {<<"vb_754:num_items_for_persistence">>,<<"0">>}, {<<"vb_754:num_checkpoints">>,<<"1">>}, {<<"vb_754:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_754:num_checkpoint_items">>,<<"1">>}, {<<"vb_754:num_tap_cursors">>,<<"1">>}, {<<"vb_754:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_754:open_checkpoint_id">>,<<"2">>}, {<<"vb_754:state">>,<<"active">>}, {<<"vb_753:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_753:eq_tapq:replication_ns_1@10.242.238.91:cursor_checkpoint_id">>, <<"2">>}, {<<"vb_753:checkpoint_extension">>,<<"false">>}, {<<"vb_753:num_items_for_persistence">>,<<"0">>}, {<<"vb_753:num_checkpoints">>,<<"1">>}, {<<"vb_753:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_753:num_checkpoint_items">>,<<"1">>}, {<<"vb_753:num_tap_cursors">>,<<"1">>}, {<<"vb_753:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_753:open_checkpoint_id">>,<<"2">>}, {<<"vb_753:state">>,<<"active">>}, {<<"vb_752:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_752:eq_tapq:replication_ns_1@10.242.238.91:cursor_checkpoint_id">>, <<"2">>}, {<<"vb_752:checkpoint_extension">>,<<"false">>}, {<<"vb_752:num_items_for_persistence">>,<<"0">>}, {<<"vb_752:num_checkpoints">>,<<"1">>}, {<<"vb_752:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_752:num_checkpoint_items">>,<<"1">>}, {<<"vb_752:num_tap_cursors">>,<<"1">>}, {<<"vb_752:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_752:open_checkpoint_id">>,<<"2">>}, {<<"vb_752:state">>,<<"active">>}, {<<"vb_751:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_751:eq_tapq:replication_ns_1@10.242.238.91:cursor_checkpoint_id">>, <<"2">>}, {<<"vb_751:checkpoint_extension">>,<<"false">>}, {<<"vb_751:num_items_for_persistence">>,<<"0">>}, {<<"vb_751:num_checkpoints">>,<<"1">>}, {<<"vb_751:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_751:num_checkpoint_items">>,<<"1">>}, {<<"vb_751:num_tap_cursors">>,<<"1">>}, {<<"vb_751:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_751:open_checkpoint_id">>,<<"2">>}, {<<"vb_751:state">>,<<"active">>}, {<<"vb_750:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_750:eq_tapq:replication_ns_1@10.242.238.91:cursor_checkpoint_id">>, <<"2">>}, {<<"vb_750:checkpoint_extension">>,<<"false">>}, {<<"vb_750:num_items_for_persistence">>,<<"0">>}, {<<"vb_750:num_checkpoints">>,<<"1">>}, {<<"vb_750:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_750:num_checkpoint_items">>,<<"1">>}, {<<"vb_750:num_tap_cursors">>,<<"1">>}, {<<"vb_750:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_750:open_checkpoint_id">>,<<"2">>}, {<<"vb_750:state">>,<<"active">>}, {<<"vb_749:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_749:eq_tapq:replication_ns_1@10.242.238.91:cursor_checkpoint_id">>, <<"2">>}, {<<"vb_749:checkpoint_extension">>,<<"false">>}, {<<"vb_749:num_items_for_persistence">>,<<"0">>}, {<<"vb_749:num_checkpoints">>,<<"1">>}, {<<"vb_749:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_749:num_checkpoint_items">>,<<"1">>}, {<<"vb_749:num_tap_cursors">>,<<"1">>}, {<<"vb_749:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_749:open_checkpoint_id">>,<<"2">>}, {<<"vb_749:state">>,<<"active">>}, {<<"vb_748:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_748:eq_tapq:replication_ns_1@10.242.238.91:cursor_checkpoint_id">>, <<"2">>}, {<<"vb_748:checkpoint_extension">>,<<"false">>}, {<<"vb_748:num_items_for_persistence">>,<<"0">>}, {<<"vb_748:num_checkpoints">>,<<"1">>}, {<<"vb_748:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_748:num_checkpoint_items">>,<<"1">>}, {<<"vb_748:num_tap_cursors">>,<<"1">>}, {<<"vb_748:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_748:open_checkpoint_id">>,<<"2">>}, {<<"vb_748:state">>,<<"active">>}, {<<"vb_747:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_747:eq_tapq:replication_ns_1@10.242.238.91:cursor_checkpoint_id">>, <<"2">>}, {<<"vb_747:checkpoint_extension">>,<<"false">>}, {<<"vb_747:num_items_for_persistence">>,<<"0">>}, {<<"vb_747:num_checkpoints">>,<<"1">>}, {<<"vb_747:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_747:num_checkpoint_items">>,<<"1">>}, {<<"vb_747:num_tap_cursors">>,<<"1">>}, {<<"vb_747:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_747:open_checkpoint_id">>,<<"2">>}, {<<"vb_747:state">>,<<"active">>}, {<<"vb_746:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_746:eq_tapq:replication_ns_1@10.242.238.91:cursor_checkpoint_id">>, <<"2">>}, {<<"vb_746:checkpoint_extension">>,<<"false">>}, {<<"vb_746:num_items_for_persistence">>,<<"0">>}, {<<"vb_746:num_checkpoints">>,<<"1">>}, {<<"vb_746:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_746:num_checkpoint_items">>,<<"1">>}, {<<"vb_746:num_tap_cursors">>,<<"1">>}, {<<"vb_746:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_746:open_checkpoint_id">>,<<"2">>}, {<<"vb_746:state">>,<<"active">>}, {<<"vb_745:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_745:eq_tapq:replication_ns_1@10.242.238.91:cursor_checkpoint_id">>, <<"2">>}, {<<"vb_745:checkpoint_extension">>,<<"false">>}, {<<"vb_745:num_items_for_persistence">>,<<"0">>}, {<<"vb_745:num_checkpoints">>,<<"1">>}, {<<"vb_745:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_745:num_checkpoint_items">>,<<"1">>}, {<<"vb_745:num_tap_cursors">>,<<"1">>}, {<<"vb_745:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_745:open_checkpoint_id">>,<<"2">>}, {<<"vb_745:state">>,<<"active">>}, {<<"vb_744:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_744:eq_tapq:replication_ns_1@10.242.238.91:cursor_checkpoint_id">>, <<"2">>}, {<<"vb_744:checkpoint_extension">>,<<"false">>}, {<<"vb_744:num_items_for_persistence">>,<<"0">>}, {<<"vb_744:num_checkpoints">>,<<"1">>}, {<<"vb_744:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_744:num_checkpoint_items">>,<<"1">>}, {<<"vb_744:num_tap_cursors">>,<<"1">>}, {<<"vb_744:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_744:open_checkpoint_id">>,<<"2">>}, {<<"vb_744:state">>,<<"active">>}, {<<"vb_743:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_743:eq_tapq:replication_ns_1@10.242.238.91:cursor_checkpoint_id">>, <<"2">>}, {<<"vb_743:checkpoint_extension">>,<<"false">>}, {<<"vb_743:num_items_for_persistence">>,<<"0">>}, {<<"vb_743:num_checkpoints">>,<<"1">>}, {<<"vb_743:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_743:num_checkpoint_items">>,<<"1">>}, {<<"vb_743:num_tap_cursors">>,<<"1">>}, {<<"vb_743:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_743:open_checkpoint_id">>,<<"2">>}, {<<"vb_743:state">>,<<"active">>}, {<<"vb_742:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_742:eq_tapq:replication_ns_1@10.242.238.91:cursor_checkpoint_id">>, <<"2">>}, {<<"vb_742:checkpoint_extension">>,<<"false">>}, {<<"vb_742:num_items_for_persistence">>,<<"0">>}, {<<"vb_742:num_checkpoints">>,<<"1">>}, {<<"vb_742:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_742:num_checkpoint_items">>,<<"1">>}, {<<"vb_742:num_tap_cursors">>,<<"1">>}, {<<"vb_742:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_742:open_checkpoint_id">>,<<"2">>}, {<<"vb_742:state">>,<<"active">>}, {<<"vb_741:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_741:eq_tapq:replication_ns_1@10.242.238.91:cursor_checkpoint_id">>, <<"2">>}, {<<"vb_741:checkpoint_extension">>,<<"false">>}, {<<"vb_741:num_items_for_persistence">>,<<"0">>}, {<<"vb_741:num_checkpoints">>,<<"1">>}, {<<"vb_741:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_741:num_checkpoint_items">>,<<"1">>}, {<<"vb_741:num_tap_cursors">>,<<"1">>}, {<<"vb_741:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_741:open_checkpoint_id">>,<<"2">>}, {<<"vb_741:state">>,<<"active">>}, {<<"vb_740:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_740:eq_tapq:replication_ns_1@10.242.238.91:cursor_checkpoint_id">>, <<"2">>}, {<<"vb_740:checkpoint_extension">>,<<"false">>}, {<<"vb_740:num_items_for_persistence">>,<<"0">>}, {<<"vb_740:num_checkpoints">>,<<"1">>}, {<<"vb_740:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_740:num_checkpoint_items">>,<<"1">>}, {<<"vb_740:num_tap_cursors">>,<<"1">>}, {<<"vb_740:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_740:open_checkpoint_id">>,<<"2">>}, {<<"vb_740:state">>,<<"active">>}, {<<"vb_739:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_739:eq_tapq:replication_ns_1@10.242.238.91:cursor_checkpoint_id">>, <<"2">>}, {<<"vb_739:checkpoint_extension">>,<<"false">>}, {<<"vb_739:num_items_for_persistence">>,<<"0">>}, {<<"vb_739:num_checkpoints">>,<<"1">>}, {<<"vb_739:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_739:num_checkpoint_items">>,<<"1">>}, {<<"vb_739:num_tap_cursors">>,<<"1">>}, {<<"vb_739:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_739:open_checkpoint_id">>,<<"2">>}, {<<"vb_739:state">>,<<"active">>}, {<<"vb_738:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_738:eq_tapq:replication_ns_1@10.242.238.91:cursor_checkpoint_id">>, <<"2">>}, {<<"vb_738:checkpoint_extension">>,<<"false">>}, {<<"vb_738:num_items_for_persistence">>,<<"0">>}, {<<"vb_738:num_checkpoints">>,<<"1">>}, {<<"vb_738:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_738:num_checkpoint_items">>,<<"1">>}, {<<"vb_738:num_tap_cursors">>,<<"1">>}, {<<"vb_738:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_738:open_checkpoint_id">>,<<"2">>}, {<<"vb_738:state">>,<<"active">>}, {<<"vb_737:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_737:eq_tapq:replication_ns_1@10.242.238.91:cursor_checkpoint_id">>, <<"2">>}, {<<"vb_737:checkpoint_extension">>,<<"false">>}, {<<"vb_737:num_items_for_persistence">>,<<"0">>}, {<<"vb_737:num_checkpoints">>,<<"1">>}, {<<"vb_737:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_737:num_checkpoint_items">>,<<"1">>}, {<<"vb_737:num_tap_cursors">>,<<"1">>}, {<<"vb_737:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_737:open_checkpoint_id">>,<<"2">>}, {<<"vb_737:state">>,<<"active">>}, {<<"vb_736:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_736:eq_tapq:replication_ns_1@10.242.238.91:cursor_checkpoint_id">>, <<"2">>}, {<<"vb_736:checkpoint_extension">>,<<"false">>}, {<<"vb_736:num_items_for_persistence">>,<<"0">>}, {<<"vb_736:num_checkpoints">>,<<"1">>}, {<<"vb_736:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_736:num_checkpoint_items">>,<<"1">>}, {<<"vb_736:num_tap_cursors">>,<<"1">>}, {<<"vb_736:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_736:open_checkpoint_id">>,<<"2">>}, {<<"vb_736:state">>,<<"active">>}, {<<"vb_735:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_735:eq_tapq:replication_ns_1@10.242.238.91:cursor_checkpoint_id">>, <<"2">>}, {<<"vb_735:checkpoint_extension">>,<<"false">>}, {<<"vb_735:num_items_for_persistence">>,<<"0">>}, {<<"vb_735:num_checkpoints">>,<<"1">>}, {<<"vb_735:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_735:num_checkpoint_items">>,<<"1">>}, {<<"vb_735:num_tap_cursors">>,<<"1">>}, {<<"vb_735:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_735:open_checkpoint_id">>,<<"2">>}, {<<"vb_735:state">>,<<"active">>}, {<<"vb_734:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_734:eq_tapq:replication_ns_1@10.242.238.91:cursor_checkpoint_id">>, <<"2">>}, {<<"vb_734:checkpoint_extension">>,<<"false">>}, {<<"vb_734:num_items_for_persistence">>,<<"0">>}, {<<"vb_734:num_checkpoints">>,<<"1">>}, {<<"vb_734:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_734:num_checkpoint_items">>,<<"1">>}, {<<"vb_734:num_tap_cursors">>,<<"1">>}, {<<"vb_734:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_734:open_checkpoint_id">>,<<"2">>}, {<<"vb_734:state">>,<<"active">>}, {<<"vb_733:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_733:eq_tapq:replication_ns_1@10.242.238.91:cursor_checkpoint_id">>, <<"2">>}, {<<"vb_733:checkpoint_extension">>,<<"false">>}, {<<"vb_733:num_items_for_persistence">>,<<"0">>}, {<<"vb_733:num_checkpoints">>,<<"1">>}, {<<"vb_733:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_733:num_checkpoint_items">>,<<"1">>}, {<<"vb_733:num_tap_cursors">>,<<"1">>}, {<<"vb_733:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_733:open_checkpoint_id">>,<<"2">>}, {<<"vb_733:state">>,<<"active">>}, {<<"vb_732:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_732:eq_tapq:replication_ns_1@10.242.238.91:cursor_checkpoint_id">>, <<"2">>}, {<<"vb_732:checkpoint_extension">>,<<"false">>}, {<<"vb_732:num_items_for_persistence">>,<<"0">>}, {<<"vb_732:num_checkpoints">>,<<"1">>}, {<<"vb_732:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_732:num_checkpoint_items">>,<<"1">>}, {<<"vb_732:num_tap_cursors">>,<<"1">>}, {<<"vb_732:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_732:open_checkpoint_id">>,<<"2">>}, {<<"vb_732:state">>,<<"active">>}, {<<"vb_731:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_731:eq_tapq:replication_ns_1@10.242.238.91:cursor_checkpoint_id">>, <<"2">>}, {<<"vb_731:checkpoint_extension">>,<<"false">>}, {<<"vb_731:num_items_for_persistence">>,<<"0">>}, {<<"vb_731:num_checkpoints">>,<<"1">>}, {<<"vb_731:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_731:num_checkpoint_items">>,<<"1">>}, {<<"vb_731:num_tap_cursors">>,<<"1">>}, {<<"vb_731:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_731:open_checkpoint_id">>,<<"2">>}, {<<"vb_731:state">>,<<"active">>}, {<<"vb_730:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_730:eq_tapq:replication_ns_1@10.242.238.91:cursor_checkpoint_id">>, <<"2">>}, {<<"vb_730:checkpoint_extension">>,<<"false">>}, {<<"vb_730:num_items_for_persistence">>,<<"0">>}, {<<"vb_730:num_checkpoints">>,<<"1">>}, {<<"vb_730:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_730:num_checkpoint_items">>,<<"1">>}, {<<"vb_730:num_tap_cursors">>,<<"1">>}, {<<"vb_730:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_730:open_checkpoint_id">>,<<"2">>}, {<<"vb_730:state">>,<<"active">>}, {<<"vb_729:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_729:eq_tapq:replication_ns_1@10.242.238.91:cursor_checkpoint_id">>, <<"2">>}, {<<"vb_729:checkpoint_extension">>,<<"false">>}, {<<"vb_729:num_items_for_persistence">>,<<"0">>}, {<<"vb_729:num_checkpoints">>,<<"1">>}, {<<"vb_729:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_729:num_checkpoint_items">>,<<"1">>}, {<<"vb_729:num_tap_cursors">>,<<"1">>}, {<<"vb_729:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_729:open_checkpoint_id">>,<<"2">>}, {<<"vb_729:state">>,<<"active">>}, {<<"vb_728:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_728:eq_tapq:replication_ns_1@10.242.238.91:cursor_checkpoint_id">>, <<"2">>}, {<<"vb_728:checkpoint_extension">>,<<"false">>}, {<<"vb_728:num_items_for_persistence">>,<<"0">>}, {<<"vb_728:num_checkpoints">>,<<"1">>}, {<<"vb_728:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_728:num_checkpoint_items">>,<<"1">>}, {<<"vb_728:num_tap_cursors">>,<<"1">>}, {<<"vb_728:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_728:open_checkpoint_id">>,<<"2">>}, {<<"vb_728:state">>,<<"active">>}, {<<"vb_727:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_727:eq_tapq:replication_ns_1@10.242.238.91:cursor_checkpoint_id">>, <<"2">>}, {<<"vb_727:checkpoint_extension">>,<<"false">>}, {<<"vb_727:num_items_for_persistence">>,<<"0">>}, {<<"vb_727:num_checkpoints">>,<<"1">>}, {<<"vb_727:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_727:num_checkpoint_items">>,<<"1">>}, {<<"vb_727:num_tap_cursors">>,<<"1">>}, {<<"vb_727:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_727:open_checkpoint_id">>,<<"2">>}, {<<"vb_727:state">>,<<"active">>}, {<<"vb_726:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_726:eq_tapq:replication_ns_1@10.242.238.91:cursor_checkpoint_id">>, <<"2">>}, {<<"vb_726:checkpoint_extension">>,<<"false">>}, {<<"vb_726:num_items_for_persistence">>,<<"0">>}, {<<"vb_726:num_checkpoints">>,<<"1">>}, {<<"vb_726:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_726:num_checkpoint_items">>,<<"1">>}, {<<"vb_726:num_tap_cursors">>,<<"1">>}, {<<"vb_726:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_726:open_checkpoint_id">>,<<"2">>}, {<<"vb_726:state">>,<<"active">>}, {<<"vb_725:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_725:eq_tapq:replication_ns_1@10.242.238.91:cursor_checkpoint_id">>, <<"2">>}, {<<"vb_725:checkpoint_extension">>,<<"false">>}, {<<"vb_725:num_items_for_persistence">>,<<"0">>}, {<<"vb_725:num_checkpoints">>,<<"1">>}, {<<"vb_725:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_725:num_checkpoint_items">>,<<"1">>}, {<<"vb_725:num_tap_cursors">>,<<"1">>}, {<<"vb_725:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_725:open_checkpoint_id">>,<<"2">>}, {<<"vb_725:state">>,<<"active">>}, {<<"vb_724:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_724:eq_tapq:replication_ns_1@10.242.238.91:cursor_checkpoint_id">>, <<"2">>}, {<<"vb_724:checkpoint_extension">>,<<"false">>}, {<<"vb_724:num_items_for_persistence">>,<<"0">>}, {<<"vb_724:num_checkpoints">>,<<"1">>}, {<<"vb_724:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_724:num_checkpoint_items">>,<<"1">>}, {<<"vb_724:num_tap_cursors">>,<<"1">>}, {<<"vb_724:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_724:open_checkpoint_id">>,<<"2">>}, {<<"vb_724:state">>,<<"active">>}, {<<"vb_723:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_723:eq_tapq:replication_ns_1@10.242.238.91:cursor_checkpoint_id">>, <<"2">>}, {<<"vb_723:checkpoint_extension">>,<<"false">>}, {<<"vb_723:num_items_for_persistence">>,<<"0">>}, {<<"vb_723:num_checkpoints">>,<<"1">>}, {<<"vb_723:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_723:num_checkpoint_items">>,<<"1">>}, {<<"vb_723:num_tap_cursors">>,<<"1">>}, {<<"vb_723:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_723:open_checkpoint_id">>,<<"2">>}, {<<"vb_723:state">>,<<"active">>}, {<<"vb_722:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_722:eq_tapq:replication_ns_1@10.242.238.91:cursor_checkpoint_id">>, <<"2">>}, {<<"vb_722:checkpoint_extension">>,<<"false">>}, {<<"vb_722:num_items_for_persistence">>,<<"0">>}, {<<"vb_722:num_checkpoints">>,<<"1">>}, {<<"vb_722:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_722:num_checkpoint_items">>,<<"1">>}, {<<"vb_722:num_tap_cursors">>,<<"1">>}, {<<"vb_722:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_722:open_checkpoint_id">>,<<"2">>}, {<<"vb_722:state">>,<<"active">>}, {<<"vb_721:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_721:eq_tapq:replication_ns_1@10.242.238.91:cursor_checkpoint_id">>, <<"2">>}, {<<"vb_721:checkpoint_extension">>,<<"false">>}, {<<"vb_721:num_items_for_persistence">>,<<"0">>}, {<<"vb_721:num_checkpoints">>,<<"1">>}, {<<"vb_721:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_721:num_checkpoint_items">>,<<"1">>}, {<<"vb_721:num_tap_cursors">>,<<"1">>}, {<<"vb_721:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_721:open_checkpoint_id">>,<<"2">>}, {<<"vb_721:state">>,<<"active">>}, {<<"vb_720:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_720:eq_tapq:replication_ns_1@10.242.238.91:cursor_checkpoint_id">>, <<"2">>}, {<<"vb_720:checkpoint_extension">>,<<"false">>}, {<<"vb_720:num_items_for_persistence">>,<<"0">>}, {<<"vb_720:num_checkpoints">>,<<"1">>}, {<<"vb_720:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_720:num_checkpoint_items">>,<<"1">>}, {<<"vb_720:num_tap_cursors">>,<<"1">>}, {<<"vb_720:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_720:open_checkpoint_id">>,<<"2">>}, {<<"vb_720:state">>,<<"active">>}, {<<"vb_719:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_719:eq_tapq:replication_ns_1@10.242.238.91:cursor_checkpoint_id">>, <<"2">>}, {<<"vb_719:checkpoint_extension">>,<<"false">>}, {<<"vb_719:num_items_for_persistence">>,<<"0">>}, {<<"vb_719:num_checkpoints">>,<<"1">>}, {<<"vb_719:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_719:num_checkpoint_items">>,<<"1">>}, {<<"vb_719:num_tap_cursors">>,<<"1">>}, {<<"vb_719:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_719:open_checkpoint_id">>,<<"2">>}, {<<"vb_719:state">>,<<"active">>}, {<<"vb_718:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_718:eq_tapq:replication_ns_1@10.242.238.91:cursor_checkpoint_id">>, <<"2">>}, {<<"vb_718:checkpoint_extension">>,<<"false">>}, {<<"vb_718:num_items_for_persistence">>,<<"0">>}, {<<"vb_718:num_checkpoints">>,<<"1">>}, {<<"vb_718:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_718:num_checkpoint_items">>,<<"1">>}, {<<"vb_718:num_tap_cursors">>,<<"1">>}, {<<"vb_718:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_718:open_checkpoint_id">>,<<"2">>}, {<<"vb_718:state">>,<<"active">>}, {<<"vb_717:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_717:eq_tapq:replication_ns_1@10.242.238.91:cursor_checkpoint_id">>, <<"2">>}, {<<"vb_717:checkpoint_extension">>,<<"false">>}, {<<"vb_717:num_items_for_persistence">>,<<"0">>}, {<<"vb_717:num_checkpoints">>,<<"1">>}, {<<"vb_717:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_717:num_checkpoint_items">>,<<"1">>}, {<<"vb_717:num_tap_cursors">>,<<"1">>}, {<<"vb_717:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_717:open_checkpoint_id">>,<<"2">>}, {<<"vb_717:state">>,<<"active">>}, {<<"vb_716:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_716:eq_tapq:replication_ns_1@10.242.238.91:cursor_checkpoint_id">>, <<"2">>}, {<<"vb_716:checkpoint_extension">>,<<"false">>}, {<<"vb_716:num_items_for_persistence">>,<<"0">>}, {<<"vb_716:num_checkpoints">>,<<"1">>}, {<<"vb_716:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_716:num_checkpoint_items">>,<<"1">>}, {<<"vb_716:num_tap_cursors">>,<<"1">>}, {<<"vb_716:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_716:open_checkpoint_id">>,<<"2">>}, {<<"vb_716:state">>,<<"active">>}, {<<"vb_715:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_715:eq_tapq:replication_ns_1@10.242.238.91:cursor_checkpoint_id">>, <<"2">>}, {<<"vb_715:checkpoint_extension">>,<<"false">>}, {<<"vb_715:num_items_for_persistence">>,<<"0">>}, {<<"vb_715:num_checkpoints">>,<<"1">>}, {<<"vb_715:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_715:num_checkpoint_items">>,<<"1">>}, {<<"vb_715:num_tap_cursors">>,<<"1">>}, {<<"vb_715:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_715:open_checkpoint_id">>,<<"2">>}, {<<"vb_715:state">>,<<"active">>}, {<<"vb_714:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_714:eq_tapq:replication_ns_1@10.242.238.91:cursor_checkpoint_id">>, <<"2">>}, {<<"vb_714:checkpoint_extension">>,<<"false">>}, {<<"vb_714:num_items_for_persistence">>,<<"0">>}, {<<"vb_714:num_checkpoints">>,<<"1">>}, {<<"vb_714:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_714:num_checkpoint_items">>,<<"1">>}, {<<"vb_714:num_tap_cursors">>,<<"1">>}, {<<"vb_714:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_714:open_checkpoint_id">>,<<"2">>}, {<<"vb_714:state">>,<<"active">>}, {<<"vb_713:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_713:eq_tapq:replication_ns_1@10.242.238.91:cursor_checkpoint_id">>, <<"2">>}, {<<"vb_713:checkpoint_extension">>,<<"false">>}, {<<"vb_713:num_items_for_persistence">>,<<"0">>}, {<<"vb_713:num_checkpoints">>,<<"1">>}, {<<"vb_713:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_713:num_checkpoint_items">>,<<"1">>}, {<<"vb_713:num_tap_cursors">>,<<"1">>}, {<<"vb_713:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_713:open_checkpoint_id">>,<<"2">>}, {<<"vb_713:state">>,<<"active">>}, {<<"vb_712:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_712:eq_tapq:replication_ns_1@10.242.238.91:cursor_checkpoint_id">>, <<"2">>}, {<<"vb_712:checkpoint_extension">>,<<"false">>}, {<<"vb_712:num_items_for_persistence">>,<<"0">>}, {<<"vb_712:num_checkpoints">>,<<"1">>}, {<<"vb_712:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_712:num_checkpoint_items">>,<<"1">>}, {<<"vb_712:num_tap_cursors">>,<<"1">>}, {<<"vb_712:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_712:open_checkpoint_id">>,<<"2">>}, {<<"vb_712:state">>,<<"active">>}, {<<"vb_711:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_711:eq_tapq:replication_ns_1@10.242.238.91:cursor_checkpoint_id">>, <<"2">>}, {<<"vb_711:checkpoint_extension">>,<<"false">>}, {<<"vb_711:num_items_for_persistence">>,<<"0">>}, {<<"vb_711:num_checkpoints">>,<<"1">>}, {<<"vb_711:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_711:num_checkpoint_items">>,<<"1">>}, {<<"vb_711:num_tap_cursors">>,<<"1">>}, {<<"vb_711:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_711:open_checkpoint_id">>,<<"2">>}, {<<"vb_711:state">>,<<"active">>}, {<<"vb_710:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_710:eq_tapq:replication_ns_1@10.242.238.91:cursor_checkpoint_id">>, <<"2">>}, {<<"vb_710:checkpoint_extension">>,<<"false">>}, {<<"vb_710:num_items_for_persistence">>,<<"0">>}, {<<"vb_710:num_checkpoints">>,<<"1">>}, {<<"vb_710:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_710:num_checkpoint_items">>,<<"1">>}, {<<"vb_710:num_tap_cursors">>,<<"1">>}, {<<"vb_710:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_710:open_checkpoint_id">>,<<"2">>}, {<<"vb_710:state">>,<<"active">>}, {<<"vb_709:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_709:eq_tapq:replication_ns_1@10.242.238.91:cursor_checkpoint_id">>, <<"2">>}, {<<"vb_709:checkpoint_extension">>,<<"false">>}, {<<"vb_709:num_items_for_persistence">>,<<"0">>}, {<<"vb_709:num_checkpoints">>,<<"1">>}, {<<"vb_709:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_709:num_checkpoint_items">>,<<"1">>}, {<<"vb_709:num_tap_cursors">>,<<"1">>}, {<<"vb_709:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_709:open_checkpoint_id">>,<<"2">>}, {<<"vb_709:state">>,<<"active">>}, {<<"vb_708:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_708:eq_tapq:replication_ns_1@10.242.238.91:cursor_checkpoint_id">>, <<"2">>}, {<<"vb_708:checkpoint_extension">>,<<"false">>}, {<<"vb_708:num_items_for_persistence">>,<<"0">>}, {<<"vb_708:num_checkpoints">>,<<"1">>}, {<<"vb_708:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_708:num_checkpoint_items">>,<<"1">>}, {<<"vb_708:num_tap_cursors">>,<<"1">>}, {<<"vb_708:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_708:open_checkpoint_id">>,<<"2">>}, {<<"vb_708:state">>,<<"active">>}, {<<"vb_707:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_707:eq_tapq:replication_ns_1@10.242.238.91:cursor_checkpoint_id">>, <<"2">>}, {<<"vb_707:checkpoint_extension">>,<<"false">>}, {<<"vb_707:num_items_for_persistence">>,<<"0">>}, {<<"vb_707:num_checkpoints">>,<<"1">>}, {<<"vb_707:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_707:num_checkpoint_items">>,<<"1">>}, {<<"vb_707:num_tap_cursors">>,<<"1">>}, {<<"vb_707:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_707:open_checkpoint_id">>,<<"2">>}, {<<"vb_707:state">>,<<"active">>}, {<<"vb_706:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_706:eq_tapq:replication_ns_1@10.242.238.91:cursor_checkpoint_id">>, <<"2">>}, {<<"vb_706:checkpoint_extension">>,<<"false">>}, {<<"vb_706:num_items_for_persistence">>,<<"0">>}, {<<"vb_706:num_checkpoints">>,<<"1">>}, {<<"vb_706:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_706:num_checkpoint_items">>,<<"1">>}, {<<"vb_706:num_tap_cursors">>,<<"1">>}, {<<"vb_706:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_706:open_checkpoint_id">>,<<"2">>}, {<<"vb_706:state">>,<<"active">>}, {<<"vb_705:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_705:eq_tapq:replication_ns_1@10.242.238.91:cursor_checkpoint_id">>, <<"2">>}, {<<"vb_705:checkpoint_extension">>,<<"false">>}, {<<"vb_705:num_items_for_persistence">>,<<"0">>}, {<<"vb_705:num_checkpoints">>,<<"1">>}, {<<"vb_705:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_705:num_checkpoint_items">>,<<"1">>}, {<<"vb_705:num_tap_cursors">>,<<"1">>}, {<<"vb_705:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_705:open_checkpoint_id">>,<<"2">>}, {<<"vb_705:state">>,<<"active">>}, {<<"vb_704:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_704:eq_tapq:replication_ns_1@10.242.238.91:cursor_checkpoint_id">>, <<"2">>}, {<<"vb_704:checkpoint_extension">>,<<"false">>}, {<<"vb_704:num_items_for_persistence">>,<<"0">>}, {<<"vb_704:num_checkpoints">>,<<"1">>}, {<<"vb_704:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_704:num_checkpoint_items">>,<<"1">>}, {<<"vb_704:num_tap_cursors">>,<<"1">>}, {<<"vb_704:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_704:open_checkpoint_id">>,<<"2">>}, {<<"vb_704:state">>,<<"active">>}, {<<"vb_703:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_703:eq_tapq:replication_ns_1@10.242.238.91:cursor_checkpoint_id">>, <<"2">>}, {<<"vb_703:checkpoint_extension">>,<<"false">>}, {<<"vb_703:num_items_for_persistence">>,<<"0">>}, {<<"vb_703:num_checkpoints">>,<<"1">>}, {<<"vb_703:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_703:num_checkpoint_items">>,<<"1">>}, {<<"vb_703:num_tap_cursors">>,<<"1">>}, {<<"vb_703:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_703:open_checkpoint_id">>,<<"2">>}, {<<"vb_703:state">>,<<"active">>}, {<<"vb_702:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_702:eq_tapq:replication_ns_1@10.242.238.91:cursor_checkpoint_id">>, <<"2">>}, {<<"vb_702:checkpoint_extension">>,<<"false">>}, {<<"vb_702:num_items_for_persistence">>,<<"0">>}, {<<"vb_702:num_checkpoints">>,<<"1">>}, {<<"vb_702:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_702:num_checkpoint_items">>,<<"1">>}, {<<"vb_702:num_tap_cursors">>,<<"1">>}, {<<"vb_702:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_702:open_checkpoint_id">>,<<"2">>}, {<<"vb_702:state">>,<<"active">>}, {<<"vb_701:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_701:eq_tapq:replication_ns_1@10.242.238.91:cursor_checkpoint_id">>, <<"2">>}, {<<"vb_701:checkpoint_extension">>,<<"false">>}, {<<"vb_701:num_items_for_persistence">>,<<"0">>}, {<<"vb_701:num_checkpoints">>,<<"1">>}, {<<"vb_701:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_701:num_checkpoint_items">>,<<"1">>}, {<<"vb_701:num_tap_cursors">>,<<"1">>}, {<<"vb_701:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_701:open_checkpoint_id">>,<<"2">>}, {<<"vb_701:state">>,<<"active">>}, {<<"vb_700:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_700:eq_tapq:replication_ns_1@10.242.238.91:cursor_checkpoint_id">>, <<"2">>}, {<<"vb_700:checkpoint_extension">>,<<"false">>}, {<<"vb_700:num_items_for_persistence">>,<<"0">>}, {<<"vb_700:num_checkpoints">>,<<"1">>}, {<<"vb_700:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_700:num_checkpoint_items">>,<<"1">>}, {<<"vb_700:num_tap_cursors">>,<<"1">>}, {<<"vb_700:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_700:open_checkpoint_id">>,<<"2">>}, {<<"vb_700:state">>,<<"active">>}, {<<"vb_699:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_699:eq_tapq:replication_ns_1@10.242.238.91:cursor_checkpoint_id">>, <<"2">>}, {<<"vb_699:checkpoint_extension">>,<<"false">>}, {<<"vb_699:num_items_for_persistence">>,<<"0">>}, {<<"vb_699:num_checkpoints">>,<<"1">>}, {<<"vb_699:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_699:num_checkpoint_items">>,<<"1">>}, {<<"vb_699:num_tap_cursors">>,<<"1">>}, {<<"vb_699:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_699:open_checkpoint_id">>,<<"2">>}, {<<"vb_699:state">>,<<"active">>}, {<<"vb_698:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_698:eq_tapq:replication_ns_1@10.242.238.91:cursor_checkpoint_id">>, <<"2">>}, {<<"vb_698:checkpoint_extension">>,<<"false">>}, {<<"vb_698:num_items_for_persistence">>,<<"0">>}, {<<"vb_698:num_checkpoints">>,<<"1">>}, {<<"vb_698:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_698:num_checkpoint_items">>,<<"1">>}, {<<"vb_698:num_tap_cursors">>,<<"1">>}, {<<"vb_698:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_698:open_checkpoint_id">>,<<"2">>}, {<<"vb_698:state">>,<<"active">>}, {<<"vb_697:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_697:eq_tapq:replication_ns_1@10.242.238.91:cursor_checkpoint_id">>, <<"2">>}, {<<"vb_697:checkpoint_extension">>,<<"false">>}, {<<"vb_697:num_items_for_persistence">>,<<"0">>}, {<<"vb_697:num_checkpoints">>,<<"1">>}, {<<"vb_697:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_697:num_checkpoint_items">>,<<"1">>}, {<<"vb_697:num_tap_cursors">>,<<"1">>}, {<<"vb_697:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_697:open_checkpoint_id">>,<<"2">>}, {<<"vb_697:state">>,<<"active">>}, {<<"vb_696:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_696:eq_tapq:replication_ns_1@10.242.238.91:cursor_checkpoint_id">>, <<"2">>}, {<<"vb_696:checkpoint_extension">>,<<"false">>}, {<<"vb_696:num_items_for_persistence">>,<<"0">>}, {<<"vb_696:num_checkpoints">>,<<"1">>}, {<<"vb_696:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_696:num_checkpoint_items">>,<<"1">>}, {<<"vb_696:num_tap_cursors">>,<<"1">>}, {<<"vb_696:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_696:open_checkpoint_id">>,<<"2">>}, {<<"vb_696:state">>,<<"active">>}, {<<"vb_695:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_695:eq_tapq:replication_ns_1@10.242.238.91:cursor_checkpoint_id">>, <<"2">>}, {<<"vb_695:checkpoint_extension">>,<<"false">>}, {<<"vb_695:num_items_for_persistence">>,<<"0">>}, {<<"vb_695:num_checkpoints">>,<<"1">>}, {<<"vb_695:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_695:num_checkpoint_items">>,<<"1">>}, {<<"vb_695:num_tap_cursors">>,<<"1">>}, {<<"vb_695:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_695:open_checkpoint_id">>,<<"2">>}, {<<"vb_695:state">>,<<"active">>}, {<<"vb_694:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_694:eq_tapq:replication_ns_1@10.242.238.91:cursor_checkpoint_id">>, <<"2">>}, {<<"vb_694:checkpoint_extension">>,<<"false">>}, {<<"vb_694:num_items_for_persistence">>,<<"0">>}, {<<"vb_694:num_checkpoints">>,<<"1">>}, {<<"vb_694:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_694:num_checkpoint_items">>,<<"1">>}, {<<"vb_694:num_tap_cursors">>,<<"1">>}, {<<"vb_694:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_694:open_checkpoint_id">>,<<"2">>}, {<<"vb_694:state">>,<<"active">>}, {<<"vb_693:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_693:eq_tapq:replication_ns_1@10.242.238.91:cursor_checkpoint_id">>, <<"2">>}, {<<"vb_693:checkpoint_extension">>,<<"false">>}, {<<"vb_693:num_items_for_persistence">>,<<"0">>}, {<<"vb_693:num_checkpoints">>,<<"1">>}, {<<"vb_693:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_693:num_checkpoint_items">>,<<"1">>}, {<<"vb_693:num_tap_cursors">>,<<"1">>}, {<<"vb_693:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_693:open_checkpoint_id">>,<<"2">>}, {<<"vb_693:state">>,<<"active">>}, {<<"vb_692:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_692:eq_tapq:replication_ns_1@10.242.238.91:cursor_checkpoint_id">>, <<"2">>}, {<<"vb_692:checkpoint_extension">>,<<"false">>}, {<<"vb_692:num_items_for_persistence">>,<<"0">>}, {<<"vb_692:num_checkpoints">>,<<"1">>}, {<<"vb_692:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_692:num_checkpoint_items">>,<<"1">>}, {<<"vb_692:num_tap_cursors">>,<<"1">>}, {<<"vb_692:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_692:open_checkpoint_id">>,<<"2">>}, {<<"vb_692:state">>,<<"active">>}, {<<"vb_691:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_691:eq_tapq:replication_ns_1@10.242.238.91:cursor_checkpoint_id">>, <<"2">>}, {<<"vb_691:checkpoint_extension">>,<<"false">>}, {<<"vb_691:num_items_for_persistence">>,<<"0">>}, {<<"vb_691:num_checkpoints">>,<<"1">>}, {<<"vb_691:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_691:num_checkpoint_items">>,<<"1">>}, {<<"vb_691:num_tap_cursors">>,<<"1">>}, {<<"vb_691:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_691:open_checkpoint_id">>,<<"2">>}, {<<"vb_691:state">>,<<"active">>}, {<<"vb_690:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_690:eq_tapq:replication_ns_1@10.242.238.91:cursor_checkpoint_id">>, <<"2">>}, {<<"vb_690:checkpoint_extension">>,<<"false">>}, {<<"vb_690:num_items_for_persistence">>,<<"0">>}, {<<"vb_690:num_checkpoints">>,<<"1">>}, {<<"vb_690:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_690:num_checkpoint_items">>,<<"1">>}, {<<"vb_690:num_tap_cursors">>,<<"1">>}, {<<"vb_690:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_690:open_checkpoint_id">>,<<"2">>}, {<<"vb_690:state">>,<<"active">>}, {<<"vb_689:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_689:eq_tapq:replication_ns_1@10.242.238.91:cursor_checkpoint_id">>, <<"2">>}, {<<"vb_689:checkpoint_extension">>,<<"false">>}, {<<"vb_689:num_items_for_persistence">>,<<"0">>}, {<<"vb_689:num_checkpoints">>,<<"1">>}, {<<"vb_689:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_689:num_checkpoint_items">>,<<"1">>}, {<<"vb_689:num_tap_cursors">>,<<"1">>}, {<<"vb_689:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_689:open_checkpoint_id">>,<<"2">>}, {<<"vb_689:state">>,<<"active">>}, {<<"vb_688:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_688:eq_tapq:replication_ns_1@10.242.238.91:cursor_checkpoint_id">>, <<"2">>}, {<<"vb_688:checkpoint_extension">>,<<"false">>}, {<<"vb_688:num_items_for_persistence">>,<<"0">>}, {<<"vb_688:num_checkpoints">>,<<"1">>}, {<<"vb_688:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_688:num_checkpoint_items">>,<<"1">>}, {<<"vb_688:num_tap_cursors">>,<<"1">>}, {<<"vb_688:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_688:open_checkpoint_id">>,<<"2">>}, {<<"vb_688:state">>,<<"active">>}, {<<"vb_687:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_687:eq_tapq:replication_ns_1@10.242.238.91:cursor_checkpoint_id">>, <<"2">>}, {<<"vb_687:checkpoint_extension">>,<<"false">>}, {<<"vb_687:num_items_for_persistence">>,<<"0">>}, {<<"vb_687:num_checkpoints">>,<<"1">>}, {<<"vb_687:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_687:num_checkpoint_items">>,<<"1">>}, {<<"vb_687:num_tap_cursors">>,<<"1">>}, {<<"vb_687:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_687:open_checkpoint_id">>,<<"2">>}, {<<"vb_687:state">>,<<"active">>}, {<<"vb_686:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_686:eq_tapq:replication_ns_1@10.242.238.91:cursor_checkpoint_id">>, <<"2">>}, {<<"vb_686:checkpoint_extension">>,<<"false">>}, {<<"vb_686:num_items_for_persistence">>,<<"0">>}, {<<"vb_686:num_checkpoints">>,<<"1">>}, {<<"vb_686:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_686:num_checkpoint_items">>,<<"1">>}, {<<"vb_686:num_tap_cursors">>,<<"1">>}, {<<"vb_686:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_686:open_checkpoint_id">>,<<"2">>}, {<<"vb_686:state">>,<<"active">>}, {<<"vb_685:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_685:eq_tapq:replication_ns_1@10.242.238.91:cursor_checkpoint_id">>, <<"2">>}, {<<"vb_685:checkpoint_extension">>,<<"false">>}, {<<"vb_685:num_items_for_persistence">>,<<"0">>}, {<<"vb_685:num_checkpoints">>,<<"1">>}, {<<"vb_685:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_685:num_checkpoint_items">>,<<"1">>}, {<<"vb_685:num_tap_cursors">>,<<"1">>}, {<<"vb_685:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_685:open_checkpoint_id">>,<<"2">>}, {<<"vb_685:state">>,<<"active">>}, {<<"vb_684:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_684:eq_tapq:replication_ns_1@10.242.238.91:cursor_checkpoint_id">>, <<"2">>}, {<<"vb_684:checkpoint_extension">>,<<"false">>}, {<<"vb_684:num_items_for_persistence">>,<<"0">>}, {<<"vb_684:num_checkpoints">>,<<"1">>}, {<<"vb_684:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_684:num_checkpoint_items">>,<<"1">>}, {<<"vb_684:num_tap_cursors">>,<<"1">>}, {<<"vb_684:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_684:open_checkpoint_id">>,<<"2">>}, {<<"vb_684:state">>,<<"active">>}, {<<"vb_683:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_683:eq_tapq:replication_ns_1@10.242.238.91:cursor_checkpoint_id">>, <<"2">>}, {<<"vb_683:checkpoint_extension">>,<<"false">>}, {<<"vb_683:num_items_for_persistence">>,<<"0">>}, {<<"vb_683:num_checkpoints">>,<<"1">>}, {<<"vb_683:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_683:num_checkpoint_items">>,<<"1">>}, {<<"vb_683:num_tap_cursors">>,<<"1">>}, {<<"vb_683:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_683:open_checkpoint_id">>,<<"2">>}, {<<"vb_683:state">>,<<"active">>}, {<<"vb_682:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_682:eq_tapq:replication_ns_1@10.242.238.91:cursor_checkpoint_id">>, <<"2">>}, {<<"vb_682:checkpoint_extension">>,<<"false">>}, {<<"vb_682:num_items_for_persistence">>,<<"0">>}, {<<"vb_682:num_checkpoints">>,<<"1">>}, {<<"vb_682:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_682:num_checkpoint_items">>,<<"1">>}, {<<"vb_682:num_tap_cursors">>,<<"1">>}, {<<"vb_682:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_682:open_checkpoint_id">>,<<"2">>}, {<<"vb_682:state">>,<<"active">>}, {<<"vb_681:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_681:eq_tapq:replication_ns_1@10.242.238.89:cursor_checkpoint_id">>, <<"2">>}, {<<"vb_681:checkpoint_extension">>,<<"false">>}, {<<"vb_681:num_items_for_persistence">>,<<"0">>}, {<<"vb_681:num_checkpoints">>,<<"1">>}, {<<"vb_681:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_681:num_checkpoint_items">>,<<"1">>}, {<<"vb_681:num_tap_cursors">>,<<"1">>}, {<<"vb_681:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_681:open_checkpoint_id">>,<<"2">>}, {<<"vb_681:state">>,<<"active">>}, {<<"vb_680:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_680:eq_tapq:replication_ns_1@10.242.238.89:cursor_checkpoint_id">>, <<"2">>}, {<<"vb_680:checkpoint_extension">>,<<"false">>}, {<<"vb_680:num_items_for_persistence">>,<<"0">>}, {<<"vb_680:num_checkpoints">>,<<"1">>}, {<<"vb_680:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_680:num_checkpoint_items">>,<<"1">>}, {<<"vb_680:num_tap_cursors">>,<<"1">>}, {<<"vb_680:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_680:open_checkpoint_id">>,<<"2">>}, {<<"vb_680:state">>,<<"active">>}, {<<"vb_679:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_679:eq_tapq:replication_ns_1@10.242.238.89:cursor_checkpoint_id">>, <<"2">>}, {<<"vb_679:checkpoint_extension">>,<<"false">>}, {<<"vb_679:num_items_for_persistence">>,<<"0">>}, {<<"vb_679:num_checkpoints">>,<<"1">>}, {<<"vb_679:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_679:num_checkpoint_items">>,<<"1">>}, {<<"vb_679:num_tap_cursors">>,<<"1">>}, {<<"vb_679:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_679:open_checkpoint_id">>,<<"2">>}, {<<"vb_679:state">>,<<"active">>}, {<<"vb_678:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_678:eq_tapq:replication_ns_1@10.242.238.89:cursor_checkpoint_id">>, <<"2">>}, {<<"vb_678:checkpoint_extension">>,<<"false">>}, {<<"vb_678:num_items_for_persistence">>,<<"0">>}, {<<"vb_678:num_checkpoints">>,<<"1">>}, {<<"vb_678:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_678:num_checkpoint_items">>,<<"1">>}, {<<"vb_678:num_tap_cursors">>,<<"1">>}, {<<"vb_678:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_678:open_checkpoint_id">>,<<"2">>}, {<<"vb_678:state">>,<<"active">>}, {<<"vb_677:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_677:eq_tapq:replication_ns_1@10.242.238.89:cursor_checkpoint_id">>, <<"2">>}, {<<"vb_677:checkpoint_extension">>,<<"false">>}, {<<"vb_677:num_items_for_persistence">>,<<"0">>}, {<<"vb_677:num_checkpoints">>,<<"1">>}, {<<"vb_677:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_677:num_checkpoint_items">>,<<"1">>}, {<<"vb_677:num_tap_cursors">>,<<"1">>}, {<<"vb_677:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_677:open_checkpoint_id">>,<<"2">>}, {<<"vb_677:state">>,<<"active">>}, {<<"vb_676:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_676:eq_tapq:replication_ns_1@10.242.238.89:cursor_checkpoint_id">>, <<"2">>}, {<<"vb_676:checkpoint_extension">>,<<"false">>}, {<<"vb_676:num_items_for_persistence">>,<<"0">>}, {<<"vb_676:num_checkpoints">>,<<"1">>}, {<<"vb_676:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_676:num_checkpoint_items">>,<<"1">>}, {<<"vb_676:num_tap_cursors">>,<<"1">>}, {<<"vb_676:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_676:open_checkpoint_id">>,<<"2">>}, {<<"vb_676:state">>,<<"active">>}, {<<"vb_675:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_675:eq_tapq:replication_ns_1@10.242.238.89:cursor_checkpoint_id">>, <<"2">>}, {<<"vb_675:checkpoint_extension">>,<<"false">>}, {<<"vb_675:num_items_for_persistence">>,<<"0">>}, {<<"vb_675:num_checkpoints">>,<<"1">>}, {<<"vb_675:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_675:num_checkpoint_items">>,<<"1">>}, {<<"vb_675:num_tap_cursors">>,<<"1">>}, {<<"vb_675:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_675:open_checkpoint_id">>,<<"2">>}, {<<"vb_675:state">>,<<"active">>}, {<<"vb_674:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_674:eq_tapq:replication_ns_1@10.242.238.89:cursor_checkpoint_id">>, <<"2">>}, {<<"vb_674:checkpoint_extension">>,<<"false">>}, {<<"vb_674:num_items_for_persistence">>,<<"0">>}, {<<"vb_674:num_checkpoints">>,<<"1">>}, {<<"vb_674:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_674:num_checkpoint_items">>,<<"1">>}, {<<"vb_674:num_tap_cursors">>,<<"1">>}, {<<"vb_674:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_674:open_checkpoint_id">>,<<"2">>}, {<<"vb_674:state">>,<<"active">>}, {<<"vb_673:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_673:eq_tapq:replication_ns_1@10.242.238.89:cursor_checkpoint_id">>, <<"2">>}, {<<"vb_673:checkpoint_extension">>,<<"false">>}, {<<"vb_673:num_items_for_persistence">>,<<"0">>}, {<<"vb_673:num_checkpoints">>,<<"1">>}, {<<"vb_673:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_673:num_checkpoint_items">>,<<"1">>}, {<<"vb_673:num_tap_cursors">>,<<"1">>}, {<<"vb_673:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_673:open_checkpoint_id">>,<<"2">>}, {<<"vb_673:state">>,<<"active">>}, {<<"vb_672:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_672:eq_tapq:replication_ns_1@10.242.238.89:cursor_checkpoint_id">>, <<"2">>}, {<<"vb_672:checkpoint_extension">>,<<"false">>}, {<<"vb_672:num_items_for_persistence">>,<<"0">>}, {<<"vb_672:num_checkpoints">>,<<"1">>}, {<<"vb_672:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_672:num_checkpoint_items">>,<<"1">>}, {<<"vb_672:num_tap_cursors">>,<<"1">>}, {<<"vb_672:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_672:open_checkpoint_id">>,<<"2">>}, {<<"vb_672:state">>,<<"active">>}, {<<"vb_671:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_671:eq_tapq:replication_ns_1@10.242.238.89:cursor_checkpoint_id">>, <<"2">>}, {<<"vb_671:checkpoint_extension">>,<<"false">>}, {<<"vb_671:num_items_for_persistence">>,<<"0">>}, {<<"vb_671:num_checkpoints">>,<<"1">>}, {<<"vb_671:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_671:num_checkpoint_items">>,<<"1">>}, {<<"vb_671:num_tap_cursors">>,<<"1">>}, {<<"vb_671:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_671:open_checkpoint_id">>,<<"2">>}, {<<"vb_671:state">>,<<"active">>}, {<<"vb_670:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_670:eq_tapq:replication_ns_1@10.242.238.89:cursor_checkpoint_id">>, <<"2">>}, {<<"vb_670:checkpoint_extension">>,<<"false">>}, {<<"vb_670:num_items_for_persistence">>,<<"0">>}, {<<"vb_670:num_checkpoints">>,<<"1">>}, {<<"vb_670:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_670:num_checkpoint_items">>,<<"1">>}, {<<"vb_670:num_tap_cursors">>,<<"1">>}, {<<"vb_670:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_670:open_checkpoint_id">>,<<"2">>}, {<<"vb_670:state">>,<<"active">>}, {<<"vb_669:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_669:eq_tapq:replication_ns_1@10.242.238.89:cursor_checkpoint_id">>, <<"2">>}, {<<"vb_669:checkpoint_extension">>,<<"false">>}, {<<"vb_669:num_items_for_persistence">>,<<"0">>}, {<<"vb_669:num_checkpoints">>,<<"1">>}, {<<"vb_669:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_669:num_checkpoint_items">>,<<"1">>}, {<<"vb_669:num_tap_cursors">>,<<"1">>}, {<<"vb_669:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_669:open_checkpoint_id">>,<<"2">>}, {<<"vb_669:state">>,<<"active">>}, {<<"vb_668:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_668:eq_tapq:replication_ns_1@10.242.238.89:cursor_checkpoint_id">>, <<"2">>}, {<<"vb_668:checkpoint_extension">>,<<"false">>}, {<<"vb_668:num_items_for_persistence">>,<<"0">>}, {<<"vb_668:num_checkpoints">>,<<"1">>}, {<<"vb_668:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_668:num_checkpoint_items">>,<<"1">>}, {<<"vb_668:num_tap_cursors">>,<<"1">>}, {<<"vb_668:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_668:open_checkpoint_id">>,<<"2">>}, {<<"vb_668:state">>,<<"active">>}, {<<"vb_667:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_667:eq_tapq:replication_ns_1@10.242.238.89:cursor_checkpoint_id">>, <<"2">>}, {<<"vb_667:checkpoint_extension">>,<<"false">>}, {<<"vb_667:num_items_for_persistence">>,<<"0">>}, {<<"vb_667:num_checkpoints">>,<<"1">>}, {<<"vb_667:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_667:num_checkpoint_items">>,<<"1">>}, {<<"vb_667:num_tap_cursors">>,<<"1">>}, {<<"vb_667:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_667:open_checkpoint_id">>,<<"2">>}, {<<"vb_667:state">>,<<"active">>}, {<<"vb_666:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_666:eq_tapq:replication_ns_1@10.242.238.89:cursor_checkpoint_id">>, <<"2">>}, {<<"vb_666:checkpoint_extension">>,<<"false">>}, {<<"vb_666:num_items_for_persistence">>,<<"0">>}, {<<"vb_666:num_checkpoints">>,<<"1">>}, {<<"vb_666:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_666:num_checkpoint_items">>,<<"1">>}, {<<"vb_666:num_tap_cursors">>,<<"1">>}, {<<"vb_666:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_666:open_checkpoint_id">>,<<"2">>}, {<<"vb_666:state">>,<<"active">>}, {<<"vb_665:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_665:eq_tapq:replication_ns_1@10.242.238.89:cursor_checkpoint_id">>, <<"2">>}, {<<"vb_665:checkpoint_extension">>,<<"false">>}, {<<"vb_665:num_items_for_persistence">>,<<"0">>}, {<<"vb_665:num_checkpoints">>,<<"1">>}, {<<"vb_665:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_665:num_checkpoint_items">>,<<"1">>}, {<<"vb_665:num_tap_cursors">>,<<"1">>}, {<<"vb_665:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_665:open_checkpoint_id">>,<<"2">>}, {<<"vb_665:state">>,<<"active">>}, {<<"vb_664:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_664:eq_tapq:replication_ns_1@10.242.238.89:cursor_checkpoint_id">>, <<"2">>}, {<<"vb_664:checkpoint_extension">>,<<"false">>}, {<<"vb_664:num_items_for_persistence">>,<<"0">>}, {<<"vb_664:num_checkpoints">>,<<"1">>}, {<<"vb_664:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_664:num_checkpoint_items">>,<<"1">>}, {<<"vb_664:num_tap_cursors">>,<<"1">>}, {<<"vb_664:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_664:open_checkpoint_id">>,<<"2">>}, {<<"vb_664:state">>,<<"active">>}, {<<"vb_663:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_663:eq_tapq:replication_ns_1@10.242.238.89:cursor_checkpoint_id">>, <<"2">>}, {<<"vb_663:checkpoint_extension">>,<<"false">>}, {<<"vb_663:num_items_for_persistence">>,<<"0">>}, {<<"vb_663:num_checkpoints">>,<<"1">>}, {<<"vb_663:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_663:num_checkpoint_items">>,<<"1">>}, {<<"vb_663:num_tap_cursors">>,<<"1">>}, {<<"vb_663:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_663:open_checkpoint_id">>,<<"2">>}, {<<"vb_663:state">>,<<"active">>}, {<<"vb_662:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_662:eq_tapq:replication_ns_1@10.242.238.89:cursor_checkpoint_id">>, <<"2">>}, {<<"vb_662:checkpoint_extension">>,<<"false">>}, {<<"vb_662:num_items_for_persistence">>,<<"0">>}, {<<"vb_662:num_checkpoints">>,<<"1">>}, {<<"vb_662:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_662:num_checkpoint_items">>,<<"1">>}, {<<"vb_662:num_tap_cursors">>,<<"1">>}, {<<"vb_662:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_662:open_checkpoint_id">>,<<"2">>}, {<<"vb_662:state">>,<<"active">>}, {<<"vb_661:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_661:eq_tapq:replication_ns_1@10.242.238.89:cursor_checkpoint_id">>, <<"2">>}, {<<"vb_661:checkpoint_extension">>,<<"false">>}, {<<"vb_661:num_items_for_persistence">>,<<"0">>}, {<<"vb_661:num_checkpoints">>,<<"1">>}, {<<"vb_661:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_661:num_checkpoint_items">>,<<"1">>}, {<<"vb_661:num_tap_cursors">>,<<"1">>}, {<<"vb_661:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_661:open_checkpoint_id">>,<<"2">>}, {<<"vb_661:state">>,<<"active">>}, {<<"vb_660:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_660:eq_tapq:replication_ns_1@10.242.238.89:cursor_checkpoint_id">>, <<"2">>}, {<<"vb_660:checkpoint_extension">>,<<"false">>}, {<<"vb_660:num_items_for_persistence">>,<<"0">>}, {<<"vb_660:num_checkpoints">>,<<"1">>}, {<<"vb_660:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_660:num_checkpoint_items">>,<<"1">>}, {<<"vb_660:num_tap_cursors">>,<<"1">>}, {<<"vb_660:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_660:open_checkpoint_id">>,<<"2">>}, {<<"vb_660:state">>,<<"active">>}, {<<"vb_659:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_659:eq_tapq:replication_ns_1@10.242.238.89:cursor_checkpoint_id">>, <<"2">>}, {<<"vb_659:checkpoint_extension">>,<<"false">>}, {<<"vb_659:num_items_for_persistence">>,<<"0">>}, {<<"vb_659:num_checkpoints">>,<<"1">>}, {<<"vb_659:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_659:num_checkpoint_items">>,<<"1">>}, {<<"vb_659:num_tap_cursors">>,<<"1">>}, {<<"vb_659:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_659:open_checkpoint_id">>,<<"2">>}, {<<"vb_659:state">>,<<"active">>}, {<<"vb_658:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_658:eq_tapq:replication_ns_1@10.242.238.89:cursor_checkpoint_id">>, <<"2">>}, {<<"vb_658:checkpoint_extension">>,<<"false">>}, {<<"vb_658:num_items_for_persistence">>,<<"0">>}, {<<"vb_658:num_checkpoints">>,<<"1">>}, {<<"vb_658:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_658:num_checkpoint_items">>,<<"1">>}, {<<"vb_658:num_tap_cursors">>,<<"1">>}, {<<"vb_658:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_658:open_checkpoint_id">>,<<"2">>}, {<<"vb_658:state">>,<<"active">>}, {<<"vb_657:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_657:eq_tapq:replication_ns_1@10.242.238.89:cursor_checkpoint_id">>, <<"2">>}, {<<"vb_657:checkpoint_extension">>,<<"false">>}, {<<"vb_657:num_items_for_persistence">>,<<"0">>}, {<<"vb_657:num_checkpoints">>,<<"1">>}, {<<"vb_657:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_657:num_checkpoint_items">>,<<"1">>}, {<<"vb_657:num_tap_cursors">>,<<"1">>}, {<<"vb_657:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_657:open_checkpoint_id">>,<<"2">>}, {<<"vb_657:state">>,<<"active">>}, {<<"vb_656:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_656:eq_tapq:replication_ns_1@10.242.238.89:cursor_checkpoint_id">>, <<"2">>}, {<<"vb_656:checkpoint_extension">>,<<"false">>}, {<<"vb_656:num_items_for_persistence">>,<<"0">>}, {<<"vb_656:num_checkpoints">>,<<"1">>}, {<<"vb_656:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_656:num_checkpoint_items">>,<<"1">>}, {<<"vb_656:num_tap_cursors">>,<<"1">>}, {<<"vb_656:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_656:open_checkpoint_id">>,<<"2">>}, {<<"vb_656:state">>,<<"active">>}, {<<"vb_655:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_655:eq_tapq:replication_ns_1@10.242.238.89:cursor_checkpoint_id">>, <<"2">>}, {<<"vb_655:checkpoint_extension">>,<<"false">>}, {<<"vb_655:num_items_for_persistence">>,<<"0">>}, {<<"vb_655:num_checkpoints">>,<<"1">>}, {<<"vb_655:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_655:num_checkpoint_items">>,<<"1">>}, {<<"vb_655:num_tap_cursors">>,<<"1">>}, {<<"vb_655:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_655:open_checkpoint_id">>,<<"2">>}, {<<"vb_655:state">>,<<"active">>}, {<<"vb_654:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_654:eq_tapq:replication_ns_1@10.242.238.89:cursor_checkpoint_id">>, <<"2">>}, {<<"vb_654:checkpoint_extension">>,<<"false">>}, {<<"vb_654:num_items_for_persistence">>,<<"0">>}, {<<"vb_654:num_checkpoints">>,<<"1">>}, {<<"vb_654:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_654:num_checkpoint_items">>,<<"1">>}, {<<"vb_654:num_tap_cursors">>,<<"1">>}, {<<"vb_654:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_654:open_checkpoint_id">>,<<"2">>}, {<<"vb_654:state">>,<<"active">>}, {<<"vb_653:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_653:eq_tapq:replication_ns_1@10.242.238.89:cursor_checkpoint_id">>, <<"2">>}, {<<"vb_653:checkpoint_extension">>,<<"false">>}, {<<"vb_653:num_items_for_persistence">>,<<"0">>}, {<<"vb_653:num_checkpoints">>,<<"1">>}, {<<"vb_653:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_653:num_checkpoint_items">>,<<"1">>}, {<<"vb_653:num_tap_cursors">>,<<"1">>}, {<<"vb_653:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_653:open_checkpoint_id">>,<<"2">>}, {<<"vb_653:state">>,<<"active">>}, {<<"vb_652:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_652:eq_tapq:replication_ns_1@10.242.238.89:cursor_checkpoint_id">>, <<"2">>}, {<<"vb_652:checkpoint_extension">>,<<"false">>}, {<<"vb_652:num_items_for_persistence">>,<<"0">>}, {<<"vb_652:num_checkpoints">>,<<"1">>}, {<<"vb_652:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_652:num_checkpoint_items">>,<<"1">>}, {<<"vb_652:num_tap_cursors">>,<<"1">>}, {<<"vb_652:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_652:open_checkpoint_id">>,<<"2">>}, {<<"vb_652:state">>,<<"active">>}, {<<"vb_651:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_651:eq_tapq:replication_ns_1@10.242.238.89:cursor_checkpoint_id">>, <<"2">>}, {<<"vb_651:checkpoint_extension">>,<<"false">>}, {<<"vb_651:num_items_for_persistence">>,<<"0">>}, {<<"vb_651:num_checkpoints">>,<<"1">>}, {<<"vb_651:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_651:num_checkpoint_items">>,<<"1">>}, {<<"vb_651:num_tap_cursors">>,<<"1">>}, {<<"vb_651:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_651:open_checkpoint_id">>,<<"2">>}, {<<"vb_651:state">>,<<"active">>}, {<<"vb_650:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_650:eq_tapq:replication_ns_1@10.242.238.89:cursor_checkpoint_id">>, <<"2">>}, {<<"vb_650:checkpoint_extension">>,<<"false">>}, {<<"vb_650:num_items_for_persistence">>,<<"0">>}, {<<"vb_650:num_checkpoints">>,<<"1">>}, {<<"vb_650:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_650:num_checkpoint_items">>,<<"1">>}, {<<"vb_650:num_tap_cursors">>,<<"1">>}, {<<"vb_650:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_650:open_checkpoint_id">>,<<"2">>}, {<<"vb_650:state">>,<<"active">>}, {<<"vb_649:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_649:eq_tapq:replication_ns_1@10.242.238.89:cursor_checkpoint_id">>, <<"2">>}, {<<"vb_649:checkpoint_extension">>,<<"false">>}, {<<"vb_649:num_items_for_persistence">>,<<"0">>}, {<<"vb_649:num_checkpoints">>,<<"1">>}, {<<"vb_649:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_649:num_checkpoint_items">>,<<"1">>}, {<<"vb_649:num_tap_cursors">>,<<"1">>}, {<<"vb_649:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_649:open_checkpoint_id">>,<<"2">>}, {<<"vb_649:state">>,<<"active">>}, {<<"vb_648:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_648:eq_tapq:replication_ns_1@10.242.238.89:cursor_checkpoint_id">>, <<"2">>}, {<<"vb_648:checkpoint_extension">>,<<"false">>}, {<<"vb_648:num_items_for_persistence">>,<<"0">>}, {<<"vb_648:num_checkpoints">>,<<"1">>}, {<<"vb_648:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_648:num_checkpoint_items">>,<<"1">>}, {<<"vb_648:num_tap_cursors">>,<<"1">>}, {<<"vb_648:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_648:open_checkpoint_id">>,<<"2">>}, {<<"vb_648:state">>,<<"active">>}, {<<"vb_647:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_647:eq_tapq:replication_ns_1@10.242.238.89:cursor_checkpoint_id">>, <<"2">>}, {<<"vb_647:checkpoint_extension">>,<<"false">>}, {<<"vb_647:num_items_for_persistence">>,<<"0">>}, {<<"vb_647:num_checkpoints">>,<<"1">>}, {<<"vb_647:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_647:num_checkpoint_items">>,<<"1">>}, {<<"vb_647:num_tap_cursors">>,<<"1">>}, {<<"vb_647:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_647:open_checkpoint_id">>,<<"2">>}, {<<"vb_647:state">>,<<"active">>}, {<<"vb_646:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_646:eq_tapq:replication_ns_1@10.242.238.89:cursor_checkpoint_id">>, <<"2">>}, {<<"vb_646:checkpoint_extension">>,<<"false">>}, {<<"vb_646:num_items_for_persistence">>,<<"0">>}, {<<"vb_646:num_checkpoints">>,<<"1">>}, {<<"vb_646:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_646:num_checkpoint_items">>,<<"1">>}, {<<"vb_646:num_tap_cursors">>,<<"1">>}, {<<"vb_646:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_646:open_checkpoint_id">>,<<"2">>}, {<<"vb_646:state">>,<<"active">>}, {<<"vb_645:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_645:eq_tapq:replication_ns_1@10.242.238.89:cursor_checkpoint_id">>, <<"2">>}, {<<"vb_645:checkpoint_extension">>,<<"false">>}, {<<"vb_645:num_items_for_persistence">>,<<"0">>}, {<<"vb_645:num_checkpoints">>,<<"1">>}, {<<"vb_645:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_645:num_checkpoint_items">>,<<"1">>}, {<<"vb_645:num_tap_cursors">>,<<"1">>}, {<<"vb_645:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_645:open_checkpoint_id">>,<<"2">>}, {<<"vb_645:state">>,<<"active">>}, {<<"vb_644:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_644:eq_tapq:replication_ns_1@10.242.238.89:cursor_checkpoint_id">>, <<"2">>}, {<<"vb_644:checkpoint_extension">>,<<"false">>}, {<<"vb_644:num_items_for_persistence">>,<<"0">>}, {<<"vb_644:num_checkpoints">>,<<"1">>}, {<<"vb_644:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_644:num_checkpoint_items">>,<<"1">>}, {<<"vb_644:num_tap_cursors">>,<<"1">>}, {<<"vb_644:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_644:open_checkpoint_id">>,<<"2">>}, {<<"vb_644:state">>,<<"active">>}, {<<"vb_643:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_643:eq_tapq:replication_ns_1@10.242.238.89:cursor_checkpoint_id">>, <<"2">>}, {<<"vb_643:checkpoint_extension">>,<<"false">>}, {<<"vb_643:num_items_for_persistence">>,<<"0">>}, {<<"vb_643:num_checkpoints">>,<<"1">>}, {<<"vb_643:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_643:num_checkpoint_items">>,<<"1">>}, {<<"vb_643:num_tap_cursors">>,<<"1">>}, {<<"vb_643:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_643:open_checkpoint_id">>,<<"2">>}, {<<"vb_643:state">>,<<"active">>}, {<<"vb_642:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_642:eq_tapq:replication_ns_1@10.242.238.89:cursor_checkpoint_id">>, <<"2">>}, {<<"vb_642:checkpoint_extension">>,<<"false">>}, {<<"vb_642:num_items_for_persistence">>,<<"0">>}, {<<"vb_642:num_checkpoints">>,<<"1">>}, {<<"vb_642:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_642:num_checkpoint_items">>,<<"1">>}, {<<"vb_642:num_tap_cursors">>,<<"1">>}, {<<"vb_642:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_642:open_checkpoint_id">>,<<"2">>}, {<<"vb_642:state">>,<<"active">>}, {<<"vb_641:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_641:eq_tapq:replication_ns_1@10.242.238.89:cursor_checkpoint_id">>, <<"2">>}, {<<"vb_641:checkpoint_extension">>,<<"false">>}, {<<"vb_641:num_items_for_persistence">>,<<"0">>}, {<<"vb_641:num_checkpoints">>,<<"1">>}, {<<"vb_641:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_641:num_checkpoint_items">>,<<"1">>}, {<<"vb_641:num_tap_cursors">>,<<"1">>}, {<<"vb_641:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_641:open_checkpoint_id">>,<<"2">>}, {<<"vb_641:state">>,<<"active">>}, {<<"vb_640:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_640:eq_tapq:replication_ns_1@10.242.238.89:cursor_checkpoint_id">>, <<"2">>}, {<<"vb_640:checkpoint_extension">>,<<"false">>}, {<<"vb_640:num_items_for_persistence">>,<<"0">>}, {<<"vb_640:num_checkpoints">>,<<"1">>}, {<<"vb_640:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_640:num_checkpoint_items">>,<<"1">>}, {<<"vb_640:num_tap_cursors">>,<<"1">>}, {<<"vb_640:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_640:open_checkpoint_id">>,<<"2">>}, {<<"vb_640:state">>,<<"active">>}, {<<"vb_639:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_639:eq_tapq:replication_ns_1@10.242.238.89:cursor_checkpoint_id">>, <<"2">>}, {<<"vb_639:checkpoint_extension">>,<<"false">>}, {<<"vb_639:num_items_for_persistence">>,<<"0">>}, {<<"vb_639:num_checkpoints">>,<<"1">>}, {<<"vb_639:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_639:num_checkpoint_items">>,<<"1">>}, {<<"vb_639:num_tap_cursors">>,<<"1">>}, {<<"vb_639:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_639:open_checkpoint_id">>,<<"2">>}, {<<"vb_639:state">>,<<"active">>}, {<<"vb_638:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_638:eq_tapq:replication_ns_1@10.242.238.89:cursor_checkpoint_id">>, <<"2">>}, {<<"vb_638:checkpoint_extension">>,<<"false">>}, {<<"vb_638:num_items_for_persistence">>,<<"0">>}, {<<"vb_638:num_checkpoints">>,<<"1">>}, {<<"vb_638:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_638:num_checkpoint_items">>,<<"1">>}, {<<"vb_638:num_tap_cursors">>,<<"1">>}, {<<"vb_638:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_638:open_checkpoint_id">>,<<"2">>}, {<<"vb_638:state">>,<<"active">>}, {<<"vb_637:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_637:eq_tapq:replication_ns_1@10.242.238.89:cursor_checkpoint_id">>, <<"2">>}, {<<"vb_637:checkpoint_extension">>,<<"false">>}, {<<"vb_637:num_items_for_persistence">>,<<"0">>}, {<<"vb_637:num_checkpoints">>,<<"1">>}, {<<"vb_637:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_637:num_checkpoint_items">>,<<"1">>}, {<<"vb_637:num_tap_cursors">>,<<"1">>}, {<<"vb_637:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_637:open_checkpoint_id">>,<<"2">>}, {<<"vb_637:state">>,<<"active">>}, {<<"vb_636:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_636:eq_tapq:replication_ns_1@10.242.238.89:cursor_checkpoint_id">>, <<"2">>}, {<<"vb_636:checkpoint_extension">>,<<"false">>}, {<<"vb_636:num_items_for_persistence">>,<<"0">>}, {<<"vb_636:num_checkpoints">>,<<"1">>}, {<<"vb_636:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_636:num_checkpoint_items">>,<<"1">>}, {<<"vb_636:num_tap_cursors">>,<<"1">>}, {<<"vb_636:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_636:open_checkpoint_id">>,<<"2">>}, {<<"vb_636:state">>,<<"active">>}, {<<"vb_635:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_635:eq_tapq:replication_ns_1@10.242.238.89:cursor_checkpoint_id">>, <<"2">>}, {<<"vb_635:checkpoint_extension">>,<<"false">>}, {<<"vb_635:num_items_for_persistence">>,<<"0">>}, {<<"vb_635:num_checkpoints">>,<<"1">>}, {<<"vb_635:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_635:num_checkpoint_items">>,<<"1">>}, {<<"vb_635:num_tap_cursors">>,<<"1">>}, {<<"vb_635:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_635:open_checkpoint_id">>,<<"2">>}, {<<"vb_635:state">>,<<"active">>}, {<<"vb_634:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_634:eq_tapq:replication_ns_1@10.242.238.89:cursor_checkpoint_id">>, <<"2">>}, {<<"vb_634:checkpoint_extension">>,<<"false">>}, {<<"vb_634:num_items_for_persistence">>,<<"0">>}, {<<"vb_634:num_checkpoints">>,<<"1">>}, {<<"vb_634:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_634:num_checkpoint_items">>,<<"1">>}, {<<"vb_634:num_tap_cursors">>,<<"1">>}, {<<"vb_634:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_634:open_checkpoint_id">>,<<"2">>}, {<<"vb_634:state">>,<<"active">>}, {<<"vb_633:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_633:eq_tapq:replication_ns_1@10.242.238.89:cursor_checkpoint_id">>, <<"2">>}, {<<"vb_633:checkpoint_extension">>,<<"false">>}, {<<"vb_633:num_items_for_persistence">>,<<"0">>}, {<<"vb_633:num_checkpoints">>,<<"1">>}, {<<"vb_633:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_633:num_checkpoint_items">>,<<"1">>}, {<<"vb_633:num_tap_cursors">>,<<"1">>}, {<<"vb_633:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_633:open_checkpoint_id">>,<<"2">>}, {<<"vb_633:state">>,<<"active">>}, {<<"vb_632:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_632:eq_tapq:replication_ns_1@10.242.238.89:cursor_checkpoint_id">>, <<"2">>}, {<<"vb_632:checkpoint_extension">>,<<"false">>}, {<<"vb_632:num_items_for_persistence">>,<<"0">>}, {<<"vb_632:num_checkpoints">>,<<"1">>}, {<<"vb_632:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_632:num_checkpoint_items">>,<<"1">>}, {<<"vb_632:num_tap_cursors">>,<<"1">>}, {<<"vb_632:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_632:open_checkpoint_id">>,<<"2">>}, {<<"vb_632:state">>,<<"active">>}, {<<"vb_631:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_631:eq_tapq:replication_ns_1@10.242.238.89:cursor_checkpoint_id">>, <<"2">>}, {<<"vb_631:checkpoint_extension">>,<<"false">>}, {<<"vb_631:num_items_for_persistence">>,<<"0">>}, {<<"vb_631:num_checkpoints">>,<<"1">>}, {<<"vb_631:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_631:num_checkpoint_items">>,<<"1">>}, {<<"vb_631:num_tap_cursors">>,<<"1">>}, {<<"vb_631:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_631:open_checkpoint_id">>,<<"2">>}, {<<"vb_631:state">>,<<"active">>}, {<<"vb_630:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_630:eq_tapq:replication_ns_1@10.242.238.89:cursor_checkpoint_id">>, <<"2">>}, {<<"vb_630:checkpoint_extension">>,<<"false">>}, {<<"vb_630:num_items_for_persistence">>,<<"0">>}, {<<"vb_630:num_checkpoints">>,<<"1">>}, {<<"vb_630:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_630:num_checkpoint_items">>,<<"1">>}, {<<"vb_630:num_tap_cursors">>,<<"1">>}, {<<"vb_630:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_630:open_checkpoint_id">>,<<"2">>}, {<<"vb_630:state">>,<<"active">>}, {<<"vb_629:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_629:eq_tapq:replication_ns_1@10.242.238.89:cursor_checkpoint_id">>, <<"2">>}, {<<"vb_629:checkpoint_extension">>,<<"false">>}, {<<"vb_629:num_items_for_persistence">>,<<"0">>}, {<<"vb_629:num_checkpoints">>,<<"1">>}, {<<"vb_629:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_629:num_checkpoint_items">>,<<"1">>}, {<<"vb_629:num_tap_cursors">>,<<"1">>}, {<<"vb_629:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_629:open_checkpoint_id">>,<<"2">>}, {<<"vb_629:state">>,<<"active">>}, {<<"vb_628:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_628:eq_tapq:replication_ns_1@10.242.238.89:cursor_checkpoint_id">>, <<"2">>}, {<<"vb_628:checkpoint_extension">>,<<"false">>}, {<<"vb_628:num_items_for_persistence">>,<<"0">>}, {<<"vb_628:num_checkpoints">>,<<"1">>}, {<<"vb_628:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_628:num_checkpoint_items">>,<<"1">>}, {<<"vb_628:num_tap_cursors">>,<<"1">>}, {<<"vb_628:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_628:open_checkpoint_id">>,<<"2">>}, {<<"vb_628:state">>,<<"active">>}, {<<"vb_627:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_627:eq_tapq:replication_ns_1@10.242.238.89:cursor_checkpoint_id">>, <<"2">>}, {<<"vb_627:checkpoint_extension">>,<<"false">>}, {<<"vb_627:num_items_for_persistence">>,<<"0">>}, {<<"vb_627:num_checkpoints">>,<<"1">>}, {<<"vb_627:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_627:num_checkpoint_items">>,<<"1">>}, {<<"vb_627:num_tap_cursors">>,<<"1">>}, {<<"vb_627:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_627:open_checkpoint_id">>,<<"2">>}, {<<"vb_627:state">>,<<"active">>}, {<<"vb_626:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_626:eq_tapq:replication_ns_1@10.242.238.89:cursor_checkpoint_id">>, <<"2">>}, {<<"vb_626:checkpoint_extension">>,<<"false">>}, {<<"vb_626:num_items_for_persistence">>,<<"0">>}, {<<"vb_626:num_checkpoints">>,<<"1">>}, {<<"vb_626:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_626:num_checkpoint_items">>,<<"1">>}, {<<"vb_626:num_tap_cursors">>,<<"1">>}, {<<"vb_626:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_626:open_checkpoint_id">>,<<"2">>}, {<<"vb_626:state">>,<<"active">>}, {<<"vb_625:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_625:eq_tapq:replication_ns_1@10.242.238.89:cursor_checkpoint_id">>, <<"2">>}, {<<"vb_625:checkpoint_extension">>,<<"false">>}, {<<"vb_625:num_items_for_persistence">>,<<"0">>}, {<<"vb_625:num_checkpoints">>,<<"1">>}, {<<"vb_625:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_625:num_checkpoint_items">>,<<"1">>}, {<<"vb_625:num_tap_cursors">>,<<"1">>}, {<<"vb_625:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_625:open_checkpoint_id">>,<<"2">>}, {<<"vb_625:state">>,<<"active">>}, {<<"vb_624:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_624:eq_tapq:replication_ns_1@10.242.238.89:cursor_checkpoint_id">>, <<"2">>}, {<<"vb_624:checkpoint_extension">>,<<"false">>}, {<<"vb_624:num_items_for_persistence">>,<<"0">>}, {<<"vb_624:num_checkpoints">>,<<"1">>}, {<<"vb_624:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_624:num_checkpoint_items">>,<<"1">>}, {<<"vb_624:num_tap_cursors">>,<<"1">>}, {<<"vb_624:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_624:open_checkpoint_id">>,<<"2">>}, {<<"vb_624:state">>,<<"active">>}, {<<"vb_623:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_623:eq_tapq:replication_ns_1@10.242.238.89:cursor_checkpoint_id">>, <<"2">>}, {<<"vb_623:checkpoint_extension">>,<<"false">>}, {<<"vb_623:num_items_for_persistence">>,<<"0">>}, {<<"vb_623:num_checkpoints">>,<<"1">>}, {<<"vb_623:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_623:num_checkpoint_items">>,<<"1">>}, {<<"vb_623:num_tap_cursors">>,<<"1">>}, {<<"vb_623:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_623:open_checkpoint_id">>,<<"2">>}, {<<"vb_623:state">>,<<"active">>}, {<<"vb_622:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_622:eq_tapq:replication_ns_1@10.242.238.89:cursor_checkpoint_id">>, <<"2">>}, {<<"vb_622:checkpoint_extension">>,<<"false">>}, {<<"vb_622:num_items_for_persistence">>,<<"0">>}, {<<"vb_622:num_checkpoints">>,<<"1">>}, {<<"vb_622:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_622:num_checkpoint_items">>,<<"1">>}, {<<"vb_622:num_tap_cursors">>,<<"1">>}, {<<"vb_622:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_622:open_checkpoint_id">>,<<"2">>}, {<<"vb_622:state">>,<<"active">>}, {<<"vb_621:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_621:eq_tapq:replication_ns_1@10.242.238.89:cursor_checkpoint_id">>, <<"2">>}, {<<"vb_621:checkpoint_extension">>,<<"false">>}, {<<"vb_621:num_items_for_persistence">>,<<"0">>}, {<<"vb_621:num_checkpoints">>,<<"1">>}, {<<"vb_621:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_621:num_checkpoint_items">>,<<"1">>}, {<<"vb_621:num_tap_cursors">>,<<"1">>}, {<<"vb_621:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_621:open_checkpoint_id">>,<<"2">>}, {<<"vb_621:state">>,<<"active">>}, {<<"vb_620:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_620:eq_tapq:replication_ns_1@10.242.238.89:cursor_checkpoint_id">>, <<"2">>}, {<<"vb_620:checkpoint_extension">>,<<"false">>}, {<<"vb_620:num_items_for_persistence">>,<<"0">>}, {<<"vb_620:num_checkpoints">>,<<"1">>}, {<<"vb_620:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_620:num_checkpoint_items">>,<<"1">>}, {<<"vb_620:num_tap_cursors">>,<<"1">>}, {<<"vb_620:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_620:open_checkpoint_id">>,<<"2">>}, {<<"vb_620:state">>,<<"active">>}, {<<"vb_619:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_619:eq_tapq:replication_ns_1@10.242.238.89:cursor_checkpoint_id">>, <<"2">>}, {<<"vb_619:checkpoint_extension">>,<<"false">>}, {<<"vb_619:num_items_for_persistence">>,<<"0">>}, {<<"vb_619:num_checkpoints">>,<<"1">>}, {<<"vb_619:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_619:num_checkpoint_items">>,<<"1">>}, {<<"vb_619:num_tap_cursors">>,<<"1">>}, {<<"vb_619:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_619:open_checkpoint_id">>,<<"2">>}, {<<"vb_619:state">>,<<"active">>}, {<<"vb_618:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_618:eq_tapq:replication_ns_1@10.242.238.89:cursor_checkpoint_id">>, <<"2">>}, {<<"vb_618:checkpoint_extension">>,<<"false">>}, {<<"vb_618:num_items_for_persistence">>,<<"0">>}, {<<"vb_618:num_checkpoints">>,<<"1">>}, {<<"vb_618:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_618:num_checkpoint_items">>,<<"1">>}, {<<"vb_618:num_tap_cursors">>,<<"1">>}, {<<"vb_618:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_618:open_checkpoint_id">>,<<"2">>}, {<<"vb_618:state">>,<<"active">>}, {<<"vb_617:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_617:eq_tapq:replication_ns_1@10.242.238.89:cursor_checkpoint_id">>, <<"2">>}, {<<"vb_617:checkpoint_extension">>,<<"false">>}, {<<"vb_617:num_items_for_persistence">>,<<"0">>}, {<<"vb_617:num_checkpoints">>,<<"1">>}, {<<"vb_617:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_617:num_checkpoint_items">>,<<"1">>}, {<<"vb_617:num_tap_cursors">>,<<"1">>}, {<<"vb_617:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_617:open_checkpoint_id">>,<<"2">>}, {<<"vb_617:state">>,<<"active">>}, {<<"vb_616:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_616:eq_tapq:replication_ns_1@10.242.238.89:cursor_checkpoint_id">>, <<"2">>}, {<<"vb_616:checkpoint_extension">>,<<"false">>}, {<<"vb_616:num_items_for_persistence">>,<<"0">>}, {<<"vb_616:num_checkpoints">>,<<"1">>}, {<<"vb_616:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_616:num_checkpoint_items">>,<<"1">>}, {<<"vb_616:num_tap_cursors">>,<<"1">>}, {<<"vb_616:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_616:open_checkpoint_id">>,<<"2">>}, {<<"vb_616:state">>,<<"active">>}, {<<"vb_615:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_615:eq_tapq:replication_ns_1@10.242.238.89:cursor_checkpoint_id">>, <<"2">>}, {<<"vb_615:checkpoint_extension">>,<<"false">>}, {<<"vb_615:num_items_for_persistence">>,<<"0">>}, {<<"vb_615:num_checkpoints">>,<<"1">>}, {<<"vb_615:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_615:num_checkpoint_items">>,<<"1">>}, {<<"vb_615:num_tap_cursors">>,<<"1">>}, {<<"vb_615:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_615:open_checkpoint_id">>,<<"2">>}, {<<"vb_615:state">>,<<"active">>}, {<<"vb_614:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_614:eq_tapq:replication_ns_1@10.242.238.89:cursor_checkpoint_id">>, <<"2">>}, {<<"vb_614:checkpoint_extension">>,<<"false">>}, {<<"vb_614:num_items_for_persistence">>,<<"0">>}, {<<"vb_614:num_checkpoints">>,<<"1">>}, {<<"vb_614:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_614:num_checkpoint_items">>,<<"1">>}, {<<"vb_614:num_tap_cursors">>,<<"1">>}, {<<"vb_614:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_614:open_checkpoint_id">>,<<"2">>}, {<<"vb_614:state">>,<<"active">>}, {<<"vb_613:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_613:eq_tapq:replication_ns_1@10.242.238.89:cursor_checkpoint_id">>, <<"2">>}, {<<"vb_613:checkpoint_extension">>,<<"false">>}, {<<"vb_613:num_items_for_persistence">>,<<"0">>}, {<<"vb_613:num_checkpoints">>,<<"1">>}, {<<"vb_613:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_613:num_checkpoint_items">>,<<"1">>}, {<<"vb_613:num_tap_cursors">>,<<"1">>}, {<<"vb_613:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_613:open_checkpoint_id">>,<<"2">>}, {<<"vb_613:state">>,<<"active">>}, {<<"vb_612:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_612:eq_tapq:replication_ns_1@10.242.238.89:cursor_checkpoint_id">>, <<"2">>}, {<<"vb_612:checkpoint_extension">>,<<"false">>}, {<<"vb_612:num_items_for_persistence">>,<<"0">>}, {<<"vb_612:num_checkpoints">>,<<"1">>}, {<<"vb_612:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_612:num_checkpoint_items">>,<<"1">>}, {<<"vb_612:num_tap_cursors">>,<<"1">>}, {<<"vb_612:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_612:open_checkpoint_id">>,<<"2">>}, {<<"vb_612:state">>,<<"active">>}, {<<"vb_611:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_611:eq_tapq:replication_ns_1@10.242.238.89:cursor_checkpoint_id">>, <<"2">>}, {<<"vb_611:checkpoint_extension">>,<<"false">>}, {<<"vb_611:num_items_for_persistence">>,<<"0">>}, {<<"vb_611:num_checkpoints">>,<<"1">>}, {<<"vb_611:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_611:num_checkpoint_items">>,<<"1">>}, {<<"vb_611:num_tap_cursors">>,<<"1">>}, {<<"vb_611:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_611:open_checkpoint_id">>,<<"2">>}, {<<"vb_611:state">>,<<"active">>}, {<<"vb_610:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_610:eq_tapq:replication_ns_1@10.242.238.89:cursor_checkpoint_id">>, <<"2">>}, {<<"vb_610:checkpoint_extension">>,<<"false">>}, {<<"vb_610:num_items_for_persistence">>,<<"0">>}, {<<"vb_610:num_checkpoints">>,<<"1">>}, {<<"vb_610:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_610:num_checkpoint_items">>,<<"1">>}, {<<"vb_610:num_tap_cursors">>,<<"1">>}, {<<"vb_610:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_610:open_checkpoint_id">>,<<"2">>}, {<<"vb_610:state">>,<<"active">>}, {<<"vb_609:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_609:eq_tapq:replication_ns_1@10.242.238.89:cursor_checkpoint_id">>, <<"2">>}, {<<"vb_609:checkpoint_extension">>,<<"false">>}, {<<"vb_609:num_items_for_persistence">>,<<"0">>}, {<<"vb_609:num_checkpoints">>,<<"1">>}, {<<"vb_609:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_609:num_checkpoint_items">>,<<"1">>}, {<<"vb_609:num_tap_cursors">>,<<"1">>}, {<<"vb_609:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_609:open_checkpoint_id">>,<<"2">>}, {<<"vb_609:state">>,<<"active">>}, {<<"vb_608:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_608:eq_tapq:replication_ns_1@10.242.238.89:cursor_checkpoint_id">>, <<"2">>}, {<<"vb_608:checkpoint_extension">>,<<"false">>}, {<<"vb_608:num_items_for_persistence">>,<<"0">>}, {<<"vb_608:num_checkpoints">>,<<"1">>}, {<<"vb_608:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_608:num_checkpoint_items">>,<<"1">>}, {<<"vb_608:num_tap_cursors">>,<<"1">>}, {<<"vb_608:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_608:open_checkpoint_id">>,<<"2">>}, {<<"vb_608:state">>,<<"active">>}, {<<"vb_607:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_607:eq_tapq:replication_ns_1@10.242.238.89:cursor_checkpoint_id">>, <<"2">>}, {<<"vb_607:checkpoint_extension">>,<<"false">>}, {<<"vb_607:num_items_for_persistence">>,<<"0">>}, {<<"vb_607:num_checkpoints">>,<<"1">>}, {<<"vb_607:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_607:num_checkpoint_items">>,<<"1">>}, {<<"vb_607:num_tap_cursors">>,<<"1">>}, {<<"vb_607:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_607:open_checkpoint_id">>,<<"2">>}, {<<"vb_607:state">>,<<"active">>}, {<<"vb_606:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_606:eq_tapq:replication_ns_1@10.242.238.89:cursor_checkpoint_id">>, <<"2">>}, {<<"vb_606:checkpoint_extension">>,<<"false">>}, {<<"vb_606:num_items_for_persistence">>,<<"0">>}, {<<"vb_606:num_checkpoints">>,<<"1">>}, {<<"vb_606:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_606:num_checkpoint_items">>,<<"1">>}, {<<"vb_606:num_tap_cursors">>,<<"1">>}, {<<"vb_606:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_606:open_checkpoint_id">>,<<"2">>}, {<<"vb_606:state">>,<<"active">>}, {<<"vb_605:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_605:eq_tapq:replication_ns_1@10.242.238.89:cursor_checkpoint_id">>, <<"2">>}, {<<"vb_605:checkpoint_extension">>,<<"false">>}, {<<"vb_605:num_items_for_persistence">>,<<"0">>}, {<<"vb_605:num_checkpoints">>,<<"1">>}, {<<"vb_605:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_605:num_checkpoint_items">>,<<"1">>}, {<<"vb_605:num_tap_cursors">>,<<"1">>}, {<<"vb_605:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_605:open_checkpoint_id">>,<<"2">>}, {<<"vb_605:state">>,<<"active">>}, {<<"vb_604:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_604:eq_tapq:replication_ns_1@10.242.238.89:cursor_checkpoint_id">>, <<"2">>}, {<<"vb_604:checkpoint_extension">>,<<"false">>}, {<<"vb_604:num_items_for_persistence">>,<<"0">>}, {<<"vb_604:num_checkpoints">>,<<"1">>}, {<<"vb_604:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_604:num_checkpoint_items">>,<<"1">>}, {<<"vb_604:num_tap_cursors">>,<<"1">>}, {<<"vb_604:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_604:open_checkpoint_id">>,<<"2">>}, {<<"vb_604:state">>,<<"active">>}, {<<"vb_603:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_603:eq_tapq:replication_ns_1@10.242.238.89:cursor_checkpoint_id">>, <<"2">>}, {<<"vb_603:checkpoint_extension">>,<<"false">>}, {<<"vb_603:num_items_for_persistence">>,<<"0">>}, {<<"vb_603:num_checkpoints">>,<<"1">>}, {<<"vb_603:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_603:num_checkpoint_items">>,<<"1">>}, {<<"vb_603:num_tap_cursors">>,<<"1">>}, {<<"vb_603:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_603:open_checkpoint_id">>,<<"2">>}, {<<"vb_603:state">>,<<"active">>}, {<<"vb_602:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_602:eq_tapq:replication_ns_1@10.242.238.89:cursor_checkpoint_id">>, <<"2">>}, {<<"vb_602:checkpoint_extension">>,<<"false">>}, {<<"vb_602:num_items_for_persistence">>,<<"0">>}, {<<"vb_602:num_checkpoints">>,<<"1">>}, {<<"vb_602:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_602:num_checkpoint_items">>,<<"1">>}, {<<"vb_602:num_tap_cursors">>,<<"1">>}, {<<"vb_602:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_602:open_checkpoint_id">>,<<"2">>}, {<<"vb_602:state">>,<<"active">>}, {<<"vb_601:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_601:eq_tapq:replication_ns_1@10.242.238.89:cursor_checkpoint_id">>, <<"2">>}, {<<"vb_601:checkpoint_extension">>,<<"false">>}, {<<"vb_601:num_items_for_persistence">>,<<"0">>}, {<<"vb_601:num_checkpoints">>,<<"1">>}, {<<"vb_601:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_601:num_checkpoint_items">>,<<"1">>}, {<<"vb_601:num_tap_cursors">>,<<"1">>}, {<<"vb_601:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_601:open_checkpoint_id">>,<<"2">>}, {<<"vb_601:state">>,<<"active">>}, {<<"vb_600:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_600:eq_tapq:replication_ns_1@10.242.238.89:cursor_checkpoint_id">>, <<"2">>}, {<<"vb_600:checkpoint_extension">>,<<"false">>}, {<<"vb_600:num_items_for_persistence">>,<<"0">>}, {<<"vb_600:num_checkpoints">>,<<"1">>}, {<<"vb_600:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_600:num_checkpoint_items">>,<<"1">>}, {<<"vb_600:num_tap_cursors">>,<<"1">>}, {<<"vb_600:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_600:open_checkpoint_id">>,<<"2">>}, {<<"vb_600:state">>,<<"active">>}, {<<"vb_599:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_599:eq_tapq:replication_ns_1@10.242.238.89:cursor_checkpoint_id">>, <<"2">>}, {<<"vb_599:checkpoint_extension">>,<<"false">>}, {<<"vb_599:num_items_for_persistence">>,<<"0">>}, {<<"vb_599:num_checkpoints">>,<<"1">>}, {<<"vb_599:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_599:num_checkpoint_items">>,<<"1">>}, {<<"vb_599:num_tap_cursors">>,<<"1">>}, {<<"vb_599:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_599:open_checkpoint_id">>,<<"2">>}, {<<"vb_599:state">>,<<"active">>}, {<<"vb_598:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_598:eq_tapq:replication_ns_1@10.242.238.89:cursor_checkpoint_id">>, <<"2">>}, {<<"vb_598:checkpoint_extension">>,<<"false">>}, {<<"vb_598:num_items_for_persistence">>,<<"0">>}, {<<"vb_598:num_checkpoints">>,<<"1">>}, {<<"vb_598:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_598:num_checkpoint_items">>,<<"1">>}, {<<"vb_598:num_tap_cursors">>,<<"1">>}, {<<"vb_598:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_598:open_checkpoint_id">>,<<"2">>}, {<<"vb_598:state">>,<<"active">>}, {<<"vb_597:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_597:eq_tapq:replication_ns_1@10.242.238.89:cursor_checkpoint_id">>, <<"2">>}, {<<"vb_597:checkpoint_extension">>,<<"false">>}, {<<"vb_597:num_items_for_persistence">>,<<"0">>}, {<<"vb_597:num_checkpoints">>,<<"1">>}, {<<"vb_597:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_597:num_checkpoint_items">>,<<"1">>}, {<<"vb_597:num_tap_cursors">>,<<"1">>}, {<<"vb_597:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_597:open_checkpoint_id">>,<<"2">>}, {<<"vb_597:state">>,<<"active">>}, {<<"vb_596:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_596:eq_tapq:replication_ns_1@10.242.238.88:cursor_checkpoint_id">>, <<"2">>}, {<<"vb_596:checkpoint_extension">>,<<"false">>}, {<<"vb_596:num_items_for_persistence">>,<<"0">>}, {<<"vb_596:num_checkpoints">>,<<"1">>}, {<<"vb_596:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_596:num_checkpoint_items">>,<<"1">>}, {<<"vb_596:num_tap_cursors">>,<<"1">>}, {<<"vb_596:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_596:open_checkpoint_id">>,<<"2">>}, {<<"vb_596:state">>,<<"active">>}, {<<"vb_595:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_595:eq_tapq:replication_ns_1@10.242.238.88:cursor_checkpoint_id">>, <<"2">>}, {<<"vb_595:checkpoint_extension">>,<<"false">>}, {<<"vb_595:num_items_for_persistence">>,<<"0">>}, {<<"vb_595:num_checkpoints">>,<<"1">>}, {<<"vb_595:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_595:num_checkpoint_items">>,<<"1">>}, {<<"vb_595:num_tap_cursors">>,<<"1">>}, {<<"vb_595:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_595:open_checkpoint_id">>,<<"2">>}, {<<"vb_595:state">>,<<"active">>}, {<<"vb_594:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_594:eq_tapq:replication_ns_1@10.242.238.88:cursor_checkpoint_id">>, <<"2">>}, {<<"vb_594:checkpoint_extension">>,<<"false">>}, {<<"vb_594:num_items_for_persistence">>,<<"0">>}, {<<"vb_594:num_checkpoints">>,<<"1">>}, {<<"vb_594:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_594:num_checkpoint_items">>,<<"1">>}, {<<"vb_594:num_tap_cursors">>,<<"1">>}, {<<"vb_594:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_594:open_checkpoint_id">>,<<"2">>}, {<<"vb_594:state">>,<<"active">>}, {<<"vb_593:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_593:eq_tapq:replication_ns_1@10.242.238.88:cursor_checkpoint_id">>, <<"2">>}, {<<"vb_593:checkpoint_extension">>,<<"false">>}, {<<"vb_593:num_items_for_persistence">>,<<"0">>}, {<<"vb_593:num_checkpoints">>,<<"1">>}, {<<"vb_593:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_593:num_checkpoint_items">>,<<"1">>}, {<<"vb_593:num_tap_cursors">>,<<"1">>}, {<<"vb_593:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_593:open_checkpoint_id">>,<<"2">>}, {<<"vb_593:state">>,<<"active">>}, {<<"vb_592:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_592:eq_tapq:replication_ns_1@10.242.238.88:cursor_checkpoint_id">>, <<"2">>}, {<<"vb_592:checkpoint_extension">>,<<"false">>}, {<<"vb_592:num_items_for_persistence">>,<<"0">>}, {<<"vb_592:num_checkpoints">>,<<"1">>}, {<<"vb_592:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_592:num_checkpoint_items">>,<<"1">>}, {<<"vb_592:num_tap_cursors">>,<<"1">>}, {<<"vb_592:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_592:open_checkpoint_id">>,<<"2">>}, {<<"vb_592:state">>,<<"active">>}, {<<"vb_591:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_591:eq_tapq:replication_ns_1@10.242.238.88:cursor_checkpoint_id">>, <<"2">>}, {<<"vb_591:checkpoint_extension">>,<<"false">>}, {<<"vb_591:num_items_for_persistence">>,<<"0">>}, {<<"vb_591:num_checkpoints">>,<<"1">>}, {<<"vb_591:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_591:num_checkpoint_items">>,<<"1">>}, {<<"vb_591:num_tap_cursors">>,<<"1">>}, {<<"vb_591:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_591:open_checkpoint_id">>,<<"2">>}, {<<"vb_591:state">>,<<"active">>}, {<<"vb_590:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_590:eq_tapq:replication_ns_1@10.242.238.88:cursor_checkpoint_id">>, <<"2">>}, {<<"vb_590:checkpoint_extension">>,<<"false">>}, {<<"vb_590:num_items_for_persistence">>,<<"0">>}, {<<"vb_590:num_checkpoints">>,<<"1">>}, {<<"vb_590:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_590:num_checkpoint_items">>,<<"1">>}, {<<"vb_590:num_tap_cursors">>,<<"1">>}, {<<"vb_590:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_590:open_checkpoint_id">>,<<"2">>}, {<<"vb_590:state">>,<<"active">>}, {<<"vb_589:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_589:eq_tapq:replication_ns_1@10.242.238.88:cursor_checkpoint_id">>, <<"2">>}, {<<"vb_589:checkpoint_extension">>,<<"false">>}, {<<"vb_589:num_items_for_persistence">>,<<"0">>}, {<<"vb_589:num_checkpoints">>,<<"1">>}, {<<"vb_589:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_589:num_checkpoint_items">>,<<"1">>}, {<<"vb_589:num_tap_cursors">>,<<"1">>}, {<<"vb_589:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_589:open_checkpoint_id">>,<<"2">>}, {<<"vb_589:state">>,<<"active">>}, {<<"vb_588:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_588:eq_tapq:replication_ns_1@10.242.238.88:cursor_checkpoint_id">>, <<"2">>}, {<<"vb_588:checkpoint_extension">>,<<"false">>}, {<<"vb_588:num_items_for_persistence">>,<<"0">>}, {<<"vb_588:num_checkpoints">>,<<"1">>}, {<<"vb_588:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_588:num_checkpoint_items">>,<<"1">>}, {<<"vb_588:num_tap_cursors">>,<<"1">>}, {<<"vb_588:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_588:open_checkpoint_id">>,<<"2">>}, {<<"vb_588:state">>,<<"active">>}, {<<"vb_587:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_587:eq_tapq:replication_ns_1@10.242.238.88:cursor_checkpoint_id">>, <<"2">>}, {<<"vb_587:checkpoint_extension">>,<<"false">>}, {<<"vb_587:num_items_for_persistence">>,<<"0">>}, {<<"vb_587:num_checkpoints">>,<<"1">>}, {<<"vb_587:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_587:num_checkpoint_items">>,<<"1">>}, {<<"vb_587:num_tap_cursors">>,<<"1">>}, {<<"vb_587:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_587:open_checkpoint_id">>,<<"2">>}, {<<"vb_587:state">>,<<"active">>}, {<<"vb_586:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_586:eq_tapq:replication_ns_1@10.242.238.88:cursor_checkpoint_id">>, <<"2">>}, {<<"vb_586:checkpoint_extension">>,<<"false">>}, {<<"vb_586:num_items_for_persistence">>,<<"0">>}, {<<"vb_586:num_checkpoints">>,<<"1">>}, {<<"vb_586:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_586:num_checkpoint_items">>,<<"1">>}, {<<"vb_586:num_tap_cursors">>,<<"1">>}, {<<"vb_586:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_586:open_checkpoint_id">>,<<"2">>}, {<<"vb_586:state">>,<<"active">>}, {<<"vb_585:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_585:eq_tapq:replication_ns_1@10.242.238.88:cursor_checkpoint_id">>, <<"2">>}, {<<"vb_585:checkpoint_extension">>,<<"false">>}, {<<"vb_585:num_items_for_persistence">>,<<"0">>}, {<<"vb_585:num_checkpoints">>,<<"1">>}, {<<"vb_585:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_585:num_checkpoint_items">>,<<"1">>}, {<<"vb_585:num_tap_cursors">>,<<"1">>}, {<<"vb_585:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_585:open_checkpoint_id">>,<<"2">>}, {<<"vb_585:state">>,<<"active">>}, {<<"vb_584:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_584:eq_tapq:replication_ns_1@10.242.238.88:cursor_checkpoint_id">>, <<"2">>}, {<<"vb_584:checkpoint_extension">>,<<"false">>}, {<<"vb_584:num_items_for_persistence">>,<<"0">>}, {<<"vb_584:num_checkpoints">>,<<"1">>}, {<<"vb_584:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_584:num_checkpoint_items">>,<<"1">>}, {<<"vb_584:num_tap_cursors">>,<<"1">>}, {<<"vb_584:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_584:open_checkpoint_id">>,<<"2">>}, {<<"vb_584:state">>,<<"active">>}, {<<"vb_583:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_583:eq_tapq:replication_ns_1@10.242.238.88:cursor_checkpoint_id">>, <<"2">>}, {<<"vb_583:checkpoint_extension">>,<<"false">>}, {<<"vb_583:num_items_for_persistence">>,<<"0">>}, {<<"vb_583:num_checkpoints">>,<<"1">>}, {<<"vb_583:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_583:num_checkpoint_items">>,<<"1">>}, {<<"vb_583:num_tap_cursors">>,<<"1">>}, {<<"vb_583:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_583:open_checkpoint_id">>,<<"2">>}, {<<"vb_583:state">>,<<"active">>}, {<<"vb_582:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_582:eq_tapq:replication_ns_1@10.242.238.88:cursor_checkpoint_id">>, <<"2">>}, {<<"vb_582:checkpoint_extension">>,<<"false">>}, {<<"vb_582:num_items_for_persistence">>,<<"0">>}, {<<"vb_582:num_checkpoints">>,<<"1">>}, {<<"vb_582:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_582:num_checkpoint_items">>,<<"1">>}, {<<"vb_582:num_tap_cursors">>,<<"1">>}, {<<"vb_582:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_582:open_checkpoint_id">>,<<"2">>}, {<<"vb_582:state">>,<<"active">>}, {<<"vb_581:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_581:eq_tapq:replication_ns_1@10.242.238.88:cursor_checkpoint_id">>, <<"2">>}, {<<"vb_581:checkpoint_extension">>,<<"false">>}, {<<"vb_581:num_items_for_persistence">>,<<"0">>}, {<<"vb_581:num_checkpoints">>,<<"1">>}, {<<"vb_581:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_581:num_checkpoint_items">>,<<"1">>}, {<<"vb_581:num_tap_cursors">>,<<"1">>}, {<<"vb_581:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_581:open_checkpoint_id">>,<<"2">>}, {<<"vb_581:state">>,<<"active">>}, {<<"vb_580:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_580:eq_tapq:replication_ns_1@10.242.238.88:cursor_checkpoint_id">>, <<"2">>}, {<<"vb_580:checkpoint_extension">>,<<"false">>}, {<<"vb_580:num_items_for_persistence">>,<<"0">>}, {<<"vb_580:num_checkpoints">>,<<"1">>}, {<<"vb_580:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_580:num_checkpoint_items">>,<<"1">>}, {<<"vb_580:num_tap_cursors">>,<<"1">>}, {<<"vb_580:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_580:open_checkpoint_id">>,<<"2">>}, {<<"vb_580:state">>,<<"active">>}, {<<"vb_579:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_579:eq_tapq:replication_ns_1@10.242.238.88:cursor_checkpoint_id">>, <<"2">>}, {<<"vb_579:checkpoint_extension">>,<<"false">>}, {<<"vb_579:num_items_for_persistence">>,<<"0">>}, {<<"vb_579:num_checkpoints">>,<<"1">>}, {<<"vb_579:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_579:num_checkpoint_items">>,<<"1">>}, {<<"vb_579:num_tap_cursors">>,<<"1">>}, {<<"vb_579:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_579:open_checkpoint_id">>,<<"2">>}, {<<"vb_579:state">>,<<"active">>}, {<<"vb_578:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_578:eq_tapq:replication_ns_1@10.242.238.88:cursor_checkpoint_id">>, <<"2">>}, {<<"vb_578:checkpoint_extension">>,<<"false">>}, {<<"vb_578:num_items_for_persistence">>,<<"0">>}, {<<"vb_578:num_checkpoints">>,<<"1">>}, {<<"vb_578:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_578:num_checkpoint_items">>,<<"1">>}, {<<"vb_578:num_tap_cursors">>,<<"1">>}, {<<"vb_578:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_578:open_checkpoint_id">>,<<"2">>}, {<<"vb_578:state">>,<<"active">>}, {<<"vb_577:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_577:eq_tapq:replication_ns_1@10.242.238.88:cursor_checkpoint_id">>, <<"2">>}, {<<"vb_577:checkpoint_extension">>,<<"false">>}, {<<"vb_577:num_items_for_persistence">>,<<"0">>}, {<<"vb_577:num_checkpoints">>,<<"1">>}, {<<"vb_577:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_577:num_checkpoint_items">>,<<"1">>}, {<<"vb_577:num_tap_cursors">>,<<"1">>}, {<<"vb_577:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_577:open_checkpoint_id">>,<<"2">>}, {<<"vb_577:state">>,<<"active">>}, {<<"vb_576:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_576:eq_tapq:replication_ns_1@10.242.238.88:cursor_checkpoint_id">>, <<"2">>}, {<<"vb_576:checkpoint_extension">>,<<"false">>}, {<<"vb_576:num_items_for_persistence">>,<<"0">>}, {<<"vb_576:num_checkpoints">>,<<"1">>}, {<<"vb_576:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_576:num_checkpoint_items">>,<<"1">>}, {<<"vb_576:num_tap_cursors">>,<<"1">>}, {<<"vb_576:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_576:open_checkpoint_id">>,<<"2">>}, {<<"vb_576:state">>,<<"active">>}, {<<"vb_575:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_575:eq_tapq:replication_ns_1@10.242.238.88:cursor_checkpoint_id">>, <<"2">>}, {<<"vb_575:checkpoint_extension">>,<<"false">>}, {<<"vb_575:num_items_for_persistence">>,<<"0">>}, {<<"vb_575:num_checkpoints">>,<<"1">>}, {<<"vb_575:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_575:num_checkpoint_items">>,<<"1">>}, {<<"vb_575:num_tap_cursors">>,<<"1">>}, {<<"vb_575:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_575:open_checkpoint_id">>,<<"2">>}, {<<"vb_575:state">>,<<"active">>}, {<<"vb_574:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_574:eq_tapq:replication_ns_1@10.242.238.88:cursor_checkpoint_id">>, <<"2">>}, {<<"vb_574:checkpoint_extension">>,<<"false">>}, {<<"vb_574:num_items_for_persistence">>,<<"0">>}, {<<"vb_574:num_checkpoints">>,<<"1">>}, {<<"vb_574:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_574:num_checkpoint_items">>,<<"1">>}, {<<"vb_574:num_tap_cursors">>,<<"1">>}, {<<"vb_574:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_574:open_checkpoint_id">>,<<"2">>}, {<<"vb_574:state">>,<<"active">>}, {<<"vb_573:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_573:eq_tapq:replication_ns_1@10.242.238.88:cursor_checkpoint_id">>, <<"2">>}, {<<"vb_573:checkpoint_extension">>,<<"false">>}, {<<"vb_573:num_items_for_persistence">>,<<"0">>}, {<<"vb_573:num_checkpoints">>,<<"1">>}, {<<"vb_573:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_573:num_checkpoint_items">>,<<"1">>}, {<<"vb_573:num_tap_cursors">>,<<"1">>}, {<<"vb_573:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_573:open_checkpoint_id">>,<<"2">>}, {<<"vb_573:state">>,<<"active">>}, {<<"vb_572:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_572:eq_tapq:replication_ns_1@10.242.238.88:cursor_checkpoint_id">>, <<"2">>}, {<<"vb_572:checkpoint_extension">>,<<"false">>}, {<<"vb_572:num_items_for_persistence">>,<<"0">>}, {<<"vb_572:num_checkpoints">>,<<"1">>}, {<<"vb_572:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_572:num_checkpoint_items">>,<<"1">>}, {<<"vb_572:num_tap_cursors">>,<<"1">>}, {<<"vb_572:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_572:open_checkpoint_id">>,<<"2">>}, {<<"vb_572:state">>,<<"active">>}, {<<"vb_571:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_571:eq_tapq:replication_ns_1@10.242.238.88:cursor_checkpoint_id">>, <<"2">>}, {<<"vb_571:checkpoint_extension">>,<<"false">>}, {<<"vb_571:num_items_for_persistence">>,<<"0">>}, {<<"vb_571:num_checkpoints">>,<<"1">>}, {<<"vb_571:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_571:num_checkpoint_items">>,<<"1">>}, {<<"vb_571:num_tap_cursors">>,<<"1">>}, {<<"vb_571:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_571:open_checkpoint_id">>,<<"2">>}, {<<"vb_571:state">>,<<"active">>}, {<<"vb_570:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_570:eq_tapq:replication_ns_1@10.242.238.88:cursor_checkpoint_id">>, <<"2">>}, {<<"vb_570:checkpoint_extension">>,<<"false">>}, {<<"vb_570:num_items_for_persistence">>,<<"0">>}, {<<"vb_570:num_checkpoints">>,<<"1">>}, {<<"vb_570:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_570:num_checkpoint_items">>,<<"1">>}, {<<"vb_570:num_tap_cursors">>,<<"1">>}, {<<"vb_570:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_570:open_checkpoint_id">>,<<"2">>}, {<<"vb_570:state">>,<<"active">>}, {<<"vb_569:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_569:eq_tapq:replication_ns_1@10.242.238.88:cursor_checkpoint_id">>, <<"2">>}, {<<"vb_569:checkpoint_extension">>,<<"false">>}, {<<"vb_569:num_items_for_persistence">>,<<"0">>}, {<<"vb_569:num_checkpoints">>,<<"1">>}, {<<"vb_569:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_569:num_checkpoint_items">>,<<"1">>}, {<<"vb_569:num_tap_cursors">>,<<"1">>}, {<<"vb_569:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_569:open_checkpoint_id">>,<<"2">>}, {<<"vb_569:state">>,<<"active">>}, {<<"vb_568:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_568:eq_tapq:replication_ns_1@10.242.238.88:cursor_checkpoint_id">>, <<"2">>}, {<<"vb_568:checkpoint_extension">>,<<"false">>}, {<<"vb_568:num_items_for_persistence">>,<<"0">>}, {<<"vb_568:num_checkpoints">>,<<"1">>}, {<<"vb_568:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_568:num_checkpoint_items">>,<<"1">>}, {<<"vb_568:num_tap_cursors">>,<<"1">>}, {<<"vb_568:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_568:open_checkpoint_id">>,<<"2">>}, {<<"vb_568:state">>,<<"active">>}, {<<"vb_567:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_567:eq_tapq:replication_ns_1@10.242.238.88:cursor_checkpoint_id">>, <<"2">>}, {<<"vb_567:checkpoint_extension">>,<<"false">>}, {<<"vb_567:num_items_for_persistence">>,<<"0">>}, {<<"vb_567:num_checkpoints">>,<<"1">>}, {<<"vb_567:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_567:num_checkpoint_items">>,<<"1">>}, {<<"vb_567:num_tap_cursors">>,<<"1">>}, {<<"vb_567:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_567:open_checkpoint_id">>,<<"2">>}, {<<"vb_567:state">>,<<"active">>}, {<<"vb_566:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_566:eq_tapq:replication_ns_1@10.242.238.88:cursor_checkpoint_id">>, <<"2">>}, {<<"vb_566:checkpoint_extension">>,<<"false">>}, {<<"vb_566:num_items_for_persistence">>,<<"0">>}, {<<"vb_566:num_checkpoints">>,<<"1">>}, {<<"vb_566:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_566:num_checkpoint_items">>,<<"1">>}, {<<"vb_566:num_tap_cursors">>,<<"1">>}, {<<"vb_566:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_566:open_checkpoint_id">>,<<"2">>}, {<<"vb_566:state">>,<<"active">>}, {<<"vb_565:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_565:eq_tapq:replication_ns_1@10.242.238.88:cursor_checkpoint_id">>, <<"2">>}, {<<"vb_565:checkpoint_extension">>,<<"false">>}, {<<"vb_565:num_items_for_persistence">>,<<"0">>}, {<<"vb_565:num_checkpoints">>,<<"1">>}, {<<"vb_565:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_565:num_checkpoint_items">>,<<"1">>}, {<<"vb_565:num_tap_cursors">>,<<"1">>}, {<<"vb_565:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_565:open_checkpoint_id">>,<<"2">>}, {<<"vb_565:state">>,<<"active">>}, {<<"vb_564:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_564:eq_tapq:replication_ns_1@10.242.238.88:cursor_checkpoint_id">>, <<"2">>}, {<<"vb_564:checkpoint_extension">>,<<"false">>}, {<<"vb_564:num_items_for_persistence">>,<<"0">>}, {<<"vb_564:num_checkpoints">>,<<"1">>}, {<<"vb_564:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_564:num_checkpoint_items">>,<<"1">>}, {<<"vb_564:num_tap_cursors">>,<<"1">>}, {<<"vb_564:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_564:open_checkpoint_id">>,<<"2">>}, {<<"vb_564:state">>,<<"active">>}, {<<"vb_563:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_563:eq_tapq:replication_ns_1@10.242.238.88:cursor_checkpoint_id">>, <<"2">>}, {<<"vb_563:checkpoint_extension">>,<<"false">>}, {<<"vb_563:num_items_for_persistence">>,<<"0">>}, {<<"vb_563:num_checkpoints">>,<<"1">>}, {<<"vb_563:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_563:num_checkpoint_items">>,<<"1">>}, {<<"vb_563:num_tap_cursors">>,<<"1">>}, {<<"vb_563:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_563:open_checkpoint_id">>,<<"2">>}, {<<"vb_563:state">>,<<"active">>}, {<<"vb_562:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_562:eq_tapq:replication_ns_1@10.242.238.88:cursor_checkpoint_id">>, <<"2">>}, {<<"vb_562:checkpoint_extension">>,<<"false">>}, {<<"vb_562:num_items_for_persistence">>,<<"0">>}, {<<"vb_562:num_checkpoints">>,<<"1">>}, {<<"vb_562:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_562:num_checkpoint_items">>,<<"1">>}, {<<"vb_562:num_tap_cursors">>,<<"1">>}, {<<"vb_562:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_562:open_checkpoint_id">>,<<"2">>}, {<<"vb_562:state">>,<<"active">>}, {<<"vb_561:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_561:eq_tapq:replication_ns_1@10.242.238.88:cursor_checkpoint_id">>, <<"2">>}, {<<"vb_561:checkpoint_extension">>,<<"false">>}, {<<"vb_561:num_items_for_persistence">>,<<"0">>}, {<<"vb_561:num_checkpoints">>,<<"1">>}, {<<"vb_561:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_561:num_checkpoint_items">>,<<"1">>}, {<<"vb_561:num_tap_cursors">>,<<"1">>}, {<<"vb_561:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_561:open_checkpoint_id">>,<<"2">>}, {<<"vb_561:state">>,<<"active">>}, {<<"vb_560:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_560:eq_tapq:replication_ns_1@10.242.238.88:cursor_checkpoint_id">>, <<"2">>}, {<<"vb_560:checkpoint_extension">>,<<"false">>}, {<<"vb_560:num_items_for_persistence">>,<<"0">>}, {<<"vb_560:num_checkpoints">>,<<"1">>}, {<<"vb_560:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_560:num_checkpoint_items">>,<<"1">>}, {<<"vb_560:num_tap_cursors">>,<<"1">>}, {<<"vb_560:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_560:open_checkpoint_id">>,<<"2">>}, {<<"vb_560:state">>,<<"active">>}, {<<"vb_559:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_559:eq_tapq:replication_ns_1@10.242.238.88:cursor_checkpoint_id">>, <<"2">>}, {<<"vb_559:checkpoint_extension">>,<<"false">>}, {<<"vb_559:num_items_for_persistence">>,<<"0">>}, {<<"vb_559:num_checkpoints">>,<<"1">>}, {<<"vb_559:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_559:num_checkpoint_items">>,<<"1">>}, {<<"vb_559:num_tap_cursors">>,<<"1">>}, {<<"vb_559:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_559:open_checkpoint_id">>,<<"2">>}, {<<"vb_559:state">>,<<"active">>}, {<<"vb_558:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_558:eq_tapq:replication_ns_1@10.242.238.88:cursor_checkpoint_id">>, <<"2">>}, {<<"vb_558:checkpoint_extension">>,<<"false">>}, {<<"vb_558:num_items_for_persistence">>,<<"0">>}, {<<"vb_558:num_checkpoints">>,<<"1">>}, {<<"vb_558:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_558:num_checkpoint_items">>,<<"1">>}, {<<"vb_558:num_tap_cursors">>,<<"1">>}, {<<"vb_558:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_558:open_checkpoint_id">>,<<"2">>}, {<<"vb_558:state">>,<<"active">>}, {<<"vb_557:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_557:eq_tapq:replication_ns_1@10.242.238.88:cursor_checkpoint_id">>, <<"2">>}, {<<"vb_557:checkpoint_extension">>,<<"false">>}, {<<"vb_557:num_items_for_persistence">>,<<"0">>}, {<<"vb_557:num_checkpoints">>,<<"1">>}, {<<"vb_557:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_557:num_checkpoint_items">>,<<"1">>}, {<<"vb_557:num_tap_cursors">>,<<"1">>}, {<<"vb_557:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_557:open_checkpoint_id">>,<<"2">>}, {<<"vb_557:state">>,<<"active">>}, {<<"vb_556:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_556:eq_tapq:replication_ns_1@10.242.238.88:cursor_checkpoint_id">>, <<"2">>}, {<<"vb_556:checkpoint_extension">>,<<"false">>}, {<<"vb_556:num_items_for_persistence">>,<<"0">>}, {<<"vb_556:num_checkpoints">>,<<"1">>}, {<<"vb_556:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_556:num_checkpoint_items">>,<<"1">>}, {<<"vb_556:num_tap_cursors">>,<<"1">>}, {<<"vb_556:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_556:open_checkpoint_id">>,<<"2">>}, {<<"vb_556:state">>,<<"active">>}, {<<"vb_555:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_555:eq_tapq:replication_ns_1@10.242.238.88:cursor_checkpoint_id">>, <<"2">>}, {<<"vb_555:checkpoint_extension">>,<<"false">>}, {<<"vb_555:num_items_for_persistence">>,<<"0">>}, {<<"vb_555:num_checkpoints">>,<<"1">>}, {<<"vb_555:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_555:num_checkpoint_items">>,<<"1">>}, {<<"vb_555:num_tap_cursors">>,<<"1">>}, {<<"vb_555:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_555:open_checkpoint_id">>,<<"2">>}, {<<"vb_555:state">>,<<"active">>}, {<<"vb_554:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_554:eq_tapq:replication_ns_1@10.242.238.88:cursor_checkpoint_id">>, <<"2">>}, {<<"vb_554:checkpoint_extension">>,<<"false">>}, {<<"vb_554:num_items_for_persistence">>,<<"0">>}, {<<"vb_554:num_checkpoints">>,<<"1">>}, {<<"vb_554:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_554:num_checkpoint_items">>,<<"1">>}, {<<"vb_554:num_tap_cursors">>,<<"1">>}, {<<"vb_554:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_554:open_checkpoint_id">>,<<"2">>}, {<<"vb_554:state">>,<<"active">>}, {<<"vb_553:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_553:eq_tapq:replication_ns_1@10.242.238.88:cursor_checkpoint_id">>, <<"2">>}, {<<"vb_553:checkpoint_extension">>,<<"false">>}, {<<"vb_553:num_items_for_persistence">>,<<"0">>}, {<<"vb_553:num_checkpoints">>,<<"1">>}, {<<"vb_553:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_553:num_checkpoint_items">>,<<"1">>}, {<<"vb_553:num_tap_cursors">>,<<"1">>}, {<<"vb_553:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_553:open_checkpoint_id">>,<<"2">>}, {<<"vb_553:state">>,<<"active">>}, {<<"vb_552:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_552:eq_tapq:replication_ns_1@10.242.238.88:cursor_checkpoint_id">>, <<"2">>}, {<<"vb_552:checkpoint_extension">>,<<"false">>}, {<<"vb_552:num_items_for_persistence">>,<<"0">>}, {<<"vb_552:num_checkpoints">>,<<"1">>}, {<<"vb_552:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_552:num_checkpoint_items">>,<<"1">>}, {<<"vb_552:num_tap_cursors">>,<<"1">>}, {<<"vb_552:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_552:open_checkpoint_id">>,<<"2">>}, {<<"vb_552:state">>,<<"active">>}, {<<"vb_551:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_551:eq_tapq:replication_ns_1@10.242.238.88:cursor_checkpoint_id">>, <<"2">>}, {<<"vb_551:checkpoint_extension">>,<<"false">>}, {<<"vb_551:num_items_for_persistence">>,<<"0">>}, {<<"vb_551:num_checkpoints">>,<<"1">>}, {<<"vb_551:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_551:num_checkpoint_items">>,<<"1">>}, {<<"vb_551:num_tap_cursors">>,<<"1">>}, {<<"vb_551:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_551:open_checkpoint_id">>,<<"2">>}, {<<"vb_551:state">>,<<"active">>}, {<<"vb_550:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_550:eq_tapq:replication_ns_1@10.242.238.88:cursor_checkpoint_id">>, <<"2">>}, {<<"vb_550:checkpoint_extension">>,<<"false">>}, {<<"vb_550:num_items_for_persistence">>,<<"0">>}, {<<"vb_550:num_checkpoints">>,<<"1">>}, {<<"vb_550:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_550:num_checkpoint_items">>,<<"1">>}, {<<"vb_550:num_tap_cursors">>,<<"1">>}, {<<"vb_550:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_550:open_checkpoint_id">>,<<"2">>}, {<<"vb_550:state">>,<<"active">>}, {<<"vb_549:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_549:eq_tapq:replication_ns_1@10.242.238.88:cursor_checkpoint_id">>, <<"2">>}, {<<"vb_549:checkpoint_extension">>,<<"false">>}, {<<"vb_549:num_items_for_persistence">>,<<"0">>}, {<<"vb_549:num_checkpoints">>,<<"1">>}, {<<"vb_549:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_549:num_checkpoint_items">>,<<"1">>}, {<<"vb_549:num_tap_cursors">>,<<"1">>}, {<<"vb_549:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_549:open_checkpoint_id">>,<<"2">>}, {<<"vb_549:state">>,<<"active">>}, {<<"vb_548:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_548:eq_tapq:replication_ns_1@10.242.238.88:cursor_checkpoint_id">>, <<"2">>}, {<<"vb_548:checkpoint_extension">>,<<"false">>}, {<<"vb_548:num_items_for_persistence">>,<<"0">>}, {<<"vb_548:num_checkpoints">>,<<"1">>}, {<<"vb_548:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_548:num_checkpoint_items">>,<<"1">>}, {<<"vb_548:num_tap_cursors">>,<<"1">>}, {<<"vb_548:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_548:open_checkpoint_id">>,<<"2">>}, {<<"vb_548:state">>,<<"active">>}, {<<"vb_547:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_547:eq_tapq:replication_ns_1@10.242.238.88:cursor_checkpoint_id">>, <<"2">>}, {<<"vb_547:checkpoint_extension">>,<<"false">>}, {<<"vb_547:num_items_for_persistence">>,<<"0">>}, {<<"vb_547:num_checkpoints">>,<<"1">>}, {<<"vb_547:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_547:num_checkpoint_items">>,<<"1">>}, {<<"vb_547:num_tap_cursors">>,<<"1">>}, {<<"vb_547:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_547:open_checkpoint_id">>,<<"2">>}, {<<"vb_547:state">>,<<"active">>}, {<<"vb_546:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_546:eq_tapq:replication_ns_1@10.242.238.88:cursor_checkpoint_id">>, <<"2">>}, {<<"vb_546:checkpoint_extension">>,<<"false">>}, {<<"vb_546:num_items_for_persistence">>,<<"0">>}, {<<"vb_546:num_checkpoints">>,<<"1">>}, {<<"vb_546:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_546:num_checkpoint_items">>,<<"1">>}, {<<"vb_546:num_tap_cursors">>,<<"1">>}, {<<"vb_546:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_546:open_checkpoint_id">>,<<"2">>}, {<<"vb_546:state">>,<<"active">>}, {<<"vb_545:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_545:eq_tapq:replication_ns_1@10.242.238.88:cursor_checkpoint_id">>, <<"2">>}, {<<"vb_545:checkpoint_extension">>,<<"false">>}, {<<"vb_545:num_items_for_persistence">>,<<"0">>}, {<<"vb_545:num_checkpoints">>,<<"1">>}, {<<"vb_545:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_545:num_checkpoint_items">>,<<"1">>}, {<<"vb_545:num_tap_cursors">>,<<"1">>}, {<<"vb_545:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_545:open_checkpoint_id">>,<<"2">>}, {<<"vb_545:state">>,<<"active">>}, {<<"vb_544:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_544:eq_tapq:replication_ns_1@10.242.238.88:cursor_checkpoint_id">>, <<"2">>}, {<<"vb_544:checkpoint_extension">>,<<"false">>}, {<<"vb_544:num_items_for_persistence">>,<<"0">>}, {<<"vb_544:num_checkpoints">>,<<"1">>}, {<<"vb_544:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_544:num_checkpoint_items">>,<<"1">>}, {<<"vb_544:num_tap_cursors">>,<<"1">>}, {<<"vb_544:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_544:open_checkpoint_id">>,<<"2">>}, {<<"vb_544:state">>,<<"active">>}, {<<"vb_543:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_543:eq_tapq:replication_ns_1@10.242.238.88:cursor_checkpoint_id">>, <<"2">>}, {<<"vb_543:checkpoint_extension">>,<<"false">>}, {<<"vb_543:num_items_for_persistence">>,<<"0">>}, {<<"vb_543:num_checkpoints">>,<<"1">>}, {<<"vb_543:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_543:num_checkpoint_items">>,<<"1">>}, {<<"vb_543:num_tap_cursors">>,<<"1">>}, {<<"vb_543:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_543:open_checkpoint_id">>,<<"2">>}, {<<"vb_543:state">>,<<"active">>}, {<<"vb_542:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_542:eq_tapq:replication_ns_1@10.242.238.88:cursor_checkpoint_id">>, <<"2">>}, {<<"vb_542:checkpoint_extension">>,<<"false">>}, {<<"vb_542:num_items_for_persistence">>,<<"0">>}, {<<"vb_542:num_checkpoints">>,<<"1">>}, {<<"vb_542:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_542:num_checkpoint_items">>,<<"1">>}, {<<"vb_542:num_tap_cursors">>,<<"1">>}, {<<"vb_542:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_542:open_checkpoint_id">>,<<"2">>}, {<<"vb_542:state">>,<<"active">>}, {<<"vb_541:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_541:eq_tapq:replication_ns_1@10.242.238.88:cursor_checkpoint_id">>, <<"2">>}, {<<"vb_541:checkpoint_extension">>,<<"false">>}, {<<"vb_541:num_items_for_persistence">>,<<"0">>}, {<<"vb_541:num_checkpoints">>,<<"1">>}, {<<"vb_541:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_541:num_checkpoint_items">>,<<"1">>}, {<<"vb_541:num_tap_cursors">>,<<"1">>}, {<<"vb_541:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_541:open_checkpoint_id">>,<<"2">>}, {<<"vb_541:state">>,<<"active">>}, {<<"vb_540:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_540:eq_tapq:replication_ns_1@10.242.238.88:cursor_checkpoint_id">>, <<"2">>}, {<<"vb_540:checkpoint_extension">>,<<"false">>}, {<<"vb_540:num_items_for_persistence">>,<<"0">>}, {<<"vb_540:num_checkpoints">>,<<"1">>}, {<<"vb_540:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_540:num_checkpoint_items">>,<<"1">>}, {<<"vb_540:num_tap_cursors">>,<<"1">>}, {<<"vb_540:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_540:open_checkpoint_id">>,<<"2">>}, {<<"vb_540:state">>,<<"active">>}, {<<"vb_539:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_539:eq_tapq:replication_ns_1@10.242.238.88:cursor_checkpoint_id">>, <<"2">>}, {<<"vb_539:checkpoint_extension">>,<<"false">>}, {<<"vb_539:num_items_for_persistence">>,<<"0">>}, {<<"vb_539:num_checkpoints">>,<<"1">>}, {<<"vb_539:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_539:num_checkpoint_items">>,<<"1">>}, {<<"vb_539:num_tap_cursors">>,<<"1">>}, {<<"vb_539:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_539:open_checkpoint_id">>,<<"2">>}, {<<"vb_539:state">>,<<"active">>}, {<<"vb_538:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_538:eq_tapq:replication_ns_1@10.242.238.88:cursor_checkpoint_id">>, <<"2">>}, {<<"vb_538:checkpoint_extension">>,<<"false">>}, {<<"vb_538:num_items_for_persistence">>,<<"0">>}, {<<"vb_538:num_checkpoints">>,<<"1">>}, {<<"vb_538:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_538:num_checkpoint_items">>,<<"1">>}, {<<"vb_538:num_tap_cursors">>,<<"1">>}, {<<"vb_538:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_538:open_checkpoint_id">>,<<"2">>}, {<<"vb_538:state">>,<<"active">>}, {<<"vb_537:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_537:eq_tapq:replication_ns_1@10.242.238.88:cursor_checkpoint_id">>, <<"2">>}, {<<"vb_537:checkpoint_extension">>,<<"false">>}, {<<"vb_537:num_items_for_persistence">>,<<"0">>}, {<<"vb_537:num_checkpoints">>,<<"1">>}, {<<"vb_537:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_537:num_checkpoint_items">>,<<"1">>}, {<<"vb_537:num_tap_cursors">>,<<"1">>}, {<<"vb_537:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_537:open_checkpoint_id">>,<<"2">>}, {<<"vb_537:state">>,<<"active">>}, {<<"vb_536:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_536:eq_tapq:replication_ns_1@10.242.238.88:cursor_checkpoint_id">>, <<"2">>}, {<<"vb_536:checkpoint_extension">>,<<"false">>}, {<<"vb_536:num_items_for_persistence">>,<<"0">>}, {<<"vb_536:num_checkpoints">>,<<"1">>}, {<<"vb_536:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_536:num_checkpoint_items">>,<<"1">>}, {<<"vb_536:num_tap_cursors">>,<<"1">>}, {<<"vb_536:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_536:open_checkpoint_id">>,<<"2">>}, {<<"vb_536:state">>,<<"active">>}, {<<"vb_535:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_535:eq_tapq:replication_ns_1@10.242.238.88:cursor_checkpoint_id">>, <<"2">>}, {<<"vb_535:checkpoint_extension">>,<<"false">>}, {<<"vb_535:num_items_for_persistence">>,<<"0">>}, {<<"vb_535:num_checkpoints">>,<<"1">>}, {<<"vb_535:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_535:num_checkpoint_items">>,<<"1">>}, {<<"vb_535:num_tap_cursors">>,<<"1">>}, {<<"vb_535:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_535:open_checkpoint_id">>,<<"2">>}, {<<"vb_535:state">>,<<"active">>}, {<<"vb_534:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_534:eq_tapq:replication_ns_1@10.242.238.88:cursor_checkpoint_id">>, <<"2">>}, {<<"vb_534:checkpoint_extension">>,<<"false">>}, {<<"vb_534:num_items_for_persistence">>,<<"0">>}, {<<"vb_534:num_checkpoints">>,<<"1">>}, {<<"vb_534:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_534:num_checkpoint_items">>,<<"1">>}, {<<"vb_534:num_tap_cursors">>,<<"1">>}, {<<"vb_534:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_534:open_checkpoint_id">>,<<"2">>}, {<<"vb_534:state">>,<<"active">>}, {<<"vb_533:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_533:eq_tapq:replication_ns_1@10.242.238.88:cursor_checkpoint_id">>, <<"2">>}, {<<"vb_533:checkpoint_extension">>,<<"false">>}, {<<"vb_533:num_items_for_persistence">>,<<"0">>}, {<<"vb_533:num_checkpoints">>,<<"1">>}, {<<"vb_533:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_533:num_checkpoint_items">>,<<"1">>}, {<<"vb_533:num_tap_cursors">>,<<"1">>}, {<<"vb_533:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_533:open_checkpoint_id">>,<<"2">>}, {<<"vb_533:state">>,<<"active">>}, {<<"vb_532:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_532:eq_tapq:replication_ns_1@10.242.238.88:cursor_checkpoint_id">>, <<"2">>}, {<<"vb_532:checkpoint_extension">>,<<"false">>}, {<<"vb_532:num_items_for_persistence">>,<<"0">>}, {<<"vb_532:num_checkpoints">>,<<"1">>}, {<<"vb_532:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_532:num_checkpoint_items">>,<<"1">>}, {<<"vb_532:num_tap_cursors">>,<<"1">>}, {<<"vb_532:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_532:open_checkpoint_id">>,<<"2">>}, {<<"vb_532:state">>,<<"active">>}, {<<"vb_531:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_531:eq_tapq:replication_ns_1@10.242.238.88:cursor_checkpoint_id">>, <<"2">>}, {<<"vb_531:checkpoint_extension">>,<<"false">>}, {<<"vb_531:num_items_for_persistence">>,<<"0">>}, {<<"vb_531:num_checkpoints">>,<<"1">>}, {<<"vb_531:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_531:num_checkpoint_items">>,<<"1">>}, {<<"vb_531:num_tap_cursors">>,<<"1">>}, {<<"vb_531:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_531:open_checkpoint_id">>,<<"2">>}, {<<"vb_531:state">>,<<"active">>}, {<<"vb_530:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_530:eq_tapq:replication_ns_1@10.242.238.88:cursor_checkpoint_id">>, <<"2">>}, {<<"vb_530:checkpoint_extension">>,<<"false">>}, {<<"vb_530:num_items_for_persistence">>,<<"0">>}, {<<"vb_530:num_checkpoints">>,<<"1">>}, {<<"vb_530:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_530:num_checkpoint_items">>,<<"1">>}, {<<"vb_530:num_tap_cursors">>,<<"1">>}, {<<"vb_530:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_530:open_checkpoint_id">>,<<"2">>}, {<<"vb_530:state">>,<<"active">>}, {<<"vb_529:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_529:eq_tapq:replication_ns_1@10.242.238.88:cursor_checkpoint_id">>, <<"2">>}, {<<"vb_529:checkpoint_extension">>,<<"false">>}, {<<"vb_529:num_items_for_persistence">>,<<"0">>}, {<<"vb_529:num_checkpoints">>,<<"1">>}, {<<"vb_529:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_529:num_checkpoint_items">>,<<"1">>}, {<<"vb_529:num_tap_cursors">>,<<"1">>}, {<<"vb_529:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_529:open_checkpoint_id">>,<<"2">>}, {<<"vb_529:state">>,<<"active">>}, {<<"vb_528:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_528:eq_tapq:replication_ns_1@10.242.238.88:cursor_checkpoint_id">>, <<"2">>}, {<<"vb_528:checkpoint_extension">>,<<"false">>}, {<<"vb_528:num_items_for_persistence">>,<<"0">>}, {<<"vb_528:num_checkpoints">>,<<"1">>}, {<<"vb_528:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_528:num_checkpoint_items">>,<<"1">>}, {<<"vb_528:num_tap_cursors">>,<<"1">>}, {<<"vb_528:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_528:open_checkpoint_id">>,<<"2">>}, {<<"vb_528:state">>,<<"active">>}, {<<"vb_527:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_527:eq_tapq:replication_ns_1@10.242.238.88:cursor_checkpoint_id">>, <<"2">>}, {<<"vb_527:checkpoint_extension">>,<<"false">>}, {<<"vb_527:num_items_for_persistence">>,<<"0">>}, {<<"vb_527:num_checkpoints">>,<<"1">>}, {<<"vb_527:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_527:num_checkpoint_items">>,<<"1">>}, {<<"vb_527:num_tap_cursors">>,<<"1">>}, {<<"vb_527:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_527:open_checkpoint_id">>,<<"2">>}, {<<"vb_527:state">>,<<"active">>}, {<<"vb_526:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_526:eq_tapq:replication_ns_1@10.242.238.88:cursor_checkpoint_id">>, <<"2">>}, {<<"vb_526:checkpoint_extension">>,<<"false">>}, {<<"vb_526:num_items_for_persistence">>,<<"0">>}, {<<"vb_526:num_checkpoints">>,<<"1">>}, {<<"vb_526:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_526:num_checkpoint_items">>,<<"1">>}, {<<"vb_526:num_tap_cursors">>,<<"1">>}, {<<"vb_526:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_526:open_checkpoint_id">>,<<"2">>}, {<<"vb_526:state">>,<<"active">>}, {<<"vb_525:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_525:eq_tapq:replication_ns_1@10.242.238.88:cursor_checkpoint_id">>, <<"2">>}, {<<"vb_525:checkpoint_extension">>,<<"false">>}, {<<"vb_525:num_items_for_persistence">>,<<"0">>}, {<<"vb_525:num_checkpoints">>,<<"1">>}, {<<"vb_525:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_525:num_checkpoint_items">>,<<"1">>}, {<<"vb_525:num_tap_cursors">>,<<"1">>}, {<<"vb_525:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_525:open_checkpoint_id">>,<<"2">>}, {<<"vb_525:state">>,<<"active">>}, {<<"vb_524:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_524:eq_tapq:replication_ns_1@10.242.238.88:cursor_checkpoint_id">>, <<"2">>}, {<<"vb_524:checkpoint_extension">>,<<"false">>}, {<<"vb_524:num_items_for_persistence">>,<<"0">>}, {<<"vb_524:num_checkpoints">>,<<"1">>}, {<<"vb_524:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_524:num_checkpoint_items">>,<<"1">>}, {<<"vb_524:num_tap_cursors">>,<<"1">>}, {<<"vb_524:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_524:open_checkpoint_id">>,<<"2">>}, {<<"vb_524:state">>,<<"active">>}, {<<"vb_523:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_523:eq_tapq:replication_ns_1@10.242.238.88:cursor_checkpoint_id">>, <<"2">>}, {<<"vb_523:checkpoint_extension">>,<<"false">>}, {<<"vb_523:num_items_for_persistence">>,<<"0">>}, {<<"vb_523:num_checkpoints">>,<<"1">>}, {<<"vb_523:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_523:num_checkpoint_items">>,<<"1">>}, {<<"vb_523:num_tap_cursors">>,<<"1">>}, {<<"vb_523:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_523:open_checkpoint_id">>,<<"2">>}, {<<"vb_523:state">>,<<"active">>}, {<<"vb_522:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_522:eq_tapq:replication_ns_1@10.242.238.88:cursor_checkpoint_id">>, <<"2">>}, {<<"vb_522:checkpoint_extension">>,<<"false">>}, {<<"vb_522:num_items_for_persistence">>,<<"0">>}, {<<"vb_522:num_checkpoints">>,<<"1">>}, {<<"vb_522:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_522:num_checkpoint_items">>,<<"1">>}, {<<"vb_522:num_tap_cursors">>,<<"1">>}, {<<"vb_522:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_522:open_checkpoint_id">>,<<"2">>}, {<<"vb_522:state">>,<<"active">>}, {<<"vb_521:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_521:eq_tapq:replication_ns_1@10.242.238.88:cursor_checkpoint_id">>, <<"2">>}, {<<"vb_521:checkpoint_extension">>,<<"false">>}, {<<"vb_521:num_items_for_persistence">>,<<"0">>}, {<<"vb_521:num_checkpoints">>,<<"1">>}, {<<"vb_521:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_521:num_checkpoint_items">>,<<"1">>}, {<<"vb_521:num_tap_cursors">>,<<"1">>}, {<<"vb_521:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_521:open_checkpoint_id">>,<<"2">>}, {<<"vb_521:state">>,<<"active">>}, {<<"vb_520:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_520:eq_tapq:replication_ns_1@10.242.238.88:cursor_checkpoint_id">>, <<"2">>}, {<<"vb_520:checkpoint_extension">>,<<"false">>}, {<<"vb_520:num_items_for_persistence">>,<<"0">>}, {<<"vb_520:num_checkpoints">>,<<"1">>}, {<<"vb_520:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_520:num_checkpoint_items">>,<<"1">>}, {<<"vb_520:num_tap_cursors">>,<<"1">>}, {<<"vb_520:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_520:open_checkpoint_id">>,<<"2">>}, {<<"vb_520:state">>,<<"active">>}, {<<"vb_519:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_519:eq_tapq:replication_ns_1@10.242.238.88:cursor_checkpoint_id">>, <<"2">>}, {<<"vb_519:checkpoint_extension">>,<<"false">>}, {<<"vb_519:num_items_for_persistence">>,<<"0">>}, {<<"vb_519:num_checkpoints">>,<<"1">>}, {<<"vb_519:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_519:num_checkpoint_items">>,<<"1">>}, {<<"vb_519:num_tap_cursors">>,<<"1">>}, {<<"vb_519:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_519:open_checkpoint_id">>,<<"2">>}, {<<"vb_519:state">>,<<"active">>}, {<<"vb_518:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_518:eq_tapq:replication_ns_1@10.242.238.88:cursor_checkpoint_id">>, <<"2">>}, {<<"vb_518:checkpoint_extension">>,<<"false">>}, {<<"vb_518:num_items_for_persistence">>,<<"0">>}, {<<"vb_518:num_checkpoints">>,<<"1">>}, {<<"vb_518:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_518:num_checkpoint_items">>,<<"1">>}, {<<"vb_518:num_tap_cursors">>,<<"1">>}, {<<"vb_518:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_518:open_checkpoint_id">>,<<"2">>}, {<<"vb_518:state">>,<<"active">>}, {<<"vb_517:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_517:eq_tapq:replication_ns_1@10.242.238.88:cursor_checkpoint_id">>, <<"2">>}, {<<"vb_517:checkpoint_extension">>,<<"false">>}, {<<"vb_517:num_items_for_persistence">>,<<"0">>}, {<<"vb_517:num_checkpoints">>,<<"1">>}, {<<"vb_517:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_517:num_checkpoint_items">>,<<"1">>}, {<<"vb_517:num_tap_cursors">>,<<"1">>}, {<<"vb_517:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_517:open_checkpoint_id">>,<<"2">>}, {<<"vb_517:state">>,<<"active">>}, {<<"vb_516:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_516:eq_tapq:replication_ns_1@10.242.238.88:cursor_checkpoint_id">>, <<"2">>}, {<<"vb_516:checkpoint_extension">>,<<"false">>}, {<<"vb_516:num_items_for_persistence">>,<<"0">>}, {<<"vb_516:num_checkpoints">>,<<"1">>}, {<<"vb_516:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_516:num_checkpoint_items">>,<<"1">>}, {<<"vb_516:num_tap_cursors">>,<<"1">>}, {<<"vb_516:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_516:open_checkpoint_id">>,<<"2">>}, {<<"vb_516:state">>,<<"active">>}, {<<"vb_515:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_515:eq_tapq:replication_ns_1@10.242.238.88:cursor_checkpoint_id">>, <<"2">>}, {<<"vb_515:checkpoint_extension">>,<<"false">>}, {<<"vb_515:num_items_for_persistence">>,<<"0">>}, {<<"vb_515:num_checkpoints">>,<<"1">>}, {<<"vb_515:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_515:num_checkpoint_items">>,<<"1">>}, {<<"vb_515:num_tap_cursors">>,<<"1">>}, {<<"vb_515:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_515:open_checkpoint_id">>,<<"2">>}, {<<"vb_515:state">>,<<"active">>}, {<<"vb_514:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_514:eq_tapq:replication_ns_1@10.242.238.88:cursor_checkpoint_id">>, <<"2">>}, {<<"vb_514:checkpoint_extension">>,<<"false">>}, {<<"vb_514:num_items_for_persistence">>,<<"0">>}, {<<"vb_514:num_checkpoints">>,<<"1">>}, {<<"vb_514:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_514:num_checkpoint_items">>,<<"1">>}, {<<"vb_514:num_tap_cursors">>,<<"1">>}, {<<"vb_514:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_514:open_checkpoint_id">>,<<"2">>}, {<<"vb_514:state">>,<<"active">>}, {<<"vb_513:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_513:eq_tapq:replication_ns_1@10.242.238.88:cursor_checkpoint_id">>, <<"2">>}, {<<"vb_513:checkpoint_extension">>,<<"false">>}, {<<"vb_513:num_items_for_persistence">>,<<"0">>}, {<<"vb_513:num_checkpoints">>,<<"1">>}, {<<"vb_513:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_513:num_checkpoint_items">>,<<"1">>}, {<<"vb_513:num_tap_cursors">>,<<"1">>}, {<<"vb_513:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_513:open_checkpoint_id">>,<<"2">>}, {<<"vb_513:state">>,<<"active">>}, {<<"vb_512:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_512:eq_tapq:replication_ns_1@10.242.238.88:cursor_checkpoint_id">>, <<"2">>}, {<<"vb_512:checkpoint_extension">>,<<"false">>}, {<<"vb_512:num_items_for_persistence">>,<<"0">>}, {<<"vb_512:num_checkpoints">>,<<"1">>}, {<<"vb_512:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_512:num_checkpoint_items">>,<<"1">>}, {<<"vb_512:num_tap_cursors">>,<<"1">>}, {<<"vb_512:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_512:open_checkpoint_id">>,<<"2">>}, {<<"vb_512:state">>,<<"active">>}, {<<"vb_426:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_426:checkpoint_extension">>,<<"false">>}, {<<"vb_426:num_items_for_persistence">>,<<"0">>}, {<<"vb_426:num_checkpoints">>,<<"1">>}, {<<"vb_426:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_426:num_checkpoint_items">>,<<"1">>}, {<<"vb_426:num_tap_cursors">>,<<"0">>}, {<<"vb_426:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_426:open_checkpoint_id">>,<<"2">>}, {<<"vb_426:state">>,<<"replica">>}, {<<"vb_425:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_425:checkpoint_extension">>,<<"false">>}, {<<"vb_425:num_items_for_persistence">>,<<"0">>}, {<<"vb_425:num_checkpoints">>,<<"1">>}, {<<"vb_425:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_425:num_checkpoint_items">>,<<"1">>}, {<<"vb_425:num_tap_cursors">>,<<"0">>}, {<<"vb_425:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_425:open_checkpoint_id">>,<<"2">>}, {<<"vb_425:state">>,<<"replica">>}, {<<"vb_424:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_424:checkpoint_extension">>,<<"false">>}, {<<"vb_424:num_items_for_persistence">>,<<"0">>}, {<<"vb_424:num_checkpoints">>,<<"1">>}, {<<"vb_424:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_424:num_checkpoint_items">>,<<"1">>}, {<<"vb_424:num_tap_cursors">>,<<"0">>}, {<<"vb_424:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_424:open_checkpoint_id">>,<<"2">>}, {<<"vb_424:state">>,<<"replica">>}, {<<"vb_423:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_423:checkpoint_extension">>,<<"false">>}, {<<"vb_423:num_items_for_persistence">>,<<"0">>}, {<<"vb_423:num_checkpoints">>,<<"1">>}, {<<"vb_423:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_423:num_checkpoint_items">>,<<"1">>}, {<<"vb_423:num_tap_cursors">>,<<"0">>}, {<<"vb_423:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_423:open_checkpoint_id">>,<<"2">>}, {<<"vb_423:state">>,<<"replica">>}, {<<"vb_422:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_422:checkpoint_extension">>,<<"false">>}, {<<"vb_422:num_items_for_persistence">>,<<"0">>}, {<<"vb_422:num_checkpoints">>,<<"1">>}, {<<"vb_422:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_422:num_checkpoint_items">>,<<"1">>}, {<<"vb_422:num_tap_cursors">>,<<"0">>}, {<<"vb_422:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_422:open_checkpoint_id">>,<<"2">>}, {<<"vb_422:state">>,<<"replica">>}, {<<"vb_421:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_421:checkpoint_extension">>,<<"false">>}, {<<"vb_421:num_items_for_persistence">>,<<"0">>}, {<<"vb_421:num_checkpoints">>,<<"1">>}, {<<"vb_421:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_421:num_checkpoint_items">>,<<"1">>}, {<<"vb_421:num_tap_cursors">>,<<"0">>}, {<<"vb_421:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_421:open_checkpoint_id">>,<<"2">>}, {<<"vb_421:state">>,<<"replica">>}, {<<"vb_420:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_420:checkpoint_extension">>,<<"false">>}, {<<"vb_420:num_items_for_persistence">>,<<"0">>}, {<<"vb_420:num_checkpoints">>,<<"1">>}, {<<"vb_420:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_420:num_checkpoint_items">>,<<"1">>}, {<<"vb_420:num_tap_cursors">>,<<"0">>}, {<<"vb_420:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_420:open_checkpoint_id">>,<<"2">>}, {<<"vb_420:state">>,<<"replica">>}, {<<"vb_419:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_419:checkpoint_extension">>,<<"false">>}, {<<"vb_419:num_items_for_persistence">>,<<"0">>}, {<<"vb_419:num_checkpoints">>,<<"1">>}, {<<"vb_419:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_419:num_checkpoint_items">>,<<"1">>}, {<<"vb_419:num_tap_cursors">>,<<"0">>}, {<<"vb_419:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_419:open_checkpoint_id">>,<<"2">>}, {<<"vb_419:state">>,<<"replica">>}, {<<"vb_418:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_418:checkpoint_extension">>,<<"false">>}, {<<"vb_418:num_items_for_persistence">>,<<"0">>}, {<<"vb_418:num_checkpoints">>,<<"1">>}, {<<"vb_418:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_418:num_checkpoint_items">>,<<"1">>}, {<<"vb_418:num_tap_cursors">>,<<"0">>}, {<<"vb_418:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_418:open_checkpoint_id">>,<<"2">>}, {<<"vb_418:state">>,<<"replica">>}, {<<"vb_417:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_417:checkpoint_extension">>,<<"false">>}, {<<"vb_417:num_items_for_persistence">>,<<"0">>}, {<<"vb_417:num_checkpoints">>,<<"1">>}, {<<"vb_417:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_417:num_checkpoint_items">>,<<"1">>}, {<<"vb_417:num_tap_cursors">>,<<"0">>}, {<<"vb_417:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_417:open_checkpoint_id">>,<<"2">>}, {<<"vb_417:state">>,<<"replica">>}, {<<"vb_416:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_416:checkpoint_extension">>,<<"false">>}, {<<"vb_416:num_items_for_persistence">>,<<"0">>}, {<<"vb_416:num_checkpoints">>,<<"1">>}, {<<"vb_416:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_416:num_checkpoint_items">>,<<"1">>}, {<<"vb_416:num_tap_cursors">>,<<"0">>}, {<<"vb_416:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_416:open_checkpoint_id">>,<<"2">>}, {<<"vb_416:state">>,<<"replica">>}, {<<"vb_415:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_415:checkpoint_extension">>,<<"false">>}, {<<"vb_415:num_items_for_persistence">>,<<"0">>}, {<<"vb_415:num_checkpoints">>,<<"1">>}, {<<"vb_415:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_415:num_checkpoint_items">>,<<"1">>}, {<<"vb_415:num_tap_cursors">>,<<"0">>}, {<<"vb_415:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_415:open_checkpoint_id">>,<<"2">>}, {<<"vb_415:state">>,<<"replica">>}, {<<"vb_414:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_414:checkpoint_extension">>,<<"false">>}, {<<"vb_414:num_items_for_persistence">>,<<"0">>}, {<<"vb_414:num_checkpoints">>,<<"1">>}, {<<"vb_414:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_414:num_checkpoint_items">>,<<"1">>}, {<<"vb_414:num_tap_cursors">>,<<"0">>}, {<<"vb_414:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_414:open_checkpoint_id">>,<<"2">>}, {<<"vb_414:state">>,<<"replica">>}, {<<"vb_413:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_413:checkpoint_extension">>,<<"false">>}, {<<"vb_413:num_items_for_persistence">>,<<"0">>}, {<<"vb_413:num_checkpoints">>,<<"1">>}, {<<"vb_413:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_413:num_checkpoint_items">>,<<"1">>}, {<<"vb_413:num_tap_cursors">>,<<"0">>}, {<<"vb_413:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_413:open_checkpoint_id">>,<<"2">>}, {<<"vb_413:state">>,<<"replica">>}, {<<"vb_412:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_412:checkpoint_extension">>,<<"false">>}, {<<"vb_412:num_items_for_persistence">>,<<"0">>}, {<<"vb_412:num_checkpoints">>,<<"1">>}, {<<"vb_412:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_412:num_checkpoint_items">>,<<"1">>}, {<<"vb_412:num_tap_cursors">>,<<"0">>}, {<<"vb_412:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_412:open_checkpoint_id">>,<<"2">>}, {<<"vb_412:state">>,<<"replica">>}, {<<"vb_411:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_411:checkpoint_extension">>,<<"false">>}, {<<"vb_411:num_items_for_persistence">>,<<"0">>}, {<<"vb_411:num_checkpoints">>,<<"1">>}, {<<"vb_411:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_411:num_checkpoint_items">>,<<"1">>}, {<<"vb_411:num_tap_cursors">>,<<"0">>}, {<<"vb_411:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_411:open_checkpoint_id">>,<<"2">>}, {<<"vb_411:state">>,<<"replica">>}, {<<"vb_410:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_410:checkpoint_extension">>,<<"false">>}, {<<"vb_410:num_items_for_persistence">>,<<"0">>}, {<<"vb_410:num_checkpoints">>,<<"1">>}, {<<"vb_410:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_410:num_checkpoint_items">>,<<"1">>}, {<<"vb_410:num_tap_cursors">>,<<"0">>}, {<<"vb_410:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_410:open_checkpoint_id">>,<<"2">>}, {<<"vb_410:state">>,<<"replica">>}, {<<"vb_409:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_409:checkpoint_extension">>,<<"false">>}, {<<"vb_409:num_items_for_persistence">>,<<"0">>}, {<<"vb_409:num_checkpoints">>,<<"1">>}, {<<"vb_409:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_409:num_checkpoint_items">>,<<"1">>}, {<<"vb_409:num_tap_cursors">>,<<"0">>}, {<<"vb_409:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_409:open_checkpoint_id">>,<<"2">>}, {<<"vb_409:state">>,<<"replica">>}, {<<"vb_408:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_408:checkpoint_extension">>,<<"false">>}, {<<"vb_408:num_items_for_persistence">>,<<"0">>}, {<<"vb_408:num_checkpoints">>,<<"1">>}, {<<"vb_408:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_408:num_checkpoint_items">>,<<"1">>}, {<<"vb_408:num_tap_cursors">>,<<"0">>}, {<<"vb_408:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_408:open_checkpoint_id">>,<<"2">>}, {<<"vb_408:state">>,<<"replica">>}, {<<"vb_407:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_407:checkpoint_extension">>,<<"false">>}, {<<"vb_407:num_items_for_persistence">>,<<"0">>}, {<<"vb_407:num_checkpoints">>,<<"1">>}, {<<"vb_407:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_407:num_checkpoint_items">>,<<"1">>}, {<<"vb_407:num_tap_cursors">>,<<"0">>}, {<<"vb_407:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_407:open_checkpoint_id">>,<<"2">>}, {<<"vb_407:state">>,<<"replica">>}, {<<"vb_406:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_406:checkpoint_extension">>,<<"false">>}, {<<"vb_406:num_items_for_persistence">>,<<"0">>}, {<<"vb_406:num_checkpoints">>,<<"1">>}, {<<"vb_406:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_406:num_checkpoint_items">>,<<"1">>}, {<<"vb_406:num_tap_cursors">>,<<"0">>}, {<<"vb_406:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_406:open_checkpoint_id">>,<<"2">>}, {<<"vb_406:state">>,<<"replica">>}, {<<"vb_405:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_405:checkpoint_extension">>,<<"false">>}, {<<"vb_405:num_items_for_persistence">>,<<"0">>}, {<<"vb_405:num_checkpoints">>,<<"1">>}, {<<"vb_405:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_405:num_checkpoint_items">>,<<"1">>}, {<<"vb_405:num_tap_cursors">>,<<"0">>}, {<<"vb_405:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_405:open_checkpoint_id">>,<<"2">>}, {<<"vb_405:state">>,<<"replica">>}, {<<"vb_404:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_404:checkpoint_extension">>,<<"false">>}, {<<"vb_404:num_items_for_persistence">>,<<"0">>}, {<<"vb_404:num_checkpoints">>,<<"1">>}, {<<"vb_404:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_404:num_checkpoint_items">>,<<"1">>}, {<<"vb_404:num_tap_cursors">>,<<"0">>}, {<<"vb_404:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_404:open_checkpoint_id">>,<<"2">>}, {<<"vb_404:state">>,<<"replica">>}, {<<"vb_403:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_403:checkpoint_extension">>,<<"false">>}, {<<"vb_403:num_items_for_persistence">>,<<"0">>}, {<<"vb_403:num_checkpoints">>,<<"1">>}, {<<"vb_403:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_403:num_checkpoint_items">>,<<"1">>}, {<<"vb_403:num_tap_cursors">>,<<"0">>}, {<<"vb_403:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_403:open_checkpoint_id">>,<<"2">>}, {<<"vb_403:state">>,<<"replica">>}, {<<"vb_402:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_402:checkpoint_extension">>,<<"false">>}, {<<"vb_402:num_items_for_persistence">>,<<"0">>}, {<<"vb_402:num_checkpoints">>,<<"1">>}, {<<"vb_402:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_402:num_checkpoint_items">>,<<"1">>}, {<<"vb_402:num_tap_cursors">>,<<"0">>}, {<<"vb_402:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_402:open_checkpoint_id">>,<<"2">>}, {<<"vb_402:state">>,<<"replica">>}, {<<"vb_401:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_401:checkpoint_extension">>,<<"false">>}, {<<"vb_401:num_items_for_persistence">>,<<"0">>}, {<<"vb_401:num_checkpoints">>,<<"1">>}, {<<"vb_401:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_401:num_checkpoint_items">>,<<"1">>}, {<<"vb_401:num_tap_cursors">>,<<"0">>}, {<<"vb_401:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_401:open_checkpoint_id">>,<<"2">>}, {<<"vb_401:state">>,<<"replica">>}, {<<"vb_400:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_400:checkpoint_extension">>,<<"false">>}, {<<"vb_400:num_items_for_persistence">>,<<"0">>}, {<<"vb_400:num_checkpoints">>,<<"1">>}, {<<"vb_400:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_400:num_checkpoint_items">>,<<"1">>}, {<<"vb_400:num_tap_cursors">>,<<"0">>}, {<<"vb_400:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_400:open_checkpoint_id">>,<<"2">>}, {<<"vb_400:state">>,<<"replica">>}, {<<"vb_399:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_399:checkpoint_extension">>,<<"false">>}, {<<"vb_399:num_items_for_persistence">>,<<"0">>}, {<<"vb_399:num_checkpoints">>,<<"1">>}, {<<"vb_399:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_399:num_checkpoint_items">>,<<"1">>}, {<<"vb_399:num_tap_cursors">>,<<"0">>}, {<<"vb_399:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_399:open_checkpoint_id">>,<<"2">>}, {<<"vb_399:state">>,<<"replica">>}, {<<"vb_398:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_398:checkpoint_extension">>,<<"false">>}, {<<"vb_398:num_items_for_persistence">>,<<"0">>}, {<<"vb_398:num_checkpoints">>,<<"1">>}, {<<"vb_398:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_398:num_checkpoint_items">>,<<"1">>}, {<<"vb_398:num_tap_cursors">>,<<"0">>}, {<<"vb_398:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_398:open_checkpoint_id">>,<<"2">>}, {<<"vb_398:state">>,<<"replica">>}, {<<"vb_397:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_397:checkpoint_extension">>,<<"false">>}, {<<"vb_397:num_items_for_persistence">>,<<"0">>}, {<<"vb_397:num_checkpoints">>,<<"1">>}, {<<"vb_397:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_397:num_checkpoint_items">>,<<"1">>}, {<<"vb_397:num_tap_cursors">>,<<"0">>}, {<<"vb_397:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_397:open_checkpoint_id">>,<<"2">>}, {<<"vb_397:state">>,<<"replica">>}, {<<"vb_396:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_396:checkpoint_extension">>,<<"false">>}, {<<"vb_396:num_items_for_persistence">>,<<"0">>}, {<<"vb_396:num_checkpoints">>,<<"1">>}, {<<"vb_396:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_396:num_checkpoint_items">>,<<"1">>}, {<<"vb_396:num_tap_cursors">>,<<"0">>}, {<<"vb_396:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_396:open_checkpoint_id">>,<<"2">>}, {<<"vb_396:state">>,<<"replica">>}, {<<"vb_395:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_395:checkpoint_extension">>,<<"false">>}, {<<"vb_395:num_items_for_persistence">>,<<"0">>}, {<<"vb_395:num_checkpoints">>,<<"1">>}, {<<"vb_395:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_395:num_checkpoint_items">>,<<"1">>}, {<<"vb_395:num_tap_cursors">>,<<"0">>}, {<<"vb_395:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_395:open_checkpoint_id">>,<<"2">>}, {<<"vb_395:state">>,<<"replica">>}, {<<"vb_394:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_394:checkpoint_extension">>,<<"false">>}, {<<"vb_394:num_items_for_persistence">>,<<"0">>}, {<<"vb_394:num_checkpoints">>,<<"1">>}, {<<"vb_394:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_394:num_checkpoint_items">>,<<"1">>}, {<<"vb_394:num_tap_cursors">>,<<"0">>}, {<<"vb_394:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_394:open_checkpoint_id">>,<<"2">>}, {<<"vb_394:state">>,<<"replica">>}, {<<"vb_393:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_393:checkpoint_extension">>,<<"false">>}, {<<"vb_393:num_items_for_persistence">>,<<"0">>}, {<<"vb_393:num_checkpoints">>,<<"1">>}, {<<"vb_393:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_393:num_checkpoint_items">>,<<"1">>}, {<<"vb_393:num_tap_cursors">>,<<"0">>}, {<<"vb_393:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_393:open_checkpoint_id">>,<<"2">>}, {<<"vb_393:state">>,<<"replica">>}, {<<"vb_392:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_392:checkpoint_extension">>,<<"false">>}, {<<"vb_392:num_items_for_persistence">>,<<"0">>}, {<<"vb_392:num_checkpoints">>,<<"1">>}, {<<"vb_392:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_392:num_checkpoint_items">>,<<"1">>}, {<<"vb_392:num_tap_cursors">>,<<"0">>}, {<<"vb_392:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_392:open_checkpoint_id">>,<<"2">>}, {<<"vb_392:state">>,<<"replica">>}, {<<"vb_391:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_391:checkpoint_extension">>,<<"false">>}, {<<"vb_391:num_items_for_persistence">>,<<"0">>}, {<<"vb_391:num_checkpoints">>,<<"1">>}, {<<"vb_391:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_391:num_checkpoint_items">>,<<"1">>}, {<<"vb_391:num_tap_cursors">>,<<"0">>}, {<<"vb_391:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_391:open_checkpoint_id">>,<<"2">>}, {<<"vb_391:state">>,<<"replica">>}, {<<"vb_390:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_390:checkpoint_extension">>,<<"false">>}, {<<"vb_390:num_items_for_persistence">>,<<"0">>}, {<<"vb_390:num_checkpoints">>,<<"1">>}, {<<"vb_390:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_390:num_checkpoint_items">>,<<"1">>}, {<<"vb_390:num_tap_cursors">>,<<"0">>}, {<<"vb_390:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_390:open_checkpoint_id">>,<<"2">>}, {<<"vb_390:state">>,<<"replica">>}, {<<"vb_389:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_389:checkpoint_extension">>,<<"false">>}, {<<"vb_389:num_items_for_persistence">>,<<"0">>}, {<<"vb_389:num_checkpoints">>,<<"1">>}, {<<"vb_389:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_389:num_checkpoint_items">>,<<"1">>}, {<<"vb_389:num_tap_cursors">>,<<"0">>}, {<<"vb_389:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_389:open_checkpoint_id">>,<<"2">>}, {<<"vb_389:state">>,<<"replica">>}, {<<"vb_388:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_388:checkpoint_extension">>,<<"false">>}, {<<"vb_388:num_items_for_persistence">>,<<"0">>}, {<<"vb_388:num_checkpoints">>,<<"1">>}, {<<"vb_388:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_388:num_checkpoint_items">>,<<"1">>}, {<<"vb_388:num_tap_cursors">>,<<"0">>}, {<<"vb_388:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_388:open_checkpoint_id">>,<<"2">>}, {<<"vb_388:state">>,<<"replica">>}, {<<"vb_387:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_387:checkpoint_extension">>,<<"false">>}, {<<"vb_387:num_items_for_persistence">>,<<"0">>}, {<<"vb_387:num_checkpoints">>,<<"1">>}, {<<"vb_387:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_387:num_checkpoint_items">>,<<"1">>}, {<<"vb_387:num_tap_cursors">>,<<"0">>}, {<<"vb_387:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_387:open_checkpoint_id">>,<<"2">>}, {<<"vb_387:state">>,<<"replica">>}, {<<"vb_386:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_386:checkpoint_extension">>,<<"false">>}, {<<"vb_386:num_items_for_persistence">>,<<"0">>}, {<<"vb_386:num_checkpoints">>,<<"1">>}, {<<"vb_386:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_386:num_checkpoint_items">>,<<"1">>}, {<<"vb_386:num_tap_cursors">>,<<"0">>}, {<<"vb_386:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_386:open_checkpoint_id">>,<<"2">>}, {<<"vb_386:state">>,<<"replica">>}, {<<"vb_385:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_385:checkpoint_extension">>,<<"false">>}, {<<"vb_385:num_items_for_persistence">>,<<"0">>}, {<<"vb_385:num_checkpoints">>,<<"1">>}, {<<"vb_385:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_385:num_checkpoint_items">>,<<"1">>}, {<<"vb_385:num_tap_cursors">>,<<"0">>}, {<<"vb_385:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_385:open_checkpoint_id">>,<<"2">>}, {<<"vb_385:state">>,<<"replica">>}, {<<"vb_384:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_384:checkpoint_extension">>,<<"false">>}, {<<"vb_384:num_items_for_persistence">>,<<"0">>}, {<<"vb_384:num_checkpoints">>,<<"1">>}, {<<"vb_384:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_384:num_checkpoint_items">>,<<"1">>}, {<<"vb_384:num_tap_cursors">>,<<"0">>}, {<<"vb_384:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_384:open_checkpoint_id">>,<<"2">>}, {<<"vb_384:state">>,<<"replica">>}, {<<"vb_383:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_383:checkpoint_extension">>,<<"false">>}, {<<"vb_383:num_items_for_persistence">>,<<"0">>}, {<<"vb_383:num_checkpoints">>,<<"1">>}, {<<"vb_383:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_383:num_checkpoint_items">>,<<"1">>}, {<<"vb_383:num_tap_cursors">>,<<"0">>}, {<<"vb_383:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_383:open_checkpoint_id">>,<<"2">>}, {<<"vb_383:state">>,<<"replica">>}, {<<"vb_382:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_382:checkpoint_extension">>,<<"false">>}, {<<"vb_382:num_items_for_persistence">>,<<"0">>}, {<<"vb_382:num_checkpoints">>,<<"1">>}, {<<"vb_382:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_382:num_checkpoint_items">>,<<"1">>}, {<<"vb_382:num_tap_cursors">>,<<"0">>}, {<<"vb_382:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_382:open_checkpoint_id">>,<<"2">>}, {<<"vb_382:state">>,<<"replica">>}, {<<"vb_381:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_381:checkpoint_extension">>,<<"false">>}, {<<"vb_381:num_items_for_persistence">>,<<"0">>}, {<<"vb_381:num_checkpoints">>,<<"1">>}, {<<"vb_381:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_381:num_checkpoint_items">>,<<"1">>}, {<<"vb_381:num_tap_cursors">>,<<"0">>}, {<<"vb_381:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_381:open_checkpoint_id">>,<<"2">>}, {<<"vb_381:state">>,<<"replica">>}, {<<"vb_380:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_380:checkpoint_extension">>,<<"false">>}, {<<"vb_380:num_items_for_persistence">>,<<"0">>}, {<<"vb_380:num_checkpoints">>,<<"1">>}, {<<"vb_380:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_380:num_checkpoint_items">>,<<"1">>}, {<<"vb_380:num_tap_cursors">>,<<"0">>}, {<<"vb_380:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_380:open_checkpoint_id">>,<<"2">>}, {<<"vb_380:state">>,<<"replica">>}, {<<"vb_379:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_379:checkpoint_extension">>,<<"false">>}, {<<"vb_379:num_items_for_persistence">>,<<"0">>}, {<<"vb_379:num_checkpoints">>,<<"1">>}, {<<"vb_379:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_379:num_checkpoint_items">>,<<"1">>}, {<<"vb_379:num_tap_cursors">>,<<"0">>}, {<<"vb_379:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_379:open_checkpoint_id">>,<<"2">>}, {<<"vb_379:state">>,<<"replica">>}, {<<"vb_378:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_378:checkpoint_extension">>,<<"false">>}, {<<"vb_378:num_items_for_persistence">>,<<"0">>}, {<<"vb_378:num_checkpoints">>,<<"1">>}, {<<"vb_378:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_378:num_checkpoint_items">>,<<"1">>}, {<<"vb_378:num_tap_cursors">>,<<"0">>}, {<<"vb_378:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_378:open_checkpoint_id">>,<<"2">>}, {<<"vb_378:state">>,<<"replica">>}, {<<"vb_377:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_377:checkpoint_extension">>,<<"false">>}, {<<"vb_377:num_items_for_persistence">>,<<"0">>}, {<<"vb_377:num_checkpoints">>,<<"1">>}, {<<"vb_377:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_377:num_checkpoint_items">>,<<"1">>}, {<<"vb_377:num_tap_cursors">>,<<"0">>}, {<<"vb_377:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_377:open_checkpoint_id">>,<<"2">>}, {<<"vb_377:state">>,<<"replica">>}, {<<"vb_376:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_376:checkpoint_extension">>,<<"false">>}, {<<"vb_376:num_items_for_persistence">>,<<"0">>}, {<<"vb_376:num_checkpoints">>,<<"1">>}, {<<"vb_376:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_376:num_checkpoint_items">>,<<"1">>}, {<<"vb_376:num_tap_cursors">>,<<"0">>}, {<<"vb_376:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_376:open_checkpoint_id">>,<<"2">>}, {<<"vb_376:state">>,<<"replica">>}, {<<"vb_375:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_375:checkpoint_extension">>,<<"false">>}, {<<"vb_375:num_items_for_persistence">>,<<"0">>}, {<<"vb_375:num_checkpoints">>,<<"1">>}, {<<"vb_375:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_375:num_checkpoint_items">>,<<"1">>}, {<<"vb_375:num_tap_cursors">>,<<"0">>}, {<<"vb_375:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_375:open_checkpoint_id">>,<<"2">>}, {<<"vb_375:state">>,<<"replica">>}, {<<"vb_374:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_374:checkpoint_extension">>,<<"false">>}, {<<"vb_374:num_items_for_persistence">>,<<"0">>}, {<<"vb_374:num_checkpoints">>,<<"1">>}, {<<"vb_374:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_374:num_checkpoint_items">>,<<"1">>}, {<<"vb_374:num_tap_cursors">>,<<"0">>}, {<<"vb_374:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_374:open_checkpoint_id">>,<<"2">>}, {<<"vb_374:state">>,<<"replica">>}, {<<"vb_373:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_373:checkpoint_extension">>,<<"false">>}, {<<"vb_373:num_items_for_persistence">>,<<"0">>}, {<<"vb_373:num_checkpoints">>,<<"1">>}, {<<"vb_373:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_373:num_checkpoint_items">>,<<"1">>}, {<<"vb_373:num_tap_cursors">>,<<"0">>}, {<<"vb_373:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_373:open_checkpoint_id">>,<<"2">>}, {<<"vb_373:state">>,<<"replica">>}, {<<"vb_372:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_372:checkpoint_extension">>,<<"false">>}, {<<"vb_372:num_items_for_persistence">>,<<"0">>}, {<<"vb_372:num_checkpoints">>,<<"1">>}, {<<"vb_372:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_372:num_checkpoint_items">>,<<"1">>}, {<<"vb_372:num_tap_cursors">>,<<"0">>}, {<<"vb_372:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_372:open_checkpoint_id">>,<<"2">>}, {<<"vb_372:state">>,<<"replica">>}, {<<"vb_371:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_371:checkpoint_extension">>,<<"false">>}, {<<"vb_371:num_items_for_persistence">>,<<"0">>}, {<<"vb_371:num_checkpoints">>,<<"1">>}, {<<"vb_371:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_371:num_checkpoint_items">>,<<"1">>}, {<<"vb_371:num_tap_cursors">>,<<"0">>}, {<<"vb_371:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_371:open_checkpoint_id">>,<<"2">>}, {<<"vb_371:state">>,<<"replica">>}, {<<"vb_370:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_370:checkpoint_extension">>,<<"false">>}, {<<"vb_370:num_items_for_persistence">>,<<"0">>}, {<<"vb_370:num_checkpoints">>,<<"1">>}, {<<"vb_370:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_370:num_checkpoint_items">>,<<"1">>}, {<<"vb_370:num_tap_cursors">>,<<"0">>}, {<<"vb_370:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_370:open_checkpoint_id">>,<<"2">>}, {<<"vb_370:state">>,<<"replica">>}, {<<"vb_369:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_369:checkpoint_extension">>,<<"false">>}, {<<"vb_369:num_items_for_persistence">>,<<"0">>}, {<<"vb_369:num_checkpoints">>,<<"1">>}, {<<"vb_369:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_369:num_checkpoint_items">>,<<"1">>}, {<<"vb_369:num_tap_cursors">>,<<"0">>}, {<<"vb_369:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_369:open_checkpoint_id">>,<<"2">>}, {<<"vb_369:state">>,<<"replica">>}, {<<"vb_368:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_368:checkpoint_extension">>,<<"false">>}, {<<"vb_368:num_items_for_persistence">>,<<"0">>}, {<<"vb_368:num_checkpoints">>,<<"1">>}, {<<"vb_368:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_368:num_checkpoint_items">>,<<"1">>}, {<<"vb_368:num_tap_cursors">>,<<"0">>}, {<<"vb_368:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_368:open_checkpoint_id">>,<<"2">>}, {<<"vb_368:state">>,<<"replica">>}, {<<"vb_367:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_367:checkpoint_extension">>,<<"false">>}, {<<"vb_367:num_items_for_persistence">>,<<"0">>}, {<<"vb_367:num_checkpoints">>,<<"1">>}, {<<"vb_367:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_367:num_checkpoint_items">>,<<"1">>}, {<<"vb_367:num_tap_cursors">>,<<"0">>}, {<<"vb_367:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_367:open_checkpoint_id">>,<<"2">>}, {<<"vb_367:state">>,<<"replica">>}, {<<"vb_366:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_366:checkpoint_extension">>,<<"false">>}, {<<"vb_366:num_items_for_persistence">>,<<"0">>}, {<<"vb_366:num_checkpoints">>,<<"1">>}, {<<"vb_366:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_366:num_checkpoint_items">>,<<"1">>}, {<<"vb_366:num_tap_cursors">>,<<"0">>}, {<<"vb_366:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_366:open_checkpoint_id">>,<<"2">>}, {<<"vb_366:state">>,<<"replica">>}, {<<"vb_365:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_365:checkpoint_extension">>,<<"false">>}, {<<"vb_365:num_items_for_persistence">>,<<"0">>}, {<<"vb_365:num_checkpoints">>,<<"1">>}, {<<"vb_365:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_365:num_checkpoint_items">>,<<"1">>}, {<<"vb_365:num_tap_cursors">>,<<"0">>}, {<<"vb_365:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_365:open_checkpoint_id">>,<<"2">>}, {<<"vb_365:state">>,<<"replica">>}, {<<"vb_364:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_364:checkpoint_extension">>,<<"false">>}, {<<"vb_364:num_items_for_persistence">>,<<"0">>}, {<<"vb_364:num_checkpoints">>,<<"1">>}, {<<"vb_364:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_364:num_checkpoint_items">>,<<"1">>}, {<<"vb_364:num_tap_cursors">>,<<"0">>}, {<<"vb_364:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_364:open_checkpoint_id">>,<<"2">>}, {<<"vb_364:state">>,<<"replica">>}, {<<"vb_363:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_363:checkpoint_extension">>,<<"false">>}, {<<"vb_363:num_items_for_persistence">>,<<"0">>}, {<<"vb_363:num_checkpoints">>,<<"1">>}, {<<"vb_363:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_363:num_checkpoint_items">>,<<"1">>}, {<<"vb_363:num_tap_cursors">>,<<"0">>}, {<<"vb_363:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_363:open_checkpoint_id">>,<<"2">>}, {<<"vb_363:state">>,<<"replica">>}, {<<"vb_362:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_362:checkpoint_extension">>,<<"false">>}, {<<"vb_362:num_items_for_persistence">>,<<"0">>}, {<<"vb_362:num_checkpoints">>,<<"1">>}, {<<"vb_362:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_362:num_checkpoint_items">>,<<"1">>}, {<<"vb_362:num_tap_cursors">>,<<"0">>}, {<<"vb_362:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_362:open_checkpoint_id">>,<<"2">>}, {<<"vb_362:state">>,<<"replica">>}, {<<"vb_361:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_361:checkpoint_extension">>,<<"false">>}, {<<"vb_361:num_items_for_persistence">>,<<"0">>}, {<<"vb_361:num_checkpoints">>,<<"1">>}, {<<"vb_361:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_361:num_checkpoint_items">>,<<"1">>}, {<<"vb_361:num_tap_cursors">>,<<"0">>}, {<<"vb_361:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_361:open_checkpoint_id">>,<<"2">>}, {<<"vb_361:state">>,<<"replica">>}, {<<"vb_360:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_360:checkpoint_extension">>,<<"false">>}, {<<"vb_360:num_items_for_persistence">>,<<"0">>}, {<<"vb_360:num_checkpoints">>,<<"1">>}, {<<"vb_360:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_360:num_checkpoint_items">>,<<"1">>}, {<<"vb_360:num_tap_cursors">>,<<"0">>}, {<<"vb_360:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_360:open_checkpoint_id">>,<<"2">>}, {<<"vb_360:state">>,<<"replica">>}, {<<"vb_359:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_359:checkpoint_extension">>,<<"false">>}, {<<"vb_359:num_items_for_persistence">>,<<"0">>}, {<<"vb_359:num_checkpoints">>,<<"1">>}, {<<"vb_359:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_359:num_checkpoint_items">>,<<"1">>}, {<<"vb_359:num_tap_cursors">>,<<"0">>}, {<<"vb_359:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_359:open_checkpoint_id">>,<<"2">>}, {<<"vb_359:state">>,<<"replica">>}, {<<"vb_358:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_358:checkpoint_extension">>,<<"false">>}, {<<"vb_358:num_items_for_persistence">>,<<"0">>}, {<<"vb_358:num_checkpoints">>,<<"1">>}, {<<"vb_358:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_358:num_checkpoint_items">>,<<"1">>}, {<<"vb_358:num_tap_cursors">>,<<"0">>}, {<<"vb_358:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_358:open_checkpoint_id">>,<<"2">>}, {<<"vb_358:state">>,<<"replica">>}, {<<"vb_357:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_357:checkpoint_extension">>,<<"false">>}, {<<"vb_357:num_items_for_persistence">>,<<"0">>}, {<<"vb_357:num_checkpoints">>,<<"1">>}, {<<"vb_357:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_357:num_checkpoint_items">>,<<"1">>}, {<<"vb_357:num_tap_cursors">>,<<"0">>}, {<<"vb_357:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_357:open_checkpoint_id">>,<<"2">>}, {<<"vb_357:state">>,<<"replica">>}, {<<"vb_356:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_356:checkpoint_extension">>,<<"false">>}, {<<"vb_356:num_items_for_persistence">>,<<"0">>}, {<<"vb_356:num_checkpoints">>,<<"1">>}, {<<"vb_356:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_356:num_checkpoint_items">>,<<"1">>}, {<<"vb_356:num_tap_cursors">>,<<"0">>}, {<<"vb_356:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_356:open_checkpoint_id">>,<<"2">>}, {<<"vb_356:state">>,<<"replica">>}, {<<"vb_355:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_355:checkpoint_extension">>,<<"false">>}, {<<"vb_355:num_items_for_persistence">>,<<"0">>}, {<<"vb_355:num_checkpoints">>,<<"1">>}, {<<"vb_355:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_355:num_checkpoint_items">>,<<"1">>}, {<<"vb_355:num_tap_cursors">>,<<"0">>}, {<<"vb_355:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_355:open_checkpoint_id">>,<<"2">>}, {<<"vb_355:state">>,<<"replica">>}, {<<"vb_354:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_354:checkpoint_extension">>,<<"false">>}, {<<"vb_354:num_items_for_persistence">>,<<"0">>}, {<<"vb_354:num_checkpoints">>,<<"1">>}, {<<"vb_354:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_354:num_checkpoint_items">>,<<"1">>}, {<<"vb_354:num_tap_cursors">>,<<"0">>}, {<<"vb_354:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_354:open_checkpoint_id">>,<<"2">>}, {<<"vb_354:state">>,<<"replica">>}, {<<"vb_353:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_353:checkpoint_extension">>,<<"false">>}, {<<"vb_353:num_items_for_persistence">>,<<"0">>}, {<<"vb_353:num_checkpoints">>,<<"1">>}, {<<"vb_353:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_353:num_checkpoint_items">>,<<"1">>}, {<<"vb_353:num_tap_cursors">>,<<"0">>}, {<<"vb_353:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_353:open_checkpoint_id">>,<<"2">>}, {<<"vb_353:state">>,<<"replica">>}, {<<"vb_352:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_352:checkpoint_extension">>,<<"false">>}, {<<"vb_352:num_items_for_persistence">>,<<"0">>}, {<<"vb_352:num_checkpoints">>,<<"1">>}, {<<"vb_352:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_352:num_checkpoint_items">>,<<"1">>}, {<<"vb_352:num_tap_cursors">>,<<"0">>}, {<<"vb_352:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_352:open_checkpoint_id">>,<<"2">>}, {<<"vb_352:state">>,<<"replica">>}, {<<"vb_351:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_351:checkpoint_extension">>,<<"false">>}, {<<"vb_351:num_items_for_persistence">>,<<"0">>}, {<<"vb_351:num_checkpoints">>,<<"1">>}, {<<"vb_351:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_351:num_checkpoint_items">>,<<"1">>}, {<<"vb_351:num_tap_cursors">>,<<"0">>}, {<<"vb_351:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_351:open_checkpoint_id">>,<<"2">>}, {<<"vb_351:state">>,<<"replica">>}, {<<"vb_350:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_350:checkpoint_extension">>,<<"false">>}, {<<"vb_350:num_items_for_persistence">>,<<"0">>}, {<<"vb_350:num_checkpoints">>,<<"1">>}, {<<"vb_350:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_350:num_checkpoint_items">>,<<"1">>}, {<<"vb_350:num_tap_cursors">>,<<"0">>}, {<<"vb_350:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_350:open_checkpoint_id">>,<<"2">>}, {<<"vb_350:state">>,<<"replica">>}, {<<"vb_349:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_349:checkpoint_extension">>,<<"false">>}, {<<"vb_349:num_items_for_persistence">>,<<"0">>}, {<<"vb_349:num_checkpoints">>,<<"1">>}, {<<"vb_349:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_349:num_checkpoint_items">>,<<"1">>}, {<<"vb_349:num_tap_cursors">>,<<"0">>}, {<<"vb_349:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_349:open_checkpoint_id">>,<<"2">>}, {<<"vb_349:state">>,<<"replica">>}, {<<"vb_348:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_348:checkpoint_extension">>,<<"false">>}, {<<"vb_348:num_items_for_persistence">>,<<"0">>}, {<<"vb_348:num_checkpoints">>,<<"1">>}, {<<"vb_348:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_348:num_checkpoint_items">>,<<"1">>}, {<<"vb_348:num_tap_cursors">>,<<"0">>}, {<<"vb_348:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_348:open_checkpoint_id">>,<<"2">>}, {<<"vb_348:state">>,<<"replica">>}, {<<"vb_347:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_347:checkpoint_extension">>,<<"false">>}, {<<"vb_347:num_items_for_persistence">>,<<"0">>}, {<<"vb_347:num_checkpoints">>,<<"1">>}, {<<"vb_347:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_347:num_checkpoint_items">>,<<"1">>}, {<<"vb_347:num_tap_cursors">>,<<"0">>}, {<<"vb_347:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_347:open_checkpoint_id">>,<<"2">>}, {<<"vb_347:state">>,<<"replica">>}, {<<"vb_346:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_346:checkpoint_extension">>,<<"false">>}, {<<"vb_346:num_items_for_persistence">>,<<"0">>}, {<<"vb_346:num_checkpoints">>,<<"1">>}, {<<"vb_346:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_346:num_checkpoint_items">>,<<"1">>}, {<<"vb_346:num_tap_cursors">>,<<"0">>}, {<<"vb_346:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_346:open_checkpoint_id">>,<<"2">>}, {<<"vb_346:state">>,<<"replica">>}, {<<"vb_345:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_345:checkpoint_extension">>,<<"false">>}, {<<"vb_345:num_items_for_persistence">>,<<"0">>}, {<<"vb_345:num_checkpoints">>,<<"1">>}, {<<"vb_345:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_345:num_checkpoint_items">>,<<"1">>}, {<<"vb_345:num_tap_cursors">>,<<"0">>}, {<<"vb_345:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_345:open_checkpoint_id">>,<<"2">>}, {<<"vb_345:state">>,<<"replica">>}, {<<"vb_344:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_344:checkpoint_extension">>,<<"false">>}, {<<"vb_344:num_items_for_persistence">>,<<"0">>}, {<<"vb_344:num_checkpoints">>,<<"1">>}, {<<"vb_344:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_344:num_checkpoint_items">>,<<"1">>}, {<<"vb_344:num_tap_cursors">>,<<"0">>}, {<<"vb_344:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_344:open_checkpoint_id">>,<<"2">>}, {<<"vb_344:state">>,<<"replica">>}, {<<"vb_343:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_343:checkpoint_extension">>,<<"false">>}, {<<"vb_343:num_items_for_persistence">>,<<"0">>}, {<<"vb_343:num_checkpoints">>,<<"1">>}, {<<"vb_343:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_343:num_checkpoint_items">>,<<"1">>}, {<<"vb_343:num_tap_cursors">>,<<"0">>}, {<<"vb_343:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_343:open_checkpoint_id">>,<<"2">>}, {<<"vb_343:state">>,<<"replica">>}, {<<"vb_342:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_342:checkpoint_extension">>,<<"false">>}, {<<"vb_342:num_items_for_persistence">>,<<"0">>}, {<<"vb_342:num_checkpoints">>,<<"1">>}, {<<"vb_342:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_342:num_checkpoint_items">>,<<"1">>}, {<<"vb_342:num_tap_cursors">>,<<"0">>}, {<<"vb_342:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_342:open_checkpoint_id">>,<<"2">>}, {<<"vb_342:state">>,<<"replica">>}, {<<"vb_170:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_170:checkpoint_extension">>,<<"false">>}, {<<"vb_170:num_items_for_persistence">>,<<"0">>}, {<<"vb_170:num_checkpoints">>,<<"1">>}, {<<"vb_170:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_170:num_checkpoint_items">>,<<"1">>}, {<<"vb_170:num_tap_cursors">>,<<"0">>}, {<<"vb_170:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_170:open_checkpoint_id">>,<<"2">>}, {<<"vb_170:state">>,<<"replica">>}, {<<"vb_169:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_169:checkpoint_extension">>,<<"false">>}, {<<"vb_169:num_items_for_persistence">>,<<"0">>}, {<<"vb_169:num_checkpoints">>,<<"1">>}, {<<"vb_169:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_169:num_checkpoint_items">>,<<"1">>}, {<<"vb_169:num_tap_cursors">>,<<"0">>}, {<<"vb_169:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_169:open_checkpoint_id">>,<<"2">>}, {<<"vb_169:state">>,<<"replica">>}, {<<"vb_168:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_168:checkpoint_extension">>,<<"false">>}, {<<"vb_168:num_items_for_persistence">>,<<"0">>}, {<<"vb_168:num_checkpoints">>,<<"1">>}, {<<"vb_168:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_168:num_checkpoint_items">>,<<"1">>}, {<<"vb_168:num_tap_cursors">>,<<"0">>}, {<<"vb_168:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_168:open_checkpoint_id">>,<<"2">>}, {<<"vb_168:state">>,<<"replica">>}, {<<"vb_167:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_167:checkpoint_extension">>,<<"false">>}, {<<"vb_167:num_items_for_persistence">>,<<"0">>}, {<<"vb_167:num_checkpoints">>,<<"1">>}, {<<"vb_167:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_167:num_checkpoint_items">>,<<"1">>}, {<<"vb_167:num_tap_cursors">>,<<"0">>}, {<<"vb_167:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_167:open_checkpoint_id">>,<<"2">>}, {<<"vb_167:state">>,<<"replica">>}, {<<"vb_166:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_166:checkpoint_extension">>,<<"false">>}, {<<"vb_166:num_items_for_persistence">>,<<"0">>}, {<<"vb_166:num_checkpoints">>,<<"1">>}, {<<"vb_166:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_166:num_checkpoint_items">>,<<"1">>}, {<<"vb_166:num_tap_cursors">>,<<"0">>}, {<<"vb_166:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_166:open_checkpoint_id">>,<<"2">>}, {<<"vb_166:state">>,<<"replica">>}, {<<"vb_165:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_165:checkpoint_extension">>,<<"false">>}, {<<"vb_165:num_items_for_persistence">>,<<"0">>}, {<<"vb_165:num_checkpoints">>,<<"1">>}, {<<"vb_165:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_165:num_checkpoint_items">>,<<"1">>}, {<<"vb_165:num_tap_cursors">>,<<"0">>}, {<<"vb_165:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_165:open_checkpoint_id">>,<<"2">>}, {<<"vb_165:state">>,<<"replica">>}, {<<"vb_164:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_164:checkpoint_extension">>,<<"false">>}, {<<"vb_164:num_items_for_persistence">>,<<"0">>}, {<<"vb_164:num_checkpoints">>,<<"1">>}, {<<"vb_164:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_164:num_checkpoint_items">>,<<"1">>}, {<<"vb_164:num_tap_cursors">>,<<"0">>}, {<<"vb_164:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_164:open_checkpoint_id">>,<<"2">>}, {<<"vb_164:state">>,<<"replica">>}, {<<"vb_163:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_163:checkpoint_extension">>,<<"false">>}, {<<"vb_163:num_items_for_persistence">>,<<"0">>}, {<<"vb_163:num_checkpoints">>,<<"1">>}, {<<"vb_163:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_163:num_checkpoint_items">>,<<"1">>}, {<<"vb_163:num_tap_cursors">>,<<"0">>}, {<<"vb_163:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_163:open_checkpoint_id">>,<<"2">>}, {<<"vb_163:state">>,<<"replica">>}, {<<"vb_162:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_162:checkpoint_extension">>,<<"false">>}, {<<"vb_162:num_items_for_persistence">>,<<"0">>}, {<<"vb_162:num_checkpoints">>,<<"1">>}, {<<"vb_162:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_162:num_checkpoint_items">>,<<"1">>}, {<<"vb_162:num_tap_cursors">>,<<"0">>}, {<<"vb_162:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_162:open_checkpoint_id">>,<<"2">>}, {<<"vb_162:state">>,<<"replica">>}, {<<"vb_161:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_161:checkpoint_extension">>,<<"false">>}, {<<"vb_161:num_items_for_persistence">>,<<"0">>}, {<<"vb_161:num_checkpoints">>,<<"1">>}, {<<"vb_161:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_161:num_checkpoint_items">>,<<"1">>}, {<<"vb_161:num_tap_cursors">>,<<"0">>}, {<<"vb_161:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_161:open_checkpoint_id">>,<<"2">>}, {<<"vb_161:state">>,<<"replica">>}, {<<"vb_160:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_160:checkpoint_extension">>,<<"false">>}, {<<"vb_160:num_items_for_persistence">>,<<"0">>}, {<<"vb_160:num_checkpoints">>,<<"1">>}, {<<"vb_160:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_160:num_checkpoint_items">>,<<"1">>}, {<<"vb_160:num_tap_cursors">>,<<"0">>}, {<<"vb_160:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_160:open_checkpoint_id">>,<<"2">>}, {<<"vb_160:state">>,<<"replica">>}, {<<"vb_159:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_159:checkpoint_extension">>,<<"false">>}, {<<"vb_159:num_items_for_persistence">>,<<"0">>}, {<<"vb_159:num_checkpoints">>,<<"1">>}, {<<"vb_159:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_159:num_checkpoint_items">>,<<"1">>}, {<<"vb_159:num_tap_cursors">>,<<"0">>}, {<<"vb_159:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_159:open_checkpoint_id">>,<<"2">>}, {<<"vb_159:state">>,<<"replica">>}, {<<"vb_158:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_158:checkpoint_extension">>,<<"false">>}, {<<"vb_158:num_items_for_persistence">>,<<"0">>}, {<<"vb_158:num_checkpoints">>,<<"1">>}, {<<"vb_158:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_158:num_checkpoint_items">>,<<"1">>}, {<<"vb_158:num_tap_cursors">>,<<"0">>}, {<<"vb_158:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_158:open_checkpoint_id">>,<<"2">>}, {<<"vb_158:state">>,<<"replica">>}, {<<"vb_157:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_157:checkpoint_extension">>,<<"false">>}, {<<"vb_157:num_items_for_persistence">>,<<"0">>}, {<<"vb_157:num_checkpoints">>,<<"1">>}, {<<"vb_157:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_157:num_checkpoint_items">>,<<"1">>}, {<<"vb_157:num_tap_cursors">>,<<"0">>}, {<<"vb_157:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_157:open_checkpoint_id">>,<<"2">>}, {<<"vb_157:state">>,<<"replica">>}, {<<"vb_156:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_156:checkpoint_extension">>,<<"false">>}, {<<"vb_156:num_items_for_persistence">>,<<"0">>}, {<<"vb_156:num_checkpoints">>,<<"1">>}, {<<"vb_156:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_156:num_checkpoint_items">>,<<"1">>}, {<<"vb_156:num_tap_cursors">>,<<"0">>}, {<<"vb_156:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_156:open_checkpoint_id">>,<<"2">>}, {<<"vb_156:state">>,<<"replica">>}, {<<"vb_155:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_155:checkpoint_extension">>,<<"false">>}, {<<"vb_155:num_items_for_persistence">>,<<"0">>}, {<<"vb_155:num_checkpoints">>,<<"1">>}, {<<"vb_155:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_155:num_checkpoint_items">>,<<"1">>}, {<<"vb_155:num_tap_cursors">>,<<"0">>}, {<<"vb_155:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_155:open_checkpoint_id">>,<<"2">>}, {<<"vb_155:state">>,<<"replica">>}, {<<"vb_154:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_154:checkpoint_extension">>,<<"false">>}, {<<"vb_154:num_items_for_persistence">>,<<"0">>}, {<<"vb_154:num_checkpoints">>,<<"1">>}, {<<"vb_154:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_154:num_checkpoint_items">>,<<"1">>}, {<<"vb_154:num_tap_cursors">>,<<"0">>}, {<<"vb_154:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_154:open_checkpoint_id">>,<<"2">>}, {<<"vb_154:state">>,<<"replica">>}, {<<"vb_153:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_153:checkpoint_extension">>,<<"false">>}, {<<"vb_153:num_items_for_persistence">>,<<"0">>}, {<<"vb_153:num_checkpoints">>,<<"1">>}, {<<"vb_153:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_153:num_checkpoint_items">>,<<"1">>}, {<<"vb_153:num_tap_cursors">>,<<"0">>}, {<<"vb_153:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_153:open_checkpoint_id">>,<<"2">>}, {<<"vb_153:state">>,<<"replica">>}, {<<"vb_152:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_152:checkpoint_extension">>,<<"false">>}, {<<"vb_152:num_items_for_persistence">>,<<"0">>}, {<<"vb_152:num_checkpoints">>,<<"1">>}, {<<"vb_152:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_152:num_checkpoint_items">>,<<"1">>}, {<<"vb_152:num_tap_cursors">>,<<"0">>}, {<<"vb_152:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_152:open_checkpoint_id">>,<<"2">>}, {<<"vb_152:state">>,<<"replica">>}, {<<"vb_151:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_151:checkpoint_extension">>,<<"false">>}, {<<"vb_151:num_items_for_persistence">>,<<"0">>}, {<<"vb_151:num_checkpoints">>,<<"1">>}, {<<"vb_151:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_151:num_checkpoint_items">>,<<"1">>}, {<<"vb_151:num_tap_cursors">>,<<"0">>}, {<<"vb_151:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_151:open_checkpoint_id">>,<<"2">>}, {<<"vb_151:state">>,<<"replica">>}, {<<"vb_150:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_150:checkpoint_extension">>,<<"false">>}, {<<"vb_150:num_items_for_persistence">>,<<"0">>}, {<<"vb_150:num_checkpoints">>,<<"1">>}, {<<"vb_150:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_150:num_checkpoint_items">>,<<"1">>}, {<<"vb_150:num_tap_cursors">>,<<"0">>}, {<<"vb_150:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_150:open_checkpoint_id">>,<<"2">>}, {<<"vb_150:state">>,<<"replica">>}, {<<"vb_149:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_149:checkpoint_extension">>,<<"false">>}, {<<"vb_149:num_items_for_persistence">>,<<"0">>}, {<<"vb_149:num_checkpoints">>,<<"1">>}, {<<"vb_149:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_149:num_checkpoint_items">>,<<"1">>}, {<<"vb_149:num_tap_cursors">>,<<"0">>}, {<<"vb_149:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_149:open_checkpoint_id">>,<<"2">>}, {<<"vb_149:state">>,<<"replica">>}, {<<"vb_148:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_148:checkpoint_extension">>,<<"false">>}, {<<"vb_148:num_items_for_persistence">>,<<"0">>}, {<<"vb_148:num_checkpoints">>,<<"1">>}, {<<"vb_148:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_148:num_checkpoint_items">>,<<"1">>}, {<<"vb_148:num_tap_cursors">>,<<"0">>}, {<<"vb_148:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_148:open_checkpoint_id">>,<<"2">>}, {<<"vb_148:state">>,<<"replica">>}, {<<"vb_147:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_147:checkpoint_extension">>,<<"false">>}, {<<"vb_147:num_items_for_persistence">>,<<"0">>}, {<<"vb_147:num_checkpoints">>,<<"1">>}, {<<"vb_147:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_147:num_checkpoint_items">>,<<"1">>}, {<<"vb_147:num_tap_cursors">>,<<"0">>}, {<<"vb_147:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_147:open_checkpoint_id">>,<<"2">>}, {<<"vb_147:state">>,<<"replica">>}, {<<"vb_146:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_146:checkpoint_extension">>,<<"false">>}, {<<"vb_146:num_items_for_persistence">>,<<"0">>}, {<<"vb_146:num_checkpoints">>,<<"1">>}, {<<"vb_146:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_146:num_checkpoint_items">>,<<"1">>}, {<<"vb_146:num_tap_cursors">>,<<"0">>}, {<<"vb_146:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_146:open_checkpoint_id">>,<<"2">>}, {<<"vb_146:state">>,<<"replica">>}, {<<"vb_145:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_145:checkpoint_extension">>,<<"false">>}, {<<"vb_145:num_items_for_persistence">>,<<"0">>}, {<<"vb_145:num_checkpoints">>,<<"1">>}, {<<"vb_145:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_145:num_checkpoint_items">>,<<"1">>}, {<<"vb_145:num_tap_cursors">>,<<"0">>}, {<<"vb_145:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_145:open_checkpoint_id">>,<<"2">>}, {<<"vb_145:state">>,<<"replica">>}, {<<"vb_144:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_144:checkpoint_extension">>,<<"false">>}, {<<"vb_144:num_items_for_persistence">>,<<"0">>}, {<<"vb_144:num_checkpoints">>,<<"1">>}, {<<"vb_144:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_144:num_checkpoint_items">>,<<"1">>}, {<<"vb_144:num_tap_cursors">>,<<"0">>}, {<<"vb_144:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_144:open_checkpoint_id">>,<<"2">>}, {<<"vb_144:state">>,<<"replica">>}, {<<"vb_143:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_143:checkpoint_extension">>,<<"false">>}, {<<"vb_143:num_items_for_persistence">>,<<"0">>}, {<<"vb_143:num_checkpoints">>,<<"1">>}, {<<"vb_143:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_143:num_checkpoint_items">>,<<"1">>}, {<<"vb_143:num_tap_cursors">>,<<"0">>}, {<<"vb_143:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_143:open_checkpoint_id">>,<<"2">>}, {<<"vb_143:state">>,<<"replica">>}, {<<"vb_142:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_142:checkpoint_extension">>,<<"false">>}, {<<"vb_142:num_items_for_persistence">>,<<"0">>}, {<<"vb_142:num_checkpoints">>,<<"1">>}, {<<"vb_142:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_142:num_checkpoint_items">>,<<"1">>}, {<<"vb_142:num_tap_cursors">>,<<"0">>}, {<<"vb_142:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_142:open_checkpoint_id">>,<<"2">>}, {<<"vb_142:state">>,<<"replica">>}, {<<"vb_141:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_141:checkpoint_extension">>,<<"false">>}, {<<"vb_141:num_items_for_persistence">>,<<"0">>}, {<<"vb_141:num_checkpoints">>,<<"1">>}, {<<"vb_141:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_141:num_checkpoint_items">>,<<"1">>}, {<<"vb_141:num_tap_cursors">>,<<"0">>}, {<<"vb_141:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_141:open_checkpoint_id">>,<<"2">>}, {<<"vb_141:state">>,<<"replica">>}, {<<"vb_140:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_140:checkpoint_extension">>,<<"false">>}, {<<"vb_140:num_items_for_persistence">>,<<"0">>}, {<<"vb_140:num_checkpoints">>,<<"1">>}, {<<"vb_140:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_140:num_checkpoint_items">>,<<"1">>}, {<<"vb_140:num_tap_cursors">>,<<"0">>}, {<<"vb_140:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_140:open_checkpoint_id">>,<<"2">>}, {<<"vb_140:state">>,<<"replica">>}, {<<"vb_139:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_139:checkpoint_extension">>,<<"false">>}, {<<"vb_139:num_items_for_persistence">>,<<"0">>}, {<<"vb_139:num_checkpoints">>,<<"1">>}, {<<"vb_139:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_139:num_checkpoint_items">>,<<"1">>}, {<<"vb_139:num_tap_cursors">>,<<"0">>}, {<<"vb_139:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_139:open_checkpoint_id">>,<<"2">>}, {<<"vb_139:state">>,<<"replica">>}, {<<"vb_138:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_138:checkpoint_extension">>,<<"false">>}, {<<"vb_138:num_items_for_persistence">>,<<"0">>}, {<<"vb_138:num_checkpoints">>,<<"1">>}, {<<"vb_138:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_138:num_checkpoint_items">>,<<"1">>}, {<<"vb_138:num_tap_cursors">>,<<"0">>}, {<<"vb_138:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_138:open_checkpoint_id">>,<<"2">>}, {<<"vb_138:state">>,<<"replica">>}, {<<"vb_137:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_137:checkpoint_extension">>,<<"false">>}, {<<"vb_137:num_items_for_persistence">>,<<"0">>}, {<<"vb_137:num_checkpoints">>,<<"1">>}, {<<"vb_137:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_137:num_checkpoint_items">>,<<"1">>}, {<<"vb_137:num_tap_cursors">>,<<"0">>}, {<<"vb_137:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_137:open_checkpoint_id">>,<<"2">>}, {<<"vb_137:state">>,<<"replica">>}, {<<"vb_136:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_136:checkpoint_extension">>,<<"false">>}, {<<"vb_136:num_items_for_persistence">>,<<"0">>}, {<<"vb_136:num_checkpoints">>,<<"1">>}, {<<"vb_136:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_136:num_checkpoint_items">>,<<"1">>}, {<<"vb_136:num_tap_cursors">>,<<"0">>}, {<<"vb_136:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_136:open_checkpoint_id">>,<<"2">>}, {<<"vb_136:state">>,<<"replica">>}, {<<"vb_135:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_135:checkpoint_extension">>,<<"false">>}, {<<"vb_135:num_items_for_persistence">>,<<"0">>}, {<<"vb_135:num_checkpoints">>,<<"1">>}, {<<"vb_135:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_135:num_checkpoint_items">>,<<"1">>}, {<<"vb_135:num_tap_cursors">>,<<"0">>}, {<<"vb_135:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_135:open_checkpoint_id">>,<<"2">>}, {<<"vb_135:state">>,<<"replica">>}, {<<"vb_134:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_134:checkpoint_extension">>,<<"false">>}, {<<"vb_134:num_items_for_persistence">>,<<"0">>}, {<<"vb_134:num_checkpoints">>,<<"1">>}, {<<"vb_134:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_134:num_checkpoint_items">>,<<"1">>}, {<<"vb_134:num_tap_cursors">>,<<"0">>}, {<<"vb_134:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_134:open_checkpoint_id">>,<<"2">>}, {<<"vb_134:state">>,<<"replica">>}, {<<"vb_133:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_133:checkpoint_extension">>,<<"false">>}, {<<"vb_133:num_items_for_persistence">>,<<"0">>}, {<<"vb_133:num_checkpoints">>,<<"1">>}, {<<"vb_133:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_133:num_checkpoint_items">>,<<"1">>}, {<<"vb_133:num_tap_cursors">>,<<"0">>}, {<<"vb_133:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_133:open_checkpoint_id">>,<<"2">>}, {<<"vb_133:state">>,<<"replica">>}, {<<"vb_132:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_132:checkpoint_extension">>,<<"false">>}, {<<"vb_132:num_items_for_persistence">>,<<"0">>}, {<<"vb_132:num_checkpoints">>,<<"1">>}, {<<"vb_132:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_132:num_checkpoint_items">>,<<"1">>}, {<<"vb_132:num_tap_cursors">>,<<"0">>}, {<<"vb_132:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_132:open_checkpoint_id">>,<<"2">>}, {<<"vb_132:state">>,<<"replica">>}, {<<"vb_131:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_131:checkpoint_extension">>,<<"false">>}, {<<"vb_131:num_items_for_persistence">>,<<"0">>}, {<<"vb_131:num_checkpoints">>,<<"1">>}, {<<"vb_131:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_131:num_checkpoint_items">>,<<"1">>}, {<<"vb_131:num_tap_cursors">>,<<"0">>}, {<<"vb_131:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_131:open_checkpoint_id">>,<<"2">>}, {<<"vb_131:state">>,<<"replica">>}, {<<"vb_130:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_130:checkpoint_extension">>,<<"false">>}, {<<"vb_130:num_items_for_persistence">>,<<"0">>}, {<<"vb_130:num_checkpoints">>,<<"1">>}, {<<"vb_130:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_130:num_checkpoint_items">>,<<"1">>}, {<<"vb_130:num_tap_cursors">>,<<"0">>}, {<<"vb_130:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_130:open_checkpoint_id">>,<<"2">>}, {<<"vb_130:state">>,<<"replica">>}, {<<"vb_129:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_129:checkpoint_extension">>,<<"false">>}, {<<"vb_129:num_items_for_persistence">>,<<"0">>}, {<<"vb_129:num_checkpoints">>,<<"1">>}, {<<"vb_129:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_129:num_checkpoint_items">>,<<"1">>}, {<<"vb_129:num_tap_cursors">>,<<"0">>}, {<<"vb_129:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_129:open_checkpoint_id">>,<<"2">>}, {<<"vb_129:state">>,<<"replica">>}, {<<"vb_128:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_128:checkpoint_extension">>,<<"false">>}, {<<"vb_128:num_items_for_persistence">>,<<"0">>}, {<<"vb_128:num_checkpoints">>,<<"1">>}, {<<"vb_128:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_128:num_checkpoint_items">>,<<"1">>}, {<<"vb_128:num_tap_cursors">>,<<"0">>}, {<<"vb_128:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_128:open_checkpoint_id">>,<<"2">>}, {<<"vb_128:state">>,<<"replica">>}, {<<"vb_127:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_127:checkpoint_extension">>,<<"false">>}, {<<"vb_127:num_items_for_persistence">>,<<"0">>}, {<<"vb_127:num_checkpoints">>,<<"1">>}, {<<"vb_127:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_127:num_checkpoint_items">>,<<"1">>}, {<<"vb_127:num_tap_cursors">>,<<"0">>}, {<<"vb_127:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_127:open_checkpoint_id">>,<<"2">>}, {<<"vb_127:state">>,<<"replica">>}, {<<"vb_126:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_126:checkpoint_extension">>,<<"false">>}, {<<"vb_126:num_items_for_persistence">>,<<"0">>}, {<<"vb_126:num_checkpoints">>,<<"1">>}, {<<"vb_126:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_126:num_checkpoint_items">>,<<"1">>}, {<<"vb_126:num_tap_cursors">>,<<"0">>}, {<<"vb_126:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_126:open_checkpoint_id">>,<<"2">>}, {<<"vb_126:state">>,<<"replica">>}, {<<"vb_125:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_125:checkpoint_extension">>,<<"false">>}, {<<"vb_125:num_items_for_persistence">>,<<"0">>}, {<<"vb_125:num_checkpoints">>,<<"1">>}, {<<"vb_125:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_125:num_checkpoint_items">>,<<"1">>}, {<<"vb_125:num_tap_cursors">>,<<"0">>}, {<<"vb_125:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_125:open_checkpoint_id">>,<<"2">>}, {<<"vb_125:state">>,<<"replica">>}, {<<"vb_124:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_124:checkpoint_extension">>,<<"false">>}, {<<"vb_124:num_items_for_persistence">>,<<"0">>}, {<<"vb_124:num_checkpoints">>,<<"1">>}, {<<"vb_124:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_124:num_checkpoint_items">>,<<"1">>}, {<<"vb_124:num_tap_cursors">>,<<"0">>}, {<<"vb_124:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_124:open_checkpoint_id">>,<<"2">>}, {<<"vb_124:state">>,<<"replica">>}, {<<"vb_123:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_123:checkpoint_extension">>,<<"false">>}, {<<"vb_123:num_items_for_persistence">>,<<"0">>}, {<<"vb_123:num_checkpoints">>,<<"1">>}, {<<"vb_123:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_123:num_checkpoint_items">>,<<"1">>}, {<<"vb_123:num_tap_cursors">>,<<"0">>}, {<<"vb_123:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_123:open_checkpoint_id">>,<<"2">>}, {<<"vb_123:state">>,<<"replica">>}, {<<"vb_122:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_122:checkpoint_extension">>,<<"false">>}, {<<"vb_122:num_items_for_persistence">>,<<"0">>}, {<<"vb_122:num_checkpoints">>,<<"1">>}, {<<"vb_122:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_122:num_checkpoint_items">>,<<"1">>}, {<<"vb_122:num_tap_cursors">>,<<"0">>}, {<<"vb_122:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_122:open_checkpoint_id">>,<<"2">>}, {<<"vb_122:state">>,<<"replica">>}, {<<"vb_121:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_121:checkpoint_extension">>,<<"false">>}, {<<"vb_121:num_items_for_persistence">>,<<"0">>}, {<<"vb_121:num_checkpoints">>,<<"1">>}, {<<"vb_121:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_121:num_checkpoint_items">>,<<"1">>}, {<<"vb_121:num_tap_cursors">>,<<"0">>}, {<<"vb_121:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_121:open_checkpoint_id">>,<<"2">>}, {<<"vb_121:state">>,<<"replica">>}, {<<"vb_120:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_120:checkpoint_extension">>,<<"false">>}, {<<"vb_120:num_items_for_persistence">>,<<"0">>}, {<<"vb_120:num_checkpoints">>,<<"1">>}, {<<"vb_120:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_120:num_checkpoint_items">>,<<"1">>}, {<<"vb_120:num_tap_cursors">>,<<"0">>}, {<<"vb_120:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_120:open_checkpoint_id">>,<<"2">>}, {<<"vb_120:state">>,<<"replica">>}, {<<"vb_119:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_119:checkpoint_extension">>,<<"false">>}, {<<"vb_119:num_items_for_persistence">>,<<"0">>}, {<<"vb_119:num_checkpoints">>,<<"1">>}, {<<"vb_119:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_119:num_checkpoint_items">>,<<"1">>}, {<<"vb_119:num_tap_cursors">>,<<"0">>}, {<<"vb_119:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_119:open_checkpoint_id">>,<<"2">>}, {<<"vb_119:state">>,<<"replica">>}, {<<"vb_118:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_118:checkpoint_extension">>,<<"false">>}, {<<"vb_118:num_items_for_persistence">>,<<"0">>}, {<<"vb_118:num_checkpoints">>,<<"1">>}, {<<"vb_118:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_118:num_checkpoint_items">>,<<"1">>}, {<<"vb_118:num_tap_cursors">>,<<"0">>}, {<<"vb_118:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_118:open_checkpoint_id">>,<<"2">>}, {<<"vb_118:state">>,<<"replica">>}, {<<"vb_117:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_117:checkpoint_extension">>,<<"false">>}, {<<"vb_117:num_items_for_persistence">>,<<"0">>}, {<<"vb_117:num_checkpoints">>,<<"1">>}, {<<"vb_117:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_117:num_checkpoint_items">>,<<"1">>}, {<<"vb_117:num_tap_cursors">>,<<"0">>}, {<<"vb_117:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_117:open_checkpoint_id">>,<<"2">>}, {<<"vb_117:state">>,<<"replica">>}, {<<"vb_116:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_116:checkpoint_extension">>,<<"false">>}, {<<"vb_116:num_items_for_persistence">>,<<"0">>}, {<<"vb_116:num_checkpoints">>,<<"1">>}, {<<"vb_116:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_116:num_checkpoint_items">>,<<"1">>}, {<<"vb_116:num_tap_cursors">>,<<"0">>}, {<<"vb_116:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_116:open_checkpoint_id">>,<<"2">>}, {<<"vb_116:state">>,<<"replica">>}, {<<"vb_115:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_115:checkpoint_extension">>,<<"false">>}, {<<"vb_115:num_items_for_persistence">>,<<"0">>}, {<<"vb_115:num_checkpoints">>,<<"1">>}, {<<"vb_115:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_115:num_checkpoint_items">>,<<"1">>}, {<<"vb_115:num_tap_cursors">>,<<"0">>}, {<<"vb_115:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_115:open_checkpoint_id">>,<<"2">>}, {<<"vb_115:state">>,<<"replica">>}, {<<"vb_114:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_114:checkpoint_extension">>,<<"false">>}, {<<"vb_114:num_items_for_persistence">>,<<"0">>}, {<<"vb_114:num_checkpoints">>,<<"1">>}, {<<"vb_114:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_114:num_checkpoint_items">>,<<"1">>}, {<<"vb_114:num_tap_cursors">>,<<"0">>}, {<<"vb_114:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_114:open_checkpoint_id">>,<<"2">>}, {<<"vb_114:state">>,<<"replica">>}, {<<"vb_113:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_113:checkpoint_extension">>,<<"false">>}, {<<"vb_113:num_items_for_persistence">>,<<"0">>}, {<<"vb_113:num_checkpoints">>,<<"1">>}, {<<"vb_113:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_113:num_checkpoint_items">>,<<"1">>}, {<<"vb_113:num_tap_cursors">>,<<"0">>}, {<<"vb_113:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_113:open_checkpoint_id">>,<<"2">>}, {<<"vb_113:state">>,<<"replica">>}, {<<"vb_112:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_112:checkpoint_extension">>,<<"false">>}, {<<"vb_112:num_items_for_persistence">>,<<"0">>}, {<<"vb_112:num_checkpoints">>,<<"1">>}, {<<"vb_112:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_112:num_checkpoint_items">>,<<"1">>}, {<<"vb_112:num_tap_cursors">>,<<"0">>}, {<<"vb_112:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_112:open_checkpoint_id">>,<<"2">>}, {<<"vb_112:state">>,<<"replica">>}, {<<"vb_111:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_111:checkpoint_extension">>,<<"false">>}, {<<"vb_111:num_items_for_persistence">>,<<"0">>}, {<<"vb_111:num_checkpoints">>,<<"1">>}, {<<"vb_111:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_111:num_checkpoint_items">>,<<"1">>}, {<<"vb_111:num_tap_cursors">>,<<"0">>}, {<<"vb_111:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_111:open_checkpoint_id">>,<<"2">>}, {<<"vb_111:state">>,<<"replica">>}, {<<"vb_110:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_110:checkpoint_extension">>,<<"false">>}, {<<"vb_110:num_items_for_persistence">>,<<"0">>}, {<<"vb_110:num_checkpoints">>,<<"1">>}, {<<"vb_110:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_110:num_checkpoint_items">>,<<"1">>}, {<<"vb_110:num_tap_cursors">>,<<"0">>}, {<<"vb_110:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_110:open_checkpoint_id">>,<<"2">>}, {<<"vb_110:state">>,<<"replica">>}, {<<"vb_109:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_109:checkpoint_extension">>,<<"false">>}, {<<"vb_109:num_items_for_persistence">>,<<"0">>}, {<<"vb_109:num_checkpoints">>,<<"1">>}, {<<"vb_109:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_109:num_checkpoint_items">>,<<"1">>}, {<<"vb_109:num_tap_cursors">>,<<"0">>}, {<<"vb_109:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_109:open_checkpoint_id">>,<<"2">>}, {<<"vb_109:state">>,<<"replica">>}, {<<"vb_108:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_108:checkpoint_extension">>,<<"false">>}, {<<"vb_108:num_items_for_persistence">>,<<"0">>}, {<<"vb_108:num_checkpoints">>,<<"1">>}, {<<"vb_108:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_108:num_checkpoint_items">>,<<"1">>}, {<<"vb_108:num_tap_cursors">>,<<"0">>}, {<<"vb_108:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_108:open_checkpoint_id">>,<<"2">>}, {<<"vb_108:state">>,<<"replica">>}, {<<"vb_107:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_107:checkpoint_extension">>,<<"false">>}, {<<"vb_107:num_items_for_persistence">>,<<"0">>}, {<<"vb_107:num_checkpoints">>,<<"1">>}, {<<"vb_107:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_107:num_checkpoint_items">>,<<"1">>}, {<<"vb_107:num_tap_cursors">>,<<"0">>}, {<<"vb_107:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_107:open_checkpoint_id">>,<<"2">>}, {<<"vb_107:state">>,<<"replica">>}, {<<"vb_106:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_106:checkpoint_extension">>,<<"false">>}, {<<"vb_106:num_items_for_persistence">>,<<"0">>}, {<<"vb_106:num_checkpoints">>,<<"1">>}, {<<"vb_106:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_106:num_checkpoint_items">>,<<"1">>}, {<<"vb_106:num_tap_cursors">>,<<"0">>}, {<<"vb_106:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_106:open_checkpoint_id">>,<<"2">>}, {<<"vb_106:state">>,<<"replica">>}, {<<"vb_105:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_105:checkpoint_extension">>,<<"false">>}, {<<"vb_105:num_items_for_persistence">>,<<"0">>}, {<<"vb_105:num_checkpoints">>,<<"1">>}, {<<"vb_105:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_105:num_checkpoint_items">>,<<"1">>}, {<<"vb_105:num_tap_cursors">>,<<"0">>}, {<<"vb_105:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_105:open_checkpoint_id">>,<<"2">>}, {<<"vb_105:state">>,<<"replica">>}, {<<"vb_104:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_104:checkpoint_extension">>,<<"false">>}, {<<"vb_104:num_items_for_persistence">>,<<"0">>}, {<<"vb_104:num_checkpoints">>,<<"1">>}, {<<"vb_104:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_104:num_checkpoint_items">>,<<"1">>}, {<<"vb_104:num_tap_cursors">>,<<"0">>}, {<<"vb_104:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_104:open_checkpoint_id">>,<<"2">>}, {<<"vb_104:state">>,<<"replica">>}, {<<"vb_103:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_103:checkpoint_extension">>,<<"false">>}, {<<"vb_103:num_items_for_persistence">>,<<"0">>}, {<<"vb_103:num_checkpoints">>,<<"1">>}, {<<"vb_103:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_103:num_checkpoint_items">>,<<"1">>}, {<<"vb_103:num_tap_cursors">>,<<"0">>}, {<<"vb_103:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_103:open_checkpoint_id">>,<<"2">>}, {<<"vb_103:state">>,<<"replica">>}, {<<"vb_102:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_102:checkpoint_extension">>,<<"false">>}, {<<"vb_102:num_items_for_persistence">>,<<"0">>}, {<<"vb_102:num_checkpoints">>,<<"1">>}, {<<"vb_102:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_102:num_checkpoint_items">>,<<"1">>}, {<<"vb_102:num_tap_cursors">>,<<"0">>}, {<<"vb_102:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_102:open_checkpoint_id">>,<<"2">>}, {<<"vb_102:state">>,<<"replica">>}, {<<"vb_101:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_101:checkpoint_extension">>,<<"false">>}, {<<"vb_101:num_items_for_persistence">>,<<"0">>}, {<<"vb_101:num_checkpoints">>,<<"1">>}, {<<"vb_101:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_101:num_checkpoint_items">>,<<"1">>}, {<<"vb_101:num_tap_cursors">>,<<"0">>}, {<<"vb_101:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_101:open_checkpoint_id">>,<<"2">>}, {<<"vb_101:state">>,<<"replica">>}, {<<"vb_100:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_100:checkpoint_extension">>,<<"false">>}, {<<"vb_100:num_items_for_persistence">>,<<"0">>}, {<<"vb_100:num_checkpoints">>,<<"1">>}, {<<"vb_100:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_100:num_checkpoint_items">>,<<"1">>}, {<<"vb_100:num_tap_cursors">>,<<"0">>}, {<<"vb_100:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_100:open_checkpoint_id">>,<<"2">>}, {<<"vb_100:state">>,<<"replica">>}, {<<"vb_99:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_99:checkpoint_extension">>,<<"false">>}, {<<"vb_99:num_items_for_persistence">>,<<"0">>}, {<<"vb_99:num_checkpoints">>,<<"1">>}, {<<"vb_99:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_99:num_checkpoint_items">>,<<"1">>}, {<<"vb_99:num_tap_cursors">>,<<"0">>}, {<<"vb_99:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_99:open_checkpoint_id">>,<<"2">>}, {<<"vb_99:state">>,<<"replica">>}, {<<"vb_98:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_98:checkpoint_extension">>,<<"false">>}, {<<"vb_98:num_items_for_persistence">>,<<"0">>}, {<<"vb_98:num_checkpoints">>,<<"1">>}, {<<"vb_98:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_98:num_checkpoint_items">>,<<"1">>}, {<<"vb_98:num_tap_cursors">>,<<"0">>}, {<<"vb_98:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_98:open_checkpoint_id">>,<<"2">>}, {<<"vb_98:state">>,<<"replica">>}, {<<"vb_97:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_97:checkpoint_extension">>,<<"false">>}, {<<"vb_97:num_items_for_persistence">>,<<"0">>}, {<<"vb_97:num_checkpoints">>,<<"1">>}, {<<"vb_97:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_97:num_checkpoint_items">>,<<"1">>}, {<<"vb_97:num_tap_cursors">>,<<"0">>}, {<<"vb_97:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_97:open_checkpoint_id">>,<<"2">>}, {<<"vb_97:state">>,<<"replica">>}, {<<"vb_96:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_96:checkpoint_extension">>,<<"false">>}, {<<"vb_96:num_items_for_persistence">>,<<"0">>}, {<<"vb_96:num_checkpoints">>,<<"1">>}, {<<"vb_96:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_96:num_checkpoint_items">>,<<"1">>}, {<<"vb_96:num_tap_cursors">>,<<"0">>}, {<<"vb_96:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_96:open_checkpoint_id">>,<<"2">>}, {<<"vb_96:state">>,<<"replica">>}, {<<"vb_95:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_95:checkpoint_extension">>,<<"false">>}, {<<"vb_95:num_items_for_persistence">>,<<"0">>}, {<<"vb_95:num_checkpoints">>,<<"1">>}, {<<"vb_95:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_95:num_checkpoint_items">>,<<"1">>}, {<<"vb_95:num_tap_cursors">>,<<"0">>}, {<<"vb_95:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_95:open_checkpoint_id">>,<<"2">>}, {<<"vb_95:state">>,<<"replica">>}, {<<"vb_94:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_94:checkpoint_extension">>,<<"false">>}, {<<"vb_94:num_items_for_persistence">>,<<"0">>}, {<<"vb_94:num_checkpoints">>,<<"1">>}, {<<"vb_94:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_94:num_checkpoint_items">>,<<"1">>}, {<<"vb_94:num_tap_cursors">>,<<"0">>}, {<<"vb_94:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_94:open_checkpoint_id">>,<<"2">>}, {<<"vb_94:state">>,<<"replica">>}, {<<"vb_93:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_93:checkpoint_extension">>,<<"false">>}, {<<"vb_93:num_items_for_persistence">>,<<"0">>}, {<<"vb_93:num_checkpoints">>,<<"1">>}, {<<"vb_93:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_93:num_checkpoint_items">>,<<"1">>}, {<<"vb_93:num_tap_cursors">>,<<"0">>}, {<<"vb_93:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_93:open_checkpoint_id">>,<<"2">>}, {<<"vb_93:state">>,<<"replica">>}, {<<"vb_92:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_92:checkpoint_extension">>,<<"false">>}, {<<"vb_92:num_items_for_persistence">>,<<"0">>}, {<<"vb_92:num_checkpoints">>,<<"1">>}, {<<"vb_92:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_92:num_checkpoint_items">>,<<"1">>}, {<<"vb_92:num_tap_cursors">>,<<"0">>}, {<<"vb_92:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_92:open_checkpoint_id">>,<<"2">>}, {<<"vb_92:state">>,<<"replica">>}, {<<"vb_91:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_91:checkpoint_extension">>,<<"false">>}, {<<"vb_91:num_items_for_persistence">>,<<"0">>}, {<<"vb_91:num_checkpoints">>,<<"1">>}, {<<"vb_91:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_91:num_checkpoint_items">>,<<"1">>}, {<<"vb_91:num_tap_cursors">>,<<"0">>}, {<<"vb_91:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_91:open_checkpoint_id">>,<<"2">>}, {<<"vb_91:state">>,<<"replica">>}, {<<"vb_90:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_90:checkpoint_extension">>,<<"false">>}, {<<"vb_90:num_items_for_persistence">>,<<"0">>}, {<<"vb_90:num_checkpoints">>,<<"1">>}, {<<"vb_90:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_90:num_checkpoint_items">>,<<"1">>}, {<<"vb_90:num_tap_cursors">>,<<"0">>}, {<<"vb_90:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_90:open_checkpoint_id">>,<<"2">>}, {<<"vb_90:state">>,<<"replica">>}, {<<"vb_89:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_89:checkpoint_extension">>,<<"false">>}, {<<"vb_89:num_items_for_persistence">>,<<"0">>}, {<<"vb_89:num_checkpoints">>,<<"1">>}, {<<"vb_89:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_89:num_checkpoint_items">>,<<"1">>}, {<<"vb_89:num_tap_cursors">>,<<"0">>}, {<<"vb_89:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_89:open_checkpoint_id">>,<<"2">>}, {<<"vb_89:state">>,<<"replica">>}, {<<"vb_88:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_88:checkpoint_extension">>,<<"false">>}, {<<"vb_88:num_items_for_persistence">>,<<"0">>}, {<<"vb_88:num_checkpoints">>,<<"1">>}, {<<"vb_88:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_88:num_checkpoint_items">>,<<"1">>}, {<<"vb_88:num_tap_cursors">>,<<"0">>}, {<<"vb_88:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_88:open_checkpoint_id">>,<<"2">>}, {<<"vb_88:state">>,<<"replica">>}, {<<"vb_87:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_87:checkpoint_extension">>,<<"false">>}, {<<"vb_87:num_items_for_persistence">>,<<"0">>}, {<<"vb_87:num_checkpoints">>,<<"1">>}, {<<"vb_87:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_87:num_checkpoint_items">>,<<"1">>}, {<<"vb_87:num_tap_cursors">>,<<"0">>}, {<<"vb_87:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_87:open_checkpoint_id">>,<<"2">>}, {<<"vb_87:state">>,<<"replica">>}, {<<"vb_86:persisted_checkpoint_id">>,<<"1">>}, {<<"vb_86:checkpoint_extension">>,<<"false">>}, {<<"vb_86:num_items_for_persistence">>,<<"0">>}, {<<"vb_86:num_checkpoints">>,<<"1">>}, {<<"vb_86:num_open_checkpoint_items">>,<<"0">>}, {<<"vb_86:num_checkpoint_items">>,<<"1">>}, {<<"vb_86:num_tap_cursors">>,<<"0">>}, {<<"vb_86:last_closed_checkpoint_id">>,<<"1">>}, {<<"vb_86:open_checkpoint_id">>,<<"2">>}, {<<"vb_86:state">>,<<"replica">>}] [ns_server:info,2014-08-19T16:52:39.499,ns_1@10.242.238.90:<0.1917.1>:diag_handler:log_all_tap_and_checkpoint_stats:132]end of logging tap & checkpoint stats [ns_server:debug,2014-08-19T16:52:42.007,ns_1@10.242.238.90:compaction_daemon<0.17567.0>:compaction_daemon:handle_info:447]Starting compaction for the following buckets: [<<"default">>] [ns_server:info,2014-08-19T16:52:42.009,ns_1@10.242.238.90:<0.1929.1>:compaction_daemon:try_to_cleanup_indexes:650]Cleaning up indexes for bucket `default` [ns_server:info,2014-08-19T16:52:42.010,ns_1@10.242.238.90:<0.1929.1>:compaction_daemon:spawn_bucket_compactor:609]Compacting bucket default with config: [{database_fragmentation_threshold,{30,undefined}}, {view_fragmentation_threshold,{30,undefined}}] [ns_server:debug,2014-08-19T16:52:42.015,ns_1@10.242.238.90:<0.1932.1>:compaction_daemon:bucket_needs_compaction:1042]`default` data size is 70794, disk size is 10529838 [ns_server:debug,2014-08-19T16:52:42.015,ns_1@10.242.238.90:compaction_daemon<0.17567.0>:compaction_daemon:handle_info:505]Finished compaction iteration. [ns_server:debug,2014-08-19T16:52:42.016,ns_1@10.242.238.90:compaction_daemon<0.17567.0>:compaction_daemon:schedule_next_compaction:1519]Finished compaction too soon. Next run will be in 30s [ns_server:info,2014-08-19T16:52:42.186,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:do_pull:341]Pulling config from: 'ns_1@10.242.238.89' [ns_server:debug,2014-08-19T16:53:12.017,ns_1@10.242.238.90:compaction_daemon<0.17567.0>:compaction_daemon:handle_info:447]Starting compaction for the following buckets: [<<"default">>] [ns_server:info,2014-08-19T16:53:12.022,ns_1@10.242.238.90:<0.2105.1>:compaction_daemon:try_to_cleanup_indexes:650]Cleaning up indexes for bucket `default` [ns_server:info,2014-08-19T16:53:12.022,ns_1@10.242.238.90:<0.2105.1>:compaction_daemon:spawn_bucket_compactor:609]Compacting bucket default with config: [{database_fragmentation_threshold,{30,undefined}}, {view_fragmentation_threshold,{30,undefined}}] [ns_server:debug,2014-08-19T16:53:12.026,ns_1@10.242.238.90:<0.2108.1>:compaction_daemon:bucket_needs_compaction:1042]`default` data size is 70794, disk size is 10529838 [ns_server:debug,2014-08-19T16:53:12.026,ns_1@10.242.238.90:compaction_daemon<0.17567.0>:compaction_daemon:handle_info:505]Finished compaction iteration. [ns_server:debug,2014-08-19T16:53:12.027,ns_1@10.242.238.90:compaction_daemon<0.17567.0>:compaction_daemon:schedule_next_compaction:1519]Finished compaction too soon. Next run will be in 30s [ns_server:debug,2014-08-19T16:53:26.706,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:53:26.706,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:53:26.707,ns_1@10.242.238.90:ns_config_isasl_sync<0.17399.0>:ns_config_isasl_sync:writeSASLConf:143]Writing isasl passwd file: "/opt/couchbase/var/lib/couchbase/isasl.pw" [ns_server:debug,2014-08-19T16:53:26.707,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}, [{map,[]}, {fastForwardMap,[]}, {uuid,<<"dfbe82706d975a8e74781701767f7843">>}, {num_replicas,1}, {replica_index,false}, {ram_quota,104857600}, {auth_type,none}, {moxi_port,11221}, {autocompaction,false}, {purge_interval,undefined}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,[]}]]}] [ns_server:debug,2014-08-19T16:53:26.709,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:53:26.709,ns_1@10.242.238.90:ns_bucket_worker<0.17558.0>:ns_bucket_sup:update_childs:84]Starting new child: {{per_bucket_sup,"maps_1_8_metahash"}, {single_bucket_sup,start_link,["maps_1_8_metahash"]}, permanent,infinity,supervisor, [single_bucket_sup]} [ns_server:debug,2014-08-19T16:53:26.709,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [error_logger:info,2014-08-19T16:53:26.709,ns_1@10.242.238.90:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_bucket_sup} started: [{pid,<0.2192.1>}, {name,{per_bucket_sup,"maps_1_8_metahash"}}, {mfargs, {single_bucket_sup,start_link, ["maps_1_8_metahash"]}}, {restart_type,permanent}, {shutdown,infinity}, {child_type,supervisor}] [ns_server:debug,2014-08-19T16:53:26.710,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}, {"maps_1_8_metahash", [{map,[]}, {fastForwardMap,[]}, {uuid,<<"dfbe82706d975a8e74781701767f7843">>}, {num_replicas,1}, {replica_index,false}, {ram_quota,104857600}, {auth_type,none}, {moxi_port,11221}, {autocompaction,false}, {purge_interval,undefined}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}]}]}] [ns_server:debug,2014-08-19T16:53:26.746,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:init:228]Usable vbuckets: [] [ns_server:debug,2014-08-19T16:53:26.746,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:53:26.746,ns_1@10.242.238.90:ns_memcached-maps_1_8_metahash<0.2207.1>:ns_memcached:init:144]Starting ns_memcached [error_logger:info,2014-08-19T16:53:26.746,ns_1@10.242.238.90:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,'single_bucket_sup-maps_1_8_metahash'} started: [{pid,<0.2194.1>}, {name,{capi_set_view_manager,"maps_1_8_metahash"}}, {mfargs, {capi_set_view_manager,start_link, ["maps_1_8_metahash"]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [ns_server:debug,2014-08-19T16:53:26.747,ns_1@10.242.238.90:<0.2208.1>:ns_memcached:run_connect_phase:167]Started 'connecting' phase of ns_memcached-maps_1_8_metahash. Parent is <0.2207.1> [error_logger:info,2014-08-19T16:53:26.747,ns_1@10.242.238.90:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,'single_bucket_sup-maps_1_8_metahash'} started: [{pid,<0.2207.1>}, {name,{ns_memcached,"maps_1_8_metahash"}}, {mfargs, {ns_memcached,start_link,["maps_1_8_metahash"]}}, {restart_type,permanent}, {shutdown,86400000}, {child_type,worker}] [error_logger:info,2014-08-19T16:53:26.747,ns_1@10.242.238.90:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,'single_bucket_sup-maps_1_8_metahash'} started: [{pid,<0.2209.1>}, {name,{tap_replication_manager,"maps_1_8_metahash"}}, {mfargs, {tap_replication_manager,start_link, ["maps_1_8_metahash"]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [ns_server:info,2014-08-19T16:53:26.747,ns_1@10.242.238.90:janitor_agent-maps_1_8_metahash<0.2212.1>:janitor_agent:read_flush_counter:936]Loading flushseq failed: {error,enoent}. Assuming it's equal to global config. [error_logger:info,2014-08-19T16:53:26.747,ns_1@10.242.238.90:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,'single_bucket_sup-maps_1_8_metahash'} started: [{pid,<0.2210.1>}, {name,{ns_vbm_new_sup,"maps_1_8_metahash"}}, {mfargs, {ns_vbm_new_sup,start_link,["maps_1_8_metahash"]}}, {restart_type,permanent}, {shutdown,infinity}, {child_type,supervisor}] [ns_server:info,2014-08-19T16:53:26.747,ns_1@10.242.238.90:janitor_agent-maps_1_8_metahash<0.2212.1>:janitor_agent:read_flush_counter_from_config:943]Initialized flushseq 0 from bucket config [error_logger:info,2014-08-19T16:53:26.747,ns_1@10.242.238.90:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,'single_bucket_sup-maps_1_8_metahash'} started: [{pid,<0.2211.1>}, {name,{ns_vbm_sup,"maps_1_8_metahash"}}, {mfargs,{ns_vbm_sup,start_link,["maps_1_8_metahash"]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,supervisor}] [error_logger:info,2014-08-19T16:53:26.747,ns_1@10.242.238.90:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,'single_bucket_sup-maps_1_8_metahash'} started: [{pid,<0.2212.1>}, {name,{janitor_agent,"maps_1_8_metahash"}}, {mfargs, {janitor_agent,start_link,["maps_1_8_metahash"]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [error_logger:info,2014-08-19T16:53:26.748,ns_1@10.242.238.90:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,'single_bucket_sup-maps_1_8_metahash'} started: [{pid,<0.2213.1>}, {name,{couch_stats_reader,"maps_1_8_metahash"}}, {mfargs, {couch_stats_reader,start_link, ["maps_1_8_metahash"]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2014-08-19T16:53:26.748,ns_1@10.242.238.90:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,'single_bucket_sup-maps_1_8_metahash'} started: [{pid,<0.2214.1>}, {name,{stats_collector,"maps_1_8_metahash"}}, {mfargs, {stats_collector,start_link,["maps_1_8_metahash"]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2014-08-19T16:53:26.748,ns_1@10.242.238.90:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,'single_bucket_sup-maps_1_8_metahash'} started: [{pid,<0.2216.1>}, {name,{stats_archiver,"maps_1_8_metahash"}}, {mfargs, {stats_archiver,start_link,["maps_1_8_metahash"]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2014-08-19T16:53:26.749,ns_1@10.242.238.90:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,'single_bucket_sup-maps_1_8_metahash'} started: [{pid,<0.2218.1>}, {name,{stats_reader,"maps_1_8_metahash"}}, {mfargs, {stats_reader,start_link,["maps_1_8_metahash"]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2014-08-19T16:53:26.749,ns_1@10.242.238.90:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,'single_bucket_sup-maps_1_8_metahash'} started: [{pid,<0.2219.1>}, {name,{failover_safeness_level,"maps_1_8_metahash"}}, {mfargs, {failover_safeness_level,start_link, ["maps_1_8_metahash"]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2014-08-19T16:53:26.749,ns_1@10.242.238.90:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,'single_bucket_sup-maps_1_8_metahash'} started: [{pid,<0.2220.1>}, {name,{terse_bucket_info_uploader,"maps_1_8_metahash"}}, {mfargs, {terse_bucket_info_uploader,start_link, ["maps_1_8_metahash"]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [ns_server:debug,2014-08-19T16:53:26.750,ns_1@10.242.238.90:<0.17535.0>:mc_tcp_listener:accept_loop:31]Got new connection [ns_server:debug,2014-08-19T16:53:26.751,ns_1@10.242.238.90:<0.17535.0>:mc_tcp_listener:accept_loop:33]Passed connection to mc_conn_sup: <0.2222.1> [ns_server:info,2014-08-19T16:53:26.752,ns_1@10.242.238.90:ns_memcached-maps_1_8_metahash<0.2207.1>:ns_memcached:ensure_bucket:1178]Created bucket "maps_1_8_metahash" with config string "ht_size=3079;ht_locks=5;tap_noop_interval=20;max_txn_size=10000;max_size=104857600;tap_keepalive=300;dbname=/var/lib/pgsql/maps_1_8_metahash;allow_data_loss_during_shutdown=true;backend=couchdb;couch_bucket=maps_1_8_metahash;couch_port=11213;max_vbuckets=1024;alog_path=/var/lib/pgsql/maps_1_8_metahash/access.log;data_traffic_enabled=false;max_num_workers=3;uuid=dfbe82706d975a8e74781701767f7843;vb0=false;waitforwarmup=false;failpartialwarmup=false;" [ns_server:info,2014-08-19T16:53:26.752,ns_1@10.242.238.90:ns_memcached-maps_1_8_metahash<0.2207.1>:ns_memcached:handle_cast:609]Main ns_memcached connection established: {ok,#Port<0.21603>} [ns_server:debug,2014-08-19T16:53:26.753,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:53:26.753,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [user:info,2014-08-19T16:53:26.753,ns_1@10.242.238.90:ns_memcached-maps_1_8_metahash<0.2207.1>:ns_memcached:handle_cast:632]Bucket "maps_1_8_metahash" loaded on node 'ns_1@10.242.238.90' in 0 seconds. [ns_server:debug,2014-08-19T16:53:26.754,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:53:26.764,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [user:info,2014-08-19T16:53:26.765,ns_1@10.242.238.90:<0.17397.0>:ns_log:crash_consumption_loop:64]Port server moxi on node 'babysitter_of_ns_1@127.0.0.1' exited with status 0. Restarting. Messages: 2014-08-19 16:49:03: (cproxy_config.c.315) env: MOXI_SASL_PLAIN_USR (13) 2014-08-19 16:49:03: (cproxy_config.c.324) env: MOXI_SASL_PLAIN_PWD (12) EOL on stdin. Exiting [ns_server:debug,2014-08-19T16:53:26.780,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:53:26.949,ns_1@10.242.238.90:ns_heart_slow_status_updater<0.17440.0>:ns_heart:current_status_slow:261]Ignoring failure to get stats for bucket: "maps_1_8_metahash": {error,no_samples} [ns_server:debug,2014-08-19T16:53:27.778,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:53:27.778,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:53:27.780,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:53:27.780,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:53:27.783,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}, {"maps_1_8_metahash", [{map,[{0,[],['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {1,[],['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {2,[],['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {3,[],['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {4,[],['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {5,[],['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {6,[],['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {7,[],['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {8,[],['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {9,[],['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {10,[],['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {11,[],['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {12,[],['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {13,[],['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {14,[],['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {15,[],['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {16,[],['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {17,[],['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {18,[],['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {19,[],['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {20,[],['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {21,[],['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {22,[],['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {23,[],['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {24,[],['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {25,[],['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {26,[],['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {27,[],['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {28,[],['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {29,[],['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {30,[],['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {31,[],['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {32,[],['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {33,[],['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {34,[],['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {35,[],['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {36,[],['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {37,[],['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {38,[],['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {39,[],['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {40,[],['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {41,[],['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {42,[],['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {43,[],['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {44,[],['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {45,[],['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {46,[],['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {47,[],['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {48,[],['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {49,[],['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {50,[],['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {51,[],['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {52,[],['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {53,[],['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {54,[],['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {55,[],['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {56,[],['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {57,[],['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {58,[],['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {59,[],['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {60,[],['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {61,[],['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {62,[],['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {63,[],['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {64,[],['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {65,[],['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {66,[],['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {67,[],['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {68,[],['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {69,[],['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {70,[],['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {71,[],['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {72,[],['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {73,[],['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {74,[],['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {75,[],['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {76,[],['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {77,[],['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {78,[],['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {79,[],['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {80,[],['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {81,[],['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {82,[],['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {83,[],['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {84,[],['ns_1@10.242.238.88'|...]}, {85,[],[...]}, {86,[],...}, {87,...}, {...}|...]}, {fastForwardMap,[]}, {uuid,<<"dfbe82706d975a8e74781701767f7843">>}, {num_replicas,1}, {replica_index,false}, {ram_quota,104857600}, {auth_type,none}, {moxi_port,11221}, {autocompaction,false}, {purge_interval,undefined}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:info,2014-08-19T16:53:27.784,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 1023 state to replica [ns_server:info,2014-08-19T16:53:27.784,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 1022 state to replica [ns_server:info,2014-08-19T16:53:27.784,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 1021 state to replica [ns_server:info,2014-08-19T16:53:27.785,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 1020 state to replica [ns_server:info,2014-08-19T16:53:27.785,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 1019 state to replica [ns_server:info,2014-08-19T16:53:27.786,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 1018 state to replica [ns_server:info,2014-08-19T16:53:27.786,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 1017 state to replica [ns_server:info,2014-08-19T16:53:27.786,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 1016 state to replica [ns_server:info,2014-08-19T16:53:27.787,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 1015 state to replica [ns_server:info,2014-08-19T16:53:27.787,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 1014 state to replica [ns_server:info,2014-08-19T16:53:27.787,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 1013 state to replica [ns_server:info,2014-08-19T16:53:27.787,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 1012 state to replica [ns_server:info,2014-08-19T16:53:27.788,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 1011 state to replica [ns_server:info,2014-08-19T16:53:27.788,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 1010 state to replica [ns_server:info,2014-08-19T16:53:27.788,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 1009 state to replica [ns_server:info,2014-08-19T16:53:27.788,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 1008 state to replica [ns_server:info,2014-08-19T16:53:27.789,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 1007 state to replica [ns_server:info,2014-08-19T16:53:27.789,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 1006 state to replica [ns_server:info,2014-08-19T16:53:27.789,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 1005 state to replica [ns_server:info,2014-08-19T16:53:27.789,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 1004 state to replica [ns_server:info,2014-08-19T16:53:27.790,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 1003 state to replica [ns_server:info,2014-08-19T16:53:27.790,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 1002 state to replica [ns_server:info,2014-08-19T16:53:27.790,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 1001 state to replica [ns_server:info,2014-08-19T16:53:27.790,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 1000 state to replica [ns_server:info,2014-08-19T16:53:27.791,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 999 state to replica [ns_server:info,2014-08-19T16:53:27.791,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 998 state to replica [ns_server:info,2014-08-19T16:53:27.791,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 997 state to replica [ns_server:info,2014-08-19T16:53:27.791,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 996 state to replica [ns_server:info,2014-08-19T16:53:27.792,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 995 state to replica [ns_server:info,2014-08-19T16:53:27.792,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 994 state to replica [ns_server:info,2014-08-19T16:53:27.792,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 993 state to replica [ns_server:info,2014-08-19T16:53:27.792,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 992 state to replica [ns_server:info,2014-08-19T16:53:27.793,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 991 state to replica [ns_server:info,2014-08-19T16:53:27.793,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 990 state to replica [ns_server:info,2014-08-19T16:53:27.793,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 989 state to replica [ns_server:info,2014-08-19T16:53:27.793,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 988 state to replica [ns_server:info,2014-08-19T16:53:27.794,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 987 state to replica [ns_server:info,2014-08-19T16:53:27.794,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 986 state to replica [ns_server:info,2014-08-19T16:53:27.794,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 985 state to replica [ns_server:info,2014-08-19T16:53:27.794,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 984 state to replica [ns_server:info,2014-08-19T16:53:27.795,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 983 state to replica [ns_server:info,2014-08-19T16:53:27.795,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 982 state to replica [ns_server:info,2014-08-19T16:53:27.795,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 981 state to replica [ns_server:info,2014-08-19T16:53:27.795,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 980 state to replica [ns_server:info,2014-08-19T16:53:27.796,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 979 state to replica [ns_server:info,2014-08-19T16:53:27.796,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 978 state to replica [ns_server:info,2014-08-19T16:53:27.796,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 977 state to replica [ns_server:info,2014-08-19T16:53:27.797,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 976 state to replica [ns_server:info,2014-08-19T16:53:27.797,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 975 state to replica [ns_server:info,2014-08-19T16:53:27.797,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 974 state to replica [ns_server:info,2014-08-19T16:53:27.797,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 973 state to replica [ns_server:info,2014-08-19T16:53:27.798,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 972 state to replica [ns_server:info,2014-08-19T16:53:27.798,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 971 state to replica [ns_server:info,2014-08-19T16:53:27.798,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 970 state to replica [ns_server:info,2014-08-19T16:53:27.798,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 969 state to replica [ns_server:info,2014-08-19T16:53:27.799,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 968 state to replica [ns_server:info,2014-08-19T16:53:27.799,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 967 state to replica [ns_server:info,2014-08-19T16:53:27.799,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 966 state to replica [ns_server:info,2014-08-19T16:53:27.799,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 965 state to replica [ns_server:info,2014-08-19T16:53:27.800,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 964 state to replica [ns_server:info,2014-08-19T16:53:27.800,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 963 state to replica [ns_server:info,2014-08-19T16:53:27.800,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 962 state to replica [ns_server:info,2014-08-19T16:53:27.801,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 961 state to replica [ns_server:info,2014-08-19T16:53:27.801,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 960 state to replica [ns_server:info,2014-08-19T16:53:27.801,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 959 state to replica [ns_server:info,2014-08-19T16:53:27.801,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 958 state to replica [ns_server:info,2014-08-19T16:53:27.802,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 957 state to replica [ns_server:info,2014-08-19T16:53:27.802,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 956 state to replica [ns_server:info,2014-08-19T16:53:27.802,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 955 state to replica [ns_server:info,2014-08-19T16:53:27.802,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 954 state to replica [ns_server:info,2014-08-19T16:53:27.803,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 953 state to replica [ns_server:info,2014-08-19T16:53:27.803,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 952 state to replica [ns_server:info,2014-08-19T16:53:27.803,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 951 state to replica [ns_server:info,2014-08-19T16:53:27.803,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 950 state to replica [ns_server:info,2014-08-19T16:53:27.804,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 949 state to replica [ns_server:info,2014-08-19T16:53:27.804,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 948 state to replica [ns_server:info,2014-08-19T16:53:27.804,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 947 state to replica [ns_server:info,2014-08-19T16:53:27.804,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 946 state to replica [ns_server:info,2014-08-19T16:53:27.805,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 945 state to replica [ns_server:info,2014-08-19T16:53:27.805,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 944 state to replica [ns_server:info,2014-08-19T16:53:27.805,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 943 state to replica [ns_server:info,2014-08-19T16:53:27.805,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 942 state to replica [ns_server:info,2014-08-19T16:53:27.806,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 941 state to replica [ns_server:info,2014-08-19T16:53:27.806,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 940 state to replica [ns_server:info,2014-08-19T16:53:27.806,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 939 state to replica [ns_server:info,2014-08-19T16:53:27.806,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 938 state to replica [ns_server:info,2014-08-19T16:53:27.807,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 767 state to active [ns_server:info,2014-08-19T16:53:27.807,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 766 state to active [ns_server:info,2014-08-19T16:53:27.807,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 765 state to active [ns_server:info,2014-08-19T16:53:27.807,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 764 state to active [ns_server:info,2014-08-19T16:53:27.808,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 763 state to active [ns_server:info,2014-08-19T16:53:27.808,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 762 state to active [ns_server:info,2014-08-19T16:53:27.808,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 761 state to active [ns_server:info,2014-08-19T16:53:27.809,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 760 state to active [ns_server:info,2014-08-19T16:53:27.809,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 759 state to active [ns_server:info,2014-08-19T16:53:27.809,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 758 state to active [ns_server:info,2014-08-19T16:53:27.809,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 757 state to active [ns_server:info,2014-08-19T16:53:27.810,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 756 state to active [ns_server:info,2014-08-19T16:53:27.810,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 755 state to active [ns_server:info,2014-08-19T16:53:27.810,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 754 state to active [ns_server:info,2014-08-19T16:53:27.810,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 753 state to active [ns_server:info,2014-08-19T16:53:27.811,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 752 state to active [ns_server:info,2014-08-19T16:53:27.811,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 751 state to active [ns_server:info,2014-08-19T16:53:27.811,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 750 state to active [ns_server:info,2014-08-19T16:53:27.811,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 749 state to active [ns_server:info,2014-08-19T16:53:27.812,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 748 state to active [ns_server:info,2014-08-19T16:53:27.812,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 747 state to active [ns_server:info,2014-08-19T16:53:27.812,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 746 state to active [ns_server:info,2014-08-19T16:53:27.813,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 745 state to active [ns_server:info,2014-08-19T16:53:27.813,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 744 state to active [ns_server:info,2014-08-19T16:53:27.813,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 743 state to active [ns_server:info,2014-08-19T16:53:27.814,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 742 state to active [ns_server:info,2014-08-19T16:53:27.814,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 741 state to active [ns_server:info,2014-08-19T16:53:27.814,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 740 state to active [ns_server:info,2014-08-19T16:53:27.815,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 739 state to active [ns_server:info,2014-08-19T16:53:27.815,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 738 state to active [ns_server:info,2014-08-19T16:53:27.815,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 737 state to active [ns_server:info,2014-08-19T16:53:27.816,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 736 state to active [ns_server:info,2014-08-19T16:53:27.816,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 735 state to active [ns_server:info,2014-08-19T16:53:27.816,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 734 state to active [ns_server:info,2014-08-19T16:53:27.817,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 733 state to active [ns_server:info,2014-08-19T16:53:27.817,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 732 state to active [ns_server:info,2014-08-19T16:53:27.817,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 731 state to active [ns_server:info,2014-08-19T16:53:27.817,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 730 state to active [ns_server:info,2014-08-19T16:53:27.818,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 729 state to active [ns_server:info,2014-08-19T16:53:27.818,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 728 state to active [ns_server:info,2014-08-19T16:53:27.818,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 727 state to active [ns_server:info,2014-08-19T16:53:27.818,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 726 state to active [ns_server:info,2014-08-19T16:53:27.819,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 725 state to active [ns_server:info,2014-08-19T16:53:27.819,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 724 state to active [ns_server:info,2014-08-19T16:53:27.819,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 723 state to active [ns_server:info,2014-08-19T16:53:27.820,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 722 state to active [ns_server:info,2014-08-19T16:53:27.820,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 721 state to active [ns_server:info,2014-08-19T16:53:27.820,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 720 state to active [ns_server:info,2014-08-19T16:53:27.820,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 719 state to active [ns_server:info,2014-08-19T16:53:27.821,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 718 state to active [ns_server:info,2014-08-19T16:53:27.821,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 717 state to active [ns_server:info,2014-08-19T16:53:27.821,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 716 state to active [ns_server:info,2014-08-19T16:53:27.821,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 715 state to active [ns_server:info,2014-08-19T16:53:27.822,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 714 state to active [ns_server:info,2014-08-19T16:53:27.822,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 713 state to active [ns_server:info,2014-08-19T16:53:27.822,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 712 state to active [ns_server:info,2014-08-19T16:53:27.823,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 711 state to active [ns_server:info,2014-08-19T16:53:27.823,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 710 state to active [ns_server:info,2014-08-19T16:53:27.823,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 709 state to active [ns_server:info,2014-08-19T16:53:27.823,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 708 state to active [ns_server:info,2014-08-19T16:53:27.824,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 707 state to active [ns_server:info,2014-08-19T16:53:27.824,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 706 state to active [ns_server:info,2014-08-19T16:53:27.824,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 705 state to active [ns_server:info,2014-08-19T16:53:27.824,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 704 state to active [ns_server:info,2014-08-19T16:53:27.825,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 703 state to active [ns_server:info,2014-08-19T16:53:27.825,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 702 state to active [ns_server:info,2014-08-19T16:53:27.825,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 701 state to active [ns_server:info,2014-08-19T16:53:27.825,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 700 state to active [ns_server:info,2014-08-19T16:53:27.826,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 699 state to active [ns_server:info,2014-08-19T16:53:27.826,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 698 state to active [ns_server:info,2014-08-19T16:53:27.826,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 697 state to active [ns_server:info,2014-08-19T16:53:27.826,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 696 state to active [ns_server:info,2014-08-19T16:53:27.827,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 695 state to active [ns_server:info,2014-08-19T16:53:27.827,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 694 state to active [ns_server:info,2014-08-19T16:53:27.827,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 693 state to active [ns_server:info,2014-08-19T16:53:27.827,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 692 state to active [ns_server:info,2014-08-19T16:53:27.828,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 691 state to active [ns_server:info,2014-08-19T16:53:27.828,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 690 state to active [ns_server:info,2014-08-19T16:53:27.828,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 689 state to active [ns_server:info,2014-08-19T16:53:27.828,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 688 state to active [ns_server:info,2014-08-19T16:53:27.828,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 687 state to active [ns_server:info,2014-08-19T16:53:27.829,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 686 state to active [ns_server:info,2014-08-19T16:53:27.829,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 685 state to active [ns_server:info,2014-08-19T16:53:27.830,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 684 state to active [ns_server:info,2014-08-19T16:53:27.830,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 683 state to active [ns_server:info,2014-08-19T16:53:27.830,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 682 state to active [ns_server:info,2014-08-19T16:53:27.831,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 681 state to active [ns_server:info,2014-08-19T16:53:27.831,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 680 state to active [ns_server:info,2014-08-19T16:53:27.831,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 679 state to active [ns_server:info,2014-08-19T16:53:27.831,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 678 state to active [ns_server:info,2014-08-19T16:53:27.832,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 677 state to active [ns_server:info,2014-08-19T16:53:27.832,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 676 state to active [ns_server:info,2014-08-19T16:53:27.832,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 675 state to active [ns_server:info,2014-08-19T16:53:27.833,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 674 state to active [ns_server:info,2014-08-19T16:53:27.833,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 673 state to active [ns_server:info,2014-08-19T16:53:27.833,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 672 state to active [ns_server:info,2014-08-19T16:53:27.833,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 671 state to active [ns_server:info,2014-08-19T16:53:27.834,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 670 state to active [ns_server:info,2014-08-19T16:53:27.846,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 669 state to active [ns_server:debug,2014-08-19T16:53:27.846,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 1023. Nacking mccouch update. [views:debug,2014-08-19T16:53:27.846,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/1023. Updated state: replica (0) [ns_server:info,2014-08-19T16:53:27.847,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 668 state to active [ns_server:debug,2014-08-19T16:53:27.847,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",1023,replica,0} [ns_server:debug,2014-08-19T16:53:27.847,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [1023] [ns_server:info,2014-08-19T16:53:27.847,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 667 state to active [ns_server:info,2014-08-19T16:53:27.847,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 666 state to active [ns_server:info,2014-08-19T16:53:27.848,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 665 state to active [ns_server:info,2014-08-19T16:53:27.848,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 664 state to active [ns_server:info,2014-08-19T16:53:27.848,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 663 state to active [ns_server:info,2014-08-19T16:53:27.849,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 662 state to active [ns_server:info,2014-08-19T16:53:27.849,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 661 state to active [ns_server:info,2014-08-19T16:53:27.849,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 660 state to active [ns_server:info,2014-08-19T16:53:27.850,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 659 state to active [ns_server:info,2014-08-19T16:53:27.850,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 658 state to active [ns_server:info,2014-08-19T16:53:27.851,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 657 state to active [ns_server:info,2014-08-19T16:53:27.851,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 656 state to active [ns_server:info,2014-08-19T16:53:27.851,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 655 state to active [ns_server:info,2014-08-19T16:53:27.851,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 654 state to active [ns_server:info,2014-08-19T16:53:27.852,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 653 state to active [ns_server:info,2014-08-19T16:53:27.852,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 652 state to active [ns_server:info,2014-08-19T16:53:27.852,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 651 state to active [ns_server:info,2014-08-19T16:53:27.853,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 650 state to active [ns_server:info,2014-08-19T16:53:27.853,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 649 state to active [ns_server:info,2014-08-19T16:53:27.853,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 648 state to active [ns_server:info,2014-08-19T16:53:27.853,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 647 state to active [ns_server:info,2014-08-19T16:53:27.854,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 646 state to active [ns_server:info,2014-08-19T16:53:27.854,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 645 state to active [ns_server:info,2014-08-19T16:53:27.854,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 644 state to active [ns_server:info,2014-08-19T16:53:27.855,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 643 state to active [ns_server:info,2014-08-19T16:53:27.855,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 642 state to active [ns_server:info,2014-08-19T16:53:27.855,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 641 state to active [ns_server:info,2014-08-19T16:53:27.855,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 640 state to active [ns_server:info,2014-08-19T16:53:27.856,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 639 state to active [ns_server:info,2014-08-19T16:53:27.856,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 638 state to active [ns_server:info,2014-08-19T16:53:27.856,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 637 state to active [ns_server:info,2014-08-19T16:53:27.856,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 636 state to active [ns_server:info,2014-08-19T16:53:27.857,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 635 state to active [ns_server:info,2014-08-19T16:53:27.857,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 634 state to active [ns_server:info,2014-08-19T16:53:27.857,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 633 state to active [ns_server:info,2014-08-19T16:53:27.858,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 632 state to active [ns_server:info,2014-08-19T16:53:27.858,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 631 state to active [ns_server:info,2014-08-19T16:53:27.858,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 630 state to active [ns_server:info,2014-08-19T16:53:27.859,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 629 state to active [ns_server:info,2014-08-19T16:53:27.859,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 628 state to active [ns_server:info,2014-08-19T16:53:27.859,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 627 state to active [ns_server:info,2014-08-19T16:53:27.859,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 626 state to active [ns_server:info,2014-08-19T16:53:27.860,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 625 state to active [ns_server:info,2014-08-19T16:53:27.860,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 624 state to active [ns_server:info,2014-08-19T16:53:27.860,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 623 state to active [ns_server:info,2014-08-19T16:53:27.861,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 622 state to active [ns_server:info,2014-08-19T16:53:27.861,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 621 state to active [ns_server:info,2014-08-19T16:53:27.861,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 620 state to active [ns_server:info,2014-08-19T16:53:27.861,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 619 state to active [ns_server:info,2014-08-19T16:53:27.862,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 618 state to active [ns_server:info,2014-08-19T16:53:27.862,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 617 state to active [ns_server:info,2014-08-19T16:53:27.862,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 616 state to active [ns_server:info,2014-08-19T16:53:27.862,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 615 state to active [ns_server:info,2014-08-19T16:53:27.863,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 614 state to active [ns_server:info,2014-08-19T16:53:27.863,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 613 state to active [ns_server:info,2014-08-19T16:53:27.863,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 612 state to active [ns_server:info,2014-08-19T16:53:27.864,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 611 state to active [ns_server:info,2014-08-19T16:53:27.864,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 610 state to active [ns_server:info,2014-08-19T16:53:27.864,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 609 state to active [ns_server:info,2014-08-19T16:53:27.864,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 608 state to active [ns_server:info,2014-08-19T16:53:27.865,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 607 state to active [ns_server:info,2014-08-19T16:53:27.865,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 606 state to active [ns_server:info,2014-08-19T16:53:27.865,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 605 state to active [ns_server:info,2014-08-19T16:53:27.865,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 604 state to active [ns_server:info,2014-08-19T16:53:27.866,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 603 state to active [ns_server:info,2014-08-19T16:53:27.866,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 602 state to active [ns_server:info,2014-08-19T16:53:27.866,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 601 state to active [ns_server:info,2014-08-19T16:53:27.866,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 600 state to active [ns_server:info,2014-08-19T16:53:27.867,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 599 state to active [ns_server:info,2014-08-19T16:53:27.867,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 598 state to active [ns_server:info,2014-08-19T16:53:27.867,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 597 state to active [ns_server:info,2014-08-19T16:53:27.868,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 596 state to active [ns_server:info,2014-08-19T16:53:27.868,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 595 state to active [ns_server:info,2014-08-19T16:53:27.868,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 594 state to active [ns_server:info,2014-08-19T16:53:27.868,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 593 state to active [ns_server:info,2014-08-19T16:53:27.869,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 592 state to active [ns_server:info,2014-08-19T16:53:27.869,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 591 state to active [ns_server:info,2014-08-19T16:53:27.869,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 590 state to active [ns_server:info,2014-08-19T16:53:27.869,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 589 state to active [ns_server:info,2014-08-19T16:53:27.870,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 588 state to active [ns_server:info,2014-08-19T16:53:27.870,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 587 state to active [ns_server:info,2014-08-19T16:53:27.870,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 586 state to active [ns_server:info,2014-08-19T16:53:27.870,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 585 state to active [ns_server:info,2014-08-19T16:53:27.871,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 584 state to active [ns_server:info,2014-08-19T16:53:27.871,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 583 state to active [ns_server:info,2014-08-19T16:53:27.871,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 582 state to active [ns_server:info,2014-08-19T16:53:27.872,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 581 state to active [ns_server:info,2014-08-19T16:53:27.872,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 580 state to active [ns_server:info,2014-08-19T16:53:27.872,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 579 state to active [ns_server:info,2014-08-19T16:53:27.872,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 578 state to active [ns_server:info,2014-08-19T16:53:27.873,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 577 state to active [ns_server:info,2014-08-19T16:53:27.873,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 576 state to active [ns_server:info,2014-08-19T16:53:27.873,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 575 state to active [ns_server:info,2014-08-19T16:53:27.873,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 574 state to active [ns_server:info,2014-08-19T16:53:27.874,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 573 state to active [ns_server:info,2014-08-19T16:53:27.874,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 572 state to active [ns_server:info,2014-08-19T16:53:27.874,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 571 state to active [ns_server:info,2014-08-19T16:53:27.874,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 570 state to active [ns_server:info,2014-08-19T16:53:27.875,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 569 state to active [ns_server:info,2014-08-19T16:53:27.875,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 568 state to active [ns_server:info,2014-08-19T16:53:27.875,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 567 state to active [ns_server:info,2014-08-19T16:53:27.876,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 566 state to active [ns_server:info,2014-08-19T16:53:27.876,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 565 state to active [ns_server:info,2014-08-19T16:53:27.876,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 564 state to active [ns_server:info,2014-08-19T16:53:27.876,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 563 state to active [ns_server:info,2014-08-19T16:53:27.877,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 562 state to active [ns_server:info,2014-08-19T16:53:27.877,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 561 state to active [ns_server:info,2014-08-19T16:53:27.877,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 560 state to active [ns_server:info,2014-08-19T16:53:27.877,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 559 state to active [ns_server:info,2014-08-19T16:53:27.878,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 558 state to active [ns_server:info,2014-08-19T16:53:27.878,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 557 state to active [ns_server:info,2014-08-19T16:53:27.878,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 556 state to active [ns_server:info,2014-08-19T16:53:27.879,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 555 state to active [ns_server:info,2014-08-19T16:53:27.879,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 554 state to active [ns_server:info,2014-08-19T16:53:27.879,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 553 state to active [ns_server:info,2014-08-19T16:53:27.879,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 552 state to active [ns_server:info,2014-08-19T16:53:27.880,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 551 state to active [ns_server:info,2014-08-19T16:53:27.880,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 550 state to active [ns_server:info,2014-08-19T16:53:27.880,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 549 state to active [views:debug,2014-08-19T16:53:27.880,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/1023. Updated state: replica (0) [ns_server:debug,2014-08-19T16:53:27.881,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",1023,replica,0} [ns_server:info,2014-08-19T16:53:27.881,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 548 state to active [ns_server:info,2014-08-19T16:53:27.881,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 547 state to active [ns_server:info,2014-08-19T16:53:27.881,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 546 state to active [ns_server:info,2014-08-19T16:53:27.882,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 545 state to active [ns_server:info,2014-08-19T16:53:27.882,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 544 state to active [ns_server:info,2014-08-19T16:53:27.882,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 543 state to active [ns_server:info,2014-08-19T16:53:27.882,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 542 state to active [ns_server:info,2014-08-19T16:53:27.883,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 541 state to active [ns_server:info,2014-08-19T16:53:27.883,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 540 state to active [ns_server:info,2014-08-19T16:53:27.883,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 539 state to active [ns_server:info,2014-08-19T16:53:27.883,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 538 state to active [ns_server:info,2014-08-19T16:53:27.884,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 537 state to active [ns_server:info,2014-08-19T16:53:27.884,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 536 state to active [ns_server:info,2014-08-19T16:53:27.884,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 535 state to active [ns_server:info,2014-08-19T16:53:27.884,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 534 state to active [ns_server:info,2014-08-19T16:53:27.885,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 533 state to active [ns_server:info,2014-08-19T16:53:27.885,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 532 state to active [ns_server:info,2014-08-19T16:53:27.885,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 531 state to active [ns_server:info,2014-08-19T16:53:27.885,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 530 state to active [ns_server:info,2014-08-19T16:53:27.886,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 529 state to active [ns_server:info,2014-08-19T16:53:27.886,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 528 state to active [ns_server:info,2014-08-19T16:53:27.886,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 527 state to active [ns_server:info,2014-08-19T16:53:27.886,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 526 state to active [ns_server:info,2014-08-19T16:53:27.887,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 525 state to active [ns_server:info,2014-08-19T16:53:27.887,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 524 state to active [ns_server:info,2014-08-19T16:53:27.887,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 523 state to active [ns_server:info,2014-08-19T16:53:27.887,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 522 state to active [ns_server:info,2014-08-19T16:53:27.887,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 521 state to active [ns_server:info,2014-08-19T16:53:27.888,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 520 state to active [ns_server:info,2014-08-19T16:53:27.888,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 519 state to active [ns_server:info,2014-08-19T16:53:27.888,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 518 state to active [ns_server:info,2014-08-19T16:53:27.888,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 517 state to active [ns_server:info,2014-08-19T16:53:27.889,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 516 state to active [ns_server:info,2014-08-19T16:53:27.889,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 515 state to active [ns_server:info,2014-08-19T16:53:27.889,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 514 state to active [ns_server:info,2014-08-19T16:53:27.889,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 513 state to active [ns_server:info,2014-08-19T16:53:27.890,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 512 state to active [ns_server:info,2014-08-19T16:53:27.890,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 426 state to replica [ns_server:info,2014-08-19T16:53:27.890,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 425 state to replica [ns_server:info,2014-08-19T16:53:27.890,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 424 state to replica [ns_server:info,2014-08-19T16:53:27.891,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 423 state to replica [ns_server:info,2014-08-19T16:53:27.891,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 422 state to replica [ns_server:info,2014-08-19T16:53:27.891,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 421 state to replica [ns_server:info,2014-08-19T16:53:27.891,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 420 state to replica [ns_server:info,2014-08-19T16:53:27.892,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 419 state to replica [ns_server:info,2014-08-19T16:53:27.892,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 418 state to replica [ns_server:info,2014-08-19T16:53:27.892,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 417 state to replica [ns_server:info,2014-08-19T16:53:27.892,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 416 state to replica [ns_server:info,2014-08-19T16:53:27.892,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 415 state to replica [ns_server:info,2014-08-19T16:53:27.893,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 414 state to replica [ns_server:info,2014-08-19T16:53:27.893,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 413 state to replica [ns_server:info,2014-08-19T16:53:27.893,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 412 state to replica [ns_server:info,2014-08-19T16:53:27.893,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 411 state to replica [ns_server:info,2014-08-19T16:53:27.894,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 410 state to replica [ns_server:info,2014-08-19T16:53:27.894,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 409 state to replica [ns_server:info,2014-08-19T16:53:27.894,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 408 state to replica [ns_server:info,2014-08-19T16:53:27.894,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 407 state to replica [ns_server:info,2014-08-19T16:53:27.895,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 406 state to replica [ns_server:info,2014-08-19T16:53:27.895,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 405 state to replica [ns_server:info,2014-08-19T16:53:27.895,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 404 state to replica [ns_server:info,2014-08-19T16:53:27.895,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 403 state to replica [ns_server:info,2014-08-19T16:53:27.896,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 402 state to replica [ns_server:info,2014-08-19T16:53:27.896,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 401 state to replica [ns_server:info,2014-08-19T16:53:27.896,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 400 state to replica [ns_server:info,2014-08-19T16:53:27.896,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 399 state to replica [ns_server:info,2014-08-19T16:53:27.896,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 398 state to replica [ns_server:info,2014-08-19T16:53:27.897,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 397 state to replica [ns_server:info,2014-08-19T16:53:27.897,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 396 state to replica [ns_server:info,2014-08-19T16:53:27.897,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 395 state to replica [ns_server:info,2014-08-19T16:53:27.897,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 394 state to replica [ns_server:info,2014-08-19T16:53:27.898,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 393 state to replica [ns_server:info,2014-08-19T16:53:27.898,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 392 state to replica [ns_server:info,2014-08-19T16:53:27.898,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 391 state to replica [ns_server:info,2014-08-19T16:53:27.898,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 390 state to replica [ns_server:info,2014-08-19T16:53:27.899,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 389 state to replica [ns_server:info,2014-08-19T16:53:27.899,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 388 state to replica [ns_server:info,2014-08-19T16:53:27.899,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 387 state to replica [ns_server:info,2014-08-19T16:53:27.899,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 386 state to replica [ns_server:info,2014-08-19T16:53:27.900,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 385 state to replica [ns_server:info,2014-08-19T16:53:27.900,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 384 state to replica [ns_server:info,2014-08-19T16:53:27.900,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 383 state to replica [ns_server:info,2014-08-19T16:53:27.900,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 382 state to replica [ns_server:info,2014-08-19T16:53:27.901,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 381 state to replica [ns_server:info,2014-08-19T16:53:27.901,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 380 state to replica [ns_server:info,2014-08-19T16:53:27.901,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 379 state to replica [ns_server:info,2014-08-19T16:53:27.901,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 378 state to replica [ns_server:info,2014-08-19T16:53:27.901,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 377 state to replica [ns_server:info,2014-08-19T16:53:27.902,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 376 state to replica [ns_server:info,2014-08-19T16:53:27.902,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 375 state to replica [ns_server:info,2014-08-19T16:53:27.902,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 374 state to replica [ns_server:info,2014-08-19T16:53:27.902,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 373 state to replica [ns_server:info,2014-08-19T16:53:27.903,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 372 state to replica [ns_server:info,2014-08-19T16:53:27.903,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 371 state to replica [ns_server:info,2014-08-19T16:53:27.903,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 370 state to replica [ns_server:info,2014-08-19T16:53:27.903,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 369 state to replica [ns_server:info,2014-08-19T16:53:27.904,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 368 state to replica [ns_server:info,2014-08-19T16:53:27.904,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 367 state to replica [ns_server:info,2014-08-19T16:53:27.904,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 366 state to replica [ns_server:info,2014-08-19T16:53:27.904,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 365 state to replica [ns_server:info,2014-08-19T16:53:27.905,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 364 state to replica [ns_server:info,2014-08-19T16:53:27.905,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 363 state to replica [ns_server:info,2014-08-19T16:53:27.905,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 362 state to replica [ns_server:info,2014-08-19T16:53:27.905,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 361 state to replica [ns_server:info,2014-08-19T16:53:27.906,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 360 state to replica [ns_server:info,2014-08-19T16:53:27.906,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 359 state to replica [ns_server:info,2014-08-19T16:53:27.906,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 358 state to replica [ns_server:info,2014-08-19T16:53:27.906,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 357 state to replica [ns_server:info,2014-08-19T16:53:27.907,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 356 state to replica [ns_server:info,2014-08-19T16:53:27.907,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 355 state to replica [ns_server:info,2014-08-19T16:53:27.907,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 354 state to replica [ns_server:info,2014-08-19T16:53:27.907,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 353 state to replica [ns_server:info,2014-08-19T16:53:27.908,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 352 state to replica [ns_server:info,2014-08-19T16:53:27.908,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 351 state to replica [ns_server:info,2014-08-19T16:53:27.908,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 350 state to replica [ns_server:info,2014-08-19T16:53:27.908,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 349 state to replica [ns_server:info,2014-08-19T16:53:27.909,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 348 state to replica [ns_server:info,2014-08-19T16:53:27.909,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 347 state to replica [ns_server:info,2014-08-19T16:53:27.909,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 346 state to replica [ns_server:info,2014-08-19T16:53:27.909,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 345 state to replica [ns_server:info,2014-08-19T16:53:27.910,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 344 state to replica [ns_server:info,2014-08-19T16:53:27.910,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 343 state to replica [ns_server:info,2014-08-19T16:53:27.910,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 342 state to replica [ns_server:info,2014-08-19T16:53:27.910,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 170 state to replica [ns_server:info,2014-08-19T16:53:27.911,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 169 state to replica [ns_server:info,2014-08-19T16:53:27.911,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 168 state to replica [ns_server:info,2014-08-19T16:53:27.911,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 167 state to replica [ns_server:info,2014-08-19T16:53:27.911,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 166 state to replica [ns_server:info,2014-08-19T16:53:27.912,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 165 state to replica [ns_server:info,2014-08-19T16:53:27.912,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 164 state to replica [ns_server:info,2014-08-19T16:53:27.912,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 163 state to replica [ns_server:info,2014-08-19T16:53:27.912,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 162 state to replica [ns_server:info,2014-08-19T16:53:27.913,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 161 state to replica [ns_server:info,2014-08-19T16:53:27.913,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 160 state to replica [ns_server:info,2014-08-19T16:53:27.913,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 159 state to replica [ns_server:info,2014-08-19T16:53:27.913,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 158 state to replica [ns_server:info,2014-08-19T16:53:27.914,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 157 state to replica [ns_server:info,2014-08-19T16:53:27.914,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 156 state to replica [ns_server:info,2014-08-19T16:53:27.914,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 155 state to replica [ns_server:info,2014-08-19T16:53:27.914,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 154 state to replica [ns_server:info,2014-08-19T16:53:27.914,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 153 state to replica [ns_server:info,2014-08-19T16:53:27.915,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 152 state to replica [ns_server:info,2014-08-19T16:53:27.915,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 151 state to replica [ns_server:info,2014-08-19T16:53:27.915,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 150 state to replica [ns_server:info,2014-08-19T16:53:27.915,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 149 state to replica [ns_server:info,2014-08-19T16:53:27.916,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 148 state to replica [ns_server:info,2014-08-19T16:53:27.916,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 147 state to replica [ns_server:info,2014-08-19T16:53:27.916,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 146 state to replica [ns_server:info,2014-08-19T16:53:27.916,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 145 state to replica [ns_server:info,2014-08-19T16:53:27.917,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 144 state to replica [ns_server:info,2014-08-19T16:53:27.917,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 143 state to replica [ns_server:info,2014-08-19T16:53:27.917,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 142 state to replica [ns_server:info,2014-08-19T16:53:27.917,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 141 state to replica [ns_server:info,2014-08-19T16:53:27.918,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 140 state to replica [ns_server:info,2014-08-19T16:53:27.918,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 139 state to replica [ns_server:info,2014-08-19T16:53:27.918,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 138 state to replica [ns_server:info,2014-08-19T16:53:27.918,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 137 state to replica [ns_server:info,2014-08-19T16:53:27.919,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 136 state to replica [ns_server:info,2014-08-19T16:53:27.919,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 135 state to replica [ns_server:info,2014-08-19T16:53:27.919,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 134 state to replica [ns_server:info,2014-08-19T16:53:27.919,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 133 state to replica [ns_server:info,2014-08-19T16:53:27.919,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 132 state to replica [ns_server:info,2014-08-19T16:53:27.920,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 131 state to replica [ns_server:info,2014-08-19T16:53:27.920,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 130 state to replica [ns_server:info,2014-08-19T16:53:27.920,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 129 state to replica [ns_server:info,2014-08-19T16:53:27.920,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 128 state to replica [ns_server:info,2014-08-19T16:53:27.921,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 127 state to replica [ns_server:info,2014-08-19T16:53:27.921,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 126 state to replica [ns_server:info,2014-08-19T16:53:27.921,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 125 state to replica [ns_server:info,2014-08-19T16:53:27.921,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 124 state to replica [ns_server:info,2014-08-19T16:53:27.922,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 123 state to replica [ns_server:info,2014-08-19T16:53:27.922,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 122 state to replica [ns_server:info,2014-08-19T16:53:27.922,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 121 state to replica [ns_server:info,2014-08-19T16:53:27.922,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 120 state to replica [ns_server:info,2014-08-19T16:53:27.923,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 119 state to replica [ns_server:info,2014-08-19T16:53:27.923,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 118 state to replica [ns_server:info,2014-08-19T16:53:27.923,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 117 state to replica [ns_server:info,2014-08-19T16:53:27.923,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 116 state to replica [ns_server:info,2014-08-19T16:53:27.924,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 115 state to replica [ns_server:info,2014-08-19T16:53:27.924,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 114 state to replica [ns_server:info,2014-08-19T16:53:27.924,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 113 state to replica [ns_server:info,2014-08-19T16:53:27.924,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 112 state to replica [ns_server:info,2014-08-19T16:53:27.925,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 111 state to replica [ns_server:info,2014-08-19T16:53:27.925,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 110 state to replica [ns_server:info,2014-08-19T16:53:27.925,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 109 state to replica [ns_server:info,2014-08-19T16:53:27.925,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 108 state to replica [ns_server:info,2014-08-19T16:53:27.925,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 107 state to replica [ns_server:info,2014-08-19T16:53:27.926,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 106 state to replica [ns_server:info,2014-08-19T16:53:27.926,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 105 state to replica [ns_server:info,2014-08-19T16:53:27.926,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 104 state to replica [ns_server:info,2014-08-19T16:53:27.926,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 103 state to replica [ns_server:info,2014-08-19T16:53:27.927,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 102 state to replica [ns_server:info,2014-08-19T16:53:27.927,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 101 state to replica [ns_server:info,2014-08-19T16:53:27.927,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 100 state to replica [ns_server:info,2014-08-19T16:53:27.927,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 99 state to replica [ns_server:info,2014-08-19T16:53:27.928,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 98 state to replica [ns_server:info,2014-08-19T16:53:27.928,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 97 state to replica [ns_server:info,2014-08-19T16:53:27.928,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 96 state to replica [ns_server:info,2014-08-19T16:53:27.928,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 95 state to replica [ns_server:info,2014-08-19T16:53:27.929,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 94 state to replica [ns_server:info,2014-08-19T16:53:27.929,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 93 state to replica [ns_server:info,2014-08-19T16:53:27.929,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 92 state to replica [ns_server:info,2014-08-19T16:53:27.929,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 91 state to replica [ns_server:info,2014-08-19T16:53:27.929,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 90 state to replica [ns_server:info,2014-08-19T16:53:27.930,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 89 state to replica [ns_server:info,2014-08-19T16:53:27.930,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 88 state to replica [ns_server:info,2014-08-19T16:53:27.930,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 87 state to replica [ns_server:info,2014-08-19T16:53:27.930,ns_1@10.242.238.90:<0.2226.1>:ns_memcached:do_handle_call:527]Changed vbucket 86 state to replica [ns_server:info,2014-08-19T16:53:27.943,ns_1@10.242.238.90:tap_replication_manager-maps_1_8_metahash<0.2209.1>:tap_replication_manager:start_child:172]Starting replication from 'ns_1@10.242.238.91' for [938,939,940,941,942,943,944,945,946,947,948,949,950,951,952,953,954,955,956, 957,958,959,960,961,962,963,964,965,966,967,968,969,970,971,972,973,974,975, 976,977,978,979,980,981,982,983,984,985,986,987,988,989,990,991,992,993,994, 995,996,997,998,999,1000,1001,1002,1003,1004,1005,1006,1007,1008,1009,1010, 1011,1012,1013,1014,1015,1016,1017,1018,1019,1020,1021,1022,1023] [ns_server:info,2014-08-19T16:53:27.944,ns_1@10.242.238.90:tap_replication_manager-maps_1_8_metahash<0.2209.1>:tap_replication_manager:start_child:172]Starting replication from 'ns_1@10.242.238.89' for [342,343,344,345,346,347,348,349,350,351,352,353,354,355,356,357,358,359,360, 361,362,363,364,365,366,367,368,369,370,371,372,373,374,375,376,377,378,379, 380,381,382,383,384,385,386,387,388,389,390,391,392,393,394,395,396,397,398, 399,400,401,402,403,404,405,406,407,408,409,410,411,412,413,414,415,416,417, 418,419,420,421,422,423,424,425,426] [error_logger:info,2014-08-19T16:53:27.944,ns_1@10.242.238.90:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,'ns_vbm_new_sup-maps_1_8_metahash'} started: [{pid,<0.2278.1>}, {name, {new_child_id, [938,939,940,941,942,943,944,945,946,947,948, 949,950,951,952,953,954,955,956,957,958,959, 960,961,962,963,964,965,966,967,968,969,970, 971,972,973,974,975,976,977,978,979,980,981, 982,983,984,985,986,987,988,989,990,991,992, 993,994,995,996,997,998,999,1000,1001,1002, 1003,1004,1005,1006,1007,1008,1009,1010,1011, 1012,1013,1014,1015,1016,1017,1018,1019,1020, 1021,1022,1023], 'ns_1@10.242.238.91'}}, {mfargs, {ebucketmigrator_srv,start_link, [{"10.242.238.91",11209}, {"10.242.238.90",11209}, [{on_not_ready_vbuckets, #Fun}, {username,"maps_1_8_metahash"}, {password,get_from_config}, {vbuckets, [938,939,940,941,942,943,944,945,946,947, 948,949,950,951,952,953,954,955,956,957, 958,959,960,961,962,963,964,965,966,967, 968,969,970,971,972,973,974,975,976,977, 978,979,980,981,982,983,984,985,986,987, 988,989,990,991,992,993,994,995,996,997, 998,999,1000,1001,1002,1003,1004,1005, 1006,1007,1008,1009,1010,1011,1012,1013, 1014,1015,1016,1017,1018,1019,1020,1021, 1022,1023]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]]}}, {restart_type,temporary}, {shutdown,60000}, {child_type,worker}] [ns_server:info,2014-08-19T16:53:27.946,ns_1@10.242.238.90:tap_replication_manager-maps_1_8_metahash<0.2209.1>:tap_replication_manager:start_child:172]Starting replication from 'ns_1@10.242.238.88' for [86,87,88,89,90,91,92,93,94,95,96,97,98,99,100,101,102,103,104,105,106,107, 108,109,110,111,112,113,114,115,116,117,118,119,120,121,122,123,124,125,126, 127,128,129,130,131,132,133,134,135,136,137,138,139,140,141,142,143,144,145, 146,147,148,149,150,151,152,153,154,155,156,157,158,159,160,161,162,163,164, 165,166,167,168,169,170] [error_logger:info,2014-08-19T16:53:27.946,ns_1@10.242.238.90:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,'ns_vbm_new_sup-maps_1_8_metahash'} started: [{pid,<0.2279.1>}, {name, {new_child_id, [342,343,344,345,346,347,348,349,350,351,352, 353,354,355,356,357,358,359,360,361,362,363, 364,365,366,367,368,369,370,371,372,373,374, 375,376,377,378,379,380,381,382,383,384,385, 386,387,388,389,390,391,392,393,394,395,396, 397,398,399,400,401,402,403,404,405,406,407, 408,409,410,411,412,413,414,415,416,417,418, 419,420,421,422,423,424,425,426], 'ns_1@10.242.238.89'}}, {mfargs, {ebucketmigrator_srv,start_link, [{"10.242.238.89",11209}, {"10.242.238.90",11209}, [{on_not_ready_vbuckets, #Fun}, {username,"maps_1_8_metahash"}, {password,get_from_config}, {vbuckets, [342,343,344,345,346,347,348,349,350,351, 352,353,354,355,356,357,358,359,360,361, 362,363,364,365,366,367,368,369,370,371, 372,373,374,375,376,377,378,379,380,381, 382,383,384,385,386,387,388,389,390,391, 392,393,394,395,396,397,398,399,400,401, 402,403,404,405,406,407,408,409,410,411, 412,413,414,415,416,417,418,419,420,421, 422,423,424,425,426]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]]}}, {restart_type,temporary}, {shutdown,60000}, {child_type,worker}] [error_logger:info,2014-08-19T16:53:27.947,ns_1@10.242.238.90:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,'ns_vbm_new_sup-maps_1_8_metahash'} started: [{pid,<0.2280.1>}, {name, {new_child_id, [86,87,88,89,90,91,92,93,94,95,96,97,98,99,100, 101,102,103,104,105,106,107,108,109,110,111, 112,113,114,115,116,117,118,119,120,121,122, 123,124,125,126,127,128,129,130,131,132,133, 134,135,136,137,138,139,140,141,142,143,144, 145,146,147,148,149,150,151,152,153,154,155, 156,157,158,159,160,161,162,163,164,165,166, 167,168,169,170], 'ns_1@10.242.238.88'}}, {mfargs, {ebucketmigrator_srv,start_link, [{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{on_not_ready_vbuckets, #Fun}, {username,"maps_1_8_metahash"}, {password,get_from_config}, {vbuckets, [86,87,88,89,90,91,92,93,94,95,96,97,98, 99,100,101,102,103,104,105,106,107,108, 109,110,111,112,113,114,115,116,117,118, 119,120,121,122,123,124,125,126,127,128, 129,130,131,132,133,134,135,136,137,138, 139,140,141,142,143,144,145,146,147,148, 149,150,151,152,153,154,155,156,157,158, 159,160,161,162,163,164,165,166,167,168, 169,170]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]]}}, {restart_type,temporary}, {shutdown,60000}, {child_type,worker}] [ns_server:info,2014-08-19T16:53:27.953,ns_1@10.242.238.90:ns_memcached-maps_1_8_metahash<0.2207.1>:ns_memcached:handle_call:247]Enabling traffic to bucket "maps_1_8_metahash" [ns_server:info,2014-08-19T16:53:27.953,ns_1@10.242.238.90:ns_memcached-maps_1_8_metahash<0.2207.1>:ns_memcached:handle_call:251]Bucket "maps_1_8_metahash" marked as warmed in 1 seconds [ns_server:debug,2014-08-19T16:53:27.983,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 1022. Nacking mccouch update. [views:debug,2014-08-19T16:53:27.983,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/1022. Updated state: replica (0) [ns_server:debug,2014-08-19T16:53:27.983,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",1022,replica,0} [ns_server:debug,2014-08-19T16:53:27.983,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [1022,1023] [ns_server:debug,2014-08-19T16:53:27.984,ns_1@10.242.238.90:<0.2280.1>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_ns_1@10.242.238.90 [rebalance:info,2014-08-19T16:53:27.991,ns_1@10.242.238.90:<0.2280.1>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[86,87,88,89,90,91,92,93,94,95,96,97,98,99,100,101,102,103,104,105, 106,107,108,109,110,111,112,113,114,115,116,117,118,119,120,121, 122,123,124,125,126,127,128,129,130,131,132,133,134,135,136,137, 138,139,140,141,142,143,144,145,146,147,148,149,150,151,152,153, 154,155,156,157,158,159,160,161,162,163,164,165,166,167,168,169, 170]}, {checkpoints,[{86,0}, {87,0}, {88,0}, {89,0}, {90,0}, {91,0}, {92,0}, {93,0}, {94,0}, {95,0}, {96,0}, {97,0}, {98,0}, {99,0}, {100,0}, {101,0}, {102,0}, {103,0}, {104,0}, {105,0}, {106,0}, {107,0}, {108,0}, {109,0}, {110,0}, {111,0}, {112,0}, {113,0}, {114,0}, {115,0}, {116,0}, {117,0}, {118,0}, {119,0}, {120,0}, {121,0}, {122,0}, {123,0}, {124,0}, {125,0}, {126,0}, {127,0}, {128,0}, {129,0}, {130,0}, {131,0}, {132,0}, {133,0}, {134,0}, {135,0}, {136,0}, {137,0}, {138,0}, {139,0}, {140,0}, {141,0}, {142,0}, {143,0}, {144,0}, {145,0}, {146,0}, {147,0}, {148,0}, {149,0}, {150,0}, {151,0}, {152,0}, {153,0}, {154,0}, {155,0}, {156,0}, {157,0}, {158,0}, {159,0}, {160,0}, {161,0}, {162,0}, {163,0}, {164,0}, {165,0}, {166,0}, {167,0}, {168,0}, {169,0}, {170,0}]}, {name,<<"replication_ns_1@10.242.238.90">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{on_not_ready_vbuckets,#Fun}, {username,"maps_1_8_metahash"}, {password,get_from_config}, {vbuckets,[86,87,88,89,90,91,92,93,94,95,96,97,98,99,100,101,102,103,104, 105,106,107,108,109,110,111,112,113,114,115,116,117,118,119,120, 121,122,123,124,125,126,127,128,129,130,131,132,133,134,135,136, 137,138,139,140,141,142,143,144,145,146,147,148,149,150,151,152, 153,154,155,156,157,158,159,160,161,162,163,164,165,166,167,168, 169,170]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]} [rebalance:debug,2014-08-19T16:53:27.993,ns_1@10.242.238.90:<0.2280.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.2281.1> [rebalance:info,2014-08-19T16:53:27.994,ns_1@10.242.238.90:<0.2280.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 86 [rebalance:info,2014-08-19T16:53:27.994,ns_1@10.242.238.90:<0.2280.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 87 [rebalance:info,2014-08-19T16:53:27.995,ns_1@10.242.238.90:<0.2280.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 88 [rebalance:info,2014-08-19T16:53:27.995,ns_1@10.242.238.90:<0.2280.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 89 [rebalance:info,2014-08-19T16:53:27.995,ns_1@10.242.238.90:<0.2280.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 90 [rebalance:info,2014-08-19T16:53:27.995,ns_1@10.242.238.90:<0.2280.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 91 [rebalance:info,2014-08-19T16:53:27.995,ns_1@10.242.238.90:<0.2280.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 92 [rebalance:info,2014-08-19T16:53:27.995,ns_1@10.242.238.90:<0.2280.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 93 [rebalance:info,2014-08-19T16:53:27.996,ns_1@10.242.238.90:<0.2280.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 94 [rebalance:info,2014-08-19T16:53:27.996,ns_1@10.242.238.90:<0.2280.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 95 [rebalance:info,2014-08-19T16:53:27.996,ns_1@10.242.238.90:<0.2280.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 96 [rebalance:info,2014-08-19T16:53:27.996,ns_1@10.242.238.90:<0.2280.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 97 [rebalance:info,2014-08-19T16:53:27.996,ns_1@10.242.238.90:<0.2280.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 98 [rebalance:info,2014-08-19T16:53:27.996,ns_1@10.242.238.90:<0.2280.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 99 [rebalance:info,2014-08-19T16:53:27.996,ns_1@10.242.238.90:<0.2280.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 100 [rebalance:info,2014-08-19T16:53:27.996,ns_1@10.242.238.90:<0.2280.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 101 [rebalance:info,2014-08-19T16:53:27.996,ns_1@10.242.238.90:<0.2280.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 102 [rebalance:info,2014-08-19T16:53:27.997,ns_1@10.242.238.90:<0.2280.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 103 [rebalance:info,2014-08-19T16:53:27.997,ns_1@10.242.238.90:<0.2280.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 104 [rebalance:info,2014-08-19T16:53:27.997,ns_1@10.242.238.90:<0.2280.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 105 [rebalance:info,2014-08-19T16:53:27.997,ns_1@10.242.238.90:<0.2280.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 106 [rebalance:info,2014-08-19T16:53:27.997,ns_1@10.242.238.90:<0.2280.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 107 [rebalance:info,2014-08-19T16:53:27.997,ns_1@10.242.238.90:<0.2280.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 108 [rebalance:info,2014-08-19T16:53:27.997,ns_1@10.242.238.90:<0.2280.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 109 [rebalance:info,2014-08-19T16:53:27.997,ns_1@10.242.238.90:<0.2280.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 110 [rebalance:info,2014-08-19T16:53:27.997,ns_1@10.242.238.90:<0.2280.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 111 [rebalance:info,2014-08-19T16:53:27.997,ns_1@10.242.238.90:<0.2280.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 112 [rebalance:info,2014-08-19T16:53:27.997,ns_1@10.242.238.90:<0.2280.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 113 [rebalance:info,2014-08-19T16:53:27.998,ns_1@10.242.238.90:<0.2280.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 114 [rebalance:info,2014-08-19T16:53:27.998,ns_1@10.242.238.90:<0.2280.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 115 [rebalance:info,2014-08-19T16:53:27.998,ns_1@10.242.238.90:<0.2280.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 116 [rebalance:info,2014-08-19T16:53:27.998,ns_1@10.242.238.90:<0.2280.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 117 [rebalance:info,2014-08-19T16:53:27.998,ns_1@10.242.238.90:<0.2280.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 118 [rebalance:info,2014-08-19T16:53:27.998,ns_1@10.242.238.90:<0.2280.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 119 [rebalance:info,2014-08-19T16:53:27.998,ns_1@10.242.238.90:<0.2280.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 120 [rebalance:info,2014-08-19T16:53:27.998,ns_1@10.242.238.90:<0.2280.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 121 [rebalance:info,2014-08-19T16:53:27.998,ns_1@10.242.238.90:<0.2280.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 122 [rebalance:info,2014-08-19T16:53:27.998,ns_1@10.242.238.90:<0.2280.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 123 [rebalance:info,2014-08-19T16:53:27.998,ns_1@10.242.238.90:<0.2280.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 124 [rebalance:info,2014-08-19T16:53:27.999,ns_1@10.242.238.90:<0.2280.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 125 [rebalance:info,2014-08-19T16:53:27.999,ns_1@10.242.238.90:<0.2280.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 126 [rebalance:info,2014-08-19T16:53:27.999,ns_1@10.242.238.90:<0.2280.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 127 [rebalance:info,2014-08-19T16:53:27.999,ns_1@10.242.238.90:<0.2280.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 128 [rebalance:info,2014-08-19T16:53:27.999,ns_1@10.242.238.90:<0.2280.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 129 [rebalance:info,2014-08-19T16:53:27.999,ns_1@10.242.238.90:<0.2280.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 130 [rebalance:info,2014-08-19T16:53:27.999,ns_1@10.242.238.90:<0.2280.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 131 [rebalance:info,2014-08-19T16:53:27.999,ns_1@10.242.238.90:<0.2280.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 132 [rebalance:info,2014-08-19T16:53:27.999,ns_1@10.242.238.90:<0.2280.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 133 [rebalance:info,2014-08-19T16:53:27.999,ns_1@10.242.238.90:<0.2280.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 134 [rebalance:info,2014-08-19T16:53:28.000,ns_1@10.242.238.90:<0.2280.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 135 [rebalance:info,2014-08-19T16:53:28.000,ns_1@10.242.238.90:<0.2280.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 136 [rebalance:info,2014-08-19T16:53:28.000,ns_1@10.242.238.90:<0.2280.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 137 [rebalance:info,2014-08-19T16:53:28.000,ns_1@10.242.238.90:<0.2280.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 138 [rebalance:info,2014-08-19T16:53:28.000,ns_1@10.242.238.90:<0.2280.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 139 [rebalance:info,2014-08-19T16:53:28.000,ns_1@10.242.238.90:<0.2280.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 140 [rebalance:info,2014-08-19T16:53:28.001,ns_1@10.242.238.90:<0.2280.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 141 [rebalance:info,2014-08-19T16:53:28.001,ns_1@10.242.238.90:<0.2280.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 142 [rebalance:info,2014-08-19T16:53:28.001,ns_1@10.242.238.90:<0.2280.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 143 [rebalance:info,2014-08-19T16:53:28.001,ns_1@10.242.238.90:<0.2280.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 144 [rebalance:info,2014-08-19T16:53:28.001,ns_1@10.242.238.90:<0.2280.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 145 [rebalance:info,2014-08-19T16:53:28.001,ns_1@10.242.238.90:<0.2280.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 146 [rebalance:info,2014-08-19T16:53:28.002,ns_1@10.242.238.90:<0.2280.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 147 [rebalance:info,2014-08-19T16:53:28.002,ns_1@10.242.238.90:<0.2280.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 148 [rebalance:info,2014-08-19T16:53:28.002,ns_1@10.242.238.90:<0.2280.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 149 [rebalance:info,2014-08-19T16:53:28.002,ns_1@10.242.238.90:<0.2280.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 150 [rebalance:info,2014-08-19T16:53:28.002,ns_1@10.242.238.90:<0.2280.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 151 [rebalance:info,2014-08-19T16:53:28.002,ns_1@10.242.238.90:<0.2280.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 152 [rebalance:info,2014-08-19T16:53:28.003,ns_1@10.242.238.90:<0.2280.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 153 [rebalance:info,2014-08-19T16:53:28.003,ns_1@10.242.238.90:<0.2280.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 154 [rebalance:info,2014-08-19T16:53:28.003,ns_1@10.242.238.90:<0.2280.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 155 [rebalance:info,2014-08-19T16:53:28.003,ns_1@10.242.238.90:<0.2280.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 156 [rebalance:info,2014-08-19T16:53:28.003,ns_1@10.242.238.90:<0.2280.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 157 [rebalance:info,2014-08-19T16:53:28.003,ns_1@10.242.238.90:<0.2280.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 158 [rebalance:info,2014-08-19T16:53:28.003,ns_1@10.242.238.90:<0.2280.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 159 [rebalance:info,2014-08-19T16:53:28.003,ns_1@10.242.238.90:<0.2280.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 160 [rebalance:info,2014-08-19T16:53:28.004,ns_1@10.242.238.90:<0.2280.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 161 [rebalance:info,2014-08-19T16:53:28.004,ns_1@10.242.238.90:<0.2280.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 162 [rebalance:info,2014-08-19T16:53:28.004,ns_1@10.242.238.90:<0.2280.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 163 [rebalance:info,2014-08-19T16:53:28.004,ns_1@10.242.238.90:<0.2280.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 164 [rebalance:info,2014-08-19T16:53:28.004,ns_1@10.242.238.90:<0.2280.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 165 [rebalance:info,2014-08-19T16:53:28.004,ns_1@10.242.238.90:<0.2280.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 166 [rebalance:info,2014-08-19T16:53:28.004,ns_1@10.242.238.90:<0.2280.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 167 [rebalance:info,2014-08-19T16:53:28.004,ns_1@10.242.238.90:<0.2280.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 168 [rebalance:info,2014-08-19T16:53:28.004,ns_1@10.242.238.90:<0.2280.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 169 [rebalance:info,2014-08-19T16:53:28.004,ns_1@10.242.238.90:<0.2280.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 170 [ns_server:debug,2014-08-19T16:53:28.015,ns_1@10.242.238.90:<0.2278.1>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_ns_1@10.242.238.90 [views:debug,2014-08-19T16:53:28.017,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/1022. Updated state: replica (0) [ns_server:debug,2014-08-19T16:53:28.017,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",1022,replica,0} [ns_server:debug,2014-08-19T16:53:28.018,ns_1@10.242.238.90:<0.2279.1>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_ns_1@10.242.238.90 [rebalance:info,2014-08-19T16:53:28.019,ns_1@10.242.238.90:<0.2278.1>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[938,939,940,941,942,943,944,945,946,947,948,949,950,951,952,953, 954,955,956,957,958,959,960,961,962,963,964,965,966,967,968,969, 970,971,972,973,974,975,976,977,978,979,980,981,982,983,984,985, 986,987,988,989,990,991,992,993,994,995,996,997,998,999,1000,1001, 1002,1003,1004,1005,1006,1007,1008,1009,1010,1011,1012,1013,1014, 1015,1016,1017,1018,1019,1020,1021,1022,1023]}, {checkpoints,[{938,0}, {939,0}, {940,0}, {941,0}, {942,0}, {943,0}, {944,0}, {945,0}, {946,0}, {947,0}, {948,0}, {949,0}, {950,0}, {951,0}, {952,0}, {953,0}, {954,0}, {955,0}, {956,0}, {957,0}, {958,0}, {959,0}, {960,0}, {961,0}, {962,0}, {963,0}, {964,0}, {965,0}, {966,0}, {967,0}, {968,0}, {969,0}, {970,0}, {971,0}, {972,0}, {973,0}, {974,0}, {975,0}, {976,0}, {977,0}, {978,0}, {979,0}, {980,0}, {981,0}, {982,0}, {983,0}, {984,0}, {985,0}, {986,0}, {987,0}, {988,0}, {989,0}, {990,0}, {991,0}, {992,0}, {993,0}, {994,0}, {995,0}, {996,0}, {997,0}, {998,0}, {999,0}, {1000,0}, {1001,0}, {1002,0}, {1003,0}, {1004,0}, {1005,0}, {1006,0}, {1007,0}, {1008,0}, {1009,0}, {1010,0}, {1011,0}, {1012,0}, {1013,0}, {1014,0}, {1015,0}, {1016,0}, {1017,0}, {1018,0}, {1019,0}, {1020,0}, {1021,0}, {1022,0}, {1023,0}]}, {name,<<"replication_ns_1@10.242.238.90">>}, {takeover,false}] {{"10.242.238.91",11209}, {"10.242.238.90",11209}, [{on_not_ready_vbuckets,#Fun}, {username,"maps_1_8_metahash"}, {password,get_from_config}, {vbuckets,[938,939,940,941,942,943,944,945,946,947,948,949,950,951,952,953, 954,955,956,957,958,959,960,961,962,963,964,965,966,967,968,969, 970,971,972,973,974,975,976,977,978,979,980,981,982,983,984,985, 986,987,988,989,990,991,992,993,994,995,996,997,998,999,1000, 1001,1002,1003,1004,1005,1006,1007,1008,1009,1010,1011,1012,1013, 1014,1015,1016,1017,1018,1019,1020,1021,1022,1023]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]} [rebalance:debug,2014-08-19T16:53:28.020,ns_1@10.242.238.90:<0.2278.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.2282.1> [rebalance:info,2014-08-19T16:53:28.021,ns_1@10.242.238.90:<0.2278.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 938 [rebalance:info,2014-08-19T16:53:28.021,ns_1@10.242.238.90:<0.2278.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 939 [rebalance:info,2014-08-19T16:53:28.021,ns_1@10.242.238.90:<0.2278.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 940 [rebalance:info,2014-08-19T16:53:28.021,ns_1@10.242.238.90:<0.2278.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 941 [rebalance:info,2014-08-19T16:53:28.021,ns_1@10.242.238.90:<0.2278.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 942 [rebalance:info,2014-08-19T16:53:28.022,ns_1@10.242.238.90:<0.2278.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 943 [rebalance:info,2014-08-19T16:53:28.022,ns_1@10.242.238.90:<0.2278.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 944 [rebalance:info,2014-08-19T16:53:28.022,ns_1@10.242.238.90:<0.2278.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 945 [rebalance:info,2014-08-19T16:53:28.022,ns_1@10.242.238.90:<0.2278.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 946 [rebalance:info,2014-08-19T16:53:28.022,ns_1@10.242.238.90:<0.2278.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 947 [rebalance:info,2014-08-19T16:53:28.022,ns_1@10.242.238.90:<0.2278.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 948 [rebalance:info,2014-08-19T16:53:28.021,ns_1@10.242.238.90:<0.2279.1>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[342,343,344,345,346,347,348,349,350,351,352,353,354,355,356,357, 358,359,360,361,362,363,364,365,366,367,368,369,370,371,372,373, 374,375,376,377,378,379,380,381,382,383,384,385,386,387,388,389, 390,391,392,393,394,395,396,397,398,399,400,401,402,403,404,405, 406,407,408,409,410,411,412,413,414,415,416,417,418,419,420,421, 422,423,424,425,426]}, {checkpoints,[{342,0}, {343,0}, {344,0}, {345,0}, {346,0}, {347,0}, {348,0}, {349,0}, {350,0}, {351,0}, {352,0}, {353,0}, {354,0}, {355,0}, {356,0}, {357,0}, {358,0}, {359,0}, {360,0}, {361,0}, {362,0}, {363,0}, {364,0}, {365,0}, {366,0}, {367,0}, {368,0}, {369,0}, {370,0}, {371,0}, {372,0}, {373,0}, {374,0}, {375,0}, {376,0}, {377,0}, {378,0}, {379,0}, {380,0}, {381,0}, {382,0}, {383,0}, {384,0}, {385,0}, {386,0}, {387,0}, {388,0}, {389,0}, {390,0}, {391,0}, {392,0}, {393,0}, {394,0}, {395,0}, {396,0}, {397,0}, {398,0}, {399,0}, {400,0}, {401,0}, {402,0}, {403,0}, {404,0}, {405,0}, {406,0}, {407,0}, {408,0}, {409,0}, {410,0}, {411,0}, {412,0}, {413,0}, {414,0}, {415,0}, {416,0}, {417,0}, {418,0}, {419,0}, {420,0}, {421,0}, {422,0}, {423,0}, {424,0}, {425,0}, {426,0}]}, {name,<<"replication_ns_1@10.242.238.90">>}, {takeover,false}] {{"10.242.238.89",11209}, {"10.242.238.90",11209}, [{on_not_ready_vbuckets,#Fun}, {username,"maps_1_8_metahash"}, {password,get_from_config}, {vbuckets,[342,343,344,345,346,347,348,349,350,351,352,353,354,355,356,357, 358,359,360,361,362,363,364,365,366,367,368,369,370,371,372,373, 374,375,376,377,378,379,380,381,382,383,384,385,386,387,388,389, 390,391,392,393,394,395,396,397,398,399,400,401,402,403,404,405, 406,407,408,409,410,411,412,413,414,415,416,417,418,419,420,421, 422,423,424,425,426]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]} [rebalance:info,2014-08-19T16:53:28.022,ns_1@10.242.238.90:<0.2278.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 949 [rebalance:info,2014-08-19T16:53:28.022,ns_1@10.242.238.90:<0.2278.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 950 [rebalance:info,2014-08-19T16:53:28.022,ns_1@10.242.238.90:<0.2278.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 951 [rebalance:info,2014-08-19T16:53:28.023,ns_1@10.242.238.90:<0.2278.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 952 [rebalance:info,2014-08-19T16:53:28.023,ns_1@10.242.238.90:<0.2278.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 953 [rebalance:info,2014-08-19T16:53:28.023,ns_1@10.242.238.90:<0.2278.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 954 [rebalance:debug,2014-08-19T16:53:28.023,ns_1@10.242.238.90:<0.2279.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.2283.1> [rebalance:info,2014-08-19T16:53:28.023,ns_1@10.242.238.90:<0.2278.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 955 [rebalance:info,2014-08-19T16:53:28.023,ns_1@10.242.238.90:<0.2278.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 956 [rebalance:info,2014-08-19T16:53:28.023,ns_1@10.242.238.90:<0.2278.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 957 [rebalance:info,2014-08-19T16:53:28.023,ns_1@10.242.238.90:<0.2278.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 958 [rebalance:info,2014-08-19T16:53:28.023,ns_1@10.242.238.90:<0.2278.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 959 [rebalance:info,2014-08-19T16:53:28.023,ns_1@10.242.238.90:<0.2278.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 960 [rebalance:info,2014-08-19T16:53:28.023,ns_1@10.242.238.90:<0.2278.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 961 [rebalance:info,2014-08-19T16:53:28.024,ns_1@10.242.238.90:<0.2278.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 962 [rebalance:info,2014-08-19T16:53:28.024,ns_1@10.242.238.90:<0.2278.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 963 [rebalance:info,2014-08-19T16:53:28.024,ns_1@10.242.238.90:<0.2278.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 964 [rebalance:info,2014-08-19T16:53:28.024,ns_1@10.242.238.90:<0.2279.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 342 [rebalance:info,2014-08-19T16:53:28.024,ns_1@10.242.238.90:<0.2278.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 965 [rebalance:info,2014-08-19T16:53:28.024,ns_1@10.242.238.90:<0.2278.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 966 [rebalance:info,2014-08-19T16:53:28.024,ns_1@10.242.238.90:<0.2279.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 343 [rebalance:info,2014-08-19T16:53:28.024,ns_1@10.242.238.90:<0.2278.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 967 [rebalance:info,2014-08-19T16:53:28.024,ns_1@10.242.238.90:<0.2279.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 344 [rebalance:info,2014-08-19T16:53:28.024,ns_1@10.242.238.90:<0.2278.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 968 [rebalance:info,2014-08-19T16:53:28.024,ns_1@10.242.238.90:<0.2279.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 345 [rebalance:info,2014-08-19T16:53:28.024,ns_1@10.242.238.90:<0.2278.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 969 [rebalance:info,2014-08-19T16:53:28.024,ns_1@10.242.238.90:<0.2279.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 346 [rebalance:info,2014-08-19T16:53:28.024,ns_1@10.242.238.90:<0.2279.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 347 [rebalance:info,2014-08-19T16:53:28.025,ns_1@10.242.238.90:<0.2278.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 970 [rebalance:info,2014-08-19T16:53:28.025,ns_1@10.242.238.90:<0.2279.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 348 [rebalance:info,2014-08-19T16:53:28.025,ns_1@10.242.238.90:<0.2278.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 971 [rebalance:info,2014-08-19T16:53:28.025,ns_1@10.242.238.90:<0.2279.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 349 [rebalance:info,2014-08-19T16:53:28.025,ns_1@10.242.238.90:<0.2279.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 350 [rebalance:info,2014-08-19T16:53:28.025,ns_1@10.242.238.90:<0.2278.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 972 [rebalance:info,2014-08-19T16:53:28.025,ns_1@10.242.238.90:<0.2279.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 351 [rebalance:info,2014-08-19T16:53:28.025,ns_1@10.242.238.90:<0.2278.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 973 [rebalance:info,2014-08-19T16:53:28.025,ns_1@10.242.238.90:<0.2279.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 352 [rebalance:info,2014-08-19T16:53:28.025,ns_1@10.242.238.90:<0.2278.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 974 [rebalance:info,2014-08-19T16:53:28.025,ns_1@10.242.238.90:<0.2279.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 353 [rebalance:info,2014-08-19T16:53:28.025,ns_1@10.242.238.90:<0.2278.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 975 [rebalance:info,2014-08-19T16:53:28.025,ns_1@10.242.238.90:<0.2279.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 354 [rebalance:info,2014-08-19T16:53:28.026,ns_1@10.242.238.90:<0.2278.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 976 [rebalance:info,2014-08-19T16:53:28.026,ns_1@10.242.238.90:<0.2279.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 355 [rebalance:info,2014-08-19T16:53:28.026,ns_1@10.242.238.90:<0.2278.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 977 [rebalance:info,2014-08-19T16:53:28.026,ns_1@10.242.238.90:<0.2279.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 356 [rebalance:info,2014-08-19T16:53:28.026,ns_1@10.242.238.90:<0.2278.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 978 [rebalance:info,2014-08-19T16:53:28.026,ns_1@10.242.238.90:<0.2279.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 357 [rebalance:info,2014-08-19T16:53:28.026,ns_1@10.242.238.90:<0.2278.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 979 [rebalance:info,2014-08-19T16:53:28.026,ns_1@10.242.238.90:<0.2279.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 358 [rebalance:info,2014-08-19T16:53:28.026,ns_1@10.242.238.90:<0.2278.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 980 [rebalance:info,2014-08-19T16:53:28.026,ns_1@10.242.238.90:<0.2279.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 359 [rebalance:info,2014-08-19T16:53:28.026,ns_1@10.242.238.90:<0.2278.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 981 [rebalance:info,2014-08-19T16:53:28.026,ns_1@10.242.238.90:<0.2279.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 360 [rebalance:info,2014-08-19T16:53:28.026,ns_1@10.242.238.90:<0.2278.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 982 [rebalance:info,2014-08-19T16:53:28.026,ns_1@10.242.238.90:<0.2279.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 361 [rebalance:info,2014-08-19T16:53:28.026,ns_1@10.242.238.90:<0.2278.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 983 [rebalance:info,2014-08-19T16:53:28.026,ns_1@10.242.238.90:<0.2279.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 362 [rebalance:info,2014-08-19T16:53:28.027,ns_1@10.242.238.90:<0.2278.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 984 [rebalance:info,2014-08-19T16:53:28.027,ns_1@10.242.238.90:<0.2279.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 363 [rebalance:info,2014-08-19T16:53:28.027,ns_1@10.242.238.90:<0.2278.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 985 [rebalance:info,2014-08-19T16:53:28.027,ns_1@10.242.238.90:<0.2279.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 364 [rebalance:info,2014-08-19T16:53:28.027,ns_1@10.242.238.90:<0.2278.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 986 [rebalance:info,2014-08-19T16:53:28.027,ns_1@10.242.238.90:<0.2279.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 365 [rebalance:info,2014-08-19T16:53:28.027,ns_1@10.242.238.90:<0.2278.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 987 [rebalance:info,2014-08-19T16:53:28.027,ns_1@10.242.238.90:<0.2279.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 366 [rebalance:info,2014-08-19T16:53:28.027,ns_1@10.242.238.90:<0.2278.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 988 [rebalance:info,2014-08-19T16:53:28.027,ns_1@10.242.238.90:<0.2279.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 367 [rebalance:info,2014-08-19T16:53:28.027,ns_1@10.242.238.90:<0.2278.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 989 [rebalance:info,2014-08-19T16:53:28.027,ns_1@10.242.238.90:<0.2279.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 368 [rebalance:info,2014-08-19T16:53:28.027,ns_1@10.242.238.90:<0.2278.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 990 [rebalance:info,2014-08-19T16:53:28.027,ns_1@10.242.238.90:<0.2279.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 369 [rebalance:info,2014-08-19T16:53:28.027,ns_1@10.242.238.90:<0.2278.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 991 [rebalance:info,2014-08-19T16:53:28.027,ns_1@10.242.238.90:<0.2279.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 370 [rebalance:info,2014-08-19T16:53:28.028,ns_1@10.242.238.90:<0.2279.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 371 [rebalance:info,2014-08-19T16:53:28.028,ns_1@10.242.238.90:<0.2278.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 992 [rebalance:info,2014-08-19T16:53:28.028,ns_1@10.242.238.90:<0.2279.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 372 [rebalance:info,2014-08-19T16:53:28.028,ns_1@10.242.238.90:<0.2278.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 993 [rebalance:info,2014-08-19T16:53:28.028,ns_1@10.242.238.90:<0.2279.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 373 [rebalance:info,2014-08-19T16:53:28.028,ns_1@10.242.238.90:<0.2279.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 374 [rebalance:info,2014-08-19T16:53:28.028,ns_1@10.242.238.90:<0.2278.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 994 [rebalance:info,2014-08-19T16:53:28.028,ns_1@10.242.238.90:<0.2279.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 375 [rebalance:info,2014-08-19T16:53:28.028,ns_1@10.242.238.90:<0.2278.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 995 [rebalance:info,2014-08-19T16:53:28.028,ns_1@10.242.238.90:<0.2279.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 376 [rebalance:info,2014-08-19T16:53:28.028,ns_1@10.242.238.90:<0.2278.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 996 [rebalance:info,2014-08-19T16:53:28.028,ns_1@10.242.238.90:<0.2279.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 377 [rebalance:info,2014-08-19T16:53:28.028,ns_1@10.242.238.90:<0.2278.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 997 [rebalance:info,2014-08-19T16:53:28.028,ns_1@10.242.238.90:<0.2279.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 378 [rebalance:info,2014-08-19T16:53:28.029,ns_1@10.242.238.90:<0.2278.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 998 [rebalance:info,2014-08-19T16:53:28.029,ns_1@10.242.238.90:<0.2279.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 379 [rebalance:info,2014-08-19T16:53:28.029,ns_1@10.242.238.90:<0.2279.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 380 [rebalance:info,2014-08-19T16:53:28.029,ns_1@10.242.238.90:<0.2278.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 999 [rebalance:info,2014-08-19T16:53:28.029,ns_1@10.242.238.90:<0.2279.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 381 [rebalance:info,2014-08-19T16:53:28.029,ns_1@10.242.238.90:<0.2278.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 1000 [rebalance:info,2014-08-19T16:53:28.029,ns_1@10.242.238.90:<0.2279.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 382 [rebalance:info,2014-08-19T16:53:28.029,ns_1@10.242.238.90:<0.2279.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 383 [rebalance:info,2014-08-19T16:53:28.029,ns_1@10.242.238.90:<0.2278.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 1001 [rebalance:info,2014-08-19T16:53:28.029,ns_1@10.242.238.90:<0.2279.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 384 [rebalance:info,2014-08-19T16:53:28.029,ns_1@10.242.238.90:<0.2278.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 1002 [rebalance:info,2014-08-19T16:53:28.029,ns_1@10.242.238.90:<0.2279.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 385 [rebalance:info,2014-08-19T16:53:28.029,ns_1@10.242.238.90:<0.2278.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 1003 [rebalance:info,2014-08-19T16:53:28.029,ns_1@10.242.238.90:<0.2279.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 386 [rebalance:info,2014-08-19T16:53:28.029,ns_1@10.242.238.90:<0.2279.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 387 [rebalance:info,2014-08-19T16:53:28.029,ns_1@10.242.238.90:<0.2278.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 1004 [rebalance:info,2014-08-19T16:53:28.030,ns_1@10.242.238.90:<0.2279.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 388 [rebalance:info,2014-08-19T16:53:28.030,ns_1@10.242.238.90:<0.2279.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 389 [rebalance:info,2014-08-19T16:53:28.030,ns_1@10.242.238.90:<0.2279.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 390 [rebalance:info,2014-08-19T16:53:28.030,ns_1@10.242.238.90:<0.2278.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 1005 [rebalance:info,2014-08-19T16:53:28.030,ns_1@10.242.238.90:<0.2279.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 391 [rebalance:info,2014-08-19T16:53:28.030,ns_1@10.242.238.90:<0.2278.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 1006 [rebalance:info,2014-08-19T16:53:28.030,ns_1@10.242.238.90:<0.2279.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 392 [rebalance:info,2014-08-19T16:53:28.030,ns_1@10.242.238.90:<0.2279.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 393 [rebalance:info,2014-08-19T16:53:28.030,ns_1@10.242.238.90:<0.2278.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 1007 [rebalance:info,2014-08-19T16:53:28.030,ns_1@10.242.238.90:<0.2279.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 394 [rebalance:info,2014-08-19T16:53:28.030,ns_1@10.242.238.90:<0.2279.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 395 [rebalance:info,2014-08-19T16:53:28.030,ns_1@10.242.238.90:<0.2278.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 1008 [rebalance:info,2014-08-19T16:53:28.030,ns_1@10.242.238.90:<0.2279.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 396 [rebalance:info,2014-08-19T16:53:28.030,ns_1@10.242.238.90:<0.2279.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 397 [rebalance:info,2014-08-19T16:53:28.030,ns_1@10.242.238.90:<0.2278.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 1009 [rebalance:info,2014-08-19T16:53:28.031,ns_1@10.242.238.90:<0.2279.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 398 [rebalance:info,2014-08-19T16:53:28.031,ns_1@10.242.238.90:<0.2278.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 1010 [rebalance:info,2014-08-19T16:53:28.031,ns_1@10.242.238.90:<0.2279.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 399 [rebalance:info,2014-08-19T16:53:28.031,ns_1@10.242.238.90:<0.2278.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 1011 [rebalance:info,2014-08-19T16:53:28.031,ns_1@10.242.238.90:<0.2279.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 400 [rebalance:info,2014-08-19T16:53:28.031,ns_1@10.242.238.90:<0.2279.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 401 [rebalance:info,2014-08-19T16:53:28.031,ns_1@10.242.238.90:<0.2278.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 1012 [rebalance:info,2014-08-19T16:53:28.031,ns_1@10.242.238.90:<0.2279.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 402 [rebalance:info,2014-08-19T16:53:28.031,ns_1@10.242.238.90:<0.2278.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 1013 [rebalance:info,2014-08-19T16:53:28.031,ns_1@10.242.238.90:<0.2279.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 403 [rebalance:info,2014-08-19T16:53:28.031,ns_1@10.242.238.90:<0.2278.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 1014 [rebalance:info,2014-08-19T16:53:28.031,ns_1@10.242.238.90:<0.2279.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 404 [rebalance:info,2014-08-19T16:53:28.031,ns_1@10.242.238.90:<0.2278.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 1015 [rebalance:info,2014-08-19T16:53:28.031,ns_1@10.242.238.90:<0.2279.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 405 [rebalance:info,2014-08-19T16:53:28.032,ns_1@10.242.238.90:<0.2279.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 406 [rebalance:info,2014-08-19T16:53:28.032,ns_1@10.242.238.90:<0.2278.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 1016 [rebalance:info,2014-08-19T16:53:28.032,ns_1@10.242.238.90:<0.2279.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 407 [rebalance:info,2014-08-19T16:53:28.032,ns_1@10.242.238.90:<0.2278.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 1017 [rebalance:info,2014-08-19T16:53:28.032,ns_1@10.242.238.90:<0.2279.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 408 [rebalance:info,2014-08-19T16:53:28.032,ns_1@10.242.238.90:<0.2278.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 1018 [rebalance:info,2014-08-19T16:53:28.032,ns_1@10.242.238.90:<0.2279.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 409 [rebalance:info,2014-08-19T16:53:28.032,ns_1@10.242.238.90:<0.2278.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 1019 [rebalance:info,2014-08-19T16:53:28.032,ns_1@10.242.238.90:<0.2279.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 410 [rebalance:info,2014-08-19T16:53:28.032,ns_1@10.242.238.90:<0.2278.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 1020 [rebalance:info,2014-08-19T16:53:28.032,ns_1@10.242.238.90:<0.2279.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 411 [rebalance:info,2014-08-19T16:53:28.032,ns_1@10.242.238.90:<0.2278.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 1021 [rebalance:info,2014-08-19T16:53:28.032,ns_1@10.242.238.90:<0.2279.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 412 [rebalance:info,2014-08-19T16:53:28.032,ns_1@10.242.238.90:<0.2278.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 1022 [rebalance:info,2014-08-19T16:53:28.032,ns_1@10.242.238.90:<0.2279.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 413 [rebalance:info,2014-08-19T16:53:28.032,ns_1@10.242.238.90:<0.2278.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 1023 [rebalance:info,2014-08-19T16:53:28.032,ns_1@10.242.238.90:<0.2279.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 414 [rebalance:info,2014-08-19T16:53:28.033,ns_1@10.242.238.90:<0.2279.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 415 [rebalance:info,2014-08-19T16:53:28.033,ns_1@10.242.238.90:<0.2279.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 416 [rebalance:info,2014-08-19T16:53:28.033,ns_1@10.242.238.90:<0.2279.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 417 [rebalance:info,2014-08-19T16:53:28.033,ns_1@10.242.238.90:<0.2279.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 418 [rebalance:info,2014-08-19T16:53:28.033,ns_1@10.242.238.90:<0.2279.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 419 [rebalance:info,2014-08-19T16:53:28.033,ns_1@10.242.238.90:<0.2279.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 420 [rebalance:info,2014-08-19T16:53:28.033,ns_1@10.242.238.90:<0.2279.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 421 [rebalance:info,2014-08-19T16:53:28.033,ns_1@10.242.238.90:<0.2279.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 422 [rebalance:info,2014-08-19T16:53:28.034,ns_1@10.242.238.90:<0.2279.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 423 [rebalance:info,2014-08-19T16:53:28.034,ns_1@10.242.238.90:<0.2279.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 424 [rebalance:info,2014-08-19T16:53:28.034,ns_1@10.242.238.90:<0.2279.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 425 [rebalance:info,2014-08-19T16:53:28.034,ns_1@10.242.238.90:<0.2279.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 426 [ns_server:debug,2014-08-19T16:53:28.036,ns_1@10.242.238.90:<0.2278.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:28.036,ns_1@10.242.238.90:<0.2278.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:28.036,ns_1@10.242.238.90:<0.2278.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:28.037,ns_1@10.242.238.90:<0.2278.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:28.037,ns_1@10.242.238.90:<0.2278.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:28.037,ns_1@10.242.238.90:<0.2278.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:28.037,ns_1@10.242.238.90:<0.2278.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:28.037,ns_1@10.242.238.90:<0.2278.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:28.037,ns_1@10.242.238.90:<0.2279.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:28.037,ns_1@10.242.238.90:<0.2278.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:28.037,ns_1@10.242.238.90:<0.2279.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:28.037,ns_1@10.242.238.90:<0.2278.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:28.037,ns_1@10.242.238.90:<0.2279.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:28.037,ns_1@10.242.238.90:<0.2278.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:28.037,ns_1@10.242.238.90:<0.2279.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:28.037,ns_1@10.242.238.90:<0.2278.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:28.037,ns_1@10.242.238.90:<0.2278.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:28.037,ns_1@10.242.238.90:<0.2279.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:28.037,ns_1@10.242.238.90:<0.2278.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:28.038,ns_1@10.242.238.90:<0.2279.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:28.038,ns_1@10.242.238.90:<0.2278.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:28.038,ns_1@10.242.238.90:<0.2279.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:28.038,ns_1@10.242.238.90:<0.2278.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:28.038,ns_1@10.242.238.90:<0.2279.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:28.038,ns_1@10.242.238.90:<0.2278.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:28.038,ns_1@10.242.238.90:<0.2279.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:28.038,ns_1@10.242.238.90:<0.2278.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:28.038,ns_1@10.242.238.90:<0.2279.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:28.038,ns_1@10.242.238.90:<0.2278.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:28.038,ns_1@10.242.238.90:<0.2278.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:28.038,ns_1@10.242.238.90:<0.2279.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:28.038,ns_1@10.242.238.90:<0.2279.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:28.038,ns_1@10.242.238.90:<0.2278.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:28.038,ns_1@10.242.238.90:<0.2279.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:28.038,ns_1@10.242.238.90:<0.2278.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:28.038,ns_1@10.242.238.90:<0.2279.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:28.038,ns_1@10.242.238.90:<0.2278.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:28.038,ns_1@10.242.238.90:<0.2279.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:28.038,ns_1@10.242.238.90:<0.2278.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:28.038,ns_1@10.242.238.90:<0.2278.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:28.038,ns_1@10.242.238.90:<0.2279.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:28.038,ns_1@10.242.238.90:<0.2278.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:28.039,ns_1@10.242.238.90:<0.2279.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:28.039,ns_1@10.242.238.90:<0.2278.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:28.039,ns_1@10.242.238.90:<0.2279.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:28.039,ns_1@10.242.238.90:<0.2278.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:28.039,ns_1@10.242.238.90:<0.2279.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:28.039,ns_1@10.242.238.90:<0.2278.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:28.039,ns_1@10.242.238.90:<0.2279.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:28.039,ns_1@10.242.238.90:<0.2278.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:28.039,ns_1@10.242.238.90:<0.2278.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:28.039,ns_1@10.242.238.90:<0.2279.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:28.039,ns_1@10.242.238.90:<0.2278.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:28.039,ns_1@10.242.238.90:<0.2279.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:28.039,ns_1@10.242.238.90:<0.2278.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:28.039,ns_1@10.242.238.90:<0.2279.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:28.039,ns_1@10.242.238.90:<0.2278.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:28.039,ns_1@10.242.238.90:<0.2279.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:28.039,ns_1@10.242.238.90:<0.2278.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:28.039,ns_1@10.242.238.90:<0.2279.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:28.039,ns_1@10.242.238.90:<0.2278.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:28.039,ns_1@10.242.238.90:<0.2279.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:28.039,ns_1@10.242.238.90:<0.2278.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:28.039,ns_1@10.242.238.90:<0.2279.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:28.039,ns_1@10.242.238.90:<0.2278.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:28.040,ns_1@10.242.238.90:<0.2279.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:28.040,ns_1@10.242.238.90:<0.2278.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:28.040,ns_1@10.242.238.90:<0.2279.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:28.040,ns_1@10.242.238.90:<0.2278.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:28.040,ns_1@10.242.238.90:<0.2279.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:28.040,ns_1@10.242.238.90:<0.2278.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:28.040,ns_1@10.242.238.90:<0.2279.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:28.040,ns_1@10.242.238.90:<0.2278.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:28.040,ns_1@10.242.238.90:<0.2279.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:28.040,ns_1@10.242.238.90:<0.2278.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:28.040,ns_1@10.242.238.90:<0.2279.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:28.040,ns_1@10.242.238.90:<0.2278.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:28.040,ns_1@10.242.238.90:<0.2279.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:28.040,ns_1@10.242.238.90:<0.2278.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:28.040,ns_1@10.242.238.90:<0.2279.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:28.040,ns_1@10.242.238.90:<0.2278.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:28.040,ns_1@10.242.238.90:<0.2279.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:28.040,ns_1@10.242.238.90:<0.2278.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:28.040,ns_1@10.242.238.90:<0.2279.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:28.040,ns_1@10.242.238.90:<0.2278.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:28.040,ns_1@10.242.238.90:<0.2279.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:28.040,ns_1@10.242.238.90:<0.2278.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:28.040,ns_1@10.242.238.90:<0.2279.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:28.041,ns_1@10.242.238.90:<0.2279.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:28.041,ns_1@10.242.238.90:<0.2278.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:28.041,ns_1@10.242.238.90:<0.2279.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:28.041,ns_1@10.242.238.90:<0.2278.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:28.041,ns_1@10.242.238.90:<0.2279.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:28.041,ns_1@10.242.238.90:<0.2278.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:28.041,ns_1@10.242.238.90:<0.2279.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:28.041,ns_1@10.242.238.90:<0.2278.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:28.041,ns_1@10.242.238.90:<0.2279.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:28.041,ns_1@10.242.238.90:<0.2278.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:28.041,ns_1@10.242.238.90:<0.2279.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:28.041,ns_1@10.242.238.90:<0.2279.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:28.041,ns_1@10.242.238.90:<0.2278.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:28.041,ns_1@10.242.238.90:<0.2279.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:28.041,ns_1@10.242.238.90:<0.2278.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:28.041,ns_1@10.242.238.90:<0.2279.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:28.041,ns_1@10.242.238.90:<0.2278.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:28.041,ns_1@10.242.238.90:<0.2279.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:28.041,ns_1@10.242.238.90:<0.2278.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:28.041,ns_1@10.242.238.90:<0.2279.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:28.041,ns_1@10.242.238.90:<0.2278.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:28.041,ns_1@10.242.238.90:<0.2279.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:28.041,ns_1@10.242.238.90:<0.2278.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:28.041,ns_1@10.242.238.90:<0.2279.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:28.042,ns_1@10.242.238.90:<0.2278.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:28.042,ns_1@10.242.238.90:<0.2279.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:28.042,ns_1@10.242.238.90:<0.2278.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:28.042,ns_1@10.242.238.90:<0.2279.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:28.042,ns_1@10.242.238.90:<0.2278.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:28.042,ns_1@10.242.238.90:<0.2279.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:28.042,ns_1@10.242.238.90:<0.2278.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:28.042,ns_1@10.242.238.90:<0.2279.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:28.042,ns_1@10.242.238.90:<0.2278.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:28.042,ns_1@10.242.238.90:<0.2279.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:28.042,ns_1@10.242.238.90:<0.2278.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:28.042,ns_1@10.242.238.90:<0.2278.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:28.042,ns_1@10.242.238.90:<0.2279.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:28.042,ns_1@10.242.238.90:<0.2278.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:28.042,ns_1@10.242.238.90:<0.2278.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:28.042,ns_1@10.242.238.90:<0.2279.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:28.042,ns_1@10.242.238.90:<0.2278.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:28.042,ns_1@10.242.238.90:<0.2279.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:28.042,ns_1@10.242.238.90:<0.2278.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:28.043,ns_1@10.242.238.90:<0.2279.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:28.043,ns_1@10.242.238.90:<0.2278.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:28.043,ns_1@10.242.238.90:<0.2279.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:28.043,ns_1@10.242.238.90:<0.2278.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:28.043,ns_1@10.242.238.90:<0.2278.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:28.043,ns_1@10.242.238.90:<0.2279.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:28.043,ns_1@10.242.238.90:<0.2278.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:28.043,ns_1@10.242.238.90:<0.2279.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:28.043,ns_1@10.242.238.90:<0.2279.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:28.043,ns_1@10.242.238.90:<0.2278.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:28.043,ns_1@10.242.238.90:<0.2279.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:28.043,ns_1@10.242.238.90:<0.2278.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:28.043,ns_1@10.242.238.90:<0.2279.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:28.043,ns_1@10.242.238.90:<0.2278.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:28.043,ns_1@10.242.238.90:<0.2279.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:28.043,ns_1@10.242.238.90:<0.2278.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:28.043,ns_1@10.242.238.90:<0.2279.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:28.043,ns_1@10.242.238.90:<0.2279.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:28.043,ns_1@10.242.238.90:<0.2278.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:28.043,ns_1@10.242.238.90:<0.2279.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:28.043,ns_1@10.242.238.90:<0.2278.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:28.044,ns_1@10.242.238.90:<0.2279.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:28.044,ns_1@10.242.238.90:<0.2278.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:28.044,ns_1@10.242.238.90:<0.2279.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:28.044,ns_1@10.242.238.90:<0.2278.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:28.044,ns_1@10.242.238.90:<0.2279.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:28.044,ns_1@10.242.238.90:<0.2278.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:28.044,ns_1@10.242.238.90:<0.2279.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:28.044,ns_1@10.242.238.90:<0.2278.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:28.044,ns_1@10.242.238.90:<0.2279.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:28.044,ns_1@10.242.238.90:<0.2278.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:28.044,ns_1@10.242.238.90:<0.2279.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:28.044,ns_1@10.242.238.90:<0.2279.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:28.044,ns_1@10.242.238.90:<0.2279.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:28.044,ns_1@10.242.238.90:<0.2279.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:28.044,ns_1@10.242.238.90:<0.2279.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:28.044,ns_1@10.242.238.90:<0.2279.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:28.044,ns_1@10.242.238.90:<0.2279.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:28.045,ns_1@10.242.238.90:<0.2279.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:28.045,ns_1@10.242.238.90:<0.2279.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:28.046,ns_1@10.242.238.90:<0.2280.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:28.046,ns_1@10.242.238.90:<0.2280.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:28.046,ns_1@10.242.238.90:<0.2280.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:28.047,ns_1@10.242.238.90:<0.2280.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:28.047,ns_1@10.242.238.90:<0.2280.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:28.047,ns_1@10.242.238.90:<0.2280.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:28.047,ns_1@10.242.238.90:<0.2280.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:28.047,ns_1@10.242.238.90:<0.2280.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:28.047,ns_1@10.242.238.90:<0.2280.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:28.047,ns_1@10.242.238.90:<0.2280.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:28.048,ns_1@10.242.238.90:<0.2280.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:28.048,ns_1@10.242.238.90:<0.2280.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:28.048,ns_1@10.242.238.90:<0.2280.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:28.048,ns_1@10.242.238.90:<0.2280.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:28.048,ns_1@10.242.238.90:<0.2280.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:28.048,ns_1@10.242.238.90:<0.2280.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:28.048,ns_1@10.242.238.90:<0.2280.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:28.048,ns_1@10.242.238.90:<0.2280.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:28.049,ns_1@10.242.238.90:<0.2280.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:28.049,ns_1@10.242.238.90:<0.2280.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:28.049,ns_1@10.242.238.90:<0.2280.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:28.049,ns_1@10.242.238.90:<0.2280.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:28.049,ns_1@10.242.238.90:<0.2280.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:28.049,ns_1@10.242.238.90:<0.2280.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:28.049,ns_1@10.242.238.90:<0.2280.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:28.049,ns_1@10.242.238.90:<0.2280.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:28.049,ns_1@10.242.238.90:<0.2280.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:28.049,ns_1@10.242.238.90:<0.2280.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:28.049,ns_1@10.242.238.90:<0.2280.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:28.050,ns_1@10.242.238.90:<0.2280.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:28.050,ns_1@10.242.238.90:<0.2280.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:28.050,ns_1@10.242.238.90:<0.2280.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:28.050,ns_1@10.242.238.90:<0.2280.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:28.050,ns_1@10.242.238.90:<0.2280.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:28.050,ns_1@10.242.238.90:<0.2280.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:28.050,ns_1@10.242.238.90:<0.2280.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:28.050,ns_1@10.242.238.90:<0.2280.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:28.050,ns_1@10.242.238.90:<0.2280.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:28.050,ns_1@10.242.238.90:<0.2280.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:28.050,ns_1@10.242.238.90:<0.2280.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:28.051,ns_1@10.242.238.90:<0.2280.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:28.051,ns_1@10.242.238.90:<0.2280.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:28.051,ns_1@10.242.238.90:<0.2280.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:28.051,ns_1@10.242.238.90:<0.2280.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:28.051,ns_1@10.242.238.90:<0.2280.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:28.051,ns_1@10.242.238.90:<0.2280.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:28.051,ns_1@10.242.238.90:<0.2280.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:28.051,ns_1@10.242.238.90:<0.2280.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:28.051,ns_1@10.242.238.90:<0.2280.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:28.051,ns_1@10.242.238.90:<0.2280.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:28.051,ns_1@10.242.238.90:<0.2280.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:28.051,ns_1@10.242.238.90:<0.2280.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:28.051,ns_1@10.242.238.90:<0.2280.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:28.051,ns_1@10.242.238.90:<0.2280.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:28.052,ns_1@10.242.238.90:<0.2280.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:28.052,ns_1@10.242.238.90:<0.2280.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:28.052,ns_1@10.242.238.90:<0.2280.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:28.052,ns_1@10.242.238.90:<0.2280.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:28.052,ns_1@10.242.238.90:<0.2280.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:28.052,ns_1@10.242.238.90:<0.2280.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:28.052,ns_1@10.242.238.90:<0.2280.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:28.052,ns_1@10.242.238.90:<0.2280.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:28.052,ns_1@10.242.238.90:<0.2280.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:28.052,ns_1@10.242.238.90:<0.2280.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:28.052,ns_1@10.242.238.90:<0.2280.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:28.052,ns_1@10.242.238.90:<0.2280.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:28.052,ns_1@10.242.238.90:<0.2280.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:28.053,ns_1@10.242.238.90:<0.2280.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:28.053,ns_1@10.242.238.90:<0.2280.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:28.053,ns_1@10.242.238.90:<0.2280.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:28.053,ns_1@10.242.238.90:<0.2280.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:28.053,ns_1@10.242.238.90:<0.2280.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:28.053,ns_1@10.242.238.90:<0.2280.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:28.053,ns_1@10.242.238.90:<0.2280.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:28.053,ns_1@10.242.238.90:<0.2280.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:28.053,ns_1@10.242.238.90:<0.2280.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:28.053,ns_1@10.242.238.90:<0.2280.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:28.053,ns_1@10.242.238.90:<0.2280.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:28.053,ns_1@10.242.238.90:<0.2280.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:28.053,ns_1@10.242.238.90:<0.2280.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:28.054,ns_1@10.242.238.90:<0.2280.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:28.054,ns_1@10.242.238.90:<0.2280.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:28.054,ns_1@10.242.238.90:<0.2280.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:28.054,ns_1@10.242.238.90:<0.2280.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:28.054,ns_1@10.242.238.90:<0.2280.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:28.175,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 1020. Nacking mccouch update. [views:debug,2014-08-19T16:53:28.175,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/1020. Updated state: replica (0) [ns_server:debug,2014-08-19T16:53:28.175,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",1020,replica,0} [ns_server:debug,2014-08-19T16:53:28.175,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [1022,1020,1023] [views:debug,2014-08-19T16:53:28.259,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/1020. Updated state: replica (0) [ns_server:debug,2014-08-19T16:53:28.259,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",1020,replica,0} [ns_server:info,2014-08-19T16:53:28.340,ns_1@10.242.238.90:ns_doctor<0.17441.0>:ns_doctor:update_status:241]The following buckets became ready on node 'ns_1@10.242.238.90': ["maps_1_8_metahash"] [ns_server:debug,2014-08-19T16:53:28.418,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 1018. Nacking mccouch update. [views:debug,2014-08-19T16:53:28.418,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/1018. Updated state: replica (0) [ns_server:debug,2014-08-19T16:53:28.418,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",1018,replica,0} [ns_server:debug,2014-08-19T16:53:28.418,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [1022,1018,1020,1023] [views:debug,2014-08-19T16:53:28.494,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/1018. Updated state: replica (0) [ns_server:debug,2014-08-19T16:53:28.494,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",1018,replica,0} [ns_server:debug,2014-08-19T16:53:28.635,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 1016. Nacking mccouch update. [views:debug,2014-08-19T16:53:28.635,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/1016. Updated state: replica (0) [ns_server:debug,2014-08-19T16:53:28.635,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",1016,replica,0} [ns_server:debug,2014-08-19T16:53:28.635,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [1016,1022,1018,1020,1023] [views:debug,2014-08-19T16:53:28.711,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/1016. Updated state: replica (0) [ns_server:debug,2014-08-19T16:53:28.711,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",1016,replica,0} [ns_server:debug,2014-08-19T16:53:28.853,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 1014. Nacking mccouch update. [views:debug,2014-08-19T16:53:28.853,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/1014. Updated state: replica (0) [ns_server:debug,2014-08-19T16:53:28.853,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",1014,replica,0} [ns_server:debug,2014-08-19T16:53:28.853,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [1016,1022,1018,1014,1020,1023] [views:debug,2014-08-19T16:53:28.920,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/1014. Updated state: replica (0) [ns_server:debug,2014-08-19T16:53:28.920,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",1014,replica,0} [ns_server:debug,2014-08-19T16:53:29.007,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 1012. Nacking mccouch update. [views:debug,2014-08-19T16:53:29.007,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/1012. Updated state: replica (0) [ns_server:debug,2014-08-19T16:53:29.007,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",1012,replica,0} [ns_server:debug,2014-08-19T16:53:29.007,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [1016,1022,1012,1018,1014,1020,1023] [views:debug,2014-08-19T16:53:29.050,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/1012. Updated state: replica (0) [ns_server:debug,2014-08-19T16:53:29.050,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",1012,replica,0} [ns_server:debug,2014-08-19T16:53:29.125,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 1010. Nacking mccouch update. [views:debug,2014-08-19T16:53:29.125,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/1010. Updated state: replica (0) [ns_server:debug,2014-08-19T16:53:29.125,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [1016,1022,1012,1018,1014,1020,1023,1010] [ns_server:debug,2014-08-19T16:53:29.125,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",1010,replica,0} [views:debug,2014-08-19T16:53:29.159,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/1010. Updated state: replica (0) [ns_server:debug,2014-08-19T16:53:29.159,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",1010,replica,0} [ns_server:debug,2014-08-19T16:53:29.300,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 1008. Nacking mccouch update. [views:debug,2014-08-19T16:53:29.301,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/1008. Updated state: replica (0) [ns_server:debug,2014-08-19T16:53:29.301,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [1016,1022,1012,1018,1008,1014,1020,1023,1010] [ns_server:debug,2014-08-19T16:53:29.301,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",1008,replica,0} [views:debug,2014-08-19T16:53:29.343,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/1008. Updated state: replica (0) [ns_server:debug,2014-08-19T16:53:29.343,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",1008,replica,0} [ns_server:debug,2014-08-19T16:53:29.435,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 1006. Nacking mccouch update. [views:debug,2014-08-19T16:53:29.435,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/1006. Updated state: replica (0) [ns_server:debug,2014-08-19T16:53:29.435,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [1016,1022,1006,1012,1018,1008,1014,1020,1023,1010] [ns_server:debug,2014-08-19T16:53:29.435,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",1006,replica,0} [views:debug,2014-08-19T16:53:29.469,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/1006. Updated state: replica (0) [ns_server:debug,2014-08-19T16:53:29.469,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",1006,replica,0} [ns_server:debug,2014-08-19T16:53:29.570,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 1004. Nacking mccouch update. [views:debug,2014-08-19T16:53:29.570,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/1004. Updated state: replica (0) [ns_server:debug,2014-08-19T16:53:29.570,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [1016,1022,1006,1012,1018,1008,1014,1020,1004,1023,1010] [ns_server:debug,2014-08-19T16:53:29.570,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",1004,replica,0} [views:debug,2014-08-19T16:53:29.637,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/1004. Updated state: replica (0) [ns_server:debug,2014-08-19T16:53:29.637,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",1004,replica,0} [ns_server:info,2014-08-19T16:53:29.769,ns_1@10.242.238.90:ns_doctor<0.17441.0>:ns_doctor:update_status:241]The following buckets became ready on node 'ns_1@10.242.238.89': ["maps_1_8_metahash"] [ns_server:debug,2014-08-19T16:53:29.795,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 1002. Nacking mccouch update. [views:debug,2014-08-19T16:53:29.796,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/1002. Updated state: replica (0) [ns_server:debug,2014-08-19T16:53:29.796,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",1002,replica,0} [ns_server:debug,2014-08-19T16:53:29.796,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [1016,1022,1006,1012,1018,1002,1008,1014,1020,1004,1023,1010] [views:debug,2014-08-19T16:53:29.863,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/1002. Updated state: replica (0) [ns_server:debug,2014-08-19T16:53:29.863,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",1002,replica,0} [ns_server:debug,2014-08-19T16:53:30.013,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 1000. Nacking mccouch update. [views:debug,2014-08-19T16:53:30.013,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/1000. Updated state: replica (0) [ns_server:debug,2014-08-19T16:53:30.013,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [1016,1000,1022,1006,1012,1018,1002,1008,1014,1020,1004,1023,1010] [ns_server:debug,2014-08-19T16:53:30.013,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",1000,replica,0} [views:debug,2014-08-19T16:53:30.080,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/1000. Updated state: replica (0) [ns_server:debug,2014-08-19T16:53:30.081,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",1000,replica,0} [ns_server:debug,2014-08-19T16:53:30.205,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 998. Nacking mccouch update. [views:debug,2014-08-19T16:53:30.206,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/998. Updated state: replica (0) [ns_server:debug,2014-08-19T16:53:30.206,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",998,replica,0} [ns_server:debug,2014-08-19T16:53:30.206,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [1016,1000,1022,1006,1012,1018,1002,1008,998,1014,1020,1004,1023,1010] [views:debug,2014-08-19T16:53:30.273,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/998. Updated state: replica (0) [ns_server:debug,2014-08-19T16:53:30.273,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",998,replica,0} [ns_server:debug,2014-08-19T16:53:30.423,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 996. Nacking mccouch update. [views:debug,2014-08-19T16:53:30.423,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/996. Updated state: replica (0) [ns_server:debug,2014-08-19T16:53:30.423,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",996,replica,0} [ns_server:debug,2014-08-19T16:53:30.423,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [1016,1000,1022,1006,996,1012,1018,1002,1008,998,1014,1020,1004,1023,1010] [views:debug,2014-08-19T16:53:30.477,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/996. Updated state: replica (0) [ns_server:debug,2014-08-19T16:53:30.478,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",996,replica,0} [ns_server:debug,2014-08-19T16:53:30.561,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 994. Nacking mccouch update. [views:debug,2014-08-19T16:53:30.561,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/994. Updated state: replica (0) [ns_server:debug,2014-08-19T16:53:30.561,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [1016,1000,1022,1006,996,1012,1018,1002,1008,998,1014,1020,1004,1023,994,1010] [ns_server:debug,2014-08-19T16:53:30.561,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",994,replica,0} [views:debug,2014-08-19T16:53:30.595,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/994. Updated state: replica (0) [ns_server:debug,2014-08-19T16:53:30.595,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",994,replica,0} [ns_server:debug,2014-08-19T16:53:30.662,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 992. Nacking mccouch update. [views:debug,2014-08-19T16:53:30.662,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/992. Updated state: replica (0) [ns_server:debug,2014-08-19T16:53:30.662,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",992,replica,0} [ns_server:debug,2014-08-19T16:53:30.662,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [1016,1000,1022,1006,996,1012,1018,1002,992,1008,998,1014,1020,1004,1023,994, 1010] [views:debug,2014-08-19T16:53:30.696,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/992. Updated state: replica (0) [ns_server:debug,2014-08-19T16:53:30.696,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",992,replica,0} [ns_server:debug,2014-08-19T16:53:30.763,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 990. Nacking mccouch update. [views:debug,2014-08-19T16:53:30.763,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/990. Updated state: replica (0) [ns_server:debug,2014-08-19T16:53:30.763,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",990,replica,0} [ns_server:debug,2014-08-19T16:53:30.763,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [1016,1000,990,1022,1006,996,1012,1018,1002,992,1008,998,1014,1020,1004,1023, 994,1010] [views:debug,2014-08-19T16:53:30.797,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/990. Updated state: replica (0) [ns_server:debug,2014-08-19T16:53:30.798,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",990,replica,0} [ns_server:debug,2014-08-19T16:53:30.864,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 988. Nacking mccouch update. [views:debug,2014-08-19T16:53:30.864,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/988. Updated state: replica (0) [ns_server:debug,2014-08-19T16:53:30.864,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",988,replica,0} [ns_server:debug,2014-08-19T16:53:30.864,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [1016,1000,990,1022,1006,996,1012,1018,1002,992,1008,998,1014,988,1020,1004, 1023,994,1010] [views:debug,2014-08-19T16:53:30.898,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/988. Updated state: replica (0) [ns_server:debug,2014-08-19T16:53:30.898,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",988,replica,0} [ns_server:debug,2014-08-19T16:53:31.058,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 986. Nacking mccouch update. [views:debug,2014-08-19T16:53:31.058,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/986. Updated state: replica (0) [ns_server:debug,2014-08-19T16:53:31.058,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",986,replica,0} [ns_server:debug,2014-08-19T16:53:31.058,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [1016,1000,990,1022,1006,996,1012,986,1018,1002,992,1008,998,1014,988,1020, 1004,1023,994,1010] [views:debug,2014-08-19T16:53:31.143,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/986. Updated state: replica (0) [ns_server:debug,2014-08-19T16:53:31.143,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",986,replica,0} [ns_server:info,2014-08-19T16:53:31.289,ns_1@10.242.238.90:ns_doctor<0.17441.0>:ns_doctor:update_status:241]The following buckets became ready on node 'ns_1@10.242.238.88': ["maps_1_8_metahash"] [ns_server:debug,2014-08-19T16:53:31.292,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 984. Nacking mccouch update. [views:debug,2014-08-19T16:53:31.292,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/984. Updated state: replica (0) [ns_server:debug,2014-08-19T16:53:31.292,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",984,replica,0} [ns_server:debug,2014-08-19T16:53:31.293,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [984,1016,1000,990,1022,1006,996,1012,986,1018,1002,992,1008,998,1014,988, 1020,1004,1023,994,1010] [views:debug,2014-08-19T16:53:31.368,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/984. Updated state: replica (0) [ns_server:debug,2014-08-19T16:53:31.368,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",984,replica,0} [ns_server:info,2014-08-19T16:53:31.439,ns_1@10.242.238.90:ns_doctor<0.17441.0>:ns_doctor:update_status:241]The following buckets became ready on node 'ns_1@10.242.238.91': ["maps_1_8_metahash"] [ns_server:debug,2014-08-19T16:53:31.518,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 982. Nacking mccouch update. [views:debug,2014-08-19T16:53:31.518,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/982. Updated state: replica (0) [ns_server:debug,2014-08-19T16:53:31.518,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",982,replica,0} [ns_server:debug,2014-08-19T16:53:31.519,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [984,1016,1000,990,1022,1006,996,1012,986,1018,1002,992,1008,998,982,1014,988, 1020,1004,1023,994,1010] [views:debug,2014-08-19T16:53:31.602,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/982. Updated state: replica (0) [ns_server:debug,2014-08-19T16:53:31.602,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",982,replica,0} [ns_server:debug,2014-08-19T16:53:31.702,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 980. Nacking mccouch update. [views:debug,2014-08-19T16:53:31.702,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/980. Updated state: replica (0) [ns_server:debug,2014-08-19T16:53:31.702,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",980,replica,0} [ns_server:debug,2014-08-19T16:53:31.703,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [984,1016,1000,990,1022,1006,996,980,1012,986,1018,1002,992,1008,998,982,1014, 988,1020,1004,1023,994,1010] [views:debug,2014-08-19T16:53:31.770,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/980. Updated state: replica (0) [ns_server:debug,2014-08-19T16:53:31.770,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",980,replica,0} [ns_server:debug,2014-08-19T16:53:31.873,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 978. Nacking mccouch update. [views:debug,2014-08-19T16:53:31.873,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/978. Updated state: replica (0) [ns_server:debug,2014-08-19T16:53:31.873,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [984,1016,1000,990,1022,1006,996,980,1012,986,1018,1002,992,1008,998,982,1014, 988,1020,1004,1023,994,978,1010] [ns_server:debug,2014-08-19T16:53:31.873,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",978,replica,0} [views:debug,2014-08-19T16:53:31.915,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/978. Updated state: replica (0) [ns_server:debug,2014-08-19T16:53:31.915,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",978,replica,0} [ns_server:debug,2014-08-19T16:53:31.991,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 976. Nacking mccouch update. [views:debug,2014-08-19T16:53:31.991,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/976. Updated state: replica (0) [ns_server:debug,2014-08-19T16:53:31.992,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [984,1016,1000,990,1022,1006,996,980,1012,986,1018,1002,992,976,1008,998,982, 1014,988,1020,1004,1023,994,978,1010] [ns_server:debug,2014-08-19T16:53:31.992,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",976,replica,0} [views:debug,2014-08-19T16:53:32.034,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/976. Updated state: replica (0) [ns_server:debug,2014-08-19T16:53:32.034,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",976,replica,0} [ns_server:debug,2014-08-19T16:53:32.101,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 974. Nacking mccouch update. [views:debug,2014-08-19T16:53:32.101,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/974. Updated state: replica (0) [ns_server:debug,2014-08-19T16:53:32.101,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [984,1016,1000,990,974,1022,1006,996,980,1012,986,1018,1002,992,976,1008,998, 982,1014,988,1020,1004,1023,994,978,1010] [ns_server:debug,2014-08-19T16:53:32.101,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",974,replica,0} [views:debug,2014-08-19T16:53:32.135,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/974. Updated state: replica (0) [ns_server:debug,2014-08-19T16:53:32.135,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",974,replica,0} [ns_server:debug,2014-08-19T16:53:32.201,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 972. Nacking mccouch update. [views:debug,2014-08-19T16:53:32.201,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/972. Updated state: replica (0) [ns_server:debug,2014-08-19T16:53:32.202,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [984,1016,1000,990,974,1022,1006,996,980,1012,986,1018,1002,992,976,1008,998, 982,1014,988,972,1020,1004,1023,994,978,1010] [ns_server:debug,2014-08-19T16:53:32.202,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",972,replica,0} [views:debug,2014-08-19T16:53:32.252,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/972. Updated state: replica (0) [ns_server:debug,2014-08-19T16:53:32.252,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",972,replica,0} [ns_server:debug,2014-08-19T16:53:32.328,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 970. Nacking mccouch update. [views:debug,2014-08-19T16:53:32.328,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/970. Updated state: replica (0) [ns_server:debug,2014-08-19T16:53:32.328,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [984,1016,1000,990,974,1022,1006,996,980,1012,986,970,1018,1002,992,976,1008, 998,982,1014,988,972,1020,1004,1023,994,978,1010] [ns_server:debug,2014-08-19T16:53:32.328,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",970,replica,0} [views:debug,2014-08-19T16:53:32.378,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/970. Updated state: replica (0) [ns_server:debug,2014-08-19T16:53:32.379,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",970,replica,0} [ns_server:debug,2014-08-19T16:53:32.445,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 968. Nacking mccouch update. [views:debug,2014-08-19T16:53:32.446,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/968. Updated state: replica (0) [ns_server:debug,2014-08-19T16:53:32.446,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [984,968,1016,1000,990,974,1022,1006,996,980,1012,986,970,1018,1002,992,976, 1008,998,982,1014,988,972,1020,1004,1023,994,978,1010] [ns_server:debug,2014-08-19T16:53:32.446,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",968,replica,0} [views:debug,2014-08-19T16:53:32.479,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/968. Updated state: replica (0) [ns_server:debug,2014-08-19T16:53:32.479,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",968,replica,0} [ns_server:debug,2014-08-19T16:53:32.563,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 966. Nacking mccouch update. [views:debug,2014-08-19T16:53:32.563,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/966. Updated state: replica (0) [ns_server:debug,2014-08-19T16:53:32.563,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [984,968,1016,1000,990,974,1022,1006,996,980,1012,986,970,1018,1002,992,976, 1008,998,982,966,1014,988,972,1020,1004,1023,994,978,1010] [ns_server:debug,2014-08-19T16:53:32.563,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",966,replica,0} [views:debug,2014-08-19T16:53:32.613,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/966. Updated state: replica (0) [ns_server:debug,2014-08-19T16:53:32.614,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",966,replica,0} [ns_server:debug,2014-08-19T16:53:32.764,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 964. Nacking mccouch update. [views:debug,2014-08-19T16:53:32.764,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/964. Updated state: replica (0) [ns_server:debug,2014-08-19T16:53:32.764,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [984,968,1016,1000,990,974,1022,1006,996,980,964,1012,986,970,1018,1002,992, 976,1008,998,982,966,1014,988,972,1020,1004,1023,994,978,1010] [ns_server:debug,2014-08-19T16:53:32.764,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",964,replica,0} [views:debug,2014-08-19T16:53:32.814,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/964. Updated state: replica (0) [ns_server:debug,2014-08-19T16:53:32.815,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",964,replica,0} [ns_server:debug,2014-08-19T16:53:32.964,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 962. Nacking mccouch update. [views:debug,2014-08-19T16:53:32.965,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/962. Updated state: replica (0) [ns_server:debug,2014-08-19T16:53:32.965,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",962,replica,0} [ns_server:debug,2014-08-19T16:53:32.965,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [984,968,1016,1000,990,974,1022,1006,996,980,964,1012,986,970,1018,1002,992, 976,1008,998,982,966,1014,988,972,1020,1004,1023,994,978,962,1010] [views:debug,2014-08-19T16:53:33.027,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/962. Updated state: replica (0) [ns_server:debug,2014-08-19T16:53:33.028,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",962,replica,0} [ns_server:debug,2014-08-19T16:53:33.120,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 960. Nacking mccouch update. [views:debug,2014-08-19T16:53:33.120,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/960. Updated state: replica (0) [ns_server:debug,2014-08-19T16:53:33.120,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",960,replica,0} [ns_server:debug,2014-08-19T16:53:33.120,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [984,968,1016,1000,990,974,1022,1006,996,980,964,1012,986,970,1018,1002,992, 976,960,1008,998,982,966,1014,988,972,1020,1004,1023,994,978,962,1010] [views:debug,2014-08-19T16:53:33.179,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/960. Updated state: replica (0) [ns_server:debug,2014-08-19T16:53:33.179,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",960,replica,0} [ns_server:debug,2014-08-19T16:53:33.270,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 958. Nacking mccouch update. [views:debug,2014-08-19T16:53:33.271,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/958. Updated state: replica (0) [ns_server:debug,2014-08-19T16:53:33.271,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",958,replica,0} [ns_server:debug,2014-08-19T16:53:33.271,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [984,968,1016,1000,990,974,958,1022,1006,996,980,964,1012,986,970,1018,1002, 992,976,960,1008,998,982,966,1014,988,972,1020,1004,1023,994,978,962,1010] [views:debug,2014-08-19T16:53:33.329,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/958. Updated state: replica (0) [ns_server:debug,2014-08-19T16:53:33.330,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",958,replica,0} [ns_server:debug,2014-08-19T16:53:33.396,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 956. Nacking mccouch update. [views:debug,2014-08-19T16:53:33.396,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/956. Updated state: replica (0) [ns_server:debug,2014-08-19T16:53:33.396,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",956,replica,0} [ns_server:debug,2014-08-19T16:53:33.396,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [984,968,1016,1000,990,974,958,1022,1006,996,980,964,1012,986,970,1018,1002, 992,976,960,1008,998,982,966,1014,988,972,956,1020,1004,1023,994,978,962, 1010] [views:debug,2014-08-19T16:53:33.430,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/956. Updated state: replica (0) [ns_server:debug,2014-08-19T16:53:33.430,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",956,replica,0} [ns_server:debug,2014-08-19T16:53:33.539,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 954. Nacking mccouch update. [views:debug,2014-08-19T16:53:33.539,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/954. Updated state: replica (0) [ns_server:debug,2014-08-19T16:53:33.539,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",954,replica,0} [ns_server:debug,2014-08-19T16:53:33.539,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [984,968,1016,1000,990,974,958,1022,1006,996,980,964,1012,986,970,954,1018, 1002,992,976,960,1008,998,982,966,1014,988,972,956,1020,1004,1023,994,978, 962,1010] [views:debug,2014-08-19T16:53:33.598,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/954. Updated state: replica (0) [ns_server:debug,2014-08-19T16:53:33.598,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",954,replica,0} [ns_server:debug,2014-08-19T16:53:33.740,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 952. Nacking mccouch update. [views:debug,2014-08-19T16:53:33.740,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/952. Updated state: replica (0) [ns_server:debug,2014-08-19T16:53:33.740,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",952,replica,0} [ns_server:debug,2014-08-19T16:53:33.740,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [984,968,952,1016,1000,990,974,958,1022,1006,996,980,964,1012,986,970,954, 1018,1002,992,976,960,1008,998,982,966,1014,988,972,956,1020,1004,1023,994, 978,962,1010] [views:debug,2014-08-19T16:53:33.816,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/952. Updated state: replica (0) [ns_server:debug,2014-08-19T16:53:33.816,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",952,replica,0} [ns_server:debug,2014-08-19T16:53:33.959,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 950. Nacking mccouch update. [views:debug,2014-08-19T16:53:33.959,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/950. Updated state: replica (0) [ns_server:debug,2014-08-19T16:53:33.959,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",950,replica,0} [ns_server:debug,2014-08-19T16:53:33.959,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [984,968,952,1016,1000,990,974,958,1022,1006,996,980,964,1012,986,970,954, 1018,1002,992,976,960,1008,998,982,966,950,1014,988,972,956,1020,1004,1023, 994,978,962,1010] [views:debug,2014-08-19T16:53:34.043,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/950. Updated state: replica (0) [ns_server:debug,2014-08-19T16:53:34.043,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",950,replica,0} [ns_server:debug,2014-08-19T16:53:34.185,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 948. Nacking mccouch update. [views:debug,2014-08-19T16:53:34.185,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/948. Updated state: replica (0) [ns_server:debug,2014-08-19T16:53:34.185,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",948,replica,0} [ns_server:debug,2014-08-19T16:53:34.186,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [984,968,952,1016,1000,990,974,958,1022,1006,996,980,964,948,1012,986,970,954, 1018,1002,992,976,960,1008,998,982,966,950,1014,988,972,956,1020,1004,1023, 994,978,962,1010] [views:debug,2014-08-19T16:53:34.244,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/948. Updated state: replica (0) [ns_server:debug,2014-08-19T16:53:34.244,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",948,replica,0} [ns_server:debug,2014-08-19T16:53:34.402,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 946. Nacking mccouch update. [views:debug,2014-08-19T16:53:34.402,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/946. Updated state: replica (0) [ns_server:debug,2014-08-19T16:53:34.403,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",946,replica,0} [ns_server:debug,2014-08-19T16:53:34.403,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [984,968,952,1016,1000,990,974,958,1022,1006,996,980,964,948,1012,986,970,954, 1018,1002,992,976,960,1008,998,982,966,950,1014,988,972,956,1020,1004,1023, 994,978,962,946,1010] [views:debug,2014-08-19T16:53:34.470,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/946. Updated state: replica (0) [ns_server:debug,2014-08-19T16:53:34.470,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",946,replica,0} [ns_server:debug,2014-08-19T16:53:34.623,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 944. Nacking mccouch update. [views:debug,2014-08-19T16:53:34.623,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/944. Updated state: replica (0) [ns_server:debug,2014-08-19T16:53:34.623,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",944,replica,0} [ns_server:debug,2014-08-19T16:53:34.623,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [984,968,952,1016,1000,990,974,958,1022,1006,996,980,964,948,1012,986,970,954, 1018,1002,992,976,960,944,1008,998,982,966,950,1014,988,972,956,1020,1004, 1023,994,978,962,946,1010] [views:debug,2014-08-19T16:53:34.682,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/944. Updated state: replica (0) [ns_server:debug,2014-08-19T16:53:34.682,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",944,replica,0} [ns_server:debug,2014-08-19T16:53:34.815,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 942. Nacking mccouch update. [views:debug,2014-08-19T16:53:34.816,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/942. Updated state: replica (0) [ns_server:debug,2014-08-19T16:53:34.816,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",942,replica,0} [ns_server:debug,2014-08-19T16:53:34.816,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [984,968,952,1016,1000,990,974,958,942,1022,1006,996,980,964,948,1012,986,970, 954,1018,1002,992,976,960,944,1008,998,982,966,950,1014,988,972,956,1020, 1004,1023,994,978,962,946,1010] [views:debug,2014-08-19T16:53:34.866,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/942. Updated state: replica (0) [ns_server:debug,2014-08-19T16:53:34.866,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",942,replica,0} [ns_server:debug,2014-08-19T16:53:35.017,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 940. Nacking mccouch update. [views:debug,2014-08-19T16:53:35.018,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/940. Updated state: replica (0) [ns_server:debug,2014-08-19T16:53:35.018,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",940,replica,0} [ns_server:debug,2014-08-19T16:53:35.018,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [984,968,952,1016,1000,990,974,958,942,1022,1006,996,980,964,948,1012,986,970, 954,1018,1002,992,976,960,944,1008,998,982,966,950,1014,988,972,956,940,1020, 1004,1023,994,978,962,946,1010] [views:debug,2014-08-19T16:53:35.068,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/940. Updated state: replica (0) [ns_server:debug,2014-08-19T16:53:35.068,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",940,replica,0} [ns_server:debug,2014-08-19T16:53:35.210,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 938. Nacking mccouch update. [views:debug,2014-08-19T16:53:35.210,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/938. Updated state: replica (0) [ns_server:debug,2014-08-19T16:53:35.210,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",938,replica,0} [ns_server:debug,2014-08-19T16:53:35.211,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [984,968,952,1016,1000,990,974,958,942,1022,1006,996,980,964,948,1012,986,970, 954,938,1018,1002,992,976,960,944,1008,998,982,966,950,1014,988,972,956,940, 1020,1004,1023,994,978,962,946,1010] [views:debug,2014-08-19T16:53:35.278,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/938. Updated state: replica (0) [ns_server:debug,2014-08-19T16:53:35.278,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",938,replica,0} [ns_server:debug,2014-08-19T16:53:35.411,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 766. Nacking mccouch update. [views:debug,2014-08-19T16:53:35.411,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/766. Updated state: active (0) [ns_server:debug,2014-08-19T16:53:35.412,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",766,active,0} [ns_server:debug,2014-08-19T16:53:35.412,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [766,984,968,952,1016,1000,990,974,958,942,1022,1006,996,980,964,948,1012,986, 970,954,938,1018,1002,992,976,960,944,1008,998,982,966,950,1014,988,972,956, 940,1020,1004,1023,994,978,962,946,1010] [views:debug,2014-08-19T16:53:35.462,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/766. Updated state: active (0) [ns_server:debug,2014-08-19T16:53:35.462,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",766,active,0} [ns_server:debug,2014-08-19T16:53:35.546,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 764. Nacking mccouch update. [views:debug,2014-08-19T16:53:35.546,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/764. Updated state: active (0) [ns_server:debug,2014-08-19T16:53:35.546,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",764,active,0} [ns_server:debug,2014-08-19T16:53:35.546,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [766,984,968,952,1016,1000,990,974,958,942,1022,1006,996,980,964,948,1012,986, 970,954,938,1018,1002,992,976,960,944,1008,764,998,982,966,950,1014,988,972, 956,940,1020,1004,1023,994,978,962,946,1010] [views:debug,2014-08-19T16:53:35.597,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/764. Updated state: active (0) [ns_server:debug,2014-08-19T16:53:35.597,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",764,active,0} [ns_server:debug,2014-08-19T16:53:35.663,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 762. Nacking mccouch update. [views:debug,2014-08-19T16:53:35.663,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/762. Updated state: active (0) [ns_server:debug,2014-08-19T16:53:35.663,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",762,active,0} [ns_server:debug,2014-08-19T16:53:35.663,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [766,984,968,952,1016,1000,990,974,958,942,1022,1006,762,996,980,964,948,1012, 986,970,954,938,1018,1002,992,976,960,944,1008,764,998,982,966,950,1014,988, 972,956,940,1020,1004,1023,994,978,962,946,1010] [views:debug,2014-08-19T16:53:35.698,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/762. Updated state: active (0) [ns_server:debug,2014-08-19T16:53:35.698,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",762,active,0} [ns_server:debug,2014-08-19T16:53:35.781,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 760. Nacking mccouch update. [views:debug,2014-08-19T16:53:35.781,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/760. Updated state: active (0) [ns_server:debug,2014-08-19T16:53:35.781,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",760,active,0} [ns_server:debug,2014-08-19T16:53:35.781,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [766,984,968,952,1016,1000,990,974,958,942,1022,1006,762,996,980,964,948,1012, 986,970,954,938,1018,1002,992,976,960,944,1008,764,998,982,966,950,1014,988, 972,956,940,1020,1004,760,1023,994,978,962,946,1010] [views:debug,2014-08-19T16:53:35.832,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/760. Updated state: active (0) [ns_server:debug,2014-08-19T16:53:35.832,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",760,active,0} [ns_server:debug,2014-08-19T16:53:35.911,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 758. Nacking mccouch update. [views:debug,2014-08-19T16:53:35.911,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/758. Updated state: active (0) [ns_server:debug,2014-08-19T16:53:35.911,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",758,active,0} [ns_server:debug,2014-08-19T16:53:35.911,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [766,984,968,952,1016,1000,990,974,958,942,1022,1006,762,996,980,964,948,1012, 986,970,954,938,1018,1002,758,992,976,960,944,1008,764,998,982,966,950,1014, 988,972,956,940,1020,1004,760,1023,994,978,962,946,1010] [views:debug,2014-08-19T16:53:35.970,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/758. Updated state: active (0) [ns_server:debug,2014-08-19T16:53:35.970,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",758,active,0} [ns_server:debug,2014-08-19T16:53:36.111,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 756. Nacking mccouch update. [views:debug,2014-08-19T16:53:36.112,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/756. Updated state: active (0) [ns_server:debug,2014-08-19T16:53:36.112,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",756,active,0} [ns_server:debug,2014-08-19T16:53:36.112,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [766,984,968,952,1016,1000,756,990,974,958,942,1022,1006,762,996,980,964,948, 1012,986,970,954,938,1018,1002,758,992,976,960,944,1008,764,998,982,966,950, 1014,988,972,956,940,1020,1004,760,1023,994,978,962,946,1010] [views:debug,2014-08-19T16:53:36.145,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/756. Updated state: active (0) [ns_server:debug,2014-08-19T16:53:36.145,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",756,active,0} [ns_server:debug,2014-08-19T16:53:36.212,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 754. Nacking mccouch update. [views:debug,2014-08-19T16:53:36.212,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/754. Updated state: active (0) [ns_server:debug,2014-08-19T16:53:36.212,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",754,active,0} [ns_server:debug,2014-08-19T16:53:36.213,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [766,984,968,952,1016,1000,756,990,974,958,942,1022,1006,762,996,980,964,948, 1012,986,970,954,938,1018,1002,758,992,976,960,944,1008,764,998,982,966,950, 1014,754,988,972,956,940,1020,1004,760,1023,994,978,962,946,1010] [views:debug,2014-08-19T16:53:36.246,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/754. Updated state: active (0) [ns_server:debug,2014-08-19T16:53:36.246,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",754,active,0} [ns_server:debug,2014-08-19T16:53:36.338,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 752. Nacking mccouch update. [views:debug,2014-08-19T16:53:36.338,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/752. Updated state: active (0) [ns_server:debug,2014-08-19T16:53:36.339,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",752,active,0} [ns_server:debug,2014-08-19T16:53:36.339,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [766,984,968,952,1016,1000,756,990,974,958,942,1022,1006,762,996,980,964,948, 1012,752,986,970,954,938,1018,1002,758,992,976,960,944,1008,764,998,982,966, 950,1014,754,988,972,956,940,1020,1004,760,1023,994,978,962,946,1010] [views:debug,2014-08-19T16:53:36.397,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/752. Updated state: active (0) [ns_server:debug,2014-08-19T16:53:36.398,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",752,active,0} [ns_server:debug,2014-08-19T16:53:36.489,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 750. Nacking mccouch update. [views:debug,2014-08-19T16:53:36.489,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/750. Updated state: active (0) [ns_server:debug,2014-08-19T16:53:36.489,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",750,active,0} [ns_server:debug,2014-08-19T16:53:36.489,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [766,750,984,968,952,1016,1000,756,990,974,958,942,1022,1006,762,996,980,964, 948,1012,752,986,970,954,938,1018,1002,758,992,976,960,944,1008,764,998,982, 966,950,1014,754,988,972,956,940,1020,1004,760,1023,994,978,962,946,1010] [views:debug,2014-08-19T16:53:36.573,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/750. Updated state: active (0) [ns_server:debug,2014-08-19T16:53:36.574,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",750,active,0} [ns_server:debug,2014-08-19T16:53:36.665,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 748. Nacking mccouch update. [views:debug,2014-08-19T16:53:36.665,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/748. Updated state: active (0) [ns_server:debug,2014-08-19T16:53:36.666,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",748,active,0} [ns_server:debug,2014-08-19T16:53:36.666,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [766,750,984,968,952,1016,1000,756,990,974,958,942,1022,1006,762,996,980,964, 948,1012,752,986,970,954,938,1018,1002,758,992,976,960,944,1008,764,748,998, 982,966,950,1014,754,988,972,956,940,1020,1004,760,1023,994,978,962,946,1010] [views:debug,2014-08-19T16:53:36.724,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/748. Updated state: active (0) [ns_server:debug,2014-08-19T16:53:36.724,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",748,active,0} [ns_server:debug,2014-08-19T16:53:36.791,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 746. Nacking mccouch update. [views:debug,2014-08-19T16:53:36.791,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/746. Updated state: active (0) [ns_server:debug,2014-08-19T16:53:36.791,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",746,active,0} [ns_server:debug,2014-08-19T16:53:36.792,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [766,750,984,968,952,1016,1000,756,990,974,958,942,1022,1006,762,746,996,980, 964,948,1012,752,986,970,954,938,1018,1002,758,992,976,960,944,1008,764,748, 998,982,966,950,1014,754,988,972,956,940,1020,1004,760,1023,994,978,962,946, 1010] [views:debug,2014-08-19T16:53:36.826,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/746. Updated state: active (0) [ns_server:debug,2014-08-19T16:53:36.826,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",746,active,0} [ns_server:debug,2014-08-19T16:53:36.918,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 744. Nacking mccouch update. [views:debug,2014-08-19T16:53:36.918,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/744. Updated state: active (0) [ns_server:debug,2014-08-19T16:53:36.918,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",744,active,0} [ns_server:debug,2014-08-19T16:53:36.919,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [766,750,984,968,952,1016,1000,756,990,974,958,942,1022,1006,762,746,996,980, 964,948,1012,752,986,970,954,938,1018,1002,758,992,976,960,944,1008,764,748, 998,982,966,950,1014,754,988,972,956,940,1020,1004,760,744,1023,994,978,962, 946,1010] [views:debug,2014-08-19T16:53:36.978,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/744. Updated state: active (0) [ns_server:debug,2014-08-19T16:53:36.978,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",744,active,0} [ns_server:debug,2014-08-19T16:53:37.044,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 742. Nacking mccouch update. [views:debug,2014-08-19T16:53:37.044,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/742. Updated state: active (0) [ns_server:debug,2014-08-19T16:53:37.044,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",742,active,0} [ns_server:debug,2014-08-19T16:53:37.044,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [766,750,984,968,952,1016,1000,756,990,974,958,942,1022,1006,762,746,996,980, 964,948,1012,752,986,970,954,938,1018,1002,758,742,992,976,960,944,1008,764, 748,998,982,966,950,1014,754,988,972,956,940,1020,1004,760,744,1023,994,978, 962,946,1010] [views:debug,2014-08-19T16:53:37.078,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/742. Updated state: active (0) [ns_server:debug,2014-08-19T16:53:37.078,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",742,active,0} [ns_server:debug,2014-08-19T16:53:37.156,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 740. Nacking mccouch update. [views:debug,2014-08-19T16:53:37.156,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/740. Updated state: active (0) [ns_server:debug,2014-08-19T16:53:37.157,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",740,active,0} [ns_server:debug,2014-08-19T16:53:37.157,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [766,750,984,968,952,1016,1000,756,740,990,974,958,942,1022,1006,762,746,996, 980,964,948,1012,752,986,970,954,938,1018,1002,758,742,992,976,960,944,1008, 764,748,998,982,966,950,1014,754,988,972,956,940,1020,1004,760,744,1023,994, 978,962,946,1010] [views:debug,2014-08-19T16:53:37.191,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/740. Updated state: active (0) [ns_server:debug,2014-08-19T16:53:37.191,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",740,active,0} [ns_server:debug,2014-08-19T16:53:37.257,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 738. Nacking mccouch update. [views:debug,2014-08-19T16:53:37.257,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/738. Updated state: active (0) [ns_server:debug,2014-08-19T16:53:37.257,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",738,active,0} [ns_server:debug,2014-08-19T16:53:37.258,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [766,750,984,968,952,1016,1000,756,740,990,974,958,942,1022,1006,762,746,996, 980,964,948,1012,752,986,970,954,938,1018,1002,758,742,992,976,960,944,1008, 764,748,998,982,966,950,1014,754,738,988,972,956,940,1020,1004,760,744,1023, 994,978,962,946,1010] [views:debug,2014-08-19T16:53:37.291,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/738. Updated state: active (0) [ns_server:debug,2014-08-19T16:53:37.291,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",738,active,0} [ns_server:debug,2014-08-19T16:53:37.358,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 736. Nacking mccouch update. [views:debug,2014-08-19T16:53:37.358,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/736. Updated state: active (0) [ns_server:debug,2014-08-19T16:53:37.358,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",736,active,0} [ns_server:debug,2014-08-19T16:53:37.358,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [766,750,984,968,952,1016,1000,756,740,990,974,958,942,1022,1006,762,746,996, 980,964,948,1012,752,736,986,970,954,938,1018,1002,758,742,992,976,960,944, 1008,764,748,998,982,966,950,1014,754,738,988,972,956,940,1020,1004,760,744, 1023,994,978,962,946,1010] [views:debug,2014-08-19T16:53:37.392,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/736. Updated state: active (0) [ns_server:debug,2014-08-19T16:53:37.392,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",736,active,0} [ns_server:debug,2014-08-19T16:53:37.477,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 734. Nacking mccouch update. [views:debug,2014-08-19T16:53:37.477,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/734. Updated state: active (0) [ns_server:debug,2014-08-19T16:53:37.477,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",734,active,0} [ns_server:debug,2014-08-19T16:53:37.477,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [766,750,734,984,968,952,1016,1000,756,740,990,974,958,942,1022,1006,762,746, 996,980,964,948,1012,752,736,986,970,954,938,1018,1002,758,742,992,976,960, 944,1008,764,748,998,982,966,950,1014,754,738,988,972,956,940,1020,1004,760, 744,1023,994,978,962,946,1010] [views:debug,2014-08-19T16:53:37.528,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/734. Updated state: active (0) [ns_server:debug,2014-08-19T16:53:37.528,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",734,active,0} [ns_server:debug,2014-08-19T16:53:37.661,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 732. Nacking mccouch update. [views:debug,2014-08-19T16:53:37.661,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/732. Updated state: active (0) [ns_server:debug,2014-08-19T16:53:37.661,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",732,active,0} [ns_server:debug,2014-08-19T16:53:37.662,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [766,750,734,984,968,952,1016,1000,756,740,990,974,958,942,1022,1006,762,746, 996,980,964,948,1012,752,736,986,970,954,938,1018,1002,758,742,992,976,960, 944,1008,764,748,732,998,982,966,950,1014,754,738,988,972,956,940,1020,1004, 760,744,1023,994,978,962,946,1010] [views:debug,2014-08-19T16:53:37.746,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/732. Updated state: active (0) [ns_server:debug,2014-08-19T16:53:37.746,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",732,active,0} [ns_server:debug,2014-08-19T16:53:37.896,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 730. Nacking mccouch update. [views:debug,2014-08-19T16:53:37.896,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/730. Updated state: active (0) [ns_server:debug,2014-08-19T16:53:37.896,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",730,active,0} [ns_server:debug,2014-08-19T16:53:37.896,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [766,750,734,984,968,952,1016,1000,756,740,990,974,958,942,1022,1006,762,746, 730,996,980,964,948,1012,752,736,986,970,954,938,1018,1002,758,742,992,976, 960,944,1008,764,748,732,998,982,966,950,1014,754,738,988,972,956,940,1020, 1004,760,744,1023,994,978,962,946,1010] [views:debug,2014-08-19T16:53:37.946,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/730. Updated state: active (0) [ns_server:debug,2014-08-19T16:53:37.946,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",730,active,0} [ns_server:debug,2014-08-19T16:53:38.088,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 728. Nacking mccouch update. [views:debug,2014-08-19T16:53:38.088,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/728. Updated state: active (0) [ns_server:debug,2014-08-19T16:53:38.088,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",728,active,0} [ns_server:debug,2014-08-19T16:53:38.088,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [766,750,734,984,968,952,1016,1000,756,740,990,974,958,942,1022,1006,762,746, 730,996,980,964,948,1012,752,736,986,970,954,938,1018,1002,758,742,992,976, 960,944,1008,764,748,732,998,982,966,950,1014,754,738,988,972,956,940,1020, 1004,760,744,728,1023,994,978,962,946,1010] [views:debug,2014-08-19T16:53:38.155,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/728. Updated state: active (0) [ns_server:debug,2014-08-19T16:53:38.155,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",728,active,0} [ns_server:debug,2014-08-19T16:53:38.297,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 726. Nacking mccouch update. [views:debug,2014-08-19T16:53:38.297,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/726. Updated state: active (0) [ns_server:debug,2014-08-19T16:53:38.298,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",726,active,0} [ns_server:debug,2014-08-19T16:53:38.298,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [766,750,734,984,968,952,1016,1000,756,740,990,974,958,942,1022,1006,762,746, 730,996,980,964,948,1012,752,736,986,970,954,938,1018,1002,758,742,726,992, 976,960,944,1008,764,748,732,998,982,966,950,1014,754,738,988,972,956,940, 1020,1004,760,744,728,1023,994,978,962,946,1010] [views:debug,2014-08-19T16:53:38.348,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/726. Updated state: active (0) [ns_server:debug,2014-08-19T16:53:38.348,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",726,active,0} [ns_server:debug,2014-08-19T16:53:38.427,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 724. Nacking mccouch update. [views:debug,2014-08-19T16:53:38.428,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/724. Updated state: active (0) [ns_server:debug,2014-08-19T16:53:38.428,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",724,active,0} [ns_server:debug,2014-08-19T16:53:38.428,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [766,750,734,984,968,952,1016,1000,756,740,724,990,974,958,942,1022,1006,762, 746,730,996,980,964,948,1012,752,736,986,970,954,938,1018,1002,758,742,726, 992,976,960,944,1008,764,748,732,998,982,966,950,1014,754,738,988,972,956, 940,1020,1004,760,744,728,1023,994,978,962,946,1010] [views:debug,2014-08-19T16:53:38.486,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/724. Updated state: active (0) [ns_server:debug,2014-08-19T16:53:38.486,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",724,active,0} [ns_server:debug,2014-08-19T16:53:38.637,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 722. Nacking mccouch update. [views:debug,2014-08-19T16:53:38.637,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/722. Updated state: active (0) [ns_server:debug,2014-08-19T16:53:38.640,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",722,active,0} [ns_server:debug,2014-08-19T16:53:38.640,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [766,750,734,984,968,952,1016,1000,756,740,724,990,974,958,942,1022,1006,762, 746,730,996,980,964,948,1012,752,736,986,970,954,938,1018,1002,758,742,726, 992,976,960,944,1008,764,748,732,998,982,966,950,1014,754,738,722,988,972, 956,940,1020,1004,760,744,728,1023,994,978,962,946,1010] [views:debug,2014-08-19T16:53:38.695,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/722. Updated state: active (0) [ns_server:debug,2014-08-19T16:53:38.696,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",722,active,0} [ns_server:debug,2014-08-19T16:53:38.846,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 720. Nacking mccouch update. [views:debug,2014-08-19T16:53:38.846,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/720. Updated state: active (0) [ns_server:debug,2014-08-19T16:53:38.846,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",720,active,0} [ns_server:debug,2014-08-19T16:53:38.846,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [766,750,734,984,968,952,1016,1000,756,740,724,990,974,958,942,1022,1006,762, 746,730,996,980,964,948,1012,752,736,720,986,970,954,938,1018,1002,758,742, 726,992,976,960,944,1008,764,748,732,998,982,966,950,1014,754,738,722,988, 972,956,940,1020,1004,760,744,728,1023,994,978,962,946,1010] [views:debug,2014-08-19T16:53:38.921,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/720. Updated state: active (0) [ns_server:debug,2014-08-19T16:53:38.922,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",720,active,0} [ns_server:debug,2014-08-19T16:53:39.080,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 718. Nacking mccouch update. [views:debug,2014-08-19T16:53:39.080,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/718. Updated state: active (0) [ns_server:debug,2014-08-19T16:53:39.080,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",718,active,0} [ns_server:debug,2014-08-19T16:53:39.080,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [766,750,734,718,984,968,952,1016,1000,756,740,724,990,974,958,942,1022,1006, 762,746,730,996,980,964,948,1012,752,736,720,986,970,954,938,1018,1002,758, 742,726,992,976,960,944,1008,764,748,732,998,982,966,950,1014,754,738,722, 988,972,956,940,1020,1004,760,744,728,1023,994,978,962,946,1010] [views:debug,2014-08-19T16:53:39.139,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/718. Updated state: active (0) [ns_server:debug,2014-08-19T16:53:39.139,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",718,active,0} [ns_server:debug,2014-08-19T16:53:39.231,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 716. Nacking mccouch update. [views:debug,2014-08-19T16:53:39.231,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/716. Updated state: active (0) [ns_server:debug,2014-08-19T16:53:39.231,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",716,active,0} [ns_server:debug,2014-08-19T16:53:39.232,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [766,750,734,718,984,968,952,1016,1000,756,740,724,990,974,958,942,1022,1006, 762,746,730,996,980,964,948,1012,752,736,720,986,970,954,938,1018,1002,758, 742,726,992,976,960,944,1008,764,748,732,716,998,982,966,950,1014,754,738, 722,988,972,956,940,1020,1004,760,744,728,1023,994,978,962,946,1010] [views:debug,2014-08-19T16:53:39.306,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/716. Updated state: active (0) [ns_server:debug,2014-08-19T16:53:39.307,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",716,active,0} [ns_server:debug,2014-08-19T16:53:39.398,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 714. Nacking mccouch update. [views:debug,2014-08-19T16:53:39.399,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/714. Updated state: active (0) [ns_server:debug,2014-08-19T16:53:39.399,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",714,active,0} [ns_server:debug,2014-08-19T16:53:39.399,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [766,750,734,718,984,968,952,1016,1000,756,740,724,990,974,958,942,1022,1006, 762,746,730,714,996,980,964,948,1012,752,736,720,986,970,954,938,1018,1002, 758,742,726,992,976,960,944,1008,764,748,732,716,998,982,966,950,1014,754, 738,722,988,972,956,940,1020,1004,760,744,728,1023,994,978,962,946,1010] [views:debug,2014-08-19T16:53:39.457,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/714. Updated state: active (0) [ns_server:debug,2014-08-19T16:53:39.457,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",714,active,0} [ns_server:debug,2014-08-19T16:53:39.549,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 712. Nacking mccouch update. [views:debug,2014-08-19T16:53:39.549,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/712. Updated state: active (0) [ns_server:debug,2014-08-19T16:53:39.549,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",712,active,0} [ns_server:debug,2014-08-19T16:53:39.550,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [766,750,734,718,984,968,952,1016,1000,756,740,724,990,974,958,942,1022,1006, 762,746,730,714,996,980,964,948,1012,752,736,720,986,970,954,938,1018,1002, 758,742,726,992,976,960,944,1008,764,748,732,716,998,982,966,950,1014,754, 738,722,988,972,956,940,1020,1004,760,744,728,712,1023,994,978,962,946,1010] [views:debug,2014-08-19T16:53:39.600,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/712. Updated state: active (0) [ns_server:debug,2014-08-19T16:53:39.600,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",712,active,0} [ns_server:debug,2014-08-19T16:53:39.684,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 710. Nacking mccouch update. [views:debug,2014-08-19T16:53:39.684,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/710. Updated state: active (0) [ns_server:debug,2014-08-19T16:53:39.684,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",710,active,0} [ns_server:debug,2014-08-19T16:53:39.684,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [766,750,734,718,984,968,952,1016,1000,756,740,724,990,974,958,942,1022,1006, 762,746,730,714,996,980,964,948,1012,752,736,720,986,970,954,938,1018,1002, 758,742,726,710,992,976,960,944,1008,764,748,732,716,998,982,966,950,1014, 754,738,722,988,972,956,940,1020,1004,760,744,728,712,1023,994,978,962,946, 1010] [views:debug,2014-08-19T16:53:39.734,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/710. Updated state: active (0) [ns_server:debug,2014-08-19T16:53:39.734,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",710,active,0} [ns_server:debug,2014-08-19T16:53:39.826,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 708. Nacking mccouch update. [views:debug,2014-08-19T16:53:39.826,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/708. Updated state: active (0) [ns_server:debug,2014-08-19T16:53:39.826,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",708,active,0} [ns_server:debug,2014-08-19T16:53:39.826,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [766,750,734,718,984,968,952,1016,1000,756,740,724,708,990,974,958,942,1022, 1006,762,746,730,714,996,980,964,948,1012,752,736,720,986,970,954,938,1018, 1002,758,742,726,710,992,976,960,944,1008,764,748,732,716,998,982,966,950, 1014,754,738,722,988,972,956,940,1020,1004,760,744,728,712,1023,994,978,962, 946,1010] [views:debug,2014-08-19T16:53:39.861,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/708. Updated state: active (0) [ns_server:debug,2014-08-19T16:53:39.861,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",708,active,0} [ns_server:debug,2014-08-19T16:53:40.006,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 706. Nacking mccouch update. [views:debug,2014-08-19T16:53:40.007,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/706. Updated state: active (0) [ns_server:debug,2014-08-19T16:53:40.007,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",706,active,0} [ns_server:debug,2014-08-19T16:53:40.007,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [766,750,734,718,984,968,952,1016,1000,756,740,724,708,990,974,958,942,1022, 1006,762,746,730,714,996,980,964,948,1012,752,736,720,986,970,954,938,1018, 1002,758,742,726,710,992,976,960,944,1008,764,748,732,716,998,982,966,950, 1014,754,738,722,706,988,972,956,940,1020,1004,760,744,728,712,1023,994,978, 962,946,1010] [views:debug,2014-08-19T16:53:40.082,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/706. Updated state: active (0) [ns_server:debug,2014-08-19T16:53:40.083,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",706,active,0} [ns_server:debug,2014-08-19T16:53:40.216,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 704. Nacking mccouch update. [views:debug,2014-08-19T16:53:40.216,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/704. Updated state: active (0) [ns_server:debug,2014-08-19T16:53:40.216,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",704,active,0} [ns_server:debug,2014-08-19T16:53:40.216,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [766,750,734,718,984,968,952,1016,1000,756,740,724,708,990,974,958,942,1022, 1006,762,746,730,714,996,980,964,948,1012,752,736,720,704,986,970,954,938, 1018,1002,758,742,726,710,992,976,960,944,1008,764,748,732,716,998,982,966, 950,1014,754,738,722,706,988,972,956,940,1020,1004,760,744,728,712,1023,994, 978,962,946,1010] [views:debug,2014-08-19T16:53:40.283,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/704. Updated state: active (0) [ns_server:debug,2014-08-19T16:53:40.283,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",704,active,0} [ns_server:debug,2014-08-19T16:53:40.442,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 702. Nacking mccouch update. [views:debug,2014-08-19T16:53:40.442,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/702. Updated state: active (0) [ns_server:debug,2014-08-19T16:53:40.442,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",702,active,0} [ns_server:debug,2014-08-19T16:53:40.442,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [766,750,734,718,702,984,968,952,1016,1000,756,740,724,708,990,974,958,942, 1022,1006,762,746,730,714,996,980,964,948,1012,752,736,720,704,986,970,954, 938,1018,1002,758,742,726,710,992,976,960,944,1008,764,748,732,716,998,982, 966,950,1014,754,738,722,706,988,972,956,940,1020,1004,760,744,728,712,1023, 994,978,962,946,1010] [views:debug,2014-08-19T16:53:40.509,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/702. Updated state: active (0) [ns_server:debug,2014-08-19T16:53:40.509,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",702,active,0} [ns_server:debug,2014-08-19T16:53:40.659,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 700. Nacking mccouch update. [views:debug,2014-08-19T16:53:40.659,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/700. Updated state: active (0) [ns_server:debug,2014-08-19T16:53:40.659,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",700,active,0} [ns_server:debug,2014-08-19T16:53:40.660,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [766,750,734,718,702,984,968,952,1016,1000,756,740,724,708,990,974,958,942, 1022,1006,762,746,730,714,996,980,964,948,1012,752,736,720,704,986,970,954, 938,1018,1002,758,742,726,710,992,976,960,944,1008,764,748,732,716,700,998, 982,966,950,1014,754,738,722,706,988,972,956,940,1020,1004,760,744,728,712, 1023,994,978,962,946,1010] [views:debug,2014-08-19T16:53:40.726,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/700. Updated state: active (0) [ns_server:debug,2014-08-19T16:53:40.726,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",700,active,0} [ns_server:debug,2014-08-19T16:53:40.836,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 698. Nacking mccouch update. [views:debug,2014-08-19T16:53:40.836,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/698. Updated state: active (0) [ns_server:debug,2014-08-19T16:53:40.836,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",698,active,0} [ns_server:debug,2014-08-19T16:53:40.837,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [766,750,734,718,702,984,968,952,1016,1000,756,740,724,708,990,974,958,942, 1022,1006,762,746,730,714,698,996,980,964,948,1012,752,736,720,704,986,970, 954,938,1018,1002,758,742,726,710,992,976,960,944,1008,764,748,732,716,700, 998,982,966,950,1014,754,738,722,706,988,972,956,940,1020,1004,760,744,728, 712,1023,994,978,962,946,1010] [views:debug,2014-08-19T16:53:40.871,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/698. Updated state: active (0) [ns_server:debug,2014-08-19T16:53:40.871,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",698,active,0} [ns_server:debug,2014-08-19T16:53:40.937,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 696. Nacking mccouch update. [views:debug,2014-08-19T16:53:40.937,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/696. Updated state: active (0) [ns_server:debug,2014-08-19T16:53:40.937,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",696,active,0} [ns_server:debug,2014-08-19T16:53:40.937,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [766,750,734,718,702,984,968,952,1016,1000,756,740,724,708,990,974,958,942, 1022,1006,762,746,730,714,698,996,980,964,948,1012,752,736,720,704,986,970, 954,938,1018,1002,758,742,726,710,992,976,960,944,1008,764,748,732,716,700, 998,982,966,950,1014,754,738,722,706,988,972,956,940,1020,1004,760,744,728, 712,696,1023,994,978,962,946,1010] [views:debug,2014-08-19T16:53:40.971,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/696. Updated state: active (0) [ns_server:debug,2014-08-19T16:53:40.971,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",696,active,0} [ns_server:debug,2014-08-19T16:53:41.038,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 694. Nacking mccouch update. [views:debug,2014-08-19T16:53:41.038,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/694. Updated state: active (0) [ns_server:debug,2014-08-19T16:53:41.038,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",694,active,0} [ns_server:debug,2014-08-19T16:53:41.038,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,718,984,968,952,1016,1000,756,740,724,708,990,974,958,942,1022,1006,762, 746,730,714,698,996,980,964,948,1012,752,736,720,704,986,970,954,938,1018, 1002,758,742,726,710,694,992,976,960,944,1008,764,748,732,716,700,998,982, 966,950,1014,754,738,722,706,988,972,956,940,1020,1004,760,744,728,712,696, 1023,994,978,962,946,1010,766,734,702] [views:debug,2014-08-19T16:53:41.072,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/694. Updated state: active (0) [ns_server:debug,2014-08-19T16:53:41.072,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",694,active,0} [ns_server:debug,2014-08-19T16:53:41.155,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 692. Nacking mccouch update. [views:debug,2014-08-19T16:53:41.155,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/692. Updated state: active (0) [ns_server:debug,2014-08-19T16:53:41.156,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",692,active,0} [ns_server:debug,2014-08-19T16:53:41.156,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,718,984,968,952,1016,1000,756,740,724,708,692,990,974,958,942,1022,1006, 762,746,730,714,698,996,980,964,948,1012,752,736,720,704,986,970,954,938, 1018,1002,758,742,726,710,694,992,976,960,944,1008,764,748,732,716,700,998, 982,966,950,1014,754,738,722,706,988,972,956,940,1020,1004,760,744,728,712, 696,1023,994,978,962,946,1010,766,734,702] [views:debug,2014-08-19T16:53:41.189,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/692. Updated state: active (0) [ns_server:debug,2014-08-19T16:53:41.189,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",692,active,0} [ns_server:debug,2014-08-19T16:53:41.256,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 690. Nacking mccouch update. [views:debug,2014-08-19T16:53:41.256,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/690. Updated state: active (0) [ns_server:debug,2014-08-19T16:53:41.257,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",690,active,0} [ns_server:debug,2014-08-19T16:53:41.257,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,718,984,968,952,1016,1000,756,740,724,708,692,990,974,958,942,1022,1006, 762,746,730,714,698,996,980,964,948,1012,752,736,720,704,986,970,954,938, 1018,1002,758,742,726,710,694,992,976,960,944,1008,764,748,732,716,700,998, 982,966,950,1014,754,738,722,706,690,988,972,956,940,1020,1004,760,744,728, 712,696,1023,994,978,962,946,1010,766,734,702] [views:debug,2014-08-19T16:53:41.321,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/690. Updated state: active (0) [ns_server:debug,2014-08-19T16:53:41.321,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",690,active,0} [ns_server:debug,2014-08-19T16:53:41.478,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 688. Nacking mccouch update. [views:debug,2014-08-19T16:53:41.478,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/688. Updated state: active (0) [ns_server:debug,2014-08-19T16:53:41.479,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",688,active,0} [ns_server:debug,2014-08-19T16:53:41.479,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,718,984,968,952,1016,1000,756,740,724,708,692,990,974,958,942,1022,1006, 762,746,730,714,698,996,980,964,948,1012,752,736,720,704,688,986,970,954,938, 1018,1002,758,742,726,710,694,992,976,960,944,1008,764,748,732,716,700,998, 982,966,950,1014,754,738,722,706,690,988,972,956,940,1020,1004,760,744,728, 712,696,1023,994,978,962,946,1010,766,734,702] [views:debug,2014-08-19T16:53:41.554,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/688. Updated state: active (0) [ns_server:debug,2014-08-19T16:53:41.554,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",688,active,0} [ns_server:info,2014-08-19T16:53:41.598,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:do_pull:341]Pulling config from: 'ns_1@10.242.238.91' [ns_server:debug,2014-08-19T16:53:41.704,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 686. Nacking mccouch update. [views:debug,2014-08-19T16:53:41.704,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/686. Updated state: active (0) [ns_server:debug,2014-08-19T16:53:41.705,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",686,active,0} [ns_server:debug,2014-08-19T16:53:41.705,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,718,686,984,968,952,1016,1000,756,740,724,708,692,990,974,958,942,1022, 1006,762,746,730,714,698,996,980,964,948,1012,752,736,720,704,688,986,970, 954,938,1018,1002,758,742,726,710,694,992,976,960,944,1008,764,748,732,716, 700,998,982,966,950,1014,754,738,722,706,690,988,972,956,940,1020,1004,760, 744,728,712,696,1023,994,978,962,946,1010,766,734,702] [views:debug,2014-08-19T16:53:41.780,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/686. Updated state: active (0) [ns_server:debug,2014-08-19T16:53:41.780,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",686,active,0} [ns_server:debug,2014-08-19T16:53:41.938,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 684. Nacking mccouch update. [views:debug,2014-08-19T16:53:41.939,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/684. Updated state: active (0) [ns_server:debug,2014-08-19T16:53:41.939,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",684,active,0} [ns_server:debug,2014-08-19T16:53:41.939,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,718,686,984,952,1016,756,740,724,708,692,990,974,958,942,1022,1006,762, 746,730,714,698,996,980,964,948,1012,752,736,720,704,688,986,970,954,938, 1018,1002,758,742,726,710,694,992,976,960,944,1008,764,748,732,716,700,684, 998,982,966,950,1014,754,738,722,706,690,988,972,956,940,1020,1004,760,744, 728,712,696,1023,994,978,962,946,1010,766,734,702,968,1000] [views:debug,2014-08-19T16:53:42.023,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/684. Updated state: active (0) [ns_server:debug,2014-08-19T16:53:42.023,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",684,active,0} [ns_server:debug,2014-08-19T16:53:42.028,ns_1@10.242.238.90:compaction_daemon<0.17567.0>:compaction_daemon:handle_info:447]Starting compaction for the following buckets: [<<"maps_1_8_metahash">>,<<"default">>] [ns_server:info,2014-08-19T16:53:42.031,ns_1@10.242.238.90:<0.3544.1>:compaction_daemon:check_all_dbs_exist:1611]Skipping compaction of bucket `maps_1_8_metahash` since at least database `maps_1_8_metahash/100` seems to be missing. [ns_server:info,2014-08-19T16:53:42.033,ns_1@10.242.238.90:<0.3545.1>:compaction_daemon:try_to_cleanup_indexes:650]Cleaning up indexes for bucket `default` [ns_server:info,2014-08-19T16:53:42.034,ns_1@10.242.238.90:<0.3545.1>:compaction_daemon:spawn_bucket_compactor:609]Compacting bucket default with config: [{database_fragmentation_threshold,{30,undefined}}, {view_fragmentation_threshold,{30,undefined}}] [ns_server:debug,2014-08-19T16:53:42.038,ns_1@10.242.238.90:<0.3548.1>:compaction_daemon:bucket_needs_compaction:1042]`default` data size is 71307, disk size is 8297262 [ns_server:debug,2014-08-19T16:53:42.038,ns_1@10.242.238.90:compaction_daemon<0.17567.0>:compaction_daemon:handle_info:505]Finished compaction iteration. [ns_server:debug,2014-08-19T16:53:42.040,ns_1@10.242.238.90:compaction_daemon<0.17567.0>:compaction_daemon:schedule_next_compaction:1519]Finished compaction too soon. Next run will be in 30s [ns_server:debug,2014-08-19T16:53:42.189,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 682. Nacking mccouch update. [views:debug,2014-08-19T16:53:42.189,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/682. Updated state: active (0) [ns_server:debug,2014-08-19T16:53:42.190,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",682,active,0} [ns_server:debug,2014-08-19T16:53:42.190,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,718,686,984,952,1016,756,740,724,708,692,990,974,958,942,1022,1006,762, 746,730,714,698,682,996,980,964,948,1012,752,736,720,704,688,986,970,954,938, 1018,1002,758,742,726,710,694,992,976,960,944,1008,764,748,732,716,700,684, 998,982,966,950,1014,754,738,722,706,690,988,972,956,940,1020,1004,760,744, 728,712,696,1023,994,978,962,946,1010,766,734,702,968,1000] [views:debug,2014-08-19T16:53:42.273,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/682. Updated state: active (0) [ns_server:debug,2014-08-19T16:53:42.274,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",682,active,0} [ns_server:debug,2014-08-19T16:53:42.348,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 680. Nacking mccouch update. [views:debug,2014-08-19T16:53:42.349,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/680. Updated state: active (0) [ns_server:debug,2014-08-19T16:53:42.349,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",680,active,0} [ns_server:debug,2014-08-19T16:53:42.349,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,718,686,984,952,1016,756,740,724,708,692,990,974,958,942,1022,1006,762, 746,730,714,698,682,996,980,964,948,1012,752,736,720,704,688,986,970,954,938, 1018,1002,758,742,726,710,694,992,976,960,944,1008,764,748,732,716,700,684, 998,982,966,950,1014,754,738,722,706,690,988,972,956,940,1020,1004,760,744, 728,712,696,680,1023,994,978,962,946,1010,766,734,702,968,1000] [views:debug,2014-08-19T16:53:42.382,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/680. Updated state: active (0) [ns_server:debug,2014-08-19T16:53:42.382,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",680,active,0} [ns_server:debug,2014-08-19T16:53:42.449,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 678. Nacking mccouch update. [views:debug,2014-08-19T16:53:42.449,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/678. Updated state: active (0) [ns_server:debug,2014-08-19T16:53:42.450,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",678,active,0} [ns_server:debug,2014-08-19T16:53:42.450,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,718,686,984,952,1016,756,740,724,708,692,990,974,958,942,1022,1006,762, 746,730,714,698,682,996,980,964,948,1012,752,736,720,704,688,986,970,954,938, 1018,1002,758,742,726,710,694,678,992,976,960,944,1008,764,748,732,716,700, 684,998,982,966,950,1014,754,738,722,706,690,988,972,956,940,1020,1004,760, 744,728,712,696,680,1023,994,978,962,946,1010,766,734,702,968,1000] [views:debug,2014-08-19T16:53:42.483,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/678. Updated state: active (0) [ns_server:debug,2014-08-19T16:53:42.484,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",678,active,0} [ns_server:debug,2014-08-19T16:53:42.550,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 676. Nacking mccouch update. [views:debug,2014-08-19T16:53:42.550,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/676. Updated state: active (0) [ns_server:debug,2014-08-19T16:53:42.550,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",676,active,0} [ns_server:debug,2014-08-19T16:53:42.551,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,718,686,984,952,1016,756,740,724,708,692,676,990,974,958,942,1022,1006, 762,746,730,714,698,682,996,980,964,948,1012,752,736,720,704,688,986,970,954, 938,1018,1002,758,742,726,710,694,678,992,976,960,944,1008,764,748,732,716, 700,684,998,982,966,950,1014,754,738,722,706,690,988,972,956,940,1020,1004, 760,744,728,712,696,680,1023,994,978,962,946,1010,766,734,702,968,1000] [views:debug,2014-08-19T16:53:42.584,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/676. Updated state: active (0) [ns_server:debug,2014-08-19T16:53:42.584,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",676,active,0} [ns_server:debug,2014-08-19T16:53:42.651,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 674. Nacking mccouch update. [views:debug,2014-08-19T16:53:42.651,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/674. Updated state: active (0) [ns_server:debug,2014-08-19T16:53:42.651,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",674,active,0} [ns_server:debug,2014-08-19T16:53:42.651,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,718,686,984,952,1016,756,724,692,990,974,958,942,1022,1006,762,746,730, 714,698,682,996,980,964,948,1012,752,736,720,704,688,986,970,954,938,1018, 1002,758,742,726,710,694,678,992,976,960,944,1008,764,748,732,716,700,684, 998,982,966,950,1014,754,738,722,706,690,674,988,972,956,940,1020,1004,760, 744,728,712,696,680,1023,994,978,962,946,1010,766,734,702,968,1000,740,708, 676] [views:debug,2014-08-19T16:53:42.685,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/674. Updated state: active (0) [ns_server:debug,2014-08-19T16:53:42.685,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",674,active,0} [ns_server:debug,2014-08-19T16:53:42.832,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 672. Nacking mccouch update. [views:debug,2014-08-19T16:53:42.832,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/672. Updated state: active (0) [ns_server:debug,2014-08-19T16:53:42.832,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",672,active,0} [ns_server:debug,2014-08-19T16:53:42.832,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,718,686,984,952,1016,756,724,692,990,974,958,942,1022,1006,762,746,730, 714,698,682,996,980,964,948,1012,752,736,720,704,688,672,986,970,954,938, 1018,1002,758,742,726,710,694,678,992,976,960,944,1008,764,748,732,716,700, 684,998,982,966,950,1014,754,738,722,706,690,674,988,972,956,940,1020,1004, 760,744,728,712,696,680,1023,994,978,962,946,1010,766,734,702,968,1000,740, 708,676] [views:debug,2014-08-19T16:53:42.891,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/672. Updated state: active (0) [ns_server:debug,2014-08-19T16:53:42.891,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",672,active,0} [ns_server:debug,2014-08-19T16:53:43.057,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 670. Nacking mccouch update. [views:debug,2014-08-19T16:53:43.058,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/670. Updated state: active (0) [ns_server:debug,2014-08-19T16:53:43.058,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",670,active,0} [ns_server:debug,2014-08-19T16:53:43.058,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,718,686,984,952,1016,756,724,692,990,974,958,942,1022,1006,762,746,730, 714,698,682,996,980,964,948,1012,752,736,720,704,688,672,986,970,954,938, 1018,1002,758,742,726,710,694,678,992,976,960,944,1008,764,748,732,716,700, 684,998,982,966,950,1014,754,738,722,706,690,674,988,972,956,940,1020,1004, 760,744,728,712,696,680,1023,994,978,962,946,1010,766,734,702,670,968,1000, 740,708,676] [views:debug,2014-08-19T16:53:43.133,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/670. Updated state: active (0) [ns_server:debug,2014-08-19T16:53:43.133,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",670,active,0} [ns_server:debug,2014-08-19T16:53:43.300,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 668. Nacking mccouch update. [views:debug,2014-08-19T16:53:43.300,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/668. Updated state: active (0) [ns_server:debug,2014-08-19T16:53:43.300,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",668,active,0} [ns_server:debug,2014-08-19T16:53:43.300,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,718,686,984,952,1016,756,724,692,990,974,958,942,1022,1006,762,746,730, 714,698,682,996,980,964,948,1012,752,736,720,704,688,672,986,970,954,938, 1018,1002,758,742,726,710,694,678,992,976,960,944,1008,764,748,732,716,700, 684,668,998,982,966,950,1014,754,738,722,706,690,674,988,972,956,940,1020, 1004,760,744,728,712,696,680,1023,994,978,962,946,1010,766,734,702,670,968, 1000,740,708,676] [views:debug,2014-08-19T16:53:43.384,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/668. Updated state: active (0) [ns_server:debug,2014-08-19T16:53:43.384,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",668,active,0} [ns_server:debug,2014-08-19T16:53:43.559,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 666. Nacking mccouch update. [views:debug,2014-08-19T16:53:43.559,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/666. Updated state: active (0) [ns_server:debug,2014-08-19T16:53:43.559,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",666,active,0} [ns_server:debug,2014-08-19T16:53:43.559,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,718,686,984,952,1016,756,724,692,990,974,958,942,1022,1006,762,746,730, 714,698,682,666,996,980,964,948,1012,752,736,720,704,688,672,986,970,954,938, 1018,1002,758,742,726,710,694,678,992,976,960,944,1008,764,748,732,716,700, 684,668,998,982,966,950,1014,754,738,722,706,690,674,988,972,956,940,1020, 1004,760,744,728,712,696,680,1023,994,978,962,946,1010,766,734,702,670,968, 1000,740,708,676] [views:debug,2014-08-19T16:53:43.635,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/666. Updated state: active (0) [ns_server:debug,2014-08-19T16:53:43.635,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",666,active,0} [ns_server:debug,2014-08-19T16:53:43.760,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 664. Nacking mccouch update. [views:debug,2014-08-19T16:53:43.760,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/664. Updated state: active (0) [ns_server:debug,2014-08-19T16:53:43.760,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",664,active,0} [ns_server:debug,2014-08-19T16:53:43.760,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,718,686,984,952,1016,756,724,692,990,958,1022,762,746,730,714,698,682, 666,996,980,964,948,1012,752,736,720,704,688,672,986,970,954,938,1018,1002, 758,742,726,710,694,678,992,976,960,944,1008,764,748,732,716,700,684,668,998, 982,966,950,1014,754,738,722,706,690,674,988,972,956,940,1020,1004,760,744, 728,712,696,680,664,1023,994,978,962,946,1010,766,734,702,670,968,1000,740, 708,676,974,942,1006] [views:debug,2014-08-19T16:53:43.794,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/664. Updated state: active (0) [ns_server:debug,2014-08-19T16:53:43.794,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",664,active,0} [ns_server:debug,2014-08-19T16:53:43.861,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 662. Nacking mccouch update. [views:debug,2014-08-19T16:53:43.861,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/662. Updated state: active (0) [ns_server:debug,2014-08-19T16:53:43.861,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",662,active,0} [ns_server:debug,2014-08-19T16:53:43.861,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,718,686,984,952,1016,756,724,692,990,958,1022,762,746,730,714,698,682, 666,996,980,964,948,1012,752,736,720,704,688,672,986,970,954,938,1018,1002, 758,742,726,710,694,678,662,992,976,960,944,1008,764,748,732,716,700,684,668, 998,982,966,950,1014,754,738,722,706,690,674,988,972,956,940,1020,1004,760, 744,728,712,696,680,664,1023,994,978,962,946,1010,766,734,702,670,968,1000, 740,708,676,974,942,1006] [views:debug,2014-08-19T16:53:43.896,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/662. Updated state: active (0) [ns_server:debug,2014-08-19T16:53:43.896,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",662,active,0} [ns_server:debug,2014-08-19T16:53:43.963,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 660. Nacking mccouch update. [views:debug,2014-08-19T16:53:43.963,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/660. Updated state: active (0) [ns_server:debug,2014-08-19T16:53:43.963,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",660,active,0} [ns_server:debug,2014-08-19T16:53:43.963,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,718,686,984,952,1016,756,724,692,660,990,958,1022,762,746,730,714,698, 682,666,996,980,964,948,1012,752,736,720,704,688,672,986,970,954,938,1018, 1002,758,742,726,710,694,678,662,992,976,960,944,1008,764,748,732,716,700, 684,668,998,982,966,950,1014,754,738,722,706,690,674,988,972,956,940,1020, 1004,760,744,728,712,696,680,664,1023,994,978,962,946,1010,766,734,702,670, 968,1000,740,708,676,974,942,1006] [views:debug,2014-08-19T16:53:44.022,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/660. Updated state: active (0) [ns_server:debug,2014-08-19T16:53:44.022,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",660,active,0} [ns_server:debug,2014-08-19T16:53:44.189,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 658. Nacking mccouch update. [views:debug,2014-08-19T16:53:44.189,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/658. Updated state: active (0) [ns_server:debug,2014-08-19T16:53:44.189,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",658,active,0} [ns_server:debug,2014-08-19T16:53:44.189,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,718,686,984,952,1016,756,724,692,660,990,958,1022,762,746,730,714,698, 682,666,996,980,964,948,1012,752,736,720,704,688,672,986,970,954,938,1018, 1002,758,742,726,710,694,678,662,992,976,960,944,1008,764,748,732,716,700, 684,668,998,982,966,950,1014,754,738,722,706,690,674,658,988,972,956,940, 1020,1004,760,744,728,712,696,680,664,1023,994,978,962,946,1010,766,734,702, 670,968,1000,740,708,676,974,942,1006] [views:debug,2014-08-19T16:53:44.272,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/658. Updated state: active (0) [ns_server:debug,2014-08-19T16:53:44.273,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",658,active,0} [ns_server:debug,2014-08-19T16:53:44.364,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 656. Nacking mccouch update. [views:debug,2014-08-19T16:53:44.364,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/656. Updated state: active (0) [ns_server:debug,2014-08-19T16:53:44.365,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",656,active,0} [ns_server:debug,2014-08-19T16:53:44.365,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,718,686,984,952,1016,756,724,692,660,990,958,1022,762,746,730,714,698, 682,666,996,980,964,948,1012,752,736,720,704,688,672,656,986,970,954,938, 1018,1002,758,742,726,710,694,678,662,992,976,960,944,1008,764,748,732,716, 700,684,668,998,982,966,950,1014,754,738,722,706,690,674,658,988,972,956,940, 1020,1004,760,744,728,712,696,680,664,1023,994,978,962,946,1010,766,734,702, 670,968,1000,740,708,676,974,942,1006] [views:debug,2014-08-19T16:53:44.402,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/656. Updated state: active (0) [ns_server:debug,2014-08-19T16:53:44.402,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",656,active,0} [ns_server:debug,2014-08-19T16:53:44.469,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 654. Nacking mccouch update. [views:debug,2014-08-19T16:53:44.469,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/654. Updated state: active (0) [ns_server:debug,2014-08-19T16:53:44.469,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",654,active,0} [ns_server:debug,2014-08-19T16:53:44.470,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,718,686,654,984,952,1016,756,724,692,660,990,958,1022,762,730,698,666, 996,980,964,948,1012,752,736,720,704,688,672,656,986,970,954,938,1018,1002, 758,742,726,710,694,678,662,992,976,960,944,1008,764,748,732,716,700,684,668, 998,982,966,950,1014,754,738,722,706,690,674,658,988,972,956,940,1020,1004, 760,744,728,712,696,680,664,1023,994,978,962,946,1010,766,734,702,670,968, 1000,740,708,676,974,942,1006,746,714,682] [views:debug,2014-08-19T16:53:44.503,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/654. Updated state: active (0) [ns_server:debug,2014-08-19T16:53:44.503,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",654,active,0} [ns_server:debug,2014-08-19T16:53:44.646,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 652. Nacking mccouch update. [views:debug,2014-08-19T16:53:44.646,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/652. Updated state: active (0) [ns_server:debug,2014-08-19T16:53:44.646,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",652,active,0} [ns_server:debug,2014-08-19T16:53:44.646,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,718,686,654,984,952,1016,756,724,692,660,990,958,1022,762,730,698,666, 996,980,964,948,1012,752,736,720,704,688,672,656,986,970,954,938,1018,1002, 758,742,726,710,694,678,662,992,976,960,944,1008,764,748,732,716,700,684,668, 652,998,982,966,950,1014,754,738,722,706,690,674,658,988,972,956,940,1020, 1004,760,744,728,712,696,680,664,1023,994,978,962,946,1010,766,734,702,670, 968,1000,740,708,676,974,942,1006,746,714,682] [views:debug,2014-08-19T16:53:44.705,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/652. Updated state: active (0) [ns_server:debug,2014-08-19T16:53:44.705,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",652,active,0} [ns_server:debug,2014-08-19T16:53:44.847,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 650. Nacking mccouch update. [views:debug,2014-08-19T16:53:44.847,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/650. Updated state: active (0) [ns_server:debug,2014-08-19T16:53:44.847,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",650,active,0} [ns_server:debug,2014-08-19T16:53:44.847,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,718,686,654,984,952,1016,756,724,692,660,990,958,1022,762,730,698,666, 996,980,964,948,1012,752,736,720,704,688,672,656,986,970,954,938,1018,1002, 758,742,726,710,694,678,662,992,976,960,944,1008,764,748,732,716,700,684,668, 652,998,982,966,950,1014,754,738,722,706,690,674,658,988,972,956,940,1020, 1004,760,744,728,712,696,680,664,1023,994,978,962,946,1010,766,734,702,670, 968,1000,740,708,676,974,942,1006,746,714,682,650] [views:debug,2014-08-19T16:53:44.881,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/650. Updated state: active (0) [ns_server:debug,2014-08-19T16:53:44.881,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",650,active,0} [ns_server:debug,2014-08-19T16:53:44.948,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 648. Nacking mccouch update. [views:debug,2014-08-19T16:53:44.948,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/648. Updated state: active (0) [ns_server:debug,2014-08-19T16:53:44.948,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",648,active,0} [ns_server:debug,2014-08-19T16:53:44.948,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,718,686,654,984,952,1016,756,724,692,660,990,958,1022,762,730,698,666, 996,980,964,948,1012,752,736,720,704,688,672,656,986,970,954,938,1018,1002, 758,742,726,710,694,678,662,992,976,960,944,1008,764,748,732,716,700,684,668, 652,998,982,966,950,1014,754,738,722,706,690,674,658,988,972,956,940,1020, 1004,760,744,728,712,696,680,664,648,1023,994,978,962,946,1010,766,734,702, 670,968,1000,740,708,676,974,942,1006,746,714,682,650] [views:debug,2014-08-19T16:53:44.982,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/648. Updated state: active (0) [ns_server:debug,2014-08-19T16:53:44.982,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",648,active,0} [ns_server:debug,2014-08-19T16:53:45.140,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 646. Nacking mccouch update. [views:debug,2014-08-19T16:53:45.140,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/646. Updated state: active (0) [ns_server:debug,2014-08-19T16:53:45.140,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",646,active,0} [ns_server:debug,2014-08-19T16:53:45.141,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,718,686,654,984,952,1016,756,724,692,660,990,958,1022,762,730,698,666, 996,980,964,948,1012,752,736,720,704,688,672,656,986,970,954,938,1018,1002, 758,742,726,710,694,678,662,646,992,976,960,944,1008,764,748,732,716,700,684, 668,652,998,982,966,950,1014,754,738,722,706,690,674,658,988,972,956,940, 1020,1004,760,744,728,712,696,680,664,648,1023,994,978,962,946,1010,766,734, 702,670,968,1000,740,708,676,974,942,1006,746,714,682,650] [views:debug,2014-08-19T16:53:45.200,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/646. Updated state: active (0) [ns_server:debug,2014-08-19T16:53:45.200,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",646,active,0} [ns_server:debug,2014-08-19T16:53:45.366,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 644. Nacking mccouch update. [views:debug,2014-08-19T16:53:45.366,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/644. Updated state: active (0) [ns_server:debug,2014-08-19T16:53:45.366,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",644,active,0} [ns_server:debug,2014-08-19T16:53:45.366,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,718,686,654,984,952,1016,756,724,692,660,990,958,1022,762,730,698,666, 996,964,752,736,720,704,688,672,656,986,970,954,938,1018,1002,758,742,726, 710,694,678,662,646,992,976,960,944,1008,764,748,732,716,700,684,668,652,998, 982,966,950,1014,754,738,722,706,690,674,658,988,972,956,940,1020,1004,760, 744,728,712,696,680,664,648,1023,994,978,962,946,1010,766,734,702,670,968, 1000,740,708,676,644,974,942,1006,746,714,682,650,980,948,1012] [views:debug,2014-08-19T16:53:45.425,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/644. Updated state: active (0) [ns_server:debug,2014-08-19T16:53:45.425,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",644,active,0} [ns_server:debug,2014-08-19T16:53:45.567,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 642. Nacking mccouch update. [views:debug,2014-08-19T16:53:45.567,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/642. Updated state: active (0) [ns_server:debug,2014-08-19T16:53:45.567,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",642,active,0} [ns_server:debug,2014-08-19T16:53:45.567,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,718,686,654,984,952,1016,756,724,692,660,990,958,1022,762,730,698,666, 996,964,752,736,720,704,688,672,656,986,970,954,938,1018,1002,758,742,726, 710,694,678,662,646,992,976,960,944,1008,764,748,732,716,700,684,668,652,998, 982,966,950,1014,754,738,722,706,690,674,658,642,988,972,956,940,1020,1004, 760,744,728,712,696,680,664,648,1023,994,978,962,946,1010,766,734,702,670, 968,1000,740,708,676,644,974,942,1006,746,714,682,650,980,948,1012] [views:debug,2014-08-19T16:53:45.643,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/642. Updated state: active (0) [ns_server:debug,2014-08-19T16:53:45.643,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",642,active,0} [ns_server:debug,2014-08-19T16:53:45.776,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 640. Nacking mccouch update. [views:debug,2014-08-19T16:53:45.776,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/640. Updated state: active (0) [ns_server:debug,2014-08-19T16:53:45.776,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",640,active,0} [ns_server:debug,2014-08-19T16:53:45.776,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,718,686,654,984,952,1016,756,724,692,660,990,958,1022,762,730,698,666, 996,964,752,736,720,704,688,672,656,640,986,970,954,938,1018,1002,758,742, 726,710,694,678,662,646,992,976,960,944,1008,764,748,732,716,700,684,668,652, 998,982,966,950,1014,754,738,722,706,690,674,658,642,988,972,956,940,1020, 1004,760,744,728,712,696,680,664,648,1023,994,978,962,946,1010,766,734,702, 670,968,1000,740,708,676,644,974,942,1006,746,714,682,650,980,948,1012] [views:debug,2014-08-19T16:53:45.827,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/640. Updated state: active (0) [ns_server:debug,2014-08-19T16:53:45.827,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",640,active,0} [ns_server:debug,2014-08-19T16:53:45.932,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 638. Nacking mccouch update. [views:debug,2014-08-19T16:53:45.932,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/638. Updated state: active (0) [ns_server:debug,2014-08-19T16:53:45.932,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",638,active,0} [ns_server:debug,2014-08-19T16:53:45.932,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,718,686,654,984,952,1016,756,724,692,660,990,958,1022,762,730,698,666, 996,964,752,736,720,704,688,672,656,640,986,970,954,938,1018,1002,758,742, 726,710,694,678,662,646,992,976,960,944,1008,764,748,732,716,700,684,668,652, 998,982,966,950,1014,754,738,722,706,690,674,658,642,988,972,956,940,1020, 1004,760,744,728,712,696,680,664,648,1023,994,978,962,946,1010,766,734,702, 670,638,968,1000,740,708,676,644,974,942,1006,746,714,682,650,980,948,1012] [views:debug,2014-08-19T16:53:45.966,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/638. Updated state: active (0) [ns_server:debug,2014-08-19T16:53:45.966,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",638,active,0} [ns_server:debug,2014-08-19T16:53:46.049,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 636. Nacking mccouch update. [views:debug,2014-08-19T16:53:46.049,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/636. Updated state: active (0) [ns_server:debug,2014-08-19T16:53:46.049,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",636,active,0} [ns_server:debug,2014-08-19T16:53:46.050,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,718,686,654,984,952,1016,756,724,692,660,990,958,1022,762,730,698,666, 996,964,752,736,720,704,688,672,656,640,986,970,954,938,1018,1002,758,742, 726,710,694,678,662,646,992,976,960,944,1008,764,748,732,716,700,684,668,652, 636,998,982,966,950,1014,754,738,722,706,690,674,658,642,988,972,956,940, 1020,1004,760,744,728,712,696,680,664,648,1023,994,978,962,946,1010,766,734, 702,670,638,968,1000,740,708,676,644,974,942,1006,746,714,682,650,980,948, 1012] [views:debug,2014-08-19T16:53:46.117,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/636. Updated state: active (0) [ns_server:debug,2014-08-19T16:53:46.117,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",636,active,0} [ns_server:debug,2014-08-19T16:53:46.242,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 634. Nacking mccouch update. [views:debug,2014-08-19T16:53:46.242,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/634. Updated state: active (0) [ns_server:debug,2014-08-19T16:53:46.242,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",634,active,0} [ns_server:debug,2014-08-19T16:53:46.242,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,718,686,654,984,952,1016,756,724,692,660,990,958,1022,762,730,698,666, 634,996,964,736,704,672,640,986,970,954,938,1018,1002,758,742,726,710,694, 678,662,646,992,976,960,944,1008,764,748,732,716,700,684,668,652,636,998,982, 966,950,1014,754,738,722,706,690,674,658,642,988,972,956,940,1020,1004,760, 744,728,712,696,680,664,648,1023,994,978,962,946,1010,766,734,702,670,638, 968,1000,740,708,676,644,974,942,1006,746,714,682,650,980,948,1012,752,720, 688,656] [views:debug,2014-08-19T16:53:46.293,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/634. Updated state: active (0) [ns_server:debug,2014-08-19T16:53:46.293,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",634,active,0} [ns_server:debug,2014-08-19T16:53:46.417,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 632. Nacking mccouch update. [views:debug,2014-08-19T16:53:46.418,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/632. Updated state: active (0) [ns_server:debug,2014-08-19T16:53:46.418,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",632,active,0} [ns_server:debug,2014-08-19T16:53:46.418,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,718,686,654,984,952,1016,756,724,692,660,990,958,1022,762,730,698,666, 634,996,964,736,704,672,640,986,970,954,938,1018,1002,758,742,726,710,694, 678,662,646,992,976,960,944,1008,764,748,732,716,700,684,668,652,636,998,982, 966,950,1014,754,738,722,706,690,674,658,642,988,972,956,940,1020,1004,760, 744,728,712,696,680,664,648,632,1023,994,978,962,946,1010,766,734,702,670, 638,968,1000,740,708,676,644,974,942,1006,746,714,682,650,980,948,1012,752, 720,688,656] [views:debug,2014-08-19T16:53:46.485,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/632. Updated state: active (0) [ns_server:debug,2014-08-19T16:53:46.485,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",632,active,0} [ns_server:debug,2014-08-19T16:53:46.585,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 630. Nacking mccouch update. [views:debug,2014-08-19T16:53:46.585,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/630. Updated state: active (0) [ns_server:debug,2014-08-19T16:53:46.585,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",630,active,0} [ns_server:debug,2014-08-19T16:53:46.585,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,718,686,654,984,952,1016,756,724,692,660,990,958,1022,762,730,698,666, 634,996,964,736,704,672,640,986,970,954,938,1018,1002,758,742,726,710,694, 678,662,646,630,992,976,960,944,1008,764,748,732,716,700,684,668,652,636,998, 982,966,950,1014,754,738,722,706,690,674,658,642,988,972,956,940,1020,1004, 760,744,728,712,696,680,664,648,632,1023,994,978,962,946,1010,766,734,702, 670,638,968,1000,740,708,676,644,974,942,1006,746,714,682,650,980,948,1012, 752,720,688,656] [views:debug,2014-08-19T16:53:46.636,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/630. Updated state: active (0) [ns_server:debug,2014-08-19T16:53:46.636,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",630,active,0} [ns_server:debug,2014-08-19T16:53:46.761,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 628. Nacking mccouch update. [views:debug,2014-08-19T16:53:46.761,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/628. Updated state: active (0) [ns_server:debug,2014-08-19T16:53:46.761,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",628,active,0} [ns_server:debug,2014-08-19T16:53:46.761,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,718,686,654,984,952,1016,756,724,692,660,628,990,958,1022,762,730,698, 666,634,996,964,736,704,672,640,986,970,954,938,1018,1002,758,742,726,710, 694,678,662,646,630,992,976,960,944,1008,764,748,732,716,700,684,668,652,636, 998,982,966,950,1014,754,738,722,706,690,674,658,642,988,972,956,940,1020, 1004,760,744,728,712,696,680,664,648,632,1023,994,978,962,946,1010,766,734, 702,670,638,968,1000,740,708,676,644,974,942,1006,746,714,682,650,980,948, 1012,752,720,688,656] [views:debug,2014-08-19T16:53:46.811,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/628. Updated state: active (0) [ns_server:debug,2014-08-19T16:53:46.811,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",628,active,0} [ns_server:debug,2014-08-19T16:53:46.936,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 626. Nacking mccouch update. [views:debug,2014-08-19T16:53:46.936,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/626. Updated state: active (0) [ns_server:debug,2014-08-19T16:53:46.937,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",626,active,0} [ns_server:debug,2014-08-19T16:53:46.937,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,718,686,654,984,952,1016,756,724,692,660,628,990,958,1022,762,730,698, 666,634,996,964,736,704,672,640,986,970,954,938,1018,1002,758,742,726,710, 694,678,662,646,630,992,976,960,944,1008,764,748,732,716,700,684,668,652,636, 998,982,966,950,1014,754,738,722,706,690,674,658,642,626,988,972,956,940, 1020,1004,760,744,728,712,696,680,664,648,632,1023,994,978,962,946,1010,766, 734,702,670,638,968,1000,740,708,676,644,974,942,1006,746,714,682,650,980, 948,1012,752,720,688,656] [views:debug,2014-08-19T16:53:46.988,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/626. Updated state: active (0) [ns_server:debug,2014-08-19T16:53:46.988,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",626,active,0} [ns_server:debug,2014-08-19T16:53:47.113,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 624. Nacking mccouch update. [views:debug,2014-08-19T16:53:47.114,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/624. Updated state: active (0) [ns_server:debug,2014-08-19T16:53:47.114,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",624,active,0} [ns_server:debug,2014-08-19T16:53:47.114,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,718,686,654,984,952,1016,756,724,692,660,628,990,958,1022,762,730,698, 666,634,996,964,736,704,672,640,970,938,1002,758,742,726,710,694,678,662,646, 630,992,976,960,944,1008,764,748,732,716,700,684,668,652,636,998,982,966,950, 1014,754,738,722,706,690,674,658,642,626,988,972,956,940,1020,1004,760,744, 728,712,696,680,664,648,632,1023,994,978,962,946,1010,766,734,702,670,638, 968,1000,740,708,676,644,974,942,1006,746,714,682,650,980,948,1012,752,720, 688,656,624,986,954,1018] [views:debug,2014-08-19T16:53:47.148,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/624. Updated state: active (0) [ns_server:debug,2014-08-19T16:53:47.148,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",624,active,0} [ns_server:debug,2014-08-19T16:53:47.264,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 622. Nacking mccouch update. [views:debug,2014-08-19T16:53:47.265,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/622. Updated state: active (0) [ns_server:debug,2014-08-19T16:53:47.265,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",622,active,0} [ns_server:debug,2014-08-19T16:53:47.265,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,718,686,654,622,984,952,1016,756,724,692,660,628,990,958,1022,762,730, 698,666,634,996,964,736,704,672,640,970,938,1002,758,742,726,710,694,678,662, 646,630,992,976,960,944,1008,764,748,732,716,700,684,668,652,636,998,982,966, 950,1014,754,738,722,706,690,674,658,642,626,988,972,956,940,1020,1004,760, 744,728,712,696,680,664,648,632,1023,994,978,962,946,1010,766,734,702,670, 638,968,1000,740,708,676,644,974,942,1006,746,714,682,650,980,948,1012,752, 720,688,656,624,986,954,1018] [views:debug,2014-08-19T16:53:47.335,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/622. Updated state: active (0) [ns_server:debug,2014-08-19T16:53:47.335,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",622,active,0} [ns_server:debug,2014-08-19T16:53:47.461,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 620. Nacking mccouch update. [views:debug,2014-08-19T16:53:47.461,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/620. Updated state: active (0) [ns_server:debug,2014-08-19T16:53:47.461,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",620,active,0} [ns_server:debug,2014-08-19T16:53:47.461,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,718,686,654,622,984,952,1016,756,724,692,660,628,990,958,1022,762,730, 698,666,634,996,964,736,704,672,640,970,938,1002,758,742,726,710,694,678,662, 646,630,992,976,960,944,1008,764,748,732,716,700,684,668,652,636,620,998,982, 966,950,1014,754,738,722,706,690,674,658,642,626,988,972,956,940,1020,1004, 760,744,728,712,696,680,664,648,632,1023,994,978,962,946,1010,766,734,702, 670,638,968,1000,740,708,676,644,974,942,1006,746,714,682,650,980,948,1012, 752,720,688,656,624,986,954,1018] [views:debug,2014-08-19T16:53:47.512,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/620. Updated state: active (0) [ns_server:debug,2014-08-19T16:53:47.512,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",620,active,0} [ns_server:debug,2014-08-19T16:53:47.636,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 618. Nacking mccouch update. [views:debug,2014-08-19T16:53:47.637,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/618. Updated state: active (0) [ns_server:debug,2014-08-19T16:53:47.637,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",618,active,0} [ns_server:debug,2014-08-19T16:53:47.637,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,718,686,654,622,984,952,1016,756,724,692,660,628,990,958,1022,762,730, 698,666,634,996,964,736,704,672,640,970,938,1002,758,742,726,710,694,678,662, 646,630,992,976,960,944,1008,764,748,732,716,700,684,668,652,636,620,998,982, 966,950,1014,754,738,722,706,690,674,658,642,626,988,972,956,940,1020,1004, 760,744,728,712,696,680,664,648,632,1023,994,978,962,946,1010,766,734,702, 670,638,968,1000,740,708,676,644,974,942,1006,746,714,682,650,618,980,948, 1012,752,720,688,656,624,986,954,1018] [views:debug,2014-08-19T16:53:47.712,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/618. Updated state: active (0) [ns_server:debug,2014-08-19T16:53:47.712,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",618,active,0} [ns_server:debug,2014-08-19T16:53:47.855,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 616. Nacking mccouch update. [views:debug,2014-08-19T16:53:47.855,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/616. Updated state: active (0) [ns_server:debug,2014-08-19T16:53:47.855,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",616,active,0} [ns_server:debug,2014-08-19T16:53:47.856,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,718,686,654,622,984,952,1016,756,724,692,660,628,990,958,1022,762,730, 698,666,634,996,964,736,704,672,640,970,938,1002,758,742,726,710,694,678,662, 646,630,992,976,960,944,1008,764,748,732,716,700,684,668,652,636,620,998,982, 966,950,1014,754,738,722,706,690,674,658,642,626,988,972,956,940,1020,1004, 760,744,728,712,696,680,664,648,632,616,1023,994,978,962,946,1010,766,734, 702,670,638,968,1000,740,708,676,644,974,942,1006,746,714,682,650,618,980, 948,1012,752,720,688,656,624,986,954,1018] [views:debug,2014-08-19T16:53:47.915,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/616. Updated state: active (0) [ns_server:debug,2014-08-19T16:53:47.915,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",616,active,0} [ns_server:debug,2014-08-19T16:53:47.981,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 614. Nacking mccouch update. [views:debug,2014-08-19T16:53:47.981,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/614. Updated state: active (0) [ns_server:debug,2014-08-19T16:53:47.981,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",614,active,0} [ns_server:debug,2014-08-19T16:53:47.982,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,718,686,654,622,984,952,1016,756,724,692,660,628,990,958,1022,762,730, 698,666,634,996,964,736,704,672,640,970,938,1002,742,710,678,646,614,992,976, 960,944,1008,764,748,732,716,700,684,668,652,636,620,998,982,966,950,1014, 754,738,722,706,690,674,658,642,626,988,972,956,940,1020,1004,760,744,728, 712,696,680,664,648,632,616,1023,994,978,962,946,1010,766,734,702,670,638, 968,1000,740,708,676,644,974,942,1006,746,714,682,650,618,980,948,1012,752, 720,688,656,624,986,954,1018,758,726,694,662,630] [views:debug,2014-08-19T16:53:48.015,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/614. Updated state: active (0) [ns_server:debug,2014-08-19T16:53:48.016,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",614,active,0} [ns_server:debug,2014-08-19T16:53:48.107,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 612. Nacking mccouch update. [views:debug,2014-08-19T16:53:48.107,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/612. Updated state: active (0) [ns_server:debug,2014-08-19T16:53:48.107,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",612,active,0} [ns_server:debug,2014-08-19T16:53:48.107,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,718,686,654,622,984,952,1016,756,724,692,660,628,990,958,1022,762,730, 698,666,634,996,964,736,704,672,640,970,938,1002,742,710,678,646,614,992,976, 960,944,1008,764,748,732,716,700,684,668,652,636,620,998,982,966,950,1014, 754,738,722,706,690,674,658,642,626,988,972,956,940,1020,1004,760,744,728, 712,696,680,664,648,632,616,1023,994,978,962,946,1010,766,734,702,670,638, 968,1000,740,708,676,644,612,974,942,1006,746,714,682,650,618,980,948,1012, 752,720,688,656,624,986,954,1018,758,726,694,662,630] [views:debug,2014-08-19T16:53:48.166,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/612. Updated state: active (0) [ns_server:debug,2014-08-19T16:53:48.166,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",612,active,0} [ns_server:debug,2014-08-19T16:53:48.283,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 610. Nacking mccouch update. [views:debug,2014-08-19T16:53:48.283,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/610. Updated state: active (0) [ns_server:debug,2014-08-19T16:53:48.283,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",610,active,0} [ns_server:debug,2014-08-19T16:53:48.283,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,718,686,654,622,984,952,1016,756,724,692,660,628,990,958,1022,762,730, 698,666,634,996,964,736,704,672,640,970,938,1002,742,710,678,646,614,992,976, 960,944,1008,764,748,732,716,700,684,668,652,636,620,998,982,966,950,1014, 754,738,722,706,690,674,658,642,626,610,988,972,956,940,1020,1004,760,744, 728,712,696,680,664,648,632,616,1023,994,978,962,946,1010,766,734,702,670, 638,968,1000,740,708,676,644,612,974,942,1006,746,714,682,650,618,980,948, 1012,752,720,688,656,624,986,954,1018,758,726,694,662,630] [views:debug,2014-08-19T16:53:48.366,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/610. Updated state: active (0) [ns_server:debug,2014-08-19T16:53:48.367,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",610,active,0} [ns_server:info,2014-08-19T16:53:48.446,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:do_pull:341]Pulling config from: 'ns_1@10.242.238.91' [ns_server:debug,2014-08-19T16:53:48.533,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 608. Nacking mccouch update. [views:debug,2014-08-19T16:53:48.533,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/608. Updated state: active (0) [ns_server:debug,2014-08-19T16:53:48.534,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",608,active,0} [ns_server:debug,2014-08-19T16:53:48.534,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,718,686,654,622,984,952,1016,756,724,692,660,628,990,958,1022,762,730, 698,666,634,996,964,736,704,672,640,608,970,938,1002,742,710,678,646,614,992, 976,960,944,1008,764,748,732,716,700,684,668,652,636,620,998,982,966,950, 1014,754,738,722,706,690,674,658,642,626,610,988,972,956,940,1020,1004,760, 744,728,712,696,680,664,648,632,616,1023,994,978,962,946,1010,766,734,702, 670,638,968,1000,740,708,676,644,612,974,942,1006,746,714,682,650,618,980, 948,1012,752,720,688,656,624,986,954,1018,758,726,694,662,630] [views:debug,2014-08-19T16:53:48.592,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/608. Updated state: active (0) [ns_server:debug,2014-08-19T16:53:48.592,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",608,active,0} [ns_server:debug,2014-08-19T16:53:48.734,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 606. Nacking mccouch update. [views:debug,2014-08-19T16:53:48.734,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/606. Updated state: active (0) [ns_server:debug,2014-08-19T16:53:48.734,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",606,active,0} [ns_server:debug,2014-08-19T16:53:48.735,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,718,686,654,622,984,952,1016,756,724,692,660,628,990,958,1022,762,730, 698,666,634,996,964,736,704,672,640,608,970,938,1002,742,710,678,646,614,992, 976,960,944,1008,764,748,732,716,700,684,668,652,636,620,998,982,966,950, 1014,754,738,722,706,690,674,658,642,626,610,988,972,956,940,1020,1004,760, 744,728,712,696,680,664,648,632,616,1023,994,978,962,946,1010,766,734,702, 670,638,606,968,1000,740,708,676,644,612,974,942,1006,746,714,682,650,618, 980,948,1012,752,720,688,656,624,986,954,1018,758,726,694,662,630] [views:debug,2014-08-19T16:53:48.819,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/606. Updated state: active (0) [ns_server:debug,2014-08-19T16:53:48.819,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",606,active,0} [ns_server:debug,2014-08-19T16:53:48.940,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 604. Nacking mccouch update. [views:debug,2014-08-19T16:53:48.940,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/604. Updated state: active (0) [ns_server:debug,2014-08-19T16:53:48.940,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",604,active,0} [ns_server:debug,2014-08-19T16:53:48.941,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,718,686,654,622,984,952,1016,756,724,692,660,628,990,958,1022,762,730, 698,666,634,996,964,736,704,672,640,608,970,938,1002,742,710,678,646,614,976, 944,1008,764,748,732,716,700,684,668,652,636,620,604,998,982,966,950,1014, 754,738,722,706,690,674,658,642,626,610,988,972,956,940,1020,1004,760,744, 728,712,696,680,664,648,632,616,1023,994,978,962,946,1010,766,734,702,670, 638,606,968,1000,740,708,676,644,612,974,942,1006,746,714,682,650,618,980, 948,1012,752,720,688,656,624,986,954,1018,758,726,694,662,630,992,960] [views:debug,2014-08-19T16:53:48.990,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/604. Updated state: active (0) [ns_server:debug,2014-08-19T16:53:48.991,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",604,active,0} [ns_server:debug,2014-08-19T16:53:49.057,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 602. Nacking mccouch update. [views:debug,2014-08-19T16:53:49.058,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/602. Updated state: active (0) [ns_server:debug,2014-08-19T16:53:49.058,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",602,active,0} [ns_server:debug,2014-08-19T16:53:49.058,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,718,686,654,622,984,952,1016,756,724,692,660,628,990,958,1022,762,730, 698,666,634,602,996,964,736,704,672,640,608,970,938,1002,742,710,678,646,614, 976,944,1008,764,748,732,716,700,684,668,652,636,620,604,998,982,966,950, 1014,754,738,722,706,690,674,658,642,626,610,988,972,956,940,1020,1004,760, 744,728,712,696,680,664,648,632,616,1023,994,978,962,946,1010,766,734,702, 670,638,606,968,1000,740,708,676,644,612,974,942,1006,746,714,682,650,618, 980,948,1012,752,720,688,656,624,986,954,1018,758,726,694,662,630,992,960] [views:debug,2014-08-19T16:53:49.092,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/602. Updated state: active (0) [ns_server:debug,2014-08-19T16:53:49.092,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",602,active,0} [ns_server:debug,2014-08-19T16:53:49.217,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 600. Nacking mccouch update. [views:debug,2014-08-19T16:53:49.217,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/600. Updated state: active (0) [ns_server:debug,2014-08-19T16:53:49.217,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",600,active,0} [ns_server:debug,2014-08-19T16:53:49.217,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,718,686,654,622,984,952,1016,756,724,692,660,628,990,958,1022,762,730, 698,666,634,602,996,964,736,704,672,640,608,970,938,1002,742,710,678,646,614, 976,944,1008,764,748,732,716,700,684,668,652,636,620,604,998,982,966,950, 1014,754,738,722,706,690,674,658,642,626,610,988,972,956,940,1020,1004,760, 744,728,712,696,680,664,648,632,616,600,1023,994,978,962,946,1010,766,734, 702,670,638,606,968,1000,740,708,676,644,612,974,942,1006,746,714,682,650, 618,980,948,1012,752,720,688,656,624,986,954,1018,758,726,694,662,630,992, 960] [views:debug,2014-08-19T16:53:49.267,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/600. Updated state: active (0) [ns_server:debug,2014-08-19T16:53:49.267,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",600,active,0} [ns_server:debug,2014-08-19T16:53:49.393,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 598. Nacking mccouch update. [views:debug,2014-08-19T16:53:49.393,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/598. Updated state: active (0) [ns_server:debug,2014-08-19T16:53:49.393,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",598,active,0} [ns_server:debug,2014-08-19T16:53:49.393,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,718,686,654,622,984,952,1016,756,724,692,660,628,990,958,1022,762,730, 698,666,634,602,996,964,736,704,672,640,608,970,938,1002,742,710,678,646,614, 976,944,1008,764,748,732,716,700,684,668,652,636,620,604,998,982,966,950, 1014,754,738,722,706,690,674,658,642,626,610,988,972,956,940,1020,1004,760, 744,728,712,696,680,664,648,632,616,600,1023,994,978,962,946,1010,766,734, 702,670,638,606,968,1000,740,708,676,644,612,974,942,1006,746,714,682,650, 618,980,948,1012,752,720,688,656,624,986,954,1018,758,726,694,662,630,598, 992,960] [views:debug,2014-08-19T16:53:49.426,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/598. Updated state: active (0) [ns_server:debug,2014-08-19T16:53:49.427,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",598,active,0} [ns_server:debug,2014-08-19T16:53:49.551,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 596. Nacking mccouch update. [views:debug,2014-08-19T16:53:49.552,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/596. Updated state: active (0) [ns_server:debug,2014-08-19T16:53:49.552,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",596,active,0} [ns_server:debug,2014-08-19T16:53:49.552,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,718,686,654,622,984,952,1016,756,724,692,660,628,596,990,958,1022,762, 730,698,666,634,602,996,964,736,704,672,640,608,970,938,1002,742,710,678,646, 614,976,944,1008,764,748,732,716,700,684,668,652,636,620,604,998,982,966,950, 1014,754,738,722,706,690,674,658,642,626,610,988,972,956,940,1020,1004,760, 744,728,712,696,680,664,648,632,616,600,1023,994,978,962,946,1010,766,734, 702,670,638,606,968,1000,740,708,676,644,612,974,942,1006,746,714,682,650, 618,980,948,1012,752,720,688,656,624,986,954,1018,758,726,694,662,630,598, 992,960] [views:debug,2014-08-19T16:53:49.602,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/596. Updated state: active (0) [ns_server:debug,2014-08-19T16:53:49.602,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",596,active,0} [ns_server:debug,2014-08-19T16:53:49.685,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 594. Nacking mccouch update. [views:debug,2014-08-19T16:53:49.686,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/594. Updated state: active (0) [ns_server:debug,2014-08-19T16:53:49.686,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",594,active,0} [ns_server:debug,2014-08-19T16:53:49.686,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,718,686,654,622,984,952,1016,756,724,692,660,628,596,990,958,1022,762, 730,698,666,634,602,996,964,736,704,672,640,608,970,938,1002,742,710,678,646, 614,976,944,1008,748,716,684,652,620,998,982,966,950,1014,754,738,722,706, 690,674,658,642,626,610,594,988,972,956,940,1020,1004,760,744,728,712,696, 680,664,648,632,616,600,1023,994,978,962,946,1010,766,734,702,670,638,606, 968,1000,740,708,676,644,612,974,942,1006,746,714,682,650,618,980,948,1012, 752,720,688,656,624,986,954,1018,758,726,694,662,630,598,992,960,764,732,700, 668,636,604] [views:debug,2014-08-19T16:53:49.719,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/594. Updated state: active (0) [ns_server:debug,2014-08-19T16:53:49.719,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",594,active,0} [ns_server:debug,2014-08-19T16:53:49.795,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 592. Nacking mccouch update. [views:debug,2014-08-19T16:53:49.795,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/592. Updated state: active (0) [ns_server:debug,2014-08-19T16:53:49.795,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",592,active,0} [ns_server:debug,2014-08-19T16:53:49.795,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,718,686,654,622,984,952,1016,756,724,692,660,628,596,990,958,1022,762, 730,698,666,634,602,996,964,736,704,672,640,608,970,938,1002,742,710,678,646, 614,976,944,1008,748,716,684,652,620,998,982,966,950,1014,754,738,722,706, 690,674,658,642,626,610,594,988,972,956,940,1020,1004,760,744,728,712,696, 680,664,648,632,616,600,1023,994,978,962,946,1010,766,734,702,670,638,606, 968,1000,740,708,676,644,612,974,942,1006,746,714,682,650,618,980,948,1012, 752,720,688,656,624,592,986,954,1018,758,726,694,662,630,598,992,960,764,732, 700,668,636,604] [views:debug,2014-08-19T16:53:49.845,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/592. Updated state: active (0) [ns_server:debug,2014-08-19T16:53:49.845,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",592,active,0} [ns_server:debug,2014-08-19T16:53:49.987,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 590. Nacking mccouch update. [views:debug,2014-08-19T16:53:49.987,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/590. Updated state: active (0) [ns_server:debug,2014-08-19T16:53:49.987,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",590,active,0} [ns_server:debug,2014-08-19T16:53:49.988,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,718,686,654,622,590,984,952,1016,756,724,692,660,628,596,990,958,1022, 762,730,698,666,634,602,996,964,736,704,672,640,608,970,938,1002,742,710,678, 646,614,976,944,1008,748,716,684,652,620,998,982,966,950,1014,754,738,722, 706,690,674,658,642,626,610,594,988,972,956,940,1020,1004,760,744,728,712, 696,680,664,648,632,616,600,1023,994,978,962,946,1010,766,734,702,670,638, 606,968,1000,740,708,676,644,612,974,942,1006,746,714,682,650,618,980,948, 1012,752,720,688,656,624,592,986,954,1018,758,726,694,662,630,598,992,960, 764,732,700,668,636,604] [views:debug,2014-08-19T16:53:50.055,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/590. Updated state: active (0) [ns_server:debug,2014-08-19T16:53:50.055,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",590,active,0} [ns_server:debug,2014-08-19T16:53:50.206,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 588. Nacking mccouch update. [views:debug,2014-08-19T16:53:50.206,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/588. Updated state: active (0) [ns_server:debug,2014-08-19T16:53:50.206,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",588,active,0} [ns_server:debug,2014-08-19T16:53:50.206,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,718,686,654,622,590,984,952,1016,756,724,692,660,628,596,990,958,1022, 762,730,698,666,634,602,996,964,736,704,672,640,608,970,938,1002,742,710,678, 646,614,976,944,1008,748,716,684,652,620,588,998,982,966,950,1014,754,738, 722,706,690,674,658,642,626,610,594,988,972,956,940,1020,1004,760,744,728, 712,696,680,664,648,632,616,600,1023,994,978,962,946,1010,766,734,702,670, 638,606,968,1000,740,708,676,644,612,974,942,1006,746,714,682,650,618,980, 948,1012,752,720,688,656,624,592,986,954,1018,758,726,694,662,630,598,992, 960,764,732,700,668,636,604] [views:debug,2014-08-19T16:53:50.253,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/588. Updated state: active (0) [ns_server:debug,2014-08-19T16:53:50.253,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",588,active,0} [ns_server:debug,2014-08-19T16:53:50.378,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 586. Nacking mccouch update. [views:debug,2014-08-19T16:53:50.378,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/586. Updated state: active (0) [ns_server:debug,2014-08-19T16:53:50.378,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",586,active,0} [ns_server:debug,2014-08-19T16:53:50.378,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,718,686,654,622,590,984,952,1016,756,724,692,660,628,596,990,958,1022, 762,730,698,666,634,602,996,964,736,704,672,640,608,970,938,1002,742,710,678, 646,614,976,944,1008,748,716,684,652,620,588,998,982,966,950,1014,754,738, 722,706,690,674,658,642,626,610,594,988,972,956,940,1020,1004,760,744,728, 712,696,680,664,648,632,616,600,1023,994,978,962,946,1010,766,734,702,670, 638,606,968,1000,740,708,676,644,612,974,942,1006,746,714,682,650,618,586, 980,948,1012,752,720,688,656,624,592,986,954,1018,758,726,694,662,630,598, 992,960,764,732,700,668,636,604] [views:debug,2014-08-19T16:53:50.429,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/586. Updated state: active (0) [ns_server:debug,2014-08-19T16:53:50.429,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",586,active,0} [ns_server:debug,2014-08-19T16:53:50.562,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 584. Nacking mccouch update. [views:debug,2014-08-19T16:53:50.562,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/584. Updated state: active (0) [ns_server:debug,2014-08-19T16:53:50.562,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",584,active,0} [ns_server:debug,2014-08-19T16:53:50.562,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,718,686,654,622,590,984,952,1016,756,724,692,660,628,596,990,958,1022, 762,730,698,666,634,602,996,964,736,704,672,640,608,970,938,1002,742,710,678, 646,614,976,944,1008,748,716,684,652,620,588,982,950,1014,754,738,722,706, 690,674,658,642,626,610,594,988,972,956,940,1020,1004,760,744,728,712,696, 680,664,648,632,616,600,584,1023,994,978,962,946,1010,766,734,702,670,638, 606,968,1000,740,708,676,644,612,974,942,1006,746,714,682,650,618,586,980, 948,1012,752,720,688,656,624,592,986,954,1018,758,726,694,662,630,598,992, 960,764,732,700,668,636,604,998,966] [views:debug,2014-08-19T16:53:50.613,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/584. Updated state: active (0) [ns_server:debug,2014-08-19T16:53:50.613,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",584,active,0} [ns_server:debug,2014-08-19T16:53:50.679,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 582. Nacking mccouch update. [views:debug,2014-08-19T16:53:50.679,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/582. Updated state: active (0) [ns_server:debug,2014-08-19T16:53:50.679,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",582,active,0} [ns_server:debug,2014-08-19T16:53:50.680,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,718,686,654,622,590,984,952,1016,756,724,692,660,628,596,990,958,1022, 762,730,698,666,634,602,996,964,736,704,672,640,608,970,938,1002,742,710,678, 646,614,582,976,944,1008,748,716,684,652,620,588,982,950,1014,754,738,722, 706,690,674,658,642,626,610,594,988,972,956,940,1020,1004,760,744,728,712, 696,680,664,648,632,616,600,584,1023,994,978,962,946,1010,766,734,702,670, 638,606,968,1000,740,708,676,644,612,974,942,1006,746,714,682,650,618,586, 980,948,1012,752,720,688,656,624,592,986,954,1018,758,726,694,662,630,598, 992,960,764,732,700,668,636,604,998,966] [views:debug,2014-08-19T16:53:50.714,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/582. Updated state: active (0) [ns_server:debug,2014-08-19T16:53:50.714,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",582,active,0} [ns_server:debug,2014-08-19T16:53:50.781,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 580. Nacking mccouch update. [views:debug,2014-08-19T16:53:50.781,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/580. Updated state: active (0) [ns_server:debug,2014-08-19T16:53:50.781,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",580,active,0} [ns_server:debug,2014-08-19T16:53:50.782,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,718,686,654,622,590,984,952,1016,756,724,692,660,628,596,990,958,1022, 762,730,698,666,634,602,996,964,736,704,672,640,608,970,938,1002,742,710,678, 646,614,582,976,944,1008,748,716,684,652,620,588,982,950,1014,754,738,722, 706,690,674,658,642,626,610,594,988,972,956,940,1020,1004,760,744,728,712, 696,680,664,648,632,616,600,584,1023,994,978,962,946,1010,766,734,702,670, 638,606,968,1000,740,708,676,644,612,580,974,942,1006,746,714,682,650,618, 586,980,948,1012,752,720,688,656,624,592,986,954,1018,758,726,694,662,630, 598,992,960,764,732,700,668,636,604,998,966] [views:debug,2014-08-19T16:53:50.840,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/580. Updated state: active (0) [ns_server:debug,2014-08-19T16:53:50.840,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",580,active,0} [ns_server:debug,2014-08-19T16:53:50.990,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 578. Nacking mccouch update. [views:debug,2014-08-19T16:53:50.990,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/578. Updated state: active (0) [ns_server:debug,2014-08-19T16:53:50.990,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",578,active,0} [ns_server:debug,2014-08-19T16:53:50.990,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,718,686,654,622,590,984,952,1016,756,724,692,660,628,596,990,958,1022, 762,730,698,666,634,602,996,964,736,704,672,640,608,970,938,1002,742,710,678, 646,614,582,976,944,1008,748,716,684,652,620,588,982,950,1014,754,738,722, 706,690,674,658,642,626,610,594,578,988,972,956,940,1020,1004,760,744,728, 712,696,680,664,648,632,616,600,584,1023,994,978,962,946,1010,766,734,702, 670,638,606,968,1000,740,708,676,644,612,580,974,942,1006,746,714,682,650, 618,586,980,948,1012,752,720,688,656,624,592,986,954,1018,758,726,694,662, 630,598,992,960,764,732,700,668,636,604,998,966] [views:debug,2014-08-19T16:53:51.050,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/578. Updated state: active (0) [ns_server:debug,2014-08-19T16:53:51.050,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",578,active,0} [ns_server:debug,2014-08-19T16:53:51.064,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:53:51.064,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:53:51.064,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:53:51.064,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:53:51.064,ns_1@10.242.238.90:ns_config_isasl_sync<0.17399.0>:ns_config_isasl_sync:writeSASLConf:143]Writing isasl passwd file: "/opt/couchbase/var/lib/couchbase/isasl.pw" [ns_server:debug,2014-08-19T16:53:51.065,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}, {"maps_1_8_metahash", [{map,[]}, {fastForwardMap,[]}, {uuid,<<"dfbe82706d975a8e74781701767f7843">>}, {num_replicas,1}, {replica_index,false}, {ram_quota,104857600}, {auth_type,none}, {moxi_port,11221}, {autocompaction,false}, {purge_interval,undefined}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}, [{map,[]}, {fastForwardMap,[]}, {uuid,<<"e28c79b4e936fc1ba8f8f3d60e6c45c8">>}, {num_replicas,1}, {replica_index,false}, {ram_quota,484442112}, {auth_type,none}, {moxi_port,11222}, {autocompaction,false}, {purge_interval,undefined}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,[]}]]}] [ns_server:debug,2014-08-19T16:53:51.069,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:53:51.069,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:53:51.069,ns_1@10.242.238.90:ns_bucket_worker<0.17558.0>:ns_bucket_sup:update_childs:84]Starting new child: {{per_bucket_sup,"maps_1_8_tiles"}, {single_bucket_sup,start_link,["maps_1_8_tiles"]}, permanent,infinity,supervisor, [single_bucket_sup]} [error_logger:info,2014-08-19T16:53:51.069,ns_1@10.242.238.90:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_bucket_sup} started: [{pid,<0.4347.1>}, {name,{per_bucket_sup,"maps_1_8_tiles"}}, {mfargs, {single_bucket_sup,start_link,["maps_1_8_tiles"]}}, {restart_type,permanent}, {shutdown,infinity}, {child_type,supervisor}] [ns_server:debug,2014-08-19T16:53:51.070,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:53:51.070,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:53:51.071,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}, {"maps_1_8_metahash", [{map,[]}, {fastForwardMap,[]}, {uuid,<<"dfbe82706d975a8e74781701767f7843">>}, {num_replicas,1}, {replica_index,false}, {ram_quota,104857600}, {auth_type,none}, {moxi_port,11221}, {autocompaction,false}, {purge_interval,undefined}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}, {"maps_1_8_tiles", [{map,[]}, {fastForwardMap,[]}, {uuid,<<"e28c79b4e936fc1ba8f8f3d60e6c45c8">>}, {num_replicas,1}, {replica_index,false}, {ram_quota,484442112}, {auth_type,none}, {moxi_port,11222}, {autocompaction,false}, {purge_interval,undefined}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}]}]}] [user:info,2014-08-19T16:53:51.125,ns_1@10.242.238.90:<0.17397.0>:ns_log:crash_consumption_loop:64]Port server moxi on node 'babysitter_of_ns_1@127.0.0.1' exited with status 0. Restarting. Messages: 2014-08-19 16:53:26: (cproxy_config.c.315) env: MOXI_SASL_PLAIN_USR (13) 2014-08-19 16:53:26: (cproxy_config.c.324) env: MOXI_SASL_PLAIN_PWD (12) EOL on stdin. Exiting [ns_server:debug,2014-08-19T16:53:51.132,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_tiles<0.4349.1>:capi_set_view_manager:init:228]Usable vbuckets: [] [ns_server:debug,2014-08-19T16:53:51.133,ns_1@10.242.238.90:ns_memcached-maps_1_8_tiles<0.4362.1>:ns_memcached:init:144]Starting ns_memcached [ns_server:debug,2014-08-19T16:53:51.133,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_tiles<0.4349.1>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [error_logger:info,2014-08-19T16:53:51.132,ns_1@10.242.238.90:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,'single_bucket_sup-maps_1_8_tiles'} started: [{pid,<0.4349.1>}, {name,{capi_set_view_manager,"maps_1_8_tiles"}}, {mfargs, {capi_set_view_manager,start_link, ["maps_1_8_tiles"]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [ns_server:debug,2014-08-19T16:53:51.133,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_tiles<0.4349.1>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:53:51.133,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_tiles<0.4349.1>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:53:51.133,ns_1@10.242.238.90:<0.4363.1>:ns_memcached:run_connect_phase:167]Started 'connecting' phase of ns_memcached-maps_1_8_tiles. Parent is <0.4362.1> [error_logger:info,2014-08-19T16:53:51.133,ns_1@10.242.238.90:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,'single_bucket_sup-maps_1_8_tiles'} started: [{pid,<0.4362.1>}, {name,{ns_memcached,"maps_1_8_tiles"}}, {mfargs,{ns_memcached,start_link,["maps_1_8_tiles"]}}, {restart_type,permanent}, {shutdown,86400000}, {child_type,worker}] [error_logger:info,2014-08-19T16:53:51.134,ns_1@10.242.238.90:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,'single_bucket_sup-maps_1_8_tiles'} started: [{pid,<0.4364.1>}, {name,{tap_replication_manager,"maps_1_8_tiles"}}, {mfargs, {tap_replication_manager,start_link, ["maps_1_8_tiles"]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [ns_server:info,2014-08-19T16:53:51.134,ns_1@10.242.238.90:janitor_agent-maps_1_8_tiles<0.4367.1>:janitor_agent:read_flush_counter:936]Loading flushseq failed: {error,enoent}. Assuming it's equal to global config. [error_logger:info,2014-08-19T16:53:51.134,ns_1@10.242.238.90:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,'single_bucket_sup-maps_1_8_tiles'} started: [{pid,<0.4365.1>}, {name,{ns_vbm_new_sup,"maps_1_8_tiles"}}, {mfargs,{ns_vbm_new_sup,start_link,["maps_1_8_tiles"]}}, {restart_type,permanent}, {shutdown,infinity}, {child_type,supervisor}] [ns_server:info,2014-08-19T16:53:51.134,ns_1@10.242.238.90:janitor_agent-maps_1_8_tiles<0.4367.1>:janitor_agent:read_flush_counter_from_config:943]Initialized flushseq 0 from bucket config [error_logger:info,2014-08-19T16:53:51.134,ns_1@10.242.238.90:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,'single_bucket_sup-maps_1_8_tiles'} started: [{pid,<0.4366.1>}, {name,{ns_vbm_sup,"maps_1_8_tiles"}}, {mfargs,{ns_vbm_sup,start_link,["maps_1_8_tiles"]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,supervisor}] [error_logger:info,2014-08-19T16:53:51.134,ns_1@10.242.238.90:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,'single_bucket_sup-maps_1_8_tiles'} started: [{pid,<0.4367.1>}, {name,{janitor_agent,"maps_1_8_tiles"}}, {mfargs,{janitor_agent,start_link,["maps_1_8_tiles"]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [error_logger:info,2014-08-19T16:53:51.135,ns_1@10.242.238.90:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,'single_bucket_sup-maps_1_8_tiles'} started: [{pid,<0.4368.1>}, {name,{couch_stats_reader,"maps_1_8_tiles"}}, {mfargs, {couch_stats_reader,start_link,["maps_1_8_tiles"]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2014-08-19T16:53:51.135,ns_1@10.242.238.90:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,'single_bucket_sup-maps_1_8_tiles'} started: [{pid,<0.4369.1>}, {name,{stats_collector,"maps_1_8_tiles"}}, {mfargs, {stats_collector,start_link,["maps_1_8_tiles"]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2014-08-19T16:53:51.136,ns_1@10.242.238.90:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,'single_bucket_sup-maps_1_8_tiles'} started: [{pid,<0.4371.1>}, {name,{stats_archiver,"maps_1_8_tiles"}}, {mfargs,{stats_archiver,start_link,["maps_1_8_tiles"]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2014-08-19T16:53:51.137,ns_1@10.242.238.90:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,'single_bucket_sup-maps_1_8_tiles'} started: [{pid,<0.4373.1>}, {name,{stats_reader,"maps_1_8_tiles"}}, {mfargs,{stats_reader,start_link,["maps_1_8_tiles"]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2014-08-19T16:53:51.137,ns_1@10.242.238.90:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,'single_bucket_sup-maps_1_8_tiles'} started: [{pid,<0.4374.1>}, {name,{failover_safeness_level,"maps_1_8_tiles"}}, {mfargs, {failover_safeness_level,start_link, ["maps_1_8_tiles"]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2014-08-19T16:53:51.137,ns_1@10.242.238.90:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,'single_bucket_sup-maps_1_8_tiles'} started: [{pid,<0.4375.1>}, {name,{terse_bucket_info_uploader,"maps_1_8_tiles"}}, {mfargs, {terse_bucket_info_uploader,start_link, ["maps_1_8_tiles"]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [ns_server:debug,2014-08-19T16:53:51.138,ns_1@10.242.238.90:<0.17535.0>:mc_tcp_listener:accept_loop:31]Got new connection [ns_server:debug,2014-08-19T16:53:51.138,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_tiles<0.4349.1>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:53:51.139,ns_1@10.242.238.90:<0.17535.0>:mc_tcp_listener:accept_loop:33]Passed connection to mc_conn_sup: <0.4377.1> [ns_server:info,2014-08-19T16:53:51.140,ns_1@10.242.238.90:ns_memcached-maps_1_8_tiles<0.4362.1>:ns_memcached:ensure_bucket:1178]Created bucket "maps_1_8_tiles" with config string "ht_size=3079;ht_locks=5;tap_noop_interval=20;max_txn_size=10000;max_size=484442112;tap_keepalive=300;dbname=/var/lib/pgsql/maps_1_8_tiles;allow_data_loss_during_shutdown=true;backend=couchdb;couch_bucket=maps_1_8_tiles;couch_port=11213;max_vbuckets=1024;alog_path=/var/lib/pgsql/maps_1_8_tiles/access.log;data_traffic_enabled=false;max_num_workers=3;uuid=e28c79b4e936fc1ba8f8f3d60e6c45c8;vb0=false;waitforwarmup=false;failpartialwarmup=false;" [ns_server:info,2014-08-19T16:53:51.141,ns_1@10.242.238.90:ns_memcached-maps_1_8_tiles<0.4362.1>:ns_memcached:handle_cast:609]Main ns_memcached connection established: {ok,#Port<0.22514>} [ns_server:debug,2014-08-19T16:53:51.141,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:53:51.141,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:53:51.141,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_tiles<0.4349.1>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [user:info,2014-08-19T16:53:51.141,ns_1@10.242.238.90:ns_memcached-maps_1_8_tiles<0.4362.1>:ns_memcached:handle_cast:632]Bucket "maps_1_8_tiles" loaded on node 'ns_1@10.242.238.90' in 0 seconds. [ns_server:debug,2014-08-19T16:53:51.233,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 576. Nacking mccouch update. [views:debug,2014-08-19T16:53:51.233,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/576. Updated state: active (0) [ns_server:debug,2014-08-19T16:53:51.233,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",576,active,0} [ns_server:debug,2014-08-19T16:53:51.233,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,718,686,654,622,590,984,952,1016,756,724,692,660,628,596,990,958,1022, 762,730,698,666,634,602,996,964,736,704,672,640,608,576,970,938,1002,742,710, 678,646,614,582,976,944,1008,748,716,684,652,620,588,982,950,1014,754,738, 722,706,690,674,658,642,626,610,594,578,988,972,956,940,1020,1004,760,744, 728,712,696,680,664,648,632,616,600,584,1023,994,978,962,946,1010,766,734, 702,670,638,606,968,1000,740,708,676,644,612,580,974,942,1006,746,714,682, 650,618,586,980,948,1012,752,720,688,656,624,592,986,954,1018,758,726,694, 662,630,598,992,960,764,732,700,668,636,604,998,966] [views:debug,2014-08-19T16:53:51.267,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/576. Updated state: active (0) [ns_server:debug,2014-08-19T16:53:51.267,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",576,active,0} [ns_server:debug,2014-08-19T16:53:51.336,ns_1@10.242.238.90:ns_heart_slow_status_updater<0.17440.0>:ns_heart:current_status_slow:261]Ignoring failure to get stats for bucket: "maps_1_8_tiles": {error,no_samples} [ns_server:debug,2014-08-19T16:53:51.359,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 574. Nacking mccouch update. [views:debug,2014-08-19T16:53:51.359,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/574. Updated state: active (0) [ns_server:debug,2014-08-19T16:53:51.359,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",574,active,0} [ns_server:debug,2014-08-19T16:53:51.359,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,718,686,654,622,590,984,952,1016,756,724,692,660,628,596,990,958,1022, 762,730,698,666,634,602,996,964,736,704,672,640,608,576,970,938,1002,742,710, 678,646,614,582,976,944,1008,748,716,684,652,620,588,982,950,1014,754,722, 690,658,626,594,988,972,956,940,1020,1004,760,744,728,712,696,680,664,648, 632,616,600,584,1023,994,978,962,946,1010,766,734,702,670,638,606,574,968, 1000,740,708,676,644,612,580,974,942,1006,746,714,682,650,618,586,980,948, 1012,752,720,688,656,624,592,986,954,1018,758,726,694,662,630,598,992,960, 764,732,700,668,636,604,998,966,738,706,674,642,610,578] [views:debug,2014-08-19T16:53:51.417,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/574. Updated state: active (0) [ns_server:debug,2014-08-19T16:53:51.418,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",574,active,0} [ns_server:debug,2014-08-19T16:53:51.584,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 572. Nacking mccouch update. [views:debug,2014-08-19T16:53:51.585,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/572. Updated state: active (0) [ns_server:debug,2014-08-19T16:53:51.585,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",572,active,0} [ns_server:debug,2014-08-19T16:53:51.585,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,718,686,654,622,590,984,952,1016,756,724,692,660,628,596,990,958,1022, 762,730,698,666,634,602,996,964,736,704,672,640,608,576,970,938,1002,742,710, 678,646,614,582,976,944,1008,748,716,684,652,620,588,982,950,1014,754,722, 690,658,626,594,988,972,956,940,1020,1004,760,744,728,712,696,680,664,648, 632,616,600,584,1023,994,978,962,946,1010,766,734,702,670,638,606,574,968, 1000,740,708,676,644,612,580,974,942,1006,746,714,682,650,618,586,980,948, 1012,752,720,688,656,624,592,986,954,1018,758,726,694,662,630,598,992,960, 764,732,700,668,636,604,572,998,966,738,706,674,642,610,578] [views:debug,2014-08-19T16:53:51.658,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/572. Updated state: active (0) [ns_server:debug,2014-08-19T16:53:51.658,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",572,active,0} [ns_server:debug,2014-08-19T16:53:51.732,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 570. Nacking mccouch update. [views:debug,2014-08-19T16:53:51.732,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/570. Updated state: active (0) [ns_server:debug,2014-08-19T16:53:51.732,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",570,active,0} [ns_server:debug,2014-08-19T16:53:51.733,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,718,686,654,622,590,984,952,1016,756,724,692,660,628,596,990,958,1022, 762,730,698,666,634,602,570,996,964,736,704,672,640,608,576,970,938,1002,742, 710,678,646,614,582,976,944,1008,748,716,684,652,620,588,982,950,1014,754, 722,690,658,626,594,988,972,956,940,1020,1004,760,744,728,712,696,680,664, 648,632,616,600,584,1023,994,978,962,946,1010,766,734,702,670,638,606,574, 968,1000,740,708,676,644,612,580,974,942,1006,746,714,682,650,618,586,980, 948,1012,752,720,688,656,624,592,986,954,1018,758,726,694,662,630,598,992, 960,764,732,700,668,636,604,572,998,966,738,706,674,642,610,578] [views:debug,2014-08-19T16:53:51.767,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/570. Updated state: active (0) [ns_server:debug,2014-08-19T16:53:51.767,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",570,active,0} [ns_server:debug,2014-08-19T16:53:51.833,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 568. Nacking mccouch update. [views:debug,2014-08-19T16:53:51.833,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/568. Updated state: active (0) [ns_server:debug,2014-08-19T16:53:51.833,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",568,active,0} [ns_server:debug,2014-08-19T16:53:51.834,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,718,686,654,622,590,984,952,1016,756,724,692,660,628,596,990,958,1022, 762,730,698,666,634,602,570,996,964,736,704,672,640,608,576,970,938,1002,742, 710,678,646,614,582,976,944,1008,748,716,684,652,620,588,982,950,1014,754, 722,690,658,626,594,988,972,956,940,1020,1004,760,744,728,712,696,680,664, 648,632,616,600,584,568,1023,994,978,962,946,1010,766,734,702,670,638,606, 574,968,1000,740,708,676,644,612,580,974,942,1006,746,714,682,650,618,586, 980,948,1012,752,720,688,656,624,592,986,954,1018,758,726,694,662,630,598, 992,960,764,732,700,668,636,604,572,998,966,738,706,674,642,610,578] [views:debug,2014-08-19T16:53:51.867,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/568. Updated state: active (0) [ns_server:debug,2014-08-19T16:53:51.868,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",568,active,0} [ns_server:debug,2014-08-19T16:53:51.951,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 566. Nacking mccouch update. [views:debug,2014-08-19T16:53:51.951,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/566. Updated state: active (0) [ns_server:debug,2014-08-19T16:53:51.951,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",566,active,0} [ns_server:debug,2014-08-19T16:53:51.951,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,718,686,654,622,590,984,952,1016,756,724,692,660,628,596,990,958,1022, 762,730,698,666,634,602,570,996,964,736,704,672,640,608,576,970,938,1002,742, 710,678,646,614,582,976,944,1008,748,716,684,652,620,588,982,950,1014,754, 722,690,658,626,594,988,972,956,940,1020,1004,760,744,728,712,696,680,664, 648,632,616,600,584,568,1023,994,978,962,946,1010,766,734,702,670,638,606, 574,968,1000,740,708,676,644,612,580,974,942,1006,746,714,682,650,618,586, 980,948,1012,752,720,688,656,624,592,986,954,1018,758,726,694,662,630,598, 566,992,960,764,732,700,668,636,604,572,998,966,738,706,674,642,610,578] [views:debug,2014-08-19T16:53:52.001,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/566. Updated state: active (0) [ns_server:debug,2014-08-19T16:53:52.002,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",566,active,0} [ns_server:debug,2014-08-19T16:53:52.085,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 564. Nacking mccouch update. [views:debug,2014-08-19T16:53:52.085,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/564. Updated state: active (0) [ns_server:debug,2014-08-19T16:53:52.085,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",564,active,0} [ns_server:debug,2014-08-19T16:53:52.085,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,718,686,654,622,590,984,952,1016,756,724,692,660,628,596,564,990,958, 1022,762,730,698,666,634,602,570,996,964,736,704,672,640,608,576,970,938, 1002,742,710,678,646,614,582,976,944,1008,748,716,684,652,620,588,982,950, 1014,754,722,690,658,626,594,988,956,1020,760,744,728,712,696,680,664,648, 632,616,600,584,568,1023,994,978,962,946,1010,766,734,702,670,638,606,574, 968,1000,740,708,676,644,612,580,974,942,1006,746,714,682,650,618,586,980, 948,1012,752,720,688,656,624,592,986,954,1018,758,726,694,662,630,598,566, 992,960,764,732,700,668,636,604,572,998,966,738,706,674,642,610,578,972,940, 1004] [ns_server:debug,2014-08-19T16:53:52.140,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:53:52.140,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_tiles<0.4349.1>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:53:52.141,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:53:52.141,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:53:52.141,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_tiles<0.4349.1>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:53:52.141,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:info,2014-08-19T16:53:52.145,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 1023 state to replica [ns_server:info,2014-08-19T16:53:52.146,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 1022 state to replica [ns_server:debug,2014-08-19T16:53:52.146,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}, {"maps_1_8_metahash", [{map,[]}, {fastForwardMap,[]}, {uuid,<<"dfbe82706d975a8e74781701767f7843">>}, {num_replicas,1}, {replica_index,false}, {ram_quota,104857600}, {auth_type,none}, {moxi_port,11221}, {autocompaction,false}, {purge_interval,undefined}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}, {"maps_1_8_tiles", [{map,[{0,[],['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {1,[],['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {2,[],['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {3,[],['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {4,[],['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {5,[],['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {6,[],['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {7,[],['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {8,[],['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {9,[],['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {10,[],['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {11,[],['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {12,[],['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {13,[],['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {14,[],['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {15,[],['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {16,[],['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {17,[],['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {18,[],['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {19,[],['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {20,[],['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {21,[],['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {22,[],['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {23,[],['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {24,[],['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {25,[],['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {26,[],['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {27,[],['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {28,[],['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {29,[],['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {30,[],['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {31,[],['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {32,[],['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {33,[],['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {34,[],['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {35,[],['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {36,[],['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {37,[],['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {38,[],['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {39,[],['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {40,[],['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {41,[],['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {42,[],['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {43,[],['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {44,[],['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {45,[],['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {46,[],['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {47,[],['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {48,[],['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {49,[],['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {50,[],['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {51,[],['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {52,[],['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {53,[],['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {54,[],['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {55,[],['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {56,[],['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {57,[],['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {58,[],['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {59,[],['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {60,[],['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {61,[],['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {62,[],['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {63,[],['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {64,[],['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {65,[],['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {66,[],['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {67,[],['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {68,[],['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {69,[],['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {70,[],['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {71,[],['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {72,[],['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {73,[],['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {74,[],['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {75,[],['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {76,[],['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {77,[],['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {78,[],['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {79,[],['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {80,[],['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {81,[],['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {82,[],['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {83,[],['ns_1@10.242.238.88'|...]}, {84,[],[...]}, {85,[],...}, {86,...}, {...}|...]}, {fastForwardMap,[]}, {uuid,<<"e28c79b4e936fc1ba8f8f3d60e6c45c8">>}, {num_replicas,1}, {replica_index,false}, {ram_quota,484442112}, {auth_type,none}, {moxi_port,11222}, {autocompaction,false}, {purge_interval,undefined}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:info,2014-08-19T16:53:52.146,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 1021 state to replica [ns_server:info,2014-08-19T16:53:52.147,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 1020 state to replica [ns_server:info,2014-08-19T16:53:52.147,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 1019 state to replica [ns_server:info,2014-08-19T16:53:52.148,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 1018 state to replica [ns_server:info,2014-08-19T16:53:52.148,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 1017 state to replica [ns_server:info,2014-08-19T16:53:52.148,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 1016 state to replica [ns_server:info,2014-08-19T16:53:52.149,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 1015 state to replica [ns_server:info,2014-08-19T16:53:52.149,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 1014 state to replica [ns_server:info,2014-08-19T16:53:52.149,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 1013 state to replica [ns_server:info,2014-08-19T16:53:52.149,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 1012 state to replica [ns_server:info,2014-08-19T16:53:52.150,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 1011 state to replica [ns_server:info,2014-08-19T16:53:52.150,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 1010 state to replica [ns_server:info,2014-08-19T16:53:52.150,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 1009 state to replica [ns_server:info,2014-08-19T16:53:52.151,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 1008 state to replica [ns_server:info,2014-08-19T16:53:52.151,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 1007 state to replica [ns_server:info,2014-08-19T16:53:52.151,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 1006 state to replica [ns_server:info,2014-08-19T16:53:52.151,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 1005 state to replica [ns_server:info,2014-08-19T16:53:52.152,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 1004 state to replica [ns_server:info,2014-08-19T16:53:52.152,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 1003 state to replica [views:debug,2014-08-19T16:53:52.152,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/564. Updated state: active (0) [ns_server:debug,2014-08-19T16:53:52.152,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",564,active,0} [ns_server:info,2014-08-19T16:53:52.152,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 1002 state to replica [ns_server:info,2014-08-19T16:53:52.153,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 1001 state to replica [ns_server:info,2014-08-19T16:53:52.153,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 1000 state to replica [ns_server:info,2014-08-19T16:53:52.153,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 999 state to replica [ns_server:info,2014-08-19T16:53:52.154,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 998 state to replica [ns_server:info,2014-08-19T16:53:52.154,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 997 state to replica [ns_server:info,2014-08-19T16:53:52.154,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 996 state to replica [ns_server:info,2014-08-19T16:53:52.155,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 995 state to replica [ns_server:info,2014-08-19T16:53:52.155,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 994 state to replica [ns_server:info,2014-08-19T16:53:52.155,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 993 state to replica [ns_server:info,2014-08-19T16:53:52.156,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 992 state to replica [ns_server:info,2014-08-19T16:53:52.156,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 991 state to replica [ns_server:info,2014-08-19T16:53:52.156,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 990 state to replica [ns_server:info,2014-08-19T16:53:52.157,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 989 state to replica [ns_server:info,2014-08-19T16:53:52.157,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 988 state to replica [ns_server:info,2014-08-19T16:53:52.157,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 987 state to replica [ns_server:info,2014-08-19T16:53:52.157,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 986 state to replica [ns_server:info,2014-08-19T16:53:52.158,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 985 state to replica [ns_server:info,2014-08-19T16:53:52.158,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 984 state to replica [ns_server:info,2014-08-19T16:53:52.158,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 983 state to replica [ns_server:info,2014-08-19T16:53:52.158,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 982 state to replica [ns_server:info,2014-08-19T16:53:52.159,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 981 state to replica [ns_server:info,2014-08-19T16:53:52.159,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 980 state to replica [ns_server:info,2014-08-19T16:53:52.159,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 979 state to replica [ns_server:info,2014-08-19T16:53:52.159,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 978 state to replica [ns_server:info,2014-08-19T16:53:52.160,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 977 state to replica [ns_server:info,2014-08-19T16:53:52.160,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 976 state to replica [ns_server:info,2014-08-19T16:53:52.160,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 975 state to replica [ns_server:info,2014-08-19T16:53:52.160,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 974 state to replica [ns_server:info,2014-08-19T16:53:52.161,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 973 state to replica [ns_server:info,2014-08-19T16:53:52.161,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 972 state to replica [ns_server:info,2014-08-19T16:53:52.161,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 971 state to replica [ns_server:info,2014-08-19T16:53:52.162,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 970 state to replica [ns_server:info,2014-08-19T16:53:52.162,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 969 state to replica [ns_server:info,2014-08-19T16:53:52.162,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 968 state to replica [ns_server:info,2014-08-19T16:53:52.162,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 967 state to replica [ns_server:info,2014-08-19T16:53:52.163,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 966 state to replica [ns_server:info,2014-08-19T16:53:52.163,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 965 state to replica [ns_server:info,2014-08-19T16:53:52.163,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 964 state to replica [ns_server:info,2014-08-19T16:53:52.164,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 963 state to replica [ns_server:info,2014-08-19T16:53:52.164,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 962 state to replica [ns_server:info,2014-08-19T16:53:52.164,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 961 state to replica [ns_server:info,2014-08-19T16:53:52.165,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 960 state to replica [ns_server:info,2014-08-19T16:53:52.165,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 959 state to replica [ns_server:info,2014-08-19T16:53:52.165,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 958 state to replica [ns_server:info,2014-08-19T16:53:52.166,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 957 state to replica [ns_server:info,2014-08-19T16:53:52.166,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 956 state to replica [ns_server:info,2014-08-19T16:53:52.166,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 955 state to replica [ns_server:info,2014-08-19T16:53:52.166,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 954 state to replica [ns_server:info,2014-08-19T16:53:52.167,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 953 state to replica [ns_server:info,2014-08-19T16:53:52.167,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 952 state to replica [ns_server:info,2014-08-19T16:53:52.167,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 951 state to replica [ns_server:info,2014-08-19T16:53:52.168,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 950 state to replica [ns_server:info,2014-08-19T16:53:52.168,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 949 state to replica [ns_server:info,2014-08-19T16:53:52.168,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 948 state to replica [ns_server:info,2014-08-19T16:53:52.169,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 947 state to replica [ns_server:info,2014-08-19T16:53:52.169,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 946 state to replica [ns_server:info,2014-08-19T16:53:52.169,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 945 state to replica [ns_server:info,2014-08-19T16:53:52.169,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 944 state to replica [ns_server:info,2014-08-19T16:53:52.170,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 943 state to replica [ns_server:info,2014-08-19T16:53:52.170,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 942 state to replica [ns_server:info,2014-08-19T16:53:52.170,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 941 state to replica [ns_server:info,2014-08-19T16:53:52.171,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 940 state to replica [ns_server:info,2014-08-19T16:53:52.171,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 939 state to replica [ns_server:info,2014-08-19T16:53:52.171,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 938 state to replica [ns_server:info,2014-08-19T16:53:52.171,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 767 state to active [ns_server:info,2014-08-19T16:53:52.172,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 766 state to active [ns_server:info,2014-08-19T16:53:52.172,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 765 state to active [ns_server:info,2014-08-19T16:53:52.172,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 764 state to active [ns_server:info,2014-08-19T16:53:52.173,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 763 state to active [ns_server:info,2014-08-19T16:53:52.173,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 762 state to active [ns_server:info,2014-08-19T16:53:52.173,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 761 state to active [ns_server:info,2014-08-19T16:53:52.173,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 760 state to active [ns_server:info,2014-08-19T16:53:52.174,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 759 state to active [ns_server:info,2014-08-19T16:53:52.174,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 758 state to active [ns_server:info,2014-08-19T16:53:52.174,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 757 state to active [ns_server:info,2014-08-19T16:53:52.175,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 756 state to active [ns_server:info,2014-08-19T16:53:52.175,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 755 state to active [ns_server:info,2014-08-19T16:53:52.175,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 754 state to active [ns_server:info,2014-08-19T16:53:52.175,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 753 state to active [ns_server:info,2014-08-19T16:53:52.176,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 752 state to active [ns_server:info,2014-08-19T16:53:52.176,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 751 state to active [ns_server:info,2014-08-19T16:53:52.176,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 750 state to active [ns_server:info,2014-08-19T16:53:52.177,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 749 state to active [ns_server:info,2014-08-19T16:53:52.177,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 748 state to active [ns_server:info,2014-08-19T16:53:52.177,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 747 state to active [ns_server:info,2014-08-19T16:53:52.177,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 746 state to active [ns_server:info,2014-08-19T16:53:52.178,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 745 state to active [ns_server:info,2014-08-19T16:53:52.178,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 744 state to active [ns_server:info,2014-08-19T16:53:52.178,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 743 state to active [ns_server:info,2014-08-19T16:53:52.178,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 742 state to active [ns_server:info,2014-08-19T16:53:52.179,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 741 state to active [ns_server:info,2014-08-19T16:53:52.179,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 740 state to active [ns_server:info,2014-08-19T16:53:52.179,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 739 state to active [ns_server:info,2014-08-19T16:53:52.179,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 738 state to active [ns_server:info,2014-08-19T16:53:52.180,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 737 state to active [ns_server:info,2014-08-19T16:53:52.180,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 736 state to active [ns_server:info,2014-08-19T16:53:52.180,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 735 state to active [ns_server:info,2014-08-19T16:53:52.181,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 734 state to active [ns_server:info,2014-08-19T16:53:52.181,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 733 state to active [ns_server:info,2014-08-19T16:53:52.181,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 732 state to active [ns_server:info,2014-08-19T16:53:52.182,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 731 state to active [ns_server:info,2014-08-19T16:53:52.182,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 730 state to active [ns_server:info,2014-08-19T16:53:52.182,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 729 state to active [ns_server:info,2014-08-19T16:53:52.182,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 728 state to active [ns_server:info,2014-08-19T16:53:52.183,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 727 state to active [ns_server:info,2014-08-19T16:53:52.183,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 726 state to active [ns_server:info,2014-08-19T16:53:52.183,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 725 state to active [ns_server:info,2014-08-19T16:53:52.184,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 724 state to active [ns_server:info,2014-08-19T16:53:52.184,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 723 state to active [ns_server:info,2014-08-19T16:53:52.184,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 722 state to active [ns_server:info,2014-08-19T16:53:52.184,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 721 state to active [ns_server:info,2014-08-19T16:53:52.185,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 720 state to active [ns_server:info,2014-08-19T16:53:52.185,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 719 state to active [ns_server:info,2014-08-19T16:53:52.185,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 718 state to active [ns_server:info,2014-08-19T16:53:52.185,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 717 state to active [ns_server:info,2014-08-19T16:53:52.186,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 716 state to active [ns_server:info,2014-08-19T16:53:52.186,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 715 state to active [ns_server:info,2014-08-19T16:53:52.186,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 714 state to active [ns_server:info,2014-08-19T16:53:52.187,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 713 state to active [ns_server:info,2014-08-19T16:53:52.187,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 712 state to active [ns_server:info,2014-08-19T16:53:52.187,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 711 state to active [ns_server:info,2014-08-19T16:53:52.187,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 710 state to active [ns_server:info,2014-08-19T16:53:52.188,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 709 state to active [ns_server:info,2014-08-19T16:53:52.188,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 708 state to active [ns_server:info,2014-08-19T16:53:52.188,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 707 state to active [ns_server:info,2014-08-19T16:53:52.188,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 706 state to active [ns_server:info,2014-08-19T16:53:52.189,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 705 state to active [ns_server:info,2014-08-19T16:53:52.189,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 704 state to active [ns_server:info,2014-08-19T16:53:52.189,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 703 state to active [ns_server:info,2014-08-19T16:53:52.190,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 702 state to active [ns_server:info,2014-08-19T16:53:52.190,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 701 state to active [ns_server:info,2014-08-19T16:53:52.190,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 700 state to active [ns_server:info,2014-08-19T16:53:52.190,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 699 state to active [ns_server:info,2014-08-19T16:53:52.191,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 698 state to active [ns_server:info,2014-08-19T16:53:52.191,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 697 state to active [ns_server:info,2014-08-19T16:53:52.191,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 696 state to active [ns_server:info,2014-08-19T16:53:52.191,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 695 state to active [ns_server:info,2014-08-19T16:53:52.192,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 694 state to active [ns_server:info,2014-08-19T16:53:52.192,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 693 state to active [ns_server:info,2014-08-19T16:53:52.192,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 692 state to active [ns_server:info,2014-08-19T16:53:52.193,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 691 state to active [ns_server:info,2014-08-19T16:53:52.193,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 690 state to active [ns_server:info,2014-08-19T16:53:52.196,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 689 state to active [ns_server:info,2014-08-19T16:53:52.196,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 688 state to active [ns_server:info,2014-08-19T16:53:52.197,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 687 state to active [ns_server:info,2014-08-19T16:53:52.198,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 686 state to active [ns_server:info,2014-08-19T16:53:52.198,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 685 state to active [ns_server:info,2014-08-19T16:53:52.198,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 684 state to active [ns_server:info,2014-08-19T16:53:52.199,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 683 state to active [ns_server:info,2014-08-19T16:53:52.199,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 682 state to active [ns_server:info,2014-08-19T16:53:52.199,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 681 state to active [ns_server:info,2014-08-19T16:53:52.199,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 680 state to active [ns_server:info,2014-08-19T16:53:52.200,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 679 state to active [ns_server:info,2014-08-19T16:53:52.200,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 678 state to active [ns_server:info,2014-08-19T16:53:52.200,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 677 state to active [ns_server:info,2014-08-19T16:53:52.201,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 676 state to active [ns_server:info,2014-08-19T16:53:52.201,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 675 state to active [ns_server:info,2014-08-19T16:53:52.201,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 674 state to active [ns_server:info,2014-08-19T16:53:52.201,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 673 state to active [ns_server:info,2014-08-19T16:53:52.202,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 672 state to active [ns_server:info,2014-08-19T16:53:52.202,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 671 state to active [ns_server:info,2014-08-19T16:53:52.202,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 670 state to active [ns_server:info,2014-08-19T16:53:52.202,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 669 state to active [ns_server:info,2014-08-19T16:53:52.203,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 668 state to active [ns_server:info,2014-08-19T16:53:52.203,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 667 state to active [ns_server:info,2014-08-19T16:53:52.203,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 666 state to active [ns_server:info,2014-08-19T16:53:52.204,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 665 state to active [ns_server:info,2014-08-19T16:53:52.204,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 664 state to active [ns_server:info,2014-08-19T16:53:52.204,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 663 state to active [ns_server:info,2014-08-19T16:53:52.205,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 662 state to active [ns_server:info,2014-08-19T16:53:52.205,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 661 state to active [ns_server:info,2014-08-19T16:53:52.205,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 660 state to active [ns_server:info,2014-08-19T16:53:52.205,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 659 state to active [ns_server:info,2014-08-19T16:53:52.206,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 658 state to active [ns_server:info,2014-08-19T16:53:52.206,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 657 state to active [ns_server:info,2014-08-19T16:53:52.206,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 656 state to active [ns_server:info,2014-08-19T16:53:52.206,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 655 state to active [ns_server:info,2014-08-19T16:53:52.207,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 654 state to active [ns_server:info,2014-08-19T16:53:52.207,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 653 state to active [ns_server:info,2014-08-19T16:53:52.207,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 652 state to active [ns_server:info,2014-08-19T16:53:52.208,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 651 state to active [ns_server:info,2014-08-19T16:53:52.208,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 650 state to active [ns_server:info,2014-08-19T16:53:52.208,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 649 state to active [ns_server:info,2014-08-19T16:53:52.209,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 648 state to active [ns_server:info,2014-08-19T16:53:52.209,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 647 state to active [ns_server:info,2014-08-19T16:53:52.209,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 646 state to active [ns_server:info,2014-08-19T16:53:52.209,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 645 state to active [ns_server:info,2014-08-19T16:53:52.210,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 644 state to active [ns_server:info,2014-08-19T16:53:52.210,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 643 state to active [ns_server:info,2014-08-19T16:53:52.210,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 642 state to active [ns_server:info,2014-08-19T16:53:52.211,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 641 state to active [ns_server:info,2014-08-19T16:53:52.211,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 640 state to active [ns_server:info,2014-08-19T16:53:52.211,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 639 state to active [ns_server:info,2014-08-19T16:53:52.212,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 638 state to active [ns_server:info,2014-08-19T16:53:52.212,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 637 state to active [ns_server:info,2014-08-19T16:53:52.212,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 636 state to active [ns_server:info,2014-08-19T16:53:52.212,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 635 state to active [ns_server:info,2014-08-19T16:53:52.213,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 634 state to active [ns_server:info,2014-08-19T16:53:52.213,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 633 state to active [ns_server:info,2014-08-19T16:53:52.213,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 632 state to active [ns_server:info,2014-08-19T16:53:52.213,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 631 state to active [ns_server:info,2014-08-19T16:53:52.214,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 630 state to active [ns_server:info,2014-08-19T16:53:52.214,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 629 state to active [ns_server:info,2014-08-19T16:53:52.214,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 628 state to active [ns_server:info,2014-08-19T16:53:52.214,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 627 state to active [ns_server:info,2014-08-19T16:53:52.215,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 626 state to active [ns_server:info,2014-08-19T16:53:52.215,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 625 state to active [ns_server:info,2014-08-19T16:53:52.215,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 624 state to active [ns_server:info,2014-08-19T16:53:52.215,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 623 state to active [ns_server:info,2014-08-19T16:53:52.215,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 622 state to active [ns_server:info,2014-08-19T16:53:52.216,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 621 state to active [ns_server:info,2014-08-19T16:53:52.216,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 620 state to active [ns_server:info,2014-08-19T16:53:52.216,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 619 state to active [ns_server:info,2014-08-19T16:53:52.216,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 618 state to active [ns_server:info,2014-08-19T16:53:52.217,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 617 state to active [ns_server:info,2014-08-19T16:53:52.217,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 616 state to active [ns_server:info,2014-08-19T16:53:52.217,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 615 state to active [ns_server:info,2014-08-19T16:53:52.217,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 614 state to active [ns_server:info,2014-08-19T16:53:52.218,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 613 state to active [ns_server:info,2014-08-19T16:53:52.218,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 612 state to active [ns_server:info,2014-08-19T16:53:52.218,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 611 state to active [ns_server:info,2014-08-19T16:53:52.219,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 610 state to active [ns_server:info,2014-08-19T16:53:52.219,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 609 state to active [ns_server:info,2014-08-19T16:53:52.219,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 608 state to active [ns_server:info,2014-08-19T16:53:52.219,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 607 state to active [ns_server:info,2014-08-19T16:53:52.220,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 606 state to active [ns_server:info,2014-08-19T16:53:52.220,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 605 state to active [ns_server:info,2014-08-19T16:53:52.220,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 604 state to active [ns_server:info,2014-08-19T16:53:52.220,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 603 state to active [ns_server:info,2014-08-19T16:53:52.221,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 602 state to active [ns_server:info,2014-08-19T16:53:52.221,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 601 state to active [ns_server:info,2014-08-19T16:53:52.221,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 600 state to active [ns_server:info,2014-08-19T16:53:52.221,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 599 state to active [ns_server:info,2014-08-19T16:53:52.221,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 598 state to active [ns_server:info,2014-08-19T16:53:52.222,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 597 state to active [ns_server:info,2014-08-19T16:53:52.222,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 596 state to active [ns_server:info,2014-08-19T16:53:52.222,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 595 state to active [ns_server:info,2014-08-19T16:53:52.222,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 594 state to active [ns_server:info,2014-08-19T16:53:52.223,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 593 state to active [ns_server:info,2014-08-19T16:53:52.223,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 592 state to active [ns_server:info,2014-08-19T16:53:52.223,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 591 state to active [ns_server:info,2014-08-19T16:53:52.223,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 590 state to active [ns_server:info,2014-08-19T16:53:52.224,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 589 state to active [ns_server:info,2014-08-19T16:53:52.224,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 588 state to active [ns_server:info,2014-08-19T16:53:52.224,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 587 state to active [ns_server:info,2014-08-19T16:53:52.224,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 586 state to active [ns_server:info,2014-08-19T16:53:52.225,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 585 state to active [ns_server:info,2014-08-19T16:53:52.225,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 584 state to active [ns_server:info,2014-08-19T16:53:52.225,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 583 state to active [ns_server:info,2014-08-19T16:53:52.225,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 582 state to active [ns_server:info,2014-08-19T16:53:52.226,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 581 state to active [ns_server:info,2014-08-19T16:53:52.226,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 580 state to active [ns_server:info,2014-08-19T16:53:52.226,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 579 state to active [ns_server:info,2014-08-19T16:53:52.226,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 578 state to active [ns_server:info,2014-08-19T16:53:52.227,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 577 state to active [ns_server:info,2014-08-19T16:53:52.227,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 576 state to active [ns_server:info,2014-08-19T16:53:52.227,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 575 state to active [ns_server:info,2014-08-19T16:53:52.228,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 574 state to active [ns_server:info,2014-08-19T16:53:52.228,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 573 state to active [ns_server:info,2014-08-19T16:53:52.228,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 572 state to active [ns_server:info,2014-08-19T16:53:52.229,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 571 state to active [ns_server:info,2014-08-19T16:53:52.229,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 570 state to active [ns_server:info,2014-08-19T16:53:52.229,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 569 state to active [ns_server:info,2014-08-19T16:53:52.230,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 568 state to active [ns_server:info,2014-08-19T16:53:52.230,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 567 state to active [ns_server:info,2014-08-19T16:53:52.230,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 566 state to active [ns_server:info,2014-08-19T16:53:52.230,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 565 state to active [ns_server:info,2014-08-19T16:53:52.231,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 564 state to active [ns_server:info,2014-08-19T16:53:52.231,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 563 state to active [ns_server:info,2014-08-19T16:53:52.231,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 562 state to active [ns_server:info,2014-08-19T16:53:52.232,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 561 state to active [ns_server:info,2014-08-19T16:53:52.232,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 560 state to active [ns_server:info,2014-08-19T16:53:52.232,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 559 state to active [ns_server:info,2014-08-19T16:53:52.233,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 558 state to active [ns_server:info,2014-08-19T16:53:52.233,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 557 state to active [ns_server:info,2014-08-19T16:53:52.233,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 556 state to active [ns_server:info,2014-08-19T16:53:52.233,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 555 state to active [ns_server:info,2014-08-19T16:53:52.234,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 554 state to active [ns_server:info,2014-08-19T16:53:52.234,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 553 state to active [ns_server:info,2014-08-19T16:53:52.234,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 552 state to active [ns_server:info,2014-08-19T16:53:52.235,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 551 state to active [ns_server:info,2014-08-19T16:53:52.235,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 550 state to active [ns_server:info,2014-08-19T16:53:52.235,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 549 state to active [ns_server:info,2014-08-19T16:53:52.235,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 548 state to active [ns_server:info,2014-08-19T16:53:52.236,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 547 state to active [ns_server:info,2014-08-19T16:53:52.236,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 546 state to active [ns_server:info,2014-08-19T16:53:52.236,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 545 state to active [ns_server:info,2014-08-19T16:53:52.237,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 544 state to active [ns_server:info,2014-08-19T16:53:52.237,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 543 state to active [ns_server:info,2014-08-19T16:53:52.237,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 542 state to active [ns_server:info,2014-08-19T16:53:52.238,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 541 state to active [ns_server:info,2014-08-19T16:53:52.238,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 540 state to active [ns_server:info,2014-08-19T16:53:52.238,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 539 state to active [ns_server:info,2014-08-19T16:53:52.238,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 538 state to active [ns_server:info,2014-08-19T16:53:52.239,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 537 state to active [ns_server:info,2014-08-19T16:53:52.239,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 536 state to active [ns_server:info,2014-08-19T16:53:52.239,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 535 state to active [ns_server:info,2014-08-19T16:53:52.239,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 534 state to active [ns_server:info,2014-08-19T16:53:52.240,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 533 state to active [ns_server:info,2014-08-19T16:53:52.240,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 532 state to active [ns_server:info,2014-08-19T16:53:52.240,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 531 state to active [ns_server:info,2014-08-19T16:53:52.241,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 530 state to active [ns_server:info,2014-08-19T16:53:52.241,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 529 state to active [ns_server:info,2014-08-19T16:53:52.241,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 528 state to active [ns_server:info,2014-08-19T16:53:52.241,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 527 state to active [ns_server:info,2014-08-19T16:53:52.242,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 526 state to active [ns_server:info,2014-08-19T16:53:52.242,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 525 state to active [ns_server:info,2014-08-19T16:53:52.242,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 524 state to active [ns_server:info,2014-08-19T16:53:52.243,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 523 state to active [ns_server:info,2014-08-19T16:53:52.243,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 522 state to active [ns_server:info,2014-08-19T16:53:52.243,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 521 state to active [ns_server:info,2014-08-19T16:53:52.244,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 520 state to active [ns_server:info,2014-08-19T16:53:52.244,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 519 state to active [ns_server:info,2014-08-19T16:53:52.244,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 518 state to active [ns_server:info,2014-08-19T16:53:52.245,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 517 state to active [ns_server:info,2014-08-19T16:53:52.245,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 516 state to active [ns_server:info,2014-08-19T16:53:52.245,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 515 state to active [ns_server:info,2014-08-19T16:53:52.246,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 514 state to active [ns_server:info,2014-08-19T16:53:52.246,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 513 state to active [ns_server:info,2014-08-19T16:53:52.246,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 512 state to active [ns_server:info,2014-08-19T16:53:52.247,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 426 state to replica [ns_server:info,2014-08-19T16:53:52.247,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 425 state to replica [ns_server:info,2014-08-19T16:53:52.247,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 424 state to replica [ns_server:info,2014-08-19T16:53:52.248,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 423 state to replica [ns_server:info,2014-08-19T16:53:52.248,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 422 state to replica [ns_server:info,2014-08-19T16:53:52.248,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 421 state to replica [ns_server:info,2014-08-19T16:53:52.248,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 420 state to replica [ns_server:info,2014-08-19T16:53:52.249,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 419 state to replica [ns_server:info,2014-08-19T16:53:52.249,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 418 state to replica [ns_server:info,2014-08-19T16:53:52.249,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 417 state to replica [ns_server:info,2014-08-19T16:53:52.250,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 416 state to replica [ns_server:info,2014-08-19T16:53:52.250,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 415 state to replica [ns_server:info,2014-08-19T16:53:52.250,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 414 state to replica [ns_server:info,2014-08-19T16:53:52.251,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 413 state to replica [ns_server:info,2014-08-19T16:53:52.251,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 412 state to replica [ns_server:info,2014-08-19T16:53:52.251,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 411 state to replica [ns_server:info,2014-08-19T16:53:52.251,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 410 state to replica [ns_server:info,2014-08-19T16:53:52.252,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 409 state to replica [ns_server:info,2014-08-19T16:53:52.252,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 408 state to replica [ns_server:info,2014-08-19T16:53:52.252,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 407 state to replica [ns_server:info,2014-08-19T16:53:52.253,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 406 state to replica [ns_server:info,2014-08-19T16:53:52.253,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 405 state to replica [ns_server:info,2014-08-19T16:53:52.253,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 404 state to replica [ns_server:info,2014-08-19T16:53:52.254,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 403 state to replica [ns_server:info,2014-08-19T16:53:52.254,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 402 state to replica [ns_server:info,2014-08-19T16:53:52.254,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 401 state to replica [ns_server:info,2014-08-19T16:53:52.254,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 400 state to replica [ns_server:info,2014-08-19T16:53:52.255,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 399 state to replica [ns_server:info,2014-08-19T16:53:52.255,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 398 state to replica [ns_server:info,2014-08-19T16:53:52.255,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 397 state to replica [ns_server:info,2014-08-19T16:53:52.255,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 396 state to replica [ns_server:info,2014-08-19T16:53:52.256,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 395 state to replica [ns_server:info,2014-08-19T16:53:52.256,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 394 state to replica [ns_server:info,2014-08-19T16:53:52.256,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 393 state to replica [ns_server:info,2014-08-19T16:53:52.257,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 392 state to replica [ns_server:info,2014-08-19T16:53:52.257,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 391 state to replica [ns_server:info,2014-08-19T16:53:52.257,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 390 state to replica [ns_server:info,2014-08-19T16:53:52.257,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 389 state to replica [ns_server:info,2014-08-19T16:53:52.258,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 388 state to replica [ns_server:info,2014-08-19T16:53:52.258,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 387 state to replica [ns_server:info,2014-08-19T16:53:52.258,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 386 state to replica [ns_server:info,2014-08-19T16:53:52.258,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 385 state to replica [ns_server:info,2014-08-19T16:53:52.259,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 384 state to replica [ns_server:info,2014-08-19T16:53:52.259,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 383 state to replica [ns_server:info,2014-08-19T16:53:52.259,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 382 state to replica [ns_server:info,2014-08-19T16:53:52.260,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 381 state to replica [ns_server:info,2014-08-19T16:53:52.260,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 380 state to replica [ns_server:info,2014-08-19T16:53:52.260,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 379 state to replica [ns_server:info,2014-08-19T16:53:52.260,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 378 state to replica [ns_server:info,2014-08-19T16:53:52.261,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 377 state to replica [ns_server:info,2014-08-19T16:53:52.261,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 376 state to replica [ns_server:info,2014-08-19T16:53:52.261,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 375 state to replica [ns_server:info,2014-08-19T16:53:52.261,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 374 state to replica [ns_server:info,2014-08-19T16:53:52.262,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 373 state to replica [ns_server:info,2014-08-19T16:53:52.262,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 372 state to replica [ns_server:info,2014-08-19T16:53:52.262,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 371 state to replica [ns_server:info,2014-08-19T16:53:52.263,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 370 state to replica [ns_server:info,2014-08-19T16:53:52.263,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 369 state to replica [ns_server:info,2014-08-19T16:53:52.263,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 368 state to replica [ns_server:info,2014-08-19T16:53:52.263,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 367 state to replica [ns_server:info,2014-08-19T16:53:52.264,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 366 state to replica [ns_server:info,2014-08-19T16:53:52.264,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 365 state to replica [ns_server:info,2014-08-19T16:53:52.264,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 364 state to replica [ns_server:info,2014-08-19T16:53:52.265,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 363 state to replica [ns_server:info,2014-08-19T16:53:52.265,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 362 state to replica [ns_server:info,2014-08-19T16:53:52.265,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 361 state to replica [ns_server:info,2014-08-19T16:53:52.265,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 360 state to replica [ns_server:info,2014-08-19T16:53:52.266,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 359 state to replica [ns_server:info,2014-08-19T16:53:52.266,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 358 state to replica [ns_server:info,2014-08-19T16:53:52.266,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 357 state to replica [ns_server:info,2014-08-19T16:53:52.266,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 356 state to replica [ns_server:info,2014-08-19T16:53:52.267,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 355 state to replica [ns_server:info,2014-08-19T16:53:52.267,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 354 state to replica [ns_server:info,2014-08-19T16:53:52.267,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 353 state to replica [ns_server:info,2014-08-19T16:53:52.268,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 352 state to replica [ns_server:info,2014-08-19T16:53:52.268,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 351 state to replica [ns_server:info,2014-08-19T16:53:52.268,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 350 state to replica [ns_server:info,2014-08-19T16:53:52.268,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 349 state to replica [ns_server:info,2014-08-19T16:53:52.269,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 348 state to replica [ns_server:info,2014-08-19T16:53:52.269,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 347 state to replica [ns_server:info,2014-08-19T16:53:52.269,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 346 state to replica [ns_server:info,2014-08-19T16:53:52.270,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 345 state to replica [ns_server:info,2014-08-19T16:53:52.270,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 344 state to replica [ns_server:info,2014-08-19T16:53:52.270,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 343 state to replica [ns_server:info,2014-08-19T16:53:52.270,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 342 state to replica [ns_server:info,2014-08-19T16:53:52.271,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 170 state to replica [ns_server:info,2014-08-19T16:53:52.271,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 169 state to replica [ns_server:info,2014-08-19T16:53:52.271,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 168 state to replica [ns_server:info,2014-08-19T16:53:52.271,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 167 state to replica [ns_server:info,2014-08-19T16:53:52.272,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 166 state to replica [ns_server:info,2014-08-19T16:53:52.272,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 165 state to replica [ns_server:info,2014-08-19T16:53:52.272,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 164 state to replica [ns_server:info,2014-08-19T16:53:52.272,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 163 state to replica [ns_server:info,2014-08-19T16:53:52.273,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 162 state to replica [ns_server:info,2014-08-19T16:53:52.273,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 161 state to replica [ns_server:info,2014-08-19T16:53:52.273,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 160 state to replica [ns_server:info,2014-08-19T16:53:52.274,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 159 state to replica [ns_server:info,2014-08-19T16:53:52.274,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 158 state to replica [ns_server:info,2014-08-19T16:53:52.274,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 157 state to replica [ns_server:info,2014-08-19T16:53:52.274,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 156 state to replica [ns_server:info,2014-08-19T16:53:52.275,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 155 state to replica [ns_server:info,2014-08-19T16:53:52.275,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 154 state to replica [ns_server:info,2014-08-19T16:53:52.275,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 153 state to replica [ns_server:info,2014-08-19T16:53:52.275,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 152 state to replica [ns_server:info,2014-08-19T16:53:52.276,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 151 state to replica [ns_server:info,2014-08-19T16:53:52.276,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 150 state to replica [ns_server:info,2014-08-19T16:53:52.276,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 149 state to replica [ns_server:info,2014-08-19T16:53:52.277,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 148 state to replica [ns_server:info,2014-08-19T16:53:52.277,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 147 state to replica [ns_server:info,2014-08-19T16:53:52.277,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 146 state to replica [ns_server:info,2014-08-19T16:53:52.278,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 145 state to replica [ns_server:info,2014-08-19T16:53:52.278,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 144 state to replica [ns_server:info,2014-08-19T16:53:52.278,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 143 state to replica [ns_server:info,2014-08-19T16:53:52.278,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 142 state to replica [ns_server:info,2014-08-19T16:53:52.279,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 141 state to replica [ns_server:info,2014-08-19T16:53:52.279,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 140 state to replica [ns_server:info,2014-08-19T16:53:52.279,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 139 state to replica [ns_server:info,2014-08-19T16:53:52.279,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 138 state to replica [ns_server:info,2014-08-19T16:53:52.280,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 137 state to replica [ns_server:info,2014-08-19T16:53:52.280,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 136 state to replica [ns_server:info,2014-08-19T16:53:52.280,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 135 state to replica [ns_server:info,2014-08-19T16:53:52.280,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 134 state to replica [ns_server:info,2014-08-19T16:53:52.281,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 133 state to replica [ns_server:info,2014-08-19T16:53:52.281,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 132 state to replica [ns_server:info,2014-08-19T16:53:52.281,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 131 state to replica [ns_server:info,2014-08-19T16:53:52.282,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 130 state to replica [ns_server:info,2014-08-19T16:53:52.282,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 129 state to replica [ns_server:info,2014-08-19T16:53:52.282,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 128 state to replica [ns_server:info,2014-08-19T16:53:52.282,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 127 state to replica [ns_server:info,2014-08-19T16:53:52.283,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 126 state to replica [ns_server:info,2014-08-19T16:53:52.283,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 125 state to replica [ns_server:info,2014-08-19T16:53:52.283,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 124 state to replica [ns_server:info,2014-08-19T16:53:52.283,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 123 state to replica [ns_server:info,2014-08-19T16:53:52.284,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 122 state to replica [ns_server:info,2014-08-19T16:53:52.284,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 121 state to replica [ns_server:info,2014-08-19T16:53:52.284,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 120 state to replica [ns_server:info,2014-08-19T16:53:52.285,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 119 state to replica [ns_server:info,2014-08-19T16:53:52.285,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 118 state to replica [ns_server:info,2014-08-19T16:53:52.285,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 117 state to replica [ns_server:info,2014-08-19T16:53:52.286,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 116 state to replica [ns_server:info,2014-08-19T16:53:52.286,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 115 state to replica [ns_server:info,2014-08-19T16:53:52.286,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 114 state to replica [ns_server:info,2014-08-19T16:53:52.287,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 113 state to replica [ns_server:info,2014-08-19T16:53:52.287,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 112 state to replica [ns_server:info,2014-08-19T16:53:52.287,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 111 state to replica [ns_server:info,2014-08-19T16:53:52.287,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 110 state to replica [ns_server:info,2014-08-19T16:53:52.288,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 109 state to replica [ns_server:info,2014-08-19T16:53:52.288,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 108 state to replica [ns_server:info,2014-08-19T16:53:52.288,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 107 state to replica [ns_server:info,2014-08-19T16:53:52.288,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 106 state to replica [ns_server:info,2014-08-19T16:53:52.289,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 105 state to replica [ns_server:info,2014-08-19T16:53:52.289,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 104 state to replica [ns_server:info,2014-08-19T16:53:52.289,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 103 state to replica [ns_server:info,2014-08-19T16:53:52.289,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 102 state to replica [ns_server:info,2014-08-19T16:53:52.290,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 101 state to replica [ns_server:info,2014-08-19T16:53:52.290,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 100 state to replica [ns_server:info,2014-08-19T16:53:52.290,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 99 state to replica [ns_server:info,2014-08-19T16:53:52.290,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 98 state to replica [ns_server:info,2014-08-19T16:53:52.291,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 97 state to replica [ns_server:info,2014-08-19T16:53:52.291,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 96 state to replica [ns_server:info,2014-08-19T16:53:52.291,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 95 state to replica [ns_server:info,2014-08-19T16:53:52.292,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 94 state to replica [ns_server:info,2014-08-19T16:53:52.292,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 93 state to replica [ns_server:info,2014-08-19T16:53:52.292,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 92 state to replica [ns_server:info,2014-08-19T16:53:52.292,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 91 state to replica [ns_server:info,2014-08-19T16:53:52.293,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 90 state to replica [ns_server:info,2014-08-19T16:53:52.293,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 89 state to replica [ns_server:info,2014-08-19T16:53:52.293,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 88 state to replica [ns_server:info,2014-08-19T16:53:52.293,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 87 state to replica [ns_server:info,2014-08-19T16:53:52.294,ns_1@10.242.238.90:<0.4379.1>:ns_memcached:do_handle_call:527]Changed vbucket 86 state to replica [ns_server:debug,2014-08-19T16:53:52.310,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 1023. Nacking mccouch update. [views:debug,2014-08-19T16:53:52.310,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/1023. Updated state: replica (0) [ns_server:debug,2014-08-19T16:53:52.311,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_tiles<0.4349.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [1023] [ns_server:debug,2014-08-19T16:53:52.311,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",1023,replica,0} [ns_server:info,2014-08-19T16:53:52.319,ns_1@10.242.238.90:tap_replication_manager-maps_1_8_tiles<0.4364.1>:tap_replication_manager:start_child:172]Starting replication from 'ns_1@10.242.238.91' for [938,939,940,941,942,943,944,945,946,947,948,949,950,951,952,953,954,955,956, 957,958,959,960,961,962,963,964,965,966,967,968,969,970,971,972,973,974,975, 976,977,978,979,980,981,982,983,984,985,986,987,988,989,990,991,992,993,994, 995,996,997,998,999,1000,1001,1002,1003,1004,1005,1006,1007,1008,1009,1010, 1011,1012,1013,1014,1015,1016,1017,1018,1019,1020,1021,1022,1023] [ns_server:info,2014-08-19T16:53:52.320,ns_1@10.242.238.90:tap_replication_manager-maps_1_8_tiles<0.4364.1>:tap_replication_manager:start_child:172]Starting replication from 'ns_1@10.242.238.89' for [342,343,344,345,346,347,348,349,350,351,352,353,354,355,356,357,358,359,360, 361,362,363,364,365,366,367,368,369,370,371,372,373,374,375,376,377,378,379, 380,381,382,383,384,385,386,387,388,389,390,391,392,393,394,395,396,397,398, 399,400,401,402,403,404,405,406,407,408,409,410,411,412,413,414,415,416,417, 418,419,420,421,422,423,424,425,426] [error_logger:info,2014-08-19T16:53:52.320,ns_1@10.242.238.90:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,'ns_vbm_new_sup-maps_1_8_tiles'} started: [{pid,<0.4532.1>}, {name, {new_child_id, [938,939,940,941,942,943,944,945,946,947,948, 949,950,951,952,953,954,955,956,957,958,959, 960,961,962,963,964,965,966,967,968,969,970, 971,972,973,974,975,976,977,978,979,980,981, 982,983,984,985,986,987,988,989,990,991,992, 993,994,995,996,997,998,999,1000,1001,1002, 1003,1004,1005,1006,1007,1008,1009,1010,1011, 1012,1013,1014,1015,1016,1017,1018,1019,1020, 1021,1022,1023], 'ns_1@10.242.238.91'}}, {mfargs, {ebucketmigrator_srv,start_link, [{"10.242.238.91",11209}, {"10.242.238.90",11209}, [{on_not_ready_vbuckets, #Fun}, {username,"maps_1_8_tiles"}, {password,get_from_config}, {vbuckets, [938,939,940,941,942,943,944,945,946,947, 948,949,950,951,952,953,954,955,956,957, 958,959,960,961,962,963,964,965,966,967, 968,969,970,971,972,973,974,975,976,977, 978,979,980,981,982,983,984,985,986,987, 988,989,990,991,992,993,994,995,996,997, 998,999,1000,1001,1002,1003,1004,1005, 1006,1007,1008,1009,1010,1011,1012,1013, 1014,1015,1016,1017,1018,1019,1020,1021, 1022,1023]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]]}}, {restart_type,temporary}, {shutdown,60000}, {child_type,worker}] [ns_server:info,2014-08-19T16:53:52.322,ns_1@10.242.238.90:tap_replication_manager-maps_1_8_tiles<0.4364.1>:tap_replication_manager:start_child:172]Starting replication from 'ns_1@10.242.238.88' for [86,87,88,89,90,91,92,93,94,95,96,97,98,99,100,101,102,103,104,105,106,107, 108,109,110,111,112,113,114,115,116,117,118,119,120,121,122,123,124,125,126, 127,128,129,130,131,132,133,134,135,136,137,138,139,140,141,142,143,144,145, 146,147,148,149,150,151,152,153,154,155,156,157,158,159,160,161,162,163,164, 165,166,167,168,169,170] [error_logger:info,2014-08-19T16:53:52.322,ns_1@10.242.238.90:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,'ns_vbm_new_sup-maps_1_8_tiles'} started: [{pid,<0.4533.1>}, {name, {new_child_id, [342,343,344,345,346,347,348,349,350,351,352, 353,354,355,356,357,358,359,360,361,362,363, 364,365,366,367,368,369,370,371,372,373,374, 375,376,377,378,379,380,381,382,383,384,385, 386,387,388,389,390,391,392,393,394,395,396, 397,398,399,400,401,402,403,404,405,406,407, 408,409,410,411,412,413,414,415,416,417,418, 419,420,421,422,423,424,425,426], 'ns_1@10.242.238.89'}}, {mfargs, {ebucketmigrator_srv,start_link, [{"10.242.238.89",11209}, {"10.242.238.90",11209}, [{on_not_ready_vbuckets, #Fun}, {username,"maps_1_8_tiles"}, {password,get_from_config}, {vbuckets, [342,343,344,345,346,347,348,349,350,351, 352,353,354,355,356,357,358,359,360,361, 362,363,364,365,366,367,368,369,370,371, 372,373,374,375,376,377,378,379,380,381, 382,383,384,385,386,387,388,389,390,391, 392,393,394,395,396,397,398,399,400,401, 402,403,404,405,406,407,408,409,410,411, 412,413,414,415,416,417,418,419,420,421, 422,423,424,425,426]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]]}}, {restart_type,temporary}, {shutdown,60000}, {child_type,worker}] [error_logger:info,2014-08-19T16:53:52.323,ns_1@10.242.238.90:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,'ns_vbm_new_sup-maps_1_8_tiles'} started: [{pid,<0.4534.1>}, {name, {new_child_id, [86,87,88,89,90,91,92,93,94,95,96,97,98,99,100, 101,102,103,104,105,106,107,108,109,110,111, 112,113,114,115,116,117,118,119,120,121,122, 123,124,125,126,127,128,129,130,131,132,133, 134,135,136,137,138,139,140,141,142,143,144, 145,146,147,148,149,150,151,152,153,154,155, 156,157,158,159,160,161,162,163,164,165,166, 167,168,169,170], 'ns_1@10.242.238.88'}}, {mfargs, {ebucketmigrator_srv,start_link, [{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{on_not_ready_vbuckets, #Fun}, {username,"maps_1_8_tiles"}, {password,get_from_config}, {vbuckets, [86,87,88,89,90,91,92,93,94,95,96,97,98, 99,100,101,102,103,104,105,106,107,108, 109,110,111,112,113,114,115,116,117,118, 119,120,121,122,123,124,125,126,127,128, 129,130,131,132,133,134,135,136,137,138, 139,140,141,142,143,144,145,146,147,148, 149,150,151,152,153,154,155,156,157,158, 159,160,161,162,163,164,165,166,167,168, 169,170]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]]}}, {restart_type,temporary}, {shutdown,60000}, {child_type,worker}] [ns_server:info,2014-08-19T16:53:52.329,ns_1@10.242.238.90:ns_memcached-maps_1_8_tiles<0.4362.1>:ns_memcached:handle_call:247]Enabling traffic to bucket "maps_1_8_tiles" [ns_server:info,2014-08-19T16:53:52.329,ns_1@10.242.238.90:ns_memcached-maps_1_8_tiles<0.4362.1>:ns_memcached:handle_call:251]Bucket "maps_1_8_tiles" marked as warmed in 1 seconds [ns_server:debug,2014-08-19T16:53:52.344,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 562. Nacking mccouch update. [views:debug,2014-08-19T16:53:52.344,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/562. Updated state: active (0) [ns_server:debug,2014-08-19T16:53:52.344,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",562,active,0} [ns_server:debug,2014-08-19T16:53:52.344,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,718,686,654,622,590,984,952,1016,756,724,692,660,628,596,564,990,958, 1022,762,730,698,666,634,602,570,996,964,736,704,672,640,608,576,970,938, 1002,742,710,678,646,614,582,976,944,1008,748,716,684,652,620,588,982,950, 1014,754,722,690,658,626,594,562,988,956,1020,760,744,728,712,696,680,664, 648,632,616,600,584,568,1023,994,978,962,946,1010,766,734,702,670,638,606, 574,968,1000,740,708,676,644,612,580,974,942,1006,746,714,682,650,618,586, 980,948,1012,752,720,688,656,624,592,986,954,1018,758,726,694,662,630,598, 566,992,960,764,732,700,668,636,604,572,998,966,738,706,674,642,610,578,972, 940,1004] [ns_server:debug,2014-08-19T16:53:52.375,ns_1@10.242.238.90:<0.4533.1>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_ns_1@10.242.238.90 [ns_server:debug,2014-08-19T16:53:52.375,ns_1@10.242.238.90:<0.4534.1>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_ns_1@10.242.238.90 [rebalance:info,2014-08-19T16:53:52.379,ns_1@10.242.238.90:<0.4533.1>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[342,343,344,345,346,347,348,349,350,351,352,353,354,355,356,357, 358,359,360,361,362,363,364,365,366,367,368,369,370,371,372,373, 374,375,376,377,378,379,380,381,382,383,384,385,386,387,388,389, 390,391,392,393,394,395,396,397,398,399,400,401,402,403,404,405, 406,407,408,409,410,411,412,413,414,415,416,417,418,419,420,421, 422,423,424,425,426]}, {checkpoints,[{342,0}, {343,0}, {344,0}, {345,0}, {346,0}, {347,0}, {348,0}, {349,0}, {350,0}, {351,0}, {352,0}, {353,0}, {354,0}, {355,0}, {356,0}, {357,0}, {358,0}, {359,0}, {360,0}, {361,0}, {362,0}, {363,0}, {364,0}, {365,0}, {366,0}, {367,0}, {368,0}, {369,0}, {370,0}, {371,0}, {372,0}, {373,0}, {374,0}, {375,0}, {376,0}, {377,0}, {378,0}, {379,0}, {380,0}, {381,0}, {382,0}, {383,0}, {384,0}, {385,0}, {386,0}, {387,0}, {388,0}, {389,0}, {390,0}, {391,0}, {392,0}, {393,0}, {394,0}, {395,0}, {396,0}, {397,0}, {398,0}, {399,0}, {400,0}, {401,0}, {402,0}, {403,0}, {404,0}, {405,0}, {406,0}, {407,0}, {408,0}, {409,0}, {410,0}, {411,0}, {412,0}, {413,0}, {414,0}, {415,0}, {416,0}, {417,0}, {418,0}, {419,0}, {420,0}, {421,0}, {422,0}, {423,0}, {424,0}, {425,0}, {426,0}]}, {name,<<"replication_ns_1@10.242.238.90">>}, {takeover,false}] {{"10.242.238.89",11209}, {"10.242.238.90",11209}, [{on_not_ready_vbuckets,#Fun}, {username,"maps_1_8_tiles"}, {password,get_from_config}, {vbuckets,[342,343,344,345,346,347,348,349,350,351,352,353,354,355,356,357, 358,359,360,361,362,363,364,365,366,367,368,369,370,371,372,373, 374,375,376,377,378,379,380,381,382,383,384,385,386,387,388,389, 390,391,392,393,394,395,396,397,398,399,400,401,402,403,404,405, 406,407,408,409,410,411,412,413,414,415,416,417,418,419,420,421, 422,423,424,425,426]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]} [rebalance:info,2014-08-19T16:53:52.379,ns_1@10.242.238.90:<0.4534.1>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[86,87,88,89,90,91,92,93,94,95,96,97,98,99,100,101,102,103,104,105, 106,107,108,109,110,111,112,113,114,115,116,117,118,119,120,121, 122,123,124,125,126,127,128,129,130,131,132,133,134,135,136,137, 138,139,140,141,142,143,144,145,146,147,148,149,150,151,152,153, 154,155,156,157,158,159,160,161,162,163,164,165,166,167,168,169, 170]}, {checkpoints,[{86,0}, {87,0}, {88,0}, {89,0}, {90,0}, {91,0}, {92,0}, {93,0}, {94,0}, {95,0}, {96,0}, {97,0}, {98,0}, {99,0}, {100,0}, {101,0}, {102,0}, {103,0}, {104,0}, {105,0}, {106,0}, {107,0}, {108,0}, {109,0}, {110,0}, {111,0}, {112,0}, {113,0}, {114,0}, {115,0}, {116,0}, {117,0}, {118,0}, {119,0}, {120,0}, {121,0}, {122,0}, {123,0}, {124,0}, {125,0}, {126,0}, {127,0}, {128,0}, {129,0}, {130,0}, {131,0}, {132,0}, {133,0}, {134,0}, {135,0}, {136,0}, {137,0}, {138,0}, {139,0}, {140,0}, {141,0}, {142,0}, {143,0}, {144,0}, {145,0}, {146,0}, {147,0}, {148,0}, {149,0}, {150,0}, {151,0}, {152,0}, {153,0}, {154,0}, {155,0}, {156,0}, {157,0}, {158,0}, {159,0}, {160,0}, {161,0}, {162,0}, {163,0}, {164,0}, {165,0}, {166,0}, {167,0}, {168,0}, {169,0}, {170,0}]}, {name,<<"replication_ns_1@10.242.238.90">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{on_not_ready_vbuckets,#Fun}, {username,"maps_1_8_tiles"}, {password,get_from_config}, {vbuckets,[86,87,88,89,90,91,92,93,94,95,96,97,98,99,100,101,102,103,104, 105,106,107,108,109,110,111,112,113,114,115,116,117,118,119,120, 121,122,123,124,125,126,127,128,129,130,131,132,133,134,135,136, 137,138,139,140,141,142,143,144,145,146,147,148,149,150,151,152, 153,154,155,156,157,158,159,160,161,162,163,164,165,166,167,168, 169,170]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]} [rebalance:debug,2014-08-19T16:53:52.380,ns_1@10.242.238.90:<0.4533.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.4535.1> [rebalance:debug,2014-08-19T16:53:52.381,ns_1@10.242.238.90:<0.4534.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.4536.1> [rebalance:info,2014-08-19T16:53:52.382,ns_1@10.242.238.90:<0.4534.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 86 [rebalance:info,2014-08-19T16:53:52.382,ns_1@10.242.238.90:<0.4534.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 87 [rebalance:info,2014-08-19T16:53:52.382,ns_1@10.242.238.90:<0.4534.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 88 [rebalance:info,2014-08-19T16:53:52.382,ns_1@10.242.238.90:<0.4534.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 89 [rebalance:info,2014-08-19T16:53:52.383,ns_1@10.242.238.90:<0.4534.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 90 [rebalance:info,2014-08-19T16:53:52.383,ns_1@10.242.238.90:<0.4534.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 91 [rebalance:info,2014-08-19T16:53:52.383,ns_1@10.242.238.90:<0.4534.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 92 [rebalance:info,2014-08-19T16:53:52.383,ns_1@10.242.238.90:<0.4534.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 93 [rebalance:info,2014-08-19T16:53:52.383,ns_1@10.242.238.90:<0.4534.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 94 [rebalance:info,2014-08-19T16:53:52.383,ns_1@10.242.238.90:<0.4534.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 95 [rebalance:info,2014-08-19T16:53:52.383,ns_1@10.242.238.90:<0.4534.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 96 [rebalance:info,2014-08-19T16:53:52.383,ns_1@10.242.238.90:<0.4533.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 342 [rebalance:info,2014-08-19T16:53:52.383,ns_1@10.242.238.90:<0.4534.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 97 [rebalance:info,2014-08-19T16:53:52.383,ns_1@10.242.238.90:<0.4534.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 98 [rebalance:info,2014-08-19T16:53:52.383,ns_1@10.242.238.90:<0.4533.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 343 [rebalance:info,2014-08-19T16:53:52.383,ns_1@10.242.238.90:<0.4534.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 99 [rebalance:info,2014-08-19T16:53:52.384,ns_1@10.242.238.90:<0.4534.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 100 [rebalance:info,2014-08-19T16:53:52.384,ns_1@10.242.238.90:<0.4534.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 101 [rebalance:info,2014-08-19T16:53:52.384,ns_1@10.242.238.90:<0.4533.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 344 [rebalance:info,2014-08-19T16:53:52.384,ns_1@10.242.238.90:<0.4534.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 102 [rebalance:info,2014-08-19T16:53:52.384,ns_1@10.242.238.90:<0.4534.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 103 [rebalance:info,2014-08-19T16:53:52.384,ns_1@10.242.238.90:<0.4534.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 104 [rebalance:info,2014-08-19T16:53:52.384,ns_1@10.242.238.90:<0.4533.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 345 [rebalance:info,2014-08-19T16:53:52.384,ns_1@10.242.238.90:<0.4534.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 105 [ns_server:debug,2014-08-19T16:53:52.384,ns_1@10.242.238.90:<0.4532.1>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_ns_1@10.242.238.90 [rebalance:info,2014-08-19T16:53:52.384,ns_1@10.242.238.90:<0.4533.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 346 [rebalance:info,2014-08-19T16:53:52.384,ns_1@10.242.238.90:<0.4534.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 106 [rebalance:info,2014-08-19T16:53:52.384,ns_1@10.242.238.90:<0.4533.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 347 [rebalance:info,2014-08-19T16:53:52.385,ns_1@10.242.238.90:<0.4534.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 107 [rebalance:info,2014-08-19T16:53:52.385,ns_1@10.242.238.90:<0.4533.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 348 [rebalance:info,2014-08-19T16:53:52.385,ns_1@10.242.238.90:<0.4534.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 108 [rebalance:info,2014-08-19T16:53:52.385,ns_1@10.242.238.90:<0.4533.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 349 [rebalance:info,2014-08-19T16:53:52.385,ns_1@10.242.238.90:<0.4534.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 109 [rebalance:info,2014-08-19T16:53:52.385,ns_1@10.242.238.90:<0.4533.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 350 [rebalance:info,2014-08-19T16:53:52.385,ns_1@10.242.238.90:<0.4534.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 110 [rebalance:info,2014-08-19T16:53:52.385,ns_1@10.242.238.90:<0.4533.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 351 [rebalance:info,2014-08-19T16:53:52.385,ns_1@10.242.238.90:<0.4534.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 111 [rebalance:info,2014-08-19T16:53:52.385,ns_1@10.242.238.90:<0.4533.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 352 [rebalance:info,2014-08-19T16:53:52.385,ns_1@10.242.238.90:<0.4534.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 112 [rebalance:info,2014-08-19T16:53:52.386,ns_1@10.242.238.90:<0.4533.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 353 [rebalance:info,2014-08-19T16:53:52.386,ns_1@10.242.238.90:<0.4533.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 354 [rebalance:info,2014-08-19T16:53:52.386,ns_1@10.242.238.90:<0.4534.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 113 [rebalance:info,2014-08-19T16:53:52.386,ns_1@10.242.238.90:<0.4533.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 355 [rebalance:info,2014-08-19T16:53:52.386,ns_1@10.242.238.90:<0.4534.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 114 [rebalance:info,2014-08-19T16:53:52.386,ns_1@10.242.238.90:<0.4533.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 356 [rebalance:info,2014-08-19T16:53:52.386,ns_1@10.242.238.90:<0.4534.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 115 [rebalance:info,2014-08-19T16:53:52.386,ns_1@10.242.238.90:<0.4534.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 116 [rebalance:info,2014-08-19T16:53:52.386,ns_1@10.242.238.90:<0.4533.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 357 [rebalance:info,2014-08-19T16:53:52.386,ns_1@10.242.238.90:<0.4533.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 358 [rebalance:info,2014-08-19T16:53:52.386,ns_1@10.242.238.90:<0.4534.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 117 [rebalance:info,2014-08-19T16:53:52.386,ns_1@10.242.238.90:<0.4533.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 359 [rebalance:info,2014-08-19T16:53:52.386,ns_1@10.242.238.90:<0.4534.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 118 [rebalance:info,2014-08-19T16:53:52.386,ns_1@10.242.238.90:<0.4534.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 119 [rebalance:info,2014-08-19T16:53:52.386,ns_1@10.242.238.90:<0.4533.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 360 [rebalance:info,2014-08-19T16:53:52.387,ns_1@10.242.238.90:<0.4534.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 120 [rebalance:info,2014-08-19T16:53:52.387,ns_1@10.242.238.90:<0.4533.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 361 [rebalance:info,2014-08-19T16:53:52.387,ns_1@10.242.238.90:<0.4534.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 121 [rebalance:info,2014-08-19T16:53:52.387,ns_1@10.242.238.90:<0.4534.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 122 [rebalance:info,2014-08-19T16:53:52.387,ns_1@10.242.238.90:<0.4533.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 362 [rebalance:info,2014-08-19T16:53:52.387,ns_1@10.242.238.90:<0.4534.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 123 [rebalance:info,2014-08-19T16:53:52.387,ns_1@10.242.238.90:<0.4533.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 363 [rebalance:info,2014-08-19T16:53:52.387,ns_1@10.242.238.90:<0.4534.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 124 [rebalance:info,2014-08-19T16:53:52.387,ns_1@10.242.238.90:<0.4533.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 364 [rebalance:info,2014-08-19T16:53:52.387,ns_1@10.242.238.90:<0.4534.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 125 [rebalance:info,2014-08-19T16:53:52.387,ns_1@10.242.238.90:<0.4533.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 365 [rebalance:info,2014-08-19T16:53:52.387,ns_1@10.242.238.90:<0.4534.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 126 [rebalance:info,2014-08-19T16:53:52.387,ns_1@10.242.238.90:<0.4533.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 366 [rebalance:info,2014-08-19T16:53:52.387,ns_1@10.242.238.90:<0.4534.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 127 [rebalance:info,2014-08-19T16:53:52.387,ns_1@10.242.238.90:<0.4534.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 128 [rebalance:info,2014-08-19T16:53:52.387,ns_1@10.242.238.90:<0.4533.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 367 [rebalance:info,2014-08-19T16:53:52.388,ns_1@10.242.238.90:<0.4534.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 129 [rebalance:info,2014-08-19T16:53:52.388,ns_1@10.242.238.90:<0.4533.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 368 [rebalance:info,2014-08-19T16:53:52.388,ns_1@10.242.238.90:<0.4534.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 130 [rebalance:info,2014-08-19T16:53:52.387,ns_1@10.242.238.90:<0.4532.1>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[938,939,940,941,942,943,944,945,946,947,948,949,950,951,952,953, 954,955,956,957,958,959,960,961,962,963,964,965,966,967,968,969, 970,971,972,973,974,975,976,977,978,979,980,981,982,983,984,985, 986,987,988,989,990,991,992,993,994,995,996,997,998,999,1000,1001, 1002,1003,1004,1005,1006,1007,1008,1009,1010,1011,1012,1013,1014, 1015,1016,1017,1018,1019,1020,1021,1022,1023]}, {checkpoints,[{938,0}, {939,0}, {940,0}, {941,0}, {942,0}, {943,0}, {944,0}, {945,0}, {946,0}, {947,0}, {948,0}, {949,0}, {950,0}, {951,0}, {952,0}, {953,0}, {954,0}, {955,0}, {956,0}, {957,0}, {958,0}, {959,0}, {960,0}, {961,0}, {962,0}, {963,0}, {964,0}, {965,0}, {966,0}, {967,0}, {968,0}, {969,0}, {970,0}, {971,0}, {972,0}, {973,0}, {974,0}, {975,0}, {976,0}, {977,0}, {978,0}, {979,0}, {980,0}, {981,0}, {982,0}, {983,0}, {984,0}, {985,0}, {986,0}, {987,0}, {988,0}, {989,0}, {990,0}, {991,0}, {992,0}, {993,0}, {994,0}, {995,0}, {996,0}, {997,0}, {998,0}, {999,0}, {1000,0}, {1001,0}, {1002,0}, {1003,0}, {1004,0}, {1005,0}, {1006,0}, {1007,0}, {1008,0}, {1009,0}, {1010,0}, {1011,0}, {1012,0}, {1013,0}, {1014,0}, {1015,0}, {1016,0}, {1017,0}, {1018,0}, {1019,0}, {1020,0}, {1021,0}, {1022,0}, {1023,0}]}, {name,<<"replication_ns_1@10.242.238.90">>}, {takeover,false}] {{"10.242.238.91",11209}, {"10.242.238.90",11209}, [{on_not_ready_vbuckets,#Fun}, {username,"maps_1_8_tiles"}, {password,get_from_config}, {vbuckets,[938,939,940,941,942,943,944,945,946,947,948,949,950,951,952,953, 954,955,956,957,958,959,960,961,962,963,964,965,966,967,968,969, 970,971,972,973,974,975,976,977,978,979,980,981,982,983,984,985, 986,987,988,989,990,991,992,993,994,995,996,997,998,999,1000, 1001,1002,1003,1004,1005,1006,1007,1008,1009,1010,1011,1012,1013, 1014,1015,1016,1017,1018,1019,1020,1021,1022,1023]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]} [rebalance:info,2014-08-19T16:53:52.388,ns_1@10.242.238.90:<0.4533.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 369 [rebalance:info,2014-08-19T16:53:52.388,ns_1@10.242.238.90:<0.4534.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 131 [rebalance:info,2014-08-19T16:53:52.388,ns_1@10.242.238.90:<0.4533.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 370 [rebalance:info,2014-08-19T16:53:52.388,ns_1@10.242.238.90:<0.4534.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 132 [rebalance:info,2014-08-19T16:53:52.388,ns_1@10.242.238.90:<0.4533.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 371 [rebalance:info,2014-08-19T16:53:52.388,ns_1@10.242.238.90:<0.4533.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 372 [rebalance:info,2014-08-19T16:53:52.388,ns_1@10.242.238.90:<0.4534.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 133 [rebalance:info,2014-08-19T16:53:52.388,ns_1@10.242.238.90:<0.4533.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 373 [rebalance:info,2014-08-19T16:53:52.388,ns_1@10.242.238.90:<0.4534.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 134 [rebalance:info,2014-08-19T16:53:52.388,ns_1@10.242.238.90:<0.4534.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 135 [rebalance:info,2014-08-19T16:53:52.388,ns_1@10.242.238.90:<0.4533.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 374 [rebalance:debug,2014-08-19T16:53:52.388,ns_1@10.242.238.90:<0.4532.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.4537.1> [rebalance:info,2014-08-19T16:53:52.388,ns_1@10.242.238.90:<0.4534.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 136 [rebalance:info,2014-08-19T16:53:52.388,ns_1@10.242.238.90:<0.4533.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 375 [rebalance:info,2014-08-19T16:53:52.389,ns_1@10.242.238.90:<0.4533.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 376 [rebalance:info,2014-08-19T16:53:52.389,ns_1@10.242.238.90:<0.4534.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 137 [rebalance:info,2014-08-19T16:53:52.389,ns_1@10.242.238.90:<0.4534.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 138 [rebalance:info,2014-08-19T16:53:52.389,ns_1@10.242.238.90:<0.4533.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 377 [rebalance:info,2014-08-19T16:53:52.389,ns_1@10.242.238.90:<0.4534.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 139 [rebalance:info,2014-08-19T16:53:52.389,ns_1@10.242.238.90:<0.4534.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 140 [rebalance:info,2014-08-19T16:53:52.389,ns_1@10.242.238.90:<0.4533.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 378 [rebalance:info,2014-08-19T16:53:52.389,ns_1@10.242.238.90:<0.4534.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 141 [rebalance:info,2014-08-19T16:53:52.389,ns_1@10.242.238.90:<0.4533.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 379 [rebalance:info,2014-08-19T16:53:52.389,ns_1@10.242.238.90:<0.4534.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 142 [rebalance:info,2014-08-19T16:53:52.389,ns_1@10.242.238.90:<0.4534.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 143 [rebalance:info,2014-08-19T16:53:52.389,ns_1@10.242.238.90:<0.4533.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 380 [rebalance:info,2014-08-19T16:53:52.389,ns_1@10.242.238.90:<0.4534.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 144 [rebalance:info,2014-08-19T16:53:52.389,ns_1@10.242.238.90:<0.4533.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 381 [rebalance:info,2014-08-19T16:53:52.390,ns_1@10.242.238.90:<0.4534.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 145 [rebalance:info,2014-08-19T16:53:52.390,ns_1@10.242.238.90:<0.4533.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 382 [rebalance:info,2014-08-19T16:53:52.390,ns_1@10.242.238.90:<0.4534.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 146 [rebalance:info,2014-08-19T16:53:52.390,ns_1@10.242.238.90:<0.4533.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 383 [rebalance:info,2014-08-19T16:53:52.390,ns_1@10.242.238.90:<0.4532.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 938 [rebalance:info,2014-08-19T16:53:52.390,ns_1@10.242.238.90:<0.4534.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 147 [rebalance:info,2014-08-19T16:53:52.390,ns_1@10.242.238.90:<0.4533.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 384 [rebalance:info,2014-08-19T16:53:52.390,ns_1@10.242.238.90:<0.4532.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 939 [rebalance:info,2014-08-19T16:53:52.390,ns_1@10.242.238.90:<0.4534.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 148 [rebalance:info,2014-08-19T16:53:52.390,ns_1@10.242.238.90:<0.4533.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 385 [rebalance:info,2014-08-19T16:53:52.390,ns_1@10.242.238.90:<0.4532.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 940 [rebalance:info,2014-08-19T16:53:52.390,ns_1@10.242.238.90:<0.4533.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 386 [rebalance:info,2014-08-19T16:53:52.390,ns_1@10.242.238.90:<0.4534.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 149 [rebalance:info,2014-08-19T16:53:52.390,ns_1@10.242.238.90:<0.4532.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 941 [rebalance:info,2014-08-19T16:53:52.390,ns_1@10.242.238.90:<0.4533.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 387 [rebalance:info,2014-08-19T16:53:52.390,ns_1@10.242.238.90:<0.4532.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 942 [rebalance:info,2014-08-19T16:53:52.390,ns_1@10.242.238.90:<0.4534.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 150 [rebalance:info,2014-08-19T16:53:52.390,ns_1@10.242.238.90:<0.4533.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 388 [rebalance:info,2014-08-19T16:53:52.390,ns_1@10.242.238.90:<0.4532.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 943 [rebalance:info,2014-08-19T16:53:52.391,ns_1@10.242.238.90:<0.4534.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 151 [rebalance:info,2014-08-19T16:53:52.391,ns_1@10.242.238.90:<0.4533.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 389 [rebalance:info,2014-08-19T16:53:52.391,ns_1@10.242.238.90:<0.4532.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 944 [rebalance:info,2014-08-19T16:53:52.391,ns_1@10.242.238.90:<0.4534.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 152 [rebalance:info,2014-08-19T16:53:52.391,ns_1@10.242.238.90:<0.4532.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 945 [rebalance:info,2014-08-19T16:53:52.391,ns_1@10.242.238.90:<0.4533.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 390 [rebalance:info,2014-08-19T16:53:52.391,ns_1@10.242.238.90:<0.4534.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 153 [rebalance:info,2014-08-19T16:53:52.391,ns_1@10.242.238.90:<0.4532.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 946 [rebalance:info,2014-08-19T16:53:52.391,ns_1@10.242.238.90:<0.4533.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 391 [rebalance:info,2014-08-19T16:53:52.391,ns_1@10.242.238.90:<0.4532.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 947 [rebalance:info,2014-08-19T16:53:52.391,ns_1@10.242.238.90:<0.4534.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 154 [rebalance:info,2014-08-19T16:53:52.391,ns_1@10.242.238.90:<0.4532.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 948 [rebalance:info,2014-08-19T16:53:52.391,ns_1@10.242.238.90:<0.4533.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 392 [rebalance:info,2014-08-19T16:53:52.391,ns_1@10.242.238.90:<0.4532.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 949 [rebalance:info,2014-08-19T16:53:52.391,ns_1@10.242.238.90:<0.4533.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 393 [rebalance:info,2014-08-19T16:53:52.391,ns_1@10.242.238.90:<0.4534.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 155 [rebalance:info,2014-08-19T16:53:52.392,ns_1@10.242.238.90:<0.4532.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 950 [rebalance:info,2014-08-19T16:53:52.392,ns_1@10.242.238.90:<0.4533.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 394 [rebalance:info,2014-08-19T16:53:52.392,ns_1@10.242.238.90:<0.4534.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 156 [rebalance:info,2014-08-19T16:53:52.392,ns_1@10.242.238.90:<0.4532.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 951 [rebalance:info,2014-08-19T16:53:52.392,ns_1@10.242.238.90:<0.4534.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 157 [rebalance:info,2014-08-19T16:53:52.392,ns_1@10.242.238.90:<0.4533.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 395 [rebalance:info,2014-08-19T16:53:52.392,ns_1@10.242.238.90:<0.4532.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 952 [rebalance:info,2014-08-19T16:53:52.392,ns_1@10.242.238.90:<0.4534.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 158 [rebalance:info,2014-08-19T16:53:52.392,ns_1@10.242.238.90:<0.4533.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 396 [rebalance:info,2014-08-19T16:53:52.392,ns_1@10.242.238.90:<0.4534.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 159 [rebalance:info,2014-08-19T16:53:52.392,ns_1@10.242.238.90:<0.4532.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 953 [rebalance:info,2014-08-19T16:53:52.392,ns_1@10.242.238.90:<0.4534.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 160 [rebalance:info,2014-08-19T16:53:52.392,ns_1@10.242.238.90:<0.4533.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 397 [rebalance:info,2014-08-19T16:53:52.392,ns_1@10.242.238.90:<0.4532.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 954 [rebalance:info,2014-08-19T16:53:52.392,ns_1@10.242.238.90:<0.4534.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 161 [rebalance:info,2014-08-19T16:53:52.392,ns_1@10.242.238.90:<0.4533.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 398 [rebalance:info,2014-08-19T16:53:52.392,ns_1@10.242.238.90:<0.4532.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 955 [rebalance:info,2014-08-19T16:53:52.392,ns_1@10.242.238.90:<0.4534.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 162 [rebalance:info,2014-08-19T16:53:52.393,ns_1@10.242.238.90:<0.4534.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 163 [rebalance:info,2014-08-19T16:53:52.393,ns_1@10.242.238.90:<0.4533.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 399 [rebalance:info,2014-08-19T16:53:52.393,ns_1@10.242.238.90:<0.4532.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 956 [rebalance:info,2014-08-19T16:53:52.393,ns_1@10.242.238.90:<0.4534.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 164 [rebalance:info,2014-08-19T16:53:52.393,ns_1@10.242.238.90:<0.4533.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 400 [rebalance:info,2014-08-19T16:53:52.393,ns_1@10.242.238.90:<0.4532.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 957 [rebalance:info,2014-08-19T16:53:52.393,ns_1@10.242.238.90:<0.4534.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 165 [rebalance:info,2014-08-19T16:53:52.393,ns_1@10.242.238.90:<0.4533.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 401 [rebalance:info,2014-08-19T16:53:52.393,ns_1@10.242.238.90:<0.4532.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 958 [rebalance:info,2014-08-19T16:53:52.393,ns_1@10.242.238.90:<0.4534.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 166 [rebalance:info,2014-08-19T16:53:52.393,ns_1@10.242.238.90:<0.4533.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 402 [rebalance:info,2014-08-19T16:53:52.393,ns_1@10.242.238.90:<0.4532.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 959 [rebalance:info,2014-08-19T16:53:52.393,ns_1@10.242.238.90:<0.4533.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 403 [rebalance:info,2014-08-19T16:53:52.393,ns_1@10.242.238.90:<0.4534.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 167 [rebalance:info,2014-08-19T16:53:52.393,ns_1@10.242.238.90:<0.4533.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 404 [rebalance:info,2014-08-19T16:53:52.393,ns_1@10.242.238.90:<0.4532.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 960 [rebalance:info,2014-08-19T16:53:52.393,ns_1@10.242.238.90:<0.4534.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 168 [rebalance:info,2014-08-19T16:53:52.393,ns_1@10.242.238.90:<0.4533.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 405 [rebalance:info,2014-08-19T16:53:52.393,ns_1@10.242.238.90:<0.4532.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 961 [rebalance:info,2014-08-19T16:53:52.393,ns_1@10.242.238.90:<0.4534.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 169 [rebalance:info,2014-08-19T16:53:52.394,ns_1@10.242.238.90:<0.4533.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 406 [rebalance:info,2014-08-19T16:53:52.394,ns_1@10.242.238.90:<0.4532.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 962 [rebalance:info,2014-08-19T16:53:52.394,ns_1@10.242.238.90:<0.4534.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 170 [rebalance:info,2014-08-19T16:53:52.394,ns_1@10.242.238.90:<0.4533.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 407 [rebalance:info,2014-08-19T16:53:52.394,ns_1@10.242.238.90:<0.4532.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 963 [rebalance:info,2014-08-19T16:53:52.394,ns_1@10.242.238.90:<0.4533.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 408 [rebalance:info,2014-08-19T16:53:52.394,ns_1@10.242.238.90:<0.4532.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 964 [rebalance:info,2014-08-19T16:53:52.394,ns_1@10.242.238.90:<0.4533.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 409 [rebalance:info,2014-08-19T16:53:52.394,ns_1@10.242.238.90:<0.4533.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 410 [rebalance:info,2014-08-19T16:53:52.394,ns_1@10.242.238.90:<0.4532.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 965 [rebalance:info,2014-08-19T16:53:52.394,ns_1@10.242.238.90:<0.4533.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 411 [rebalance:info,2014-08-19T16:53:52.394,ns_1@10.242.238.90:<0.4533.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 412 [rebalance:info,2014-08-19T16:53:52.394,ns_1@10.242.238.90:<0.4532.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 966 [rebalance:info,2014-08-19T16:53:52.394,ns_1@10.242.238.90:<0.4533.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 413 [rebalance:info,2014-08-19T16:53:52.394,ns_1@10.242.238.90:<0.4532.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 967 [rebalance:info,2014-08-19T16:53:52.394,ns_1@10.242.238.90:<0.4533.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 414 [rebalance:info,2014-08-19T16:53:52.395,ns_1@10.242.238.90:<0.4532.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 968 [rebalance:info,2014-08-19T16:53:52.395,ns_1@10.242.238.90:<0.4533.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 415 [rebalance:info,2014-08-19T16:53:52.395,ns_1@10.242.238.90:<0.4532.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 969 [rebalance:info,2014-08-19T16:53:52.395,ns_1@10.242.238.90:<0.4533.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 416 [rebalance:info,2014-08-19T16:53:52.395,ns_1@10.242.238.90:<0.4532.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 970 [rebalance:info,2014-08-19T16:53:52.395,ns_1@10.242.238.90:<0.4532.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 971 [rebalance:info,2014-08-19T16:53:52.395,ns_1@10.242.238.90:<0.4533.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 417 [rebalance:info,2014-08-19T16:53:52.395,ns_1@10.242.238.90:<0.4532.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 972 [rebalance:info,2014-08-19T16:53:52.395,ns_1@10.242.238.90:<0.4533.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 418 [rebalance:info,2014-08-19T16:53:52.395,ns_1@10.242.238.90:<0.4532.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 973 [rebalance:info,2014-08-19T16:53:52.395,ns_1@10.242.238.90:<0.4532.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 974 [rebalance:info,2014-08-19T16:53:52.395,ns_1@10.242.238.90:<0.4533.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 419 [rebalance:info,2014-08-19T16:53:52.395,ns_1@10.242.238.90:<0.4532.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 975 [rebalance:info,2014-08-19T16:53:52.395,ns_1@10.242.238.90:<0.4533.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 420 [rebalance:info,2014-08-19T16:53:52.396,ns_1@10.242.238.90:<0.4532.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 976 [rebalance:info,2014-08-19T16:53:52.396,ns_1@10.242.238.90:<0.4533.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 421 [rebalance:info,2014-08-19T16:53:52.396,ns_1@10.242.238.90:<0.4532.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 977 [rebalance:info,2014-08-19T16:53:52.396,ns_1@10.242.238.90:<0.4532.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 978 [rebalance:info,2014-08-19T16:53:52.396,ns_1@10.242.238.90:<0.4533.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 422 [rebalance:info,2014-08-19T16:53:52.396,ns_1@10.242.238.90:<0.4532.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 979 [rebalance:info,2014-08-19T16:53:52.396,ns_1@10.242.238.90:<0.4532.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 980 [rebalance:info,2014-08-19T16:53:52.396,ns_1@10.242.238.90:<0.4533.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 423 [rebalance:info,2014-08-19T16:53:52.396,ns_1@10.242.238.90:<0.4532.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 981 [rebalance:info,2014-08-19T16:53:52.396,ns_1@10.242.238.90:<0.4533.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 424 [rebalance:info,2014-08-19T16:53:52.396,ns_1@10.242.238.90:<0.4532.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 982 [rebalance:info,2014-08-19T16:53:52.396,ns_1@10.242.238.90:<0.4533.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 425 [rebalance:info,2014-08-19T16:53:52.396,ns_1@10.242.238.90:<0.4532.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 983 [ns_server:debug,2014-08-19T16:53:52.396,ns_1@10.242.238.90:<0.4534.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:info,2014-08-19T16:53:52.396,ns_1@10.242.238.90:<0.4532.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 984 [rebalance:info,2014-08-19T16:53:52.396,ns_1@10.242.238.90:<0.4533.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 426 [rebalance:info,2014-08-19T16:53:52.397,ns_1@10.242.238.90:<0.4532.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 985 [ns_server:debug,2014-08-19T16:53:52.397,ns_1@10.242.238.90:<0.4534.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:info,2014-08-19T16:53:52.397,ns_1@10.242.238.90:<0.4532.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 986 [ns_server:debug,2014-08-19T16:53:52.397,ns_1@10.242.238.90:<0.4534.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:info,2014-08-19T16:53:52.397,ns_1@10.242.238.90:<0.4532.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 987 [rebalance:info,2014-08-19T16:53:52.397,ns_1@10.242.238.90:<0.4532.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 988 [ns_server:debug,2014-08-19T16:53:52.397,ns_1@10.242.238.90:<0.4534.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:info,2014-08-19T16:53:52.397,ns_1@10.242.238.90:<0.4532.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 989 [ns_server:debug,2014-08-19T16:53:52.397,ns_1@10.242.238.90:<0.4534.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:info,2014-08-19T16:53:52.397,ns_1@10.242.238.90:<0.4532.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 990 [ns_server:debug,2014-08-19T16:53:52.397,ns_1@10.242.238.90:<0.4534.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:52.397,ns_1@10.242.238.90:<0.4534.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:info,2014-08-19T16:53:52.397,ns_1@10.242.238.90:<0.4532.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 991 [ns_server:debug,2014-08-19T16:53:52.397,ns_1@10.242.238.90:<0.4534.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:info,2014-08-19T16:53:52.398,ns_1@10.242.238.90:<0.4532.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 992 [rebalance:info,2014-08-19T16:53:52.398,ns_1@10.242.238.90:<0.4532.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 993 [ns_server:debug,2014-08-19T16:53:52.398,ns_1@10.242.238.90:<0.4534.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:info,2014-08-19T16:53:52.398,ns_1@10.242.238.90:<0.4532.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 994 [ns_server:debug,2014-08-19T16:53:52.398,ns_1@10.242.238.90:<0.4534.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:52.398,ns_1@10.242.238.90:<0.4534.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:info,2014-08-19T16:53:52.398,ns_1@10.242.238.90:<0.4532.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 995 [ns_server:debug,2014-08-19T16:53:52.398,ns_1@10.242.238.90:<0.4534.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:info,2014-08-19T16:53:52.398,ns_1@10.242.238.90:<0.4532.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 996 [ns_server:debug,2014-08-19T16:53:52.398,ns_1@10.242.238.90:<0.4534.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:info,2014-08-19T16:53:52.398,ns_1@10.242.238.90:<0.4532.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 997 [ns_server:debug,2014-08-19T16:53:52.398,ns_1@10.242.238.90:<0.4534.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:info,2014-08-19T16:53:52.398,ns_1@10.242.238.90:<0.4532.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 998 [ns_server:debug,2014-08-19T16:53:52.398,ns_1@10.242.238.90:<0.4534.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:info,2014-08-19T16:53:52.398,ns_1@10.242.238.90:<0.4532.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 999 [ns_server:debug,2014-08-19T16:53:52.398,ns_1@10.242.238.90:<0.4534.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:info,2014-08-19T16:53:52.399,ns_1@10.242.238.90:<0.4532.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 1000 [ns_server:debug,2014-08-19T16:53:52.399,ns_1@10.242.238.90:<0.4534.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:info,2014-08-19T16:53:52.399,ns_1@10.242.238.90:<0.4532.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 1001 [ns_server:debug,2014-08-19T16:53:52.399,ns_1@10.242.238.90:<0.4534.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:52.399,ns_1@10.242.238.90:<0.4534.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:info,2014-08-19T16:53:52.399,ns_1@10.242.238.90:<0.4532.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 1002 [ns_server:debug,2014-08-19T16:53:52.399,ns_1@10.242.238.90:<0.4534.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:info,2014-08-19T16:53:52.399,ns_1@10.242.238.90:<0.4532.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 1003 [ns_server:debug,2014-08-19T16:53:52.399,ns_1@10.242.238.90:<0.4534.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:info,2014-08-19T16:53:52.399,ns_1@10.242.238.90:<0.4532.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 1004 [ns_server:debug,2014-08-19T16:53:52.399,ns_1@10.242.238.90:<0.4534.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:info,2014-08-19T16:53:52.399,ns_1@10.242.238.90:<0.4532.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 1005 [ns_server:debug,2014-08-19T16:53:52.399,ns_1@10.242.238.90:<0.4534.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:info,2014-08-19T16:53:52.399,ns_1@10.242.238.90:<0.4532.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 1006 [ns_server:debug,2014-08-19T16:53:52.399,ns_1@10.242.238.90:<0.4534.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:info,2014-08-19T16:53:52.399,ns_1@10.242.238.90:<0.4532.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 1007 [ns_server:debug,2014-08-19T16:53:52.399,ns_1@10.242.238.90:<0.4534.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:52.399,ns_1@10.242.238.90:<0.4533.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:52.399,ns_1@10.242.238.90:<0.4533.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:52.399,ns_1@10.242.238.90:<0.4534.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:info,2014-08-19T16:53:52.400,ns_1@10.242.238.90:<0.4532.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 1008 [ns_server:debug,2014-08-19T16:53:52.400,ns_1@10.242.238.90:<0.4534.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:52.400,ns_1@10.242.238.90:<0.4533.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:info,2014-08-19T16:53:52.400,ns_1@10.242.238.90:<0.4532.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 1009 [ns_server:debug,2014-08-19T16:53:52.400,ns_1@10.242.238.90:<0.4534.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:52.400,ns_1@10.242.238.90:<0.4533.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:info,2014-08-19T16:53:52.400,ns_1@10.242.238.90:<0.4532.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 1010 [ns_server:debug,2014-08-19T16:53:52.400,ns_1@10.242.238.90:<0.4534.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:52.400,ns_1@10.242.238.90:<0.4533.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:52.400,ns_1@10.242.238.90:<0.4534.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:52.400,ns_1@10.242.238.90:<0.4533.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:info,2014-08-19T16:53:52.400,ns_1@10.242.238.90:<0.4532.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 1011 [ns_server:debug,2014-08-19T16:53:52.400,ns_1@10.242.238.90:<0.4533.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:52.400,ns_1@10.242.238.90:<0.4534.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:52.400,ns_1@10.242.238.90:<0.4533.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:52.400,ns_1@10.242.238.90:<0.4534.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:info,2014-08-19T16:53:52.400,ns_1@10.242.238.90:<0.4532.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 1012 [ns_server:debug,2014-08-19T16:53:52.400,ns_1@10.242.238.90:<0.4533.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:52.400,ns_1@10.242.238.90:<0.4534.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:52.401,ns_1@10.242.238.90:<0.4533.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:info,2014-08-19T16:53:52.401,ns_1@10.242.238.90:<0.4532.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 1013 [ns_server:debug,2014-08-19T16:53:52.401,ns_1@10.242.238.90:<0.4534.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:52.401,ns_1@10.242.238.90:<0.4534.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:52.401,ns_1@10.242.238.90:<0.4533.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:info,2014-08-19T16:53:52.401,ns_1@10.242.238.90:<0.4532.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 1014 [ns_server:debug,2014-08-19T16:53:52.401,ns_1@10.242.238.90:<0.4534.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:info,2014-08-19T16:53:52.401,ns_1@10.242.238.90:<0.4532.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 1015 [ns_server:debug,2014-08-19T16:53:52.401,ns_1@10.242.238.90:<0.4533.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:52.401,ns_1@10.242.238.90:<0.4534.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:52.401,ns_1@10.242.238.90:<0.4533.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:52.401,ns_1@10.242.238.90:<0.4534.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:info,2014-08-19T16:53:52.401,ns_1@10.242.238.90:<0.4532.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 1016 [ns_server:debug,2014-08-19T16:53:52.401,ns_1@10.242.238.90:<0.4534.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:52.401,ns_1@10.242.238.90:<0.4533.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:info,2014-08-19T16:53:52.401,ns_1@10.242.238.90:<0.4532.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 1017 [ns_server:debug,2014-08-19T16:53:52.401,ns_1@10.242.238.90:<0.4534.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:52.401,ns_1@10.242.238.90:<0.4533.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:52.401,ns_1@10.242.238.90:<0.4534.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:info,2014-08-19T16:53:52.401,ns_1@10.242.238.90:<0.4532.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 1018 [ns_server:debug,2014-08-19T16:53:52.401,ns_1@10.242.238.90:<0.4533.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:52.401,ns_1@10.242.238.90:<0.4534.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:info,2014-08-19T16:53:52.401,ns_1@10.242.238.90:<0.4532.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 1019 [ns_server:debug,2014-08-19T16:53:52.401,ns_1@10.242.238.90:<0.4534.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:52.401,ns_1@10.242.238.90:<0.4533.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:info,2014-08-19T16:53:52.402,ns_1@10.242.238.90:<0.4532.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 1020 [ns_server:debug,2014-08-19T16:53:52.402,ns_1@10.242.238.90:<0.4534.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:52.402,ns_1@10.242.238.90:<0.4533.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:info,2014-08-19T16:53:52.402,ns_1@10.242.238.90:<0.4532.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 1021 [ns_server:debug,2014-08-19T16:53:52.402,ns_1@10.242.238.90:<0.4534.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:52.402,ns_1@10.242.238.90:<0.4534.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:info,2014-08-19T16:53:52.402,ns_1@10.242.238.90:<0.4532.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 1022 [ns_server:debug,2014-08-19T16:53:52.402,ns_1@10.242.238.90:<0.4533.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:52.402,ns_1@10.242.238.90:<0.4534.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:info,2014-08-19T16:53:52.402,ns_1@10.242.238.90:<0.4532.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 1023 [ns_server:debug,2014-08-19T16:53:52.402,ns_1@10.242.238.90:<0.4534.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:52.402,ns_1@10.242.238.90:<0.4533.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:52.402,ns_1@10.242.238.90:<0.4534.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:52.402,ns_1@10.242.238.90:<0.4533.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:52.402,ns_1@10.242.238.90:<0.4534.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:52.402,ns_1@10.242.238.90:<0.4533.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:52.402,ns_1@10.242.238.90:<0.4534.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:52.402,ns_1@10.242.238.90:<0.4534.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:52.402,ns_1@10.242.238.90:<0.4534.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:52.402,ns_1@10.242.238.90:<0.4533.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:52.402,ns_1@10.242.238.90:<0.4534.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:52.402,ns_1@10.242.238.90:<0.4533.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:52.403,ns_1@10.242.238.90:<0.4534.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:52.403,ns_1@10.242.238.90:<0.4533.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:52.403,ns_1@10.242.238.90:<0.4534.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:52.403,ns_1@10.242.238.90:<0.4533.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:52.403,ns_1@10.242.238.90:<0.4534.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:52.403,ns_1@10.242.238.90:<0.4533.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:52.403,ns_1@10.242.238.90:<0.4534.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:52.403,ns_1@10.242.238.90:<0.4533.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:52.403,ns_1@10.242.238.90:<0.4533.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:52.403,ns_1@10.242.238.90:<0.4534.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:52.403,ns_1@10.242.238.90:<0.4533.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:52.403,ns_1@10.242.238.90:<0.4534.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:52.403,ns_1@10.242.238.90:<0.4533.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:52.403,ns_1@10.242.238.90:<0.4534.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:52.403,ns_1@10.242.238.90:<0.4533.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:52.403,ns_1@10.242.238.90:<0.4534.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:52.403,ns_1@10.242.238.90:<0.4533.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:52.403,ns_1@10.242.238.90:<0.4534.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:52.403,ns_1@10.242.238.90:<0.4533.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:52.403,ns_1@10.242.238.90:<0.4534.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:52.404,ns_1@10.242.238.90:<0.4533.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:52.404,ns_1@10.242.238.90:<0.4534.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:52.404,ns_1@10.242.238.90:<0.4533.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:52.404,ns_1@10.242.238.90:<0.4534.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:52.404,ns_1@10.242.238.90:<0.4533.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:52.404,ns_1@10.242.238.90:<0.4534.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:52.404,ns_1@10.242.238.90:<0.4533.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:52.404,ns_1@10.242.238.90:<0.4533.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:52.404,ns_1@10.242.238.90:<0.4534.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:52.404,ns_1@10.242.238.90:<0.4533.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:52.404,ns_1@10.242.238.90:<0.4534.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:52.404,ns_1@10.242.238.90:<0.4533.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:52.404,ns_1@10.242.238.90:<0.4534.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:52.404,ns_1@10.242.238.90:<0.4533.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:52.404,ns_1@10.242.238.90:<0.4533.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:52.404,ns_1@10.242.238.90:<0.4534.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:52.404,ns_1@10.242.238.90:<0.4533.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:52.404,ns_1@10.242.238.90:<0.4534.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:52.404,ns_1@10.242.238.90:<0.4533.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:52.404,ns_1@10.242.238.90:<0.4534.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:52.404,ns_1@10.242.238.90:<0.4533.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:52.404,ns_1@10.242.238.90:<0.4534.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:52.405,ns_1@10.242.238.90:<0.4533.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:52.405,ns_1@10.242.238.90:<0.4534.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:52.405,ns_1@10.242.238.90:<0.4533.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:52.405,ns_1@10.242.238.90:<0.4533.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:52.405,ns_1@10.242.238.90:<0.4534.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:52.405,ns_1@10.242.238.90:<0.4533.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:52.405,ns_1@10.242.238.90:<0.4534.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:52.405,ns_1@10.242.238.90:<0.4533.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:52.405,ns_1@10.242.238.90:<0.4533.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:52.405,ns_1@10.242.238.90:<0.4534.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:52.405,ns_1@10.242.238.90:<0.4533.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:52.405,ns_1@10.242.238.90:<0.4534.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:52.405,ns_1@10.242.238.90:<0.4532.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:52.405,ns_1@10.242.238.90:<0.4533.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:52.405,ns_1@10.242.238.90:<0.4534.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:52.405,ns_1@10.242.238.90:<0.4532.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:52.405,ns_1@10.242.238.90:<0.4533.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:52.405,ns_1@10.242.238.90:<0.4534.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:52.405,ns_1@10.242.238.90:<0.4532.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:52.406,ns_1@10.242.238.90:<0.4534.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:52.406,ns_1@10.242.238.90:<0.4533.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:52.406,ns_1@10.242.238.90:<0.4532.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:52.406,ns_1@10.242.238.90:<0.4534.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:52.406,ns_1@10.242.238.90:<0.4533.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:52.406,ns_1@10.242.238.90:<0.4532.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:52.406,ns_1@10.242.238.90:<0.4534.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:52.406,ns_1@10.242.238.90:<0.4533.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:52.406,ns_1@10.242.238.90:<0.4532.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:52.406,ns_1@10.242.238.90:<0.4533.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:52.406,ns_1@10.242.238.90:<0.4534.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:52.406,ns_1@10.242.238.90:<0.4532.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:52.406,ns_1@10.242.238.90:<0.4533.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:52.406,ns_1@10.242.238.90:<0.4532.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:52.406,ns_1@10.242.238.90:<0.4533.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:52.406,ns_1@10.242.238.90:<0.4532.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:52.406,ns_1@10.242.238.90:<0.4533.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:52.406,ns_1@10.242.238.90:<0.4532.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:52.406,ns_1@10.242.238.90:<0.4533.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:52.407,ns_1@10.242.238.90:<0.4532.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:52.407,ns_1@10.242.238.90:<0.4533.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:52.407,ns_1@10.242.238.90:<0.4532.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:52.407,ns_1@10.242.238.90:<0.4533.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:52.407,ns_1@10.242.238.90:<0.4532.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:52.407,ns_1@10.242.238.90:<0.4533.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:52.407,ns_1@10.242.238.90:<0.4533.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:52.407,ns_1@10.242.238.90:<0.4532.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:52.407,ns_1@10.242.238.90:<0.4532.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:52.407,ns_1@10.242.238.90:<0.4533.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:52.407,ns_1@10.242.238.90:<0.4532.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:52.407,ns_1@10.242.238.90:<0.4533.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:52.407,ns_1@10.242.238.90:<0.4532.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:52.407,ns_1@10.242.238.90:<0.4533.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:52.408,ns_1@10.242.238.90:<0.4532.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:52.408,ns_1@10.242.238.90:<0.4533.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:52.408,ns_1@10.242.238.90:<0.4532.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:52.408,ns_1@10.242.238.90:<0.4533.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:52.408,ns_1@10.242.238.90:<0.4532.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:52.408,ns_1@10.242.238.90:<0.4533.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:52.408,ns_1@10.242.238.90:<0.4532.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:52.408,ns_1@10.242.238.90:<0.4533.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:52.408,ns_1@10.242.238.90:<0.4532.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:52.408,ns_1@10.242.238.90:<0.4533.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:52.408,ns_1@10.242.238.90:<0.4532.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:52.408,ns_1@10.242.238.90:<0.4533.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:52.408,ns_1@10.242.238.90:<0.4532.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:52.408,ns_1@10.242.238.90:<0.4533.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:52.408,ns_1@10.242.238.90:<0.4533.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:52.408,ns_1@10.242.238.90:<0.4532.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:52.409,ns_1@10.242.238.90:<0.4533.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:52.409,ns_1@10.242.238.90:<0.4532.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:52.409,ns_1@10.242.238.90:<0.4533.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:52.409,ns_1@10.242.238.90:<0.4532.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:52.409,ns_1@10.242.238.90:<0.4533.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:52.409,ns_1@10.242.238.90:<0.4532.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:52.409,ns_1@10.242.238.90:<0.4533.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:52.409,ns_1@10.242.238.90:<0.4533.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:52.409,ns_1@10.242.238.90:<0.4532.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:52.409,ns_1@10.242.238.90:<0.4533.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:52.409,ns_1@10.242.238.90:<0.4532.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:52.409,ns_1@10.242.238.90:<0.4533.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:52.409,ns_1@10.242.238.90:<0.4532.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:52.409,ns_1@10.242.238.90:<0.4532.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:52.409,ns_1@10.242.238.90:<0.4532.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:52.409,ns_1@10.242.238.90:<0.4532.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:52.410,ns_1@10.242.238.90:<0.4532.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:52.410,ns_1@10.242.238.90:<0.4532.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:52.410,ns_1@10.242.238.90:<0.4532.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:52.410,ns_1@10.242.238.90:<0.4532.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:52.410,ns_1@10.242.238.90:<0.4532.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:52.410,ns_1@10.242.238.90:<0.4532.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:52.410,ns_1@10.242.238.90:<0.4532.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:52.410,ns_1@10.242.238.90:<0.4532.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:52.410,ns_1@10.242.238.90:<0.4532.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:52.411,ns_1@10.242.238.90:<0.4532.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:52.411,ns_1@10.242.238.90:<0.4532.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:52.411,ns_1@10.242.238.90:<0.4532.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:52.411,ns_1@10.242.238.90:<0.4532.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:52.411,ns_1@10.242.238.90:<0.4532.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:52.411,ns_1@10.242.238.90:<0.4532.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:52.411,ns_1@10.242.238.90:<0.4532.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:52.411,ns_1@10.242.238.90:<0.4532.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:52.411,ns_1@10.242.238.90:<0.4532.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [views:debug,2014-08-19T16:53:52.411,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/1023. Updated state: replica (0) [ns_server:debug,2014-08-19T16:53:52.412,ns_1@10.242.238.90:<0.4532.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:52.412,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",1023,replica,0} [ns_server:debug,2014-08-19T16:53:52.412,ns_1@10.242.238.90:<0.4532.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:52.412,ns_1@10.242.238.90:<0.4532.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:52.412,ns_1@10.242.238.90:<0.4532.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:52.412,ns_1@10.242.238.90:<0.4532.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:52.412,ns_1@10.242.238.90:<0.4532.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:52.412,ns_1@10.242.238.90:<0.4532.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:52.412,ns_1@10.242.238.90:<0.4532.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:52.413,ns_1@10.242.238.90:<0.4532.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:52.413,ns_1@10.242.238.90:<0.4532.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:52.413,ns_1@10.242.238.90:<0.4532.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:52.413,ns_1@10.242.238.90:<0.4532.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:52.413,ns_1@10.242.238.90:<0.4532.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:52.413,ns_1@10.242.238.90:<0.4532.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:52.413,ns_1@10.242.238.90:<0.4532.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:52.413,ns_1@10.242.238.90:<0.4532.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:52.413,ns_1@10.242.238.90:<0.4532.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:52.413,ns_1@10.242.238.90:<0.4532.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:52.414,ns_1@10.242.238.90:<0.4532.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:52.414,ns_1@10.242.238.90:<0.4532.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:52.414,ns_1@10.242.238.90:<0.4532.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:52.414,ns_1@10.242.238.90:<0.4532.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:52.414,ns_1@10.242.238.90:<0.4532.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:52.414,ns_1@10.242.238.90:<0.4532.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:52.414,ns_1@10.242.238.90:<0.4532.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:52.414,ns_1@10.242.238.90:<0.4532.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:52.414,ns_1@10.242.238.90:<0.4532.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:52.415,ns_1@10.242.238.90:<0.4532.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:52.415,ns_1@10.242.238.90:<0.4532.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:52.415,ns_1@10.242.238.90:<0.4532.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:52.415,ns_1@10.242.238.90:<0.4532.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:52.415,ns_1@10.242.238.90:<0.4532.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:52.415,ns_1@10.242.238.90:<0.4532.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:53:52.415,ns_1@10.242.238.90:<0.4532.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [views:debug,2014-08-19T16:53:52.428,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/562. Updated state: active (0) [ns_server:debug,2014-08-19T16:53:52.428,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",562,active,0} [ns_server:debug,2014-08-19T16:53:52.604,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 1022. Nacking mccouch update. [views:debug,2014-08-19T16:53:52.604,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/1022. Updated state: replica (0) [ns_server:debug,2014-08-19T16:53:52.604,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",1022,replica,0} [ns_server:debug,2014-08-19T16:53:52.604,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_tiles<0.4349.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [1022,1023] [ns_server:debug,2014-08-19T16:53:52.620,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 560. Nacking mccouch update. [views:debug,2014-08-19T16:53:52.621,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/560. Updated state: active (0) [ns_server:debug,2014-08-19T16:53:52.621,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",560,active,0} [ns_server:debug,2014-08-19T16:53:52.621,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,718,686,654,622,590,984,952,1016,756,724,692,660,628,596,564,990,958, 1022,762,730,698,666,634,602,570,996,964,736,704,672,640,608,576,970,938, 1002,742,710,678,646,614,582,976,944,1008,748,716,684,652,620,588,982,950, 1014,754,722,690,658,626,594,562,988,956,1020,760,744,728,712,696,680,664, 648,632,616,600,584,568,1023,994,978,962,946,1010,766,734,702,670,638,606, 574,968,1000,740,708,676,644,612,580,974,942,1006,746,714,682,650,618,586, 980,948,1012,752,720,688,656,624,592,560,986,954,1018,758,726,694,662,630, 598,566,992,960,764,732,700,668,636,604,572,998,966,738,706,674,642,610,578, 972,940,1004] [views:debug,2014-08-19T16:53:52.717,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/1022. Updated state: replica (0) [ns_server:debug,2014-08-19T16:53:52.718,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",1022,replica,0} [views:debug,2014-08-19T16:53:52.755,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/560. Updated state: active (0) [ns_server:debug,2014-08-19T16:53:52.755,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",560,active,0} [ns_server:debug,2014-08-19T16:53:52.936,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 1020. Nacking mccouch update. [views:debug,2014-08-19T16:53:52.936,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/1020. Updated state: replica (0) [ns_server:debug,2014-08-19T16:53:52.936,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",1020,replica,0} [ns_server:debug,2014-08-19T16:53:52.936,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_tiles<0.4349.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [1022,1020,1023] [ns_server:debug,2014-08-19T16:53:52.970,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 558. Nacking mccouch update. [views:debug,2014-08-19T16:53:52.970,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/558. Updated state: active (0) [ns_server:debug,2014-08-19T16:53:52.970,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",558,active,0} [ns_server:debug,2014-08-19T16:53:52.970,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,718,686,654,622,590,558,984,952,1016,756,724,692,660,628,596,564,990,958, 1022,762,730,698,666,634,602,570,996,964,736,704,672,640,608,576,970,938, 1002,742,710,678,646,614,582,976,944,1008,748,716,684,652,620,588,982,950, 1014,754,722,690,658,626,594,562,988,956,1020,760,744,728,712,696,680,664, 648,632,616,600,584,568,1023,994,978,962,946,1010,766,734,702,670,638,606, 574,968,1000,740,708,676,644,612,580,974,942,1006,746,714,682,650,618,586, 980,948,1012,752,720,688,656,624,592,560,986,954,1018,758,726,694,662,630, 598,566,992,960,764,732,700,668,636,604,572,998,966,738,706,674,642,610,578, 972,940,1004] [views:debug,2014-08-19T16:53:53.053,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/1020. Updated state: replica (0) [ns_server:debug,2014-08-19T16:53:53.053,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",1020,replica,0} [views:debug,2014-08-19T16:53:53.070,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/558. Updated state: active (0) [ns_server:debug,2014-08-19T16:53:53.070,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",558,active,0} [ns_server:debug,2014-08-19T16:53:53.254,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 1018. Nacking mccouch update. [views:debug,2014-08-19T16:53:53.254,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/1018. Updated state: replica (0) [ns_server:debug,2014-08-19T16:53:53.254,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",1018,replica,0} [ns_server:debug,2014-08-19T16:53:53.254,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_tiles<0.4349.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [1022,1018,1020,1023] [ns_server:debug,2014-08-19T16:53:53.271,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 556. Nacking mccouch update. [views:debug,2014-08-19T16:53:53.271,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/556. Updated state: active (0) [ns_server:debug,2014-08-19T16:53:53.271,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",556,active,0} [ns_server:debug,2014-08-19T16:53:53.271,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,718,686,654,622,590,558,984,952,1016,756,724,692,660,628,596,564,990,958, 1022,762,730,698,666,634,602,570,996,964,736,704,672,640,608,576,970,938, 1002,742,710,678,646,614,582,976,944,1008,748,716,684,652,620,588,556,982, 950,1014,754,722,690,658,626,594,562,988,956,1020,760,744,728,712,696,680, 664,648,632,616,600,584,568,1023,994,978,962,946,1010,766,734,702,670,638, 606,574,968,1000,740,708,676,644,612,580,974,942,1006,746,714,682,650,618, 586,980,948,1012,752,720,688,656,624,592,560,986,954,1018,758,726,694,662, 630,598,566,992,960,764,732,700,668,636,604,572,998,966,738,706,674,642,610, 578,972,940,1004] [ns_server:info,2014-08-19T16:53:53.339,ns_1@10.242.238.90:ns_doctor<0.17441.0>:ns_doctor:update_status:241]The following buckets became ready on node 'ns_1@10.242.238.90': ["maps_1_8_tiles"] [views:debug,2014-08-19T16:53:53.355,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/1018. Updated state: replica (0) [ns_server:debug,2014-08-19T16:53:53.355,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",1018,replica,0} [views:debug,2014-08-19T16:53:53.372,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/556. Updated state: active (0) [ns_server:debug,2014-08-19T16:53:53.372,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",556,active,0} [ns_server:debug,2014-08-19T16:53:53.580,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 1016. Nacking mccouch update. [views:debug,2014-08-19T16:53:53.580,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/1016. Updated state: replica (0) [ns_server:debug,2014-08-19T16:53:53.580,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_tiles<0.4349.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [1016,1022,1018,1020,1023] [ns_server:debug,2014-08-19T16:53:53.580,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",1016,replica,0} [ns_server:debug,2014-08-19T16:53:53.597,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 554. Nacking mccouch update. [views:debug,2014-08-19T16:53:53.597,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/554. Updated state: active (0) [ns_server:debug,2014-08-19T16:53:53.597,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",554,active,0} [ns_server:debug,2014-08-19T16:53:53.598,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,718,686,654,622,590,558,984,952,1016,756,724,692,660,628,596,564,990,958, 1022,762,730,698,666,634,602,570,996,964,736,704,672,640,608,576,970,938, 1002,742,710,678,646,614,582,976,944,1008,748,716,684,652,620,588,556,982, 950,1014,754,722,690,658,626,594,562,988,956,1020,760,728,696,664,632,600, 568,994,978,962,946,1010,766,734,702,670,638,606,574,968,1000,740,708,676, 644,612,580,974,942,1006,746,714,682,650,618,586,554,980,948,1012,752,720, 688,656,624,592,560,986,954,1018,758,726,694,662,630,598,566,992,960,764,732, 700,668,636,604,572,998,966,738,706,674,642,610,578,972,940,1004,744,712,680, 648,616,584,1023] [views:debug,2014-08-19T16:53:53.689,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/1016. Updated state: replica (0) [ns_server:debug,2014-08-19T16:53:53.689,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",1016,replica,0} [views:debug,2014-08-19T16:53:53.731,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/554. Updated state: active (0) [ns_server:debug,2014-08-19T16:53:53.731,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",554,active,0} [ns_server:debug,2014-08-19T16:53:53.933,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 1014. Nacking mccouch update. [views:debug,2014-08-19T16:53:53.933,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/1014. Updated state: replica (0) [ns_server:debug,2014-08-19T16:53:53.933,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",1014,replica,0} [ns_server:debug,2014-08-19T16:53:53.933,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_tiles<0.4349.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [1016,1022,1018,1014,1020,1023] [ns_server:debug,2014-08-19T16:53:53.991,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 552. Nacking mccouch update. [views:debug,2014-08-19T16:53:53.991,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/552. Updated state: active (0) [ns_server:debug,2014-08-19T16:53:53.992,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",552,active,0} [ns_server:debug,2014-08-19T16:53:53.992,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,718,686,654,622,590,558,984,952,1016,756,724,692,660,628,596,564,990,958, 1022,762,730,698,666,634,602,570,996,964,736,704,672,640,608,576,970,938, 1002,742,710,678,646,614,582,976,944,1008,748,716,684,652,620,588,556,982, 950,1014,754,722,690,658,626,594,562,988,956,1020,760,728,696,664,632,600, 568,994,978,962,946,1010,766,734,702,670,638,606,574,968,1000,740,708,676, 644,612,580,974,942,1006,746,714,682,650,618,586,554,980,948,1012,752,720, 688,656,624,592,560,986,954,1018,758,726,694,662,630,598,566,992,960,764,732, 700,668,636,604,572,998,966,738,706,674,642,610,578,972,940,1004,744,712,680, 648,616,584,552,1023] [views:debug,2014-08-19T16:53:54.042,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/1014. Updated state: replica (0) [ns_server:debug,2014-08-19T16:53:54.042,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",1014,replica,0} [views:debug,2014-08-19T16:53:54.059,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/552. Updated state: active (0) [ns_server:debug,2014-08-19T16:53:54.059,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",552,active,0} [ns_server:debug,2014-08-19T16:53:54.197,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 1012. Nacking mccouch update. [views:debug,2014-08-19T16:53:54.197,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/1012. Updated state: replica (0) [ns_server:debug,2014-08-19T16:53:54.197,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",1012,replica,0} [ns_server:debug,2014-08-19T16:53:54.197,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_tiles<0.4349.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [1016,1022,1012,1018,1014,1020,1023] [ns_server:debug,2014-08-19T16:53:54.214,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 550. Nacking mccouch update. [views:debug,2014-08-19T16:53:54.214,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/550. Updated state: active (0) [ns_server:debug,2014-08-19T16:53:54.214,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",550,active,0} [ns_server:debug,2014-08-19T16:53:54.215,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,718,686,654,622,590,558,984,952,1016,756,724,692,660,628,596,564,990,958, 1022,762,730,698,666,634,602,570,996,964,736,704,672,640,608,576,970,938, 1002,742,710,678,646,614,582,550,976,944,1008,748,716,684,652,620,588,556, 982,950,1014,754,722,690,658,626,594,562,988,956,1020,760,728,696,664,632, 600,568,994,978,962,946,1010,766,734,702,670,638,606,574,968,1000,740,708, 676,644,612,580,974,942,1006,746,714,682,650,618,586,554,980,948,1012,752, 720,688,656,624,592,560,986,954,1018,758,726,694,662,630,598,566,992,960,764, 732,700,668,636,604,572,998,966,738,706,674,642,610,578,972,940,1004,744,712, 680,648,616,584,552,1023] [views:debug,2014-08-19T16:53:54.316,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/1012. Updated state: replica (0) [ns_server:debug,2014-08-19T16:53:54.316,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",1012,replica,0} [views:debug,2014-08-19T16:53:54.349,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/550. Updated state: active (0) [ns_server:debug,2014-08-19T16:53:54.349,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",550,active,0} [ns_server:debug,2014-08-19T16:53:54.500,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 1010. Nacking mccouch update. [ns_server:debug,2014-08-19T16:53:54.500,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 548. Nacking mccouch update. [views:debug,2014-08-19T16:53:54.500,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/1010. Updated state: replica (0) [views:debug,2014-08-19T16:53:54.500,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/548. Updated state: active (0) [ns_server:debug,2014-08-19T16:53:54.500,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_tiles<0.4349.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [1016,1022,1012,1018,1014,1020,1023,1010] [ns_server:debug,2014-08-19T16:53:54.500,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",1010,replica,0} [ns_server:debug,2014-08-19T16:53:54.501,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",548,active,0} [ns_server:debug,2014-08-19T16:53:54.501,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,718,686,654,622,590,558,984,952,1016,756,724,692,660,628,596,564,990,958, 1022,762,730,698,666,634,602,570,996,964,736,704,672,640,608,576,970,938, 1002,742,710,678,646,614,582,550,976,944,1008,748,716,684,652,620,588,556, 982,950,1014,754,722,690,658,626,594,562,988,956,1020,760,728,696,664,632, 600,568,994,978,962,946,1010,766,734,702,670,638,606,574,968,1000,740,708, 676,644,612,580,548,974,942,1006,746,714,682,650,618,586,554,980,948,1012, 752,720,688,656,624,592,560,986,954,1018,758,726,694,662,630,598,566,992,960, 764,732,700,668,636,604,572,998,966,738,706,674,642,610,578,972,940,1004,744, 712,680,648,616,584,552,1023] [views:debug,2014-08-19T16:53:54.567,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/548. Updated state: active (0) [views:debug,2014-08-19T16:53:54.568,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/1010. Updated state: replica (0) [ns_server:debug,2014-08-19T16:53:54.568,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",548,active,0} [ns_server:debug,2014-08-19T16:53:54.568,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",1010,replica,0} [ns_server:info,2014-08-19T16:53:54.770,ns_1@10.242.238.90:ns_doctor<0.17441.0>:ns_doctor:update_status:241]The following buckets became ready on node 'ns_1@10.242.238.89': ["maps_1_8_tiles"] [ns_server:debug,2014-08-19T16:53:54.810,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 1008. Nacking mccouch update. [views:debug,2014-08-19T16:53:54.810,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/1008. Updated state: replica (0) [ns_server:debug,2014-08-19T16:53:54.810,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",1008,replica,0} [ns_server:debug,2014-08-19T16:53:54.810,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_tiles<0.4349.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [1016,1022,1012,1018,1008,1014,1020,1023,1010] [ns_server:debug,2014-08-19T16:53:54.852,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 1021. Nacking mccouch update. [views:debug,2014-08-19T16:53:54.852,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/1021. Updated state: replica (0) [ns_server:debug,2014-08-19T16:53:54.852,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",1021,replica,0} [ns_server:debug,2014-08-19T16:53:54.852,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,718,686,654,622,590,558,984,952,1016,756,724,692,660,628,596,564,990,958, 1022,762,730,698,666,634,602,570,996,964,736,704,672,640,608,576,970,938, 1002,742,710,678,646,614,582,550,1021,976,944,1008,748,716,684,652,620,588, 556,982,950,1014,754,722,690,658,626,594,562,988,956,1020,760,728,696,664, 632,600,568,994,978,962,946,1010,766,734,702,670,638,606,574,968,1000,740, 708,676,644,612,580,548,974,942,1006,746,714,682,650,618,586,554,980,948, 1012,752,720,688,656,624,592,560,986,954,1018,758,726,694,662,630,598,566, 992,960,764,732,700,668,636,604,572,998,966,738,706,674,642,610,578,972,940, 1004,744,712,680,648,616,584,552,1023] [views:debug,2014-08-19T16:53:54.902,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/1008. Updated state: replica (0) [ns_server:debug,2014-08-19T16:53:54.902,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",1008,replica,0} [views:debug,2014-08-19T16:53:54.977,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/1021. Updated state: replica (0) [ns_server:debug,2014-08-19T16:53:54.977,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",1021,replica,0} [ns_server:debug,2014-08-19T16:53:55.085,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 1006. Nacking mccouch update. [views:debug,2014-08-19T16:53:55.085,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/1006. Updated state: replica (0) [ns_server:debug,2014-08-19T16:53:55.086,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",1006,replica,0} [ns_server:debug,2014-08-19T16:53:55.086,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_tiles<0.4349.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [1016,1022,1006,1012,1018,1008,1014,1020,1023,1010] [views:debug,2014-08-19T16:53:55.119,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/1006. Updated state: replica (0) [ns_server:debug,2014-08-19T16:53:55.119,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",1006,replica,0} [ns_server:debug,2014-08-19T16:53:55.136,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 1019. Nacking mccouch update. [views:debug,2014-08-19T16:53:55.136,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/1019. Updated state: replica (0) [ns_server:debug,2014-08-19T16:53:55.136,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",1019,replica,0} [ns_server:debug,2014-08-19T16:53:55.136,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,718,686,654,622,590,558,984,952,1016,756,724,692,660,628,596,564,990,958, 1022,762,730,698,666,634,602,570,996,964,736,704,672,640,608,576,970,938, 1002,742,710,678,646,614,582,550,1021,976,944,1008,748,716,684,652,620,588, 556,982,950,1014,754,722,690,658,626,594,562,988,956,1020,760,728,696,664, 632,600,568,994,962,766,734,702,670,638,606,574,968,1000,740,708,676,644,612, 580,548,1019,974,942,1006,746,714,682,650,618,586,554,980,948,1012,752,720, 688,656,624,592,560,986,954,1018,758,726,694,662,630,598,566,992,960,764,732, 700,668,636,604,572,998,966,738,706,674,642,610,578,972,940,1004,744,712,680, 648,616,584,552,1023,978,946,1010] [views:debug,2014-08-19T16:53:55.212,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/1019. Updated state: replica (0) [ns_server:debug,2014-08-19T16:53:55.212,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",1019,replica,0} [ns_server:debug,2014-08-19T16:53:55.295,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 1004. Nacking mccouch update. [views:debug,2014-08-19T16:53:55.295,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/1004. Updated state: replica (0) [ns_server:debug,2014-08-19T16:53:55.295,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",1004,replica,0} [ns_server:debug,2014-08-19T16:53:55.296,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_tiles<0.4349.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [1016,1022,1006,1012,1018,1008,1014,1020,1004,1023,1010] [views:debug,2014-08-19T16:53:55.354,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/1004. Updated state: replica (0) [ns_server:debug,2014-08-19T16:53:55.355,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",1004,replica,0} [ns_server:debug,2014-08-19T16:53:55.421,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 1017. Nacking mccouch update. [views:debug,2014-08-19T16:53:55.421,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/1017. Updated state: replica (0) [ns_server:debug,2014-08-19T16:53:55.421,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",1017,replica,0} [ns_server:debug,2014-08-19T16:53:55.422,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,718,686,654,622,590,558,984,952,1016,756,724,692,660,628,596,564,990,958, 1022,762,730,698,666,634,602,570,996,964,736,704,672,640,608,576,970,938, 1002,742,710,678,646,614,582,550,1021,976,944,1008,748,716,684,652,620,588, 556,982,950,1014,754,722,690,658,626,594,562,988,956,1020,760,728,696,664, 632,600,568,994,962,766,734,702,670,638,606,574,968,1000,740,708,676,644,612, 580,548,1019,974,942,1006,746,714,682,650,618,586,554,980,948,1012,752,720, 688,656,624,592,560,986,954,1018,758,726,694,662,630,598,566,992,960,764,732, 700,668,636,604,572,998,966,738,706,674,642,610,578,1017,972,940,1004,744, 712,680,648,616,584,552,1023,978,946,1010] [views:debug,2014-08-19T16:53:55.472,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/1017. Updated state: replica (0) [ns_server:debug,2014-08-19T16:53:55.472,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",1017,replica,0} [ns_server:debug,2014-08-19T16:53:55.569,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 1002. Nacking mccouch update. [views:debug,2014-08-19T16:53:55.569,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/1002. Updated state: replica (0) [ns_server:debug,2014-08-19T16:53:55.569,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_tiles<0.4349.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [1016,1022,1006,1012,1018,1002,1008,1014,1020,1004,1023,1010] [ns_server:debug,2014-08-19T16:53:55.569,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",1002,replica,0} [views:debug,2014-08-19T16:53:55.670,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/1002. Updated state: replica (0) [ns_server:debug,2014-08-19T16:53:55.670,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",1002,replica,0} [ns_server:debug,2014-08-19T16:53:55.770,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 1015. Nacking mccouch update. [views:debug,2014-08-19T16:53:55.770,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/1015. Updated state: replica (0) [ns_server:debug,2014-08-19T16:53:55.770,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",1015,replica,0} [ns_server:debug,2014-08-19T16:53:55.770,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,718,686,654,622,590,558,984,952,1016,756,724,692,660,628,596,564,990,958, 1022,762,730,698,666,634,602,570,996,964,736,704,672,640,608,576,1015,970, 938,1002,742,710,678,646,614,582,550,1021,976,944,1008,748,716,684,652,620, 588,556,982,950,1014,754,722,690,658,626,594,562,988,956,1020,760,728,696, 664,632,600,568,994,962,766,734,702,670,638,606,574,968,1000,740,708,676,644, 612,580,548,1019,974,942,1006,746,714,682,650,618,586,554,980,948,1012,752, 720,688,656,624,592,560,986,954,1018,758,726,694,662,630,598,566,992,960,764, 732,700,668,636,604,572,998,966,738,706,674,642,610,578,1017,972,940,1004, 744,712,680,648,616,584,552,1023,978,946,1010] [views:debug,2014-08-19T16:53:55.854,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/1015. Updated state: replica (0) [ns_server:debug,2014-08-19T16:53:55.854,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",1015,replica,0} [ns_server:debug,2014-08-19T16:53:55.966,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 1000. Nacking mccouch update. [views:debug,2014-08-19T16:53:55.966,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/1000. Updated state: replica (0) [ns_server:debug,2014-08-19T16:53:55.966,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",1000,replica,0} [ns_server:debug,2014-08-19T16:53:55.966,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_tiles<0.4349.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [1016,1000,1022,1006,1012,1018,1002,1008,1014,1020,1004,1023,1010] [views:debug,2014-08-19T16:53:56.055,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/1000. Updated state: replica (0) [ns_server:debug,2014-08-19T16:53:56.055,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",1000,replica,0} [ns_server:debug,2014-08-19T16:53:56.155,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 1013. Nacking mccouch update. [views:debug,2014-08-19T16:53:56.155,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/1013. Updated state: replica (0) [ns_server:debug,2014-08-19T16:53:56.155,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",1013,replica,0} [ns_server:debug,2014-08-19T16:53:56.156,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,718,686,654,622,590,558,984,952,1016,756,724,692,660,628,596,564,990,958, 1022,762,730,698,666,634,602,570,996,964,736,704,672,640,608,576,1015,970, 938,1002,742,710,678,646,614,582,550,1021,976,944,1008,748,716,684,652,620, 588,556,982,950,1014,754,722,690,658,626,594,562,988,956,1020,760,728,696, 664,632,600,568,994,962,766,734,702,670,638,606,574,1013,968,1000,740,708, 676,644,612,580,548,1019,974,942,1006,746,714,682,650,618,586,554,980,948, 1012,752,720,688,656,624,592,560,986,954,1018,758,726,694,662,630,598,566, 992,960,764,732,700,668,636,604,572,998,966,738,706,674,642,610,578,1017,972, 940,1004,744,712,680,648,616,584,552,1023,978,946,1010] [views:debug,2014-08-19T16:53:56.239,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/1013. Updated state: replica (0) [ns_server:debug,2014-08-19T16:53:56.239,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",1013,replica,0} [ns_server:info,2014-08-19T16:53:56.292,ns_1@10.242.238.90:ns_doctor<0.17441.0>:ns_doctor:update_status:241]The following buckets became ready on node 'ns_1@10.242.238.88': ["maps_1_8_tiles"] [ns_server:debug,2014-08-19T16:53:56.338,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 998. Nacking mccouch update. [views:debug,2014-08-19T16:53:56.338,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/998. Updated state: replica (0) [ns_server:debug,2014-08-19T16:53:56.339,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",998,replica,0} [ns_server:debug,2014-08-19T16:53:56.339,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_tiles<0.4349.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [1016,1000,1022,1006,1012,1018,1002,1008,998,1014,1020,1004,1023,1010] [views:debug,2014-08-19T16:53:56.389,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/998. Updated state: replica (0) [ns_server:debug,2014-08-19T16:53:56.389,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",998,replica,0} [ns_server:info,2014-08-19T16:53:56.440,ns_1@10.242.238.90:ns_doctor<0.17441.0>:ns_doctor:update_status:241]The following buckets became ready on node 'ns_1@10.242.238.91': ["maps_1_8_tiles"] [ns_server:debug,2014-08-19T16:53:56.464,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 1011. Nacking mccouch update. [views:debug,2014-08-19T16:53:56.464,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/1011. Updated state: replica (0) [ns_server:debug,2014-08-19T16:53:56.464,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",1011,replica,0} [ns_server:debug,2014-08-19T16:53:56.465,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,718,686,654,622,590,558,984,952,1016,756,724,692,660,628,596,564,990,958, 1022,762,730,698,666,634,602,570,996,964,736,704,672,640,608,576,1015,970, 938,1002,742,710,678,646,614,582,550,1021,976,944,1008,748,716,684,652,620, 588,556,982,950,1014,754,722,690,658,626,594,562,988,956,1020,760,728,696, 664,632,600,568,994,962,766,734,702,670,638,606,574,1013,968,1000,740,708, 676,644,612,580,548,1019,974,942,1006,746,714,682,650,618,586,554,980,948, 1012,752,720,688,656,624,592,560,986,954,1018,758,726,694,662,630,598,566, 992,960,764,732,700,668,636,604,572,1011,998,966,738,706,674,642,610,578, 1017,972,940,1004,744,712,680,648,616,584,552,1023,978,946,1010] [views:debug,2014-08-19T16:53:56.523,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/1011. Updated state: replica (0) [ns_server:debug,2014-08-19T16:53:56.524,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",1011,replica,0} [ns_server:debug,2014-08-19T16:53:56.582,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 996. Nacking mccouch update. [views:debug,2014-08-19T16:53:56.582,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/996. Updated state: replica (0) [ns_server:debug,2014-08-19T16:53:56.582,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_tiles<0.4349.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [1016,1000,1022,1006,996,1012,1018,1002,1008,998,1014,1020,1004,1023,1010] [ns_server:debug,2014-08-19T16:53:56.582,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",996,replica,0} [views:debug,2014-08-19T16:53:56.634,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/996. Updated state: replica (0) [ns_server:debug,2014-08-19T16:53:56.634,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",996,replica,0} [ns_server:debug,2014-08-19T16:53:56.676,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 1009. Nacking mccouch update. [views:debug,2014-08-19T16:53:56.676,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/1009. Updated state: replica (0) [ns_server:debug,2014-08-19T16:53:56.676,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",1009,replica,0} [ns_server:debug,2014-08-19T16:53:56.676,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,686,622,558,984,952,1016,756,724,692,660,628,596,564,990,958,1022,762, 730,698,666,634,602,570,1009,996,964,736,704,672,640,608,576,1015,970,938, 1002,742,710,678,646,614,582,550,1021,976,944,1008,748,716,684,652,620,588, 556,982,950,1014,754,722,690,658,626,594,562,988,956,1020,760,728,696,664, 632,600,568,994,962,766,734,702,670,638,606,574,1013,968,1000,740,708,676, 644,612,580,548,1019,974,942,1006,746,714,682,650,618,586,554,980,948,1012, 752,720,688,656,624,592,560,986,954,1018,758,726,694,662,630,598,566,992,960, 764,732,700,668,636,604,572,1011,998,966,738,706,674,642,610,578,1017,972, 940,1004,744,712,680,648,616,584,552,1023,978,946,1010,718,654,590] [views:debug,2014-08-19T16:53:56.726,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/1009. Updated state: replica (0) [ns_server:debug,2014-08-19T16:53:56.726,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",1009,replica,0} [ns_server:debug,2014-08-19T16:53:56.776,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 994. Nacking mccouch update. [views:debug,2014-08-19T16:53:56.776,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/994. Updated state: replica (0) [ns_server:debug,2014-08-19T16:53:56.776,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",994,replica,0} [ns_server:debug,2014-08-19T16:53:56.776,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_tiles<0.4349.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [1016,1000,1022,1006,996,1012,1018,1002,1008,998,1014,1020,1004,1023,994,1010] [views:debug,2014-08-19T16:53:56.856,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/994. Updated state: replica (0) [ns_server:debug,2014-08-19T16:53:56.856,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",994,replica,0} [ns_server:debug,2014-08-19T16:53:56.956,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 1007. Nacking mccouch update. [views:debug,2014-08-19T16:53:56.956,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/1007. Updated state: replica (0) [ns_server:debug,2014-08-19T16:53:56.956,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",1007,replica,0} [ns_server:debug,2014-08-19T16:53:56.957,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,686,622,558,984,952,1016,756,724,692,660,628,596,564,990,958,1022,762, 730,698,666,634,602,570,1009,996,964,736,704,672,640,608,576,1015,970,938, 1002,742,710,678,646,614,582,550,1021,976,944,1008,748,716,684,652,620,588, 556,982,950,1014,754,722,690,658,626,594,562,988,956,1020,760,728,696,664, 632,600,568,1007,994,962,766,734,702,670,638,606,574,1013,968,1000,740,708, 676,644,612,580,548,1019,974,942,1006,746,714,682,650,618,586,554,980,948, 1012,752,720,688,656,624,592,560,986,954,1018,758,726,694,662,630,598,566, 992,960,764,732,700,668,636,604,572,1011,998,966,738,706,674,642,610,578, 1017,972,940,1004,744,712,680,648,616,584,552,1023,978,946,1010,718,654,590] [views:debug,2014-08-19T16:53:57.032,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/1007. Updated state: replica (0) [ns_server:debug,2014-08-19T16:53:57.032,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",1007,replica,0} [ns_server:debug,2014-08-19T16:53:57.092,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 992. Nacking mccouch update. [views:debug,2014-08-19T16:53:57.092,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/992. Updated state: replica (0) [ns_server:debug,2014-08-19T16:53:57.092,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",992,replica,0} [ns_server:debug,2014-08-19T16:53:57.092,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_tiles<0.4349.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [1016,1000,1022,1006,996,1012,1018,1002,992,1008,998,1014,1020,1004,1023,994, 1010] [views:debug,2014-08-19T16:53:57.167,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/992. Updated state: replica (0) [ns_server:debug,2014-08-19T16:53:57.168,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",992,replica,0} [ns_server:debug,2014-08-19T16:53:57.284,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 1005. Nacking mccouch update. [views:debug,2014-08-19T16:53:57.284,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/1005. Updated state: replica (0) [ns_server:debug,2014-08-19T16:53:57.284,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",1005,replica,0} [ns_server:debug,2014-08-19T16:53:57.285,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,686,622,558,984,952,1016,756,724,692,660,628,596,564,990,958,1022,762, 730,698,666,634,602,570,1009,996,964,736,704,672,640,608,576,1015,970,938, 1002,742,710,678,646,614,582,550,1021,976,944,1008,748,716,684,652,620,588, 556,982,950,1014,754,722,690,658,626,594,562,988,956,1020,760,728,696,664, 632,600,568,1007,994,962,766,734,702,670,638,606,574,1013,968,1000,740,708, 676,644,612,580,548,1019,974,942,1006,746,714,682,650,618,586,554,980,948, 1012,752,720,688,656,624,592,560,986,954,1018,758,726,694,662,630,598,566, 1005,992,960,764,732,700,668,636,604,572,1011,998,966,738,706,674,642,610, 578,1017,972,940,1004,744,712,680,648,616,584,552,1023,978,946,1010,718,654, 590] [views:debug,2014-08-19T16:53:57.335,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/1005. Updated state: replica (0) [ns_server:debug,2014-08-19T16:53:57.335,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",1005,replica,0} [ns_server:debug,2014-08-19T16:53:57.433,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 990. Nacking mccouch update. [views:debug,2014-08-19T16:53:57.433,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/990. Updated state: replica (0) [ns_server:debug,2014-08-19T16:53:57.433,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",990,replica,0} [ns_server:debug,2014-08-19T16:53:57.433,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_tiles<0.4349.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [1016,1000,990,1022,1006,996,1012,1018,1002,992,1008,998,1014,1020,1004,1023, 994,1010] [views:debug,2014-08-19T16:53:57.502,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/990. Updated state: replica (0) [ns_server:debug,2014-08-19T16:53:57.503,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",990,replica,0} [ns_server:debug,2014-08-19T16:53:57.610,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 1003. Nacking mccouch update. [views:debug,2014-08-19T16:53:57.610,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/1003. Updated state: replica (0) [ns_server:debug,2014-08-19T16:53:57.610,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",1003,replica,0} [ns_server:debug,2014-08-19T16:53:57.610,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,686,622,558,984,952,1016,756,724,692,660,628,596,564,1003,990,958,1022, 762,730,698,666,634,602,570,1009,996,964,736,704,672,640,608,576,1015,970, 938,1002,742,710,678,646,614,582,550,1021,976,944,1008,748,716,684,652,620, 588,556,982,950,1014,754,722,690,658,626,594,562,988,956,1020,760,728,696, 664,632,600,568,1007,994,962,766,734,702,670,638,606,574,1013,968,1000,740, 708,676,644,612,580,548,1019,974,942,1006,746,714,682,650,618,586,554,980, 948,1012,752,720,688,656,624,592,560,986,954,1018,758,726,694,662,630,598, 566,1005,992,960,764,732,700,668,636,604,572,1011,998,966,738,706,674,642, 610,578,1017,972,940,1004,744,712,680,648,616,584,552,1023,978,946,1010,718, 654,590] [views:debug,2014-08-19T16:53:57.677,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/1003. Updated state: replica (0) [ns_server:debug,2014-08-19T16:53:57.677,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",1003,replica,0} [ns_server:debug,2014-08-19T16:53:57.744,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 988. Nacking mccouch update. [views:debug,2014-08-19T16:53:57.744,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/988. Updated state: replica (0) [ns_server:debug,2014-08-19T16:53:57.744,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_tiles<0.4349.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [1016,1000,990,1022,1006,996,1012,1018,1002,992,1008,998,1014,988,1020,1004, 1023,994,1010] [ns_server:debug,2014-08-19T16:53:57.744,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",988,replica,0} [views:debug,2014-08-19T16:53:57.828,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/988. Updated state: replica (0) [ns_server:debug,2014-08-19T16:53:57.828,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",988,replica,0} [ns_server:debug,2014-08-19T16:53:57.903,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 1001. Nacking mccouch update. [views:debug,2014-08-19T16:53:57.903,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/1001. Updated state: replica (0) [ns_server:debug,2014-08-19T16:53:57.903,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",1001,replica,0} [ns_server:debug,2014-08-19T16:53:57.904,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,686,622,558,984,952,1016,756,724,692,660,628,596,564,1003,990,958,1022, 762,730,698,666,634,602,570,1009,996,964,736,704,672,640,608,576,1015,970, 938,1002,742,710,678,646,614,582,550,1021,976,944,1008,748,716,684,652,620, 588,556,982,950,1014,754,722,690,658,626,594,562,1001,988,956,1020,760,728, 696,664,632,600,568,1007,994,962,766,734,702,670,638,606,574,1013,968,1000, 740,708,676,644,612,580,548,1019,974,942,1006,746,714,682,650,618,586,554, 980,948,1012,752,720,688,656,624,592,560,986,954,1018,758,726,694,662,630, 598,566,1005,992,960,764,732,700,668,636,604,572,1011,998,966,738,706,674, 642,610,578,1017,972,940,1004,744,712,680,648,616,584,552,1023,978,946,1010, 718,654,590] [views:debug,2014-08-19T16:53:57.979,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/1001. Updated state: replica (0) [ns_server:debug,2014-08-19T16:53:57.979,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",1001,replica,0} [ns_server:debug,2014-08-19T16:53:58.054,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 986. Nacking mccouch update. [views:debug,2014-08-19T16:53:58.054,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/986. Updated state: replica (0) [ns_server:debug,2014-08-19T16:53:58.054,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",986,replica,0} [ns_server:debug,2014-08-19T16:53:58.054,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_tiles<0.4349.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [1016,1000,990,1022,1006,996,1012,986,1018,1002,992,1008,998,1014,988,1020, 1004,1023,994,1010] [views:debug,2014-08-19T16:53:58.155,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/986. Updated state: replica (0) [ns_server:debug,2014-08-19T16:53:58.155,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",986,replica,0} [ns_server:debug,2014-08-19T16:53:58.255,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 999. Nacking mccouch update. [views:debug,2014-08-19T16:53:58.255,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/999. Updated state: replica (0) [ns_server:debug,2014-08-19T16:53:58.255,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",999,replica,0} [ns_server:debug,2014-08-19T16:53:58.255,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,686,622,558,984,756,724,692,660,628,596,564,1003,990,958,1022,762,730, 698,666,634,602,570,1009,996,964,736,704,672,640,608,576,1015,970,938,1002, 742,710,678,646,614,582,550,1021,976,944,1008,748,716,684,652,620,588,556, 982,950,1014,754,722,690,658,626,594,562,1001,988,956,1020,760,728,696,664, 632,600,568,1007,994,962,766,734,702,670,638,606,574,1013,968,1000,740,708, 676,644,612,580,548,1019,974,942,1006,746,714,682,650,618,586,554,980,948, 1012,999,752,720,688,656,624,592,560,986,954,1018,758,726,694,662,630,598, 566,1005,992,960,764,732,700,668,636,604,572,1011,998,966,738,706,674,642, 610,578,1017,972,940,1004,744,712,680,648,616,584,552,1023,978,946,1010,718, 654,590,952,1016] [views:debug,2014-08-19T16:53:58.328,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/999. Updated state: replica (0) [ns_server:debug,2014-08-19T16:53:58.328,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",999,replica,0} [ns_server:debug,2014-08-19T16:53:58.428,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 984. Nacking mccouch update. [views:debug,2014-08-19T16:53:58.428,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/984. Updated state: replica (0) [ns_server:debug,2014-08-19T16:53:58.428,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",984,replica,0} [ns_server:debug,2014-08-19T16:53:58.428,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_tiles<0.4349.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [984,1016,1000,990,1022,1006,996,1012,986,1018,1002,992,1008,998,1014,988, 1020,1004,1023,994,1010] [views:debug,2014-08-19T16:53:58.512,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/984. Updated state: replica (0) [ns_server:debug,2014-08-19T16:53:58.512,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",984,replica,0} [ns_server:debug,2014-08-19T16:53:58.596,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 997. Nacking mccouch update. [views:debug,2014-08-19T16:53:58.596,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/997. Updated state: replica (0) [ns_server:debug,2014-08-19T16:53:58.596,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",997,replica,0} [ns_server:debug,2014-08-19T16:53:58.596,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,750,686,622,558,984,756,724,692,660,628,596,564,1003,990,958,1022,762, 730,698,666,634,602,570,1009,996,964,736,704,672,640,608,576,1015,970,938, 1002,742,710,678,646,614,582,550,1021,976,944,1008,748,716,684,652,620,588, 556,982,950,1014,754,722,690,658,626,594,562,1001,988,956,1020,760,728,696, 664,632,600,568,1007,994,962,766,734,702,670,638,606,574,1013,968,1000,740, 708,676,644,612,580,548,1019,974,942,1006,746,714,682,650,618,586,554,980, 948,1012,999,752,720,688,656,624,592,560,986,954,1018,758,726,694,662,630, 598,566,1005,992,960,764,732,700,668,636,604,572,1011,998,966,738,706,674, 642,610,578,1017,972,940,1004,744,712,680,648,616,584,552,1023,978,946,1010, 718,654,590,952,1016] [views:debug,2014-08-19T16:53:58.680,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/997. Updated state: replica (0) [ns_server:debug,2014-08-19T16:53:58.680,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",997,replica,0} [ns_server:debug,2014-08-19T16:53:58.746,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 982. Nacking mccouch update. [views:debug,2014-08-19T16:53:58.747,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/982. Updated state: replica (0) [ns_server:debug,2014-08-19T16:53:58.747,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",982,replica,0} [ns_server:debug,2014-08-19T16:53:58.747,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_tiles<0.4349.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [984,1016,1000,990,1022,1006,996,1012,986,1018,1002,992,1008,998,982,1014,988, 1020,1004,1023,994,1010] [views:debug,2014-08-19T16:53:58.830,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/982. Updated state: replica (0) [ns_server:debug,2014-08-19T16:53:58.831,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",982,replica,0} [ns_server:debug,2014-08-19T16:53:58.881,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 995. Nacking mccouch update. [views:debug,2014-08-19T16:53:58.881,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/995. Updated state: replica (0) [ns_server:debug,2014-08-19T16:53:58.881,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",995,replica,0} [ns_server:debug,2014-08-19T16:53:58.881,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,750,686,622,558,984,756,724,692,660,628,596,564,1003,990,958,1022,762, 730,698,666,634,602,570,1009,996,964,736,704,672,640,608,576,1015,970,938, 1002,742,710,678,646,614,582,550,1021,976,944,1008,995,748,716,684,652,620, 588,556,982,950,1014,754,722,690,658,626,594,562,1001,988,956,1020,760,728, 696,664,632,600,568,1007,994,962,766,734,702,670,638,606,574,1013,968,1000, 740,708,676,644,612,580,548,1019,974,942,1006,746,714,682,650,618,586,554, 980,948,1012,999,752,720,688,656,624,592,560,986,954,1018,758,726,694,662, 630,598,566,1005,992,960,764,732,700,668,636,604,572,1011,998,966,738,706, 674,642,610,578,1017,972,940,1004,744,712,680,648,616,584,552,1023,978,946, 1010,718,654,590,952,1016] [views:debug,2014-08-19T16:53:58.956,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/995. Updated state: replica (0) [ns_server:debug,2014-08-19T16:53:58.956,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",995,replica,0} [ns_server:debug,2014-08-19T16:53:59.088,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 980. Nacking mccouch update. [views:debug,2014-08-19T16:53:59.089,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/980. Updated state: replica (0) [ns_server:debug,2014-08-19T16:53:59.089,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",980,replica,0} [ns_server:debug,2014-08-19T16:53:59.089,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_tiles<0.4349.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [984,1016,1000,990,1022,1006,996,980,1012,986,1018,1002,992,1008,998,982,1014, 988,1020,1004,1023,994,1010] [views:debug,2014-08-19T16:53:59.139,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/980. Updated state: replica (0) [ns_server:debug,2014-08-19T16:53:59.139,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",980,replica,0} [ns_server:debug,2014-08-19T16:53:59.231,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 993. Nacking mccouch update. [views:debug,2014-08-19T16:53:59.231,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/993. Updated state: replica (0) [ns_server:debug,2014-08-19T16:53:59.231,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",993,replica,0} [ns_server:debug,2014-08-19T16:53:59.232,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,750,686,622,558,984,756,724,692,660,628,596,564,1003,990,958,1022,762, 730,698,666,634,602,570,1009,996,964,736,704,672,640,608,576,1015,970,938, 1002,742,710,678,646,614,582,550,1021,976,944,1008,995,748,716,684,652,620, 588,556,982,950,1014,754,722,690,658,626,594,562,1001,988,956,1020,760,728, 696,664,632,600,568,1007,994,962,766,734,702,670,638,606,574,1013,968,1000, 740,708,676,644,612,580,548,1019,974,942,1006,993,746,714,682,650,618,586, 554,980,948,1012,999,752,720,688,656,624,592,560,986,954,1018,758,726,694, 662,630,598,566,1005,992,960,764,732,700,668,636,604,572,1011,998,966,738, 706,674,642,610,578,1017,972,940,1004,744,712,680,648,616,584,552,1023,978, 946,1010,718,654,590,952,1016] [views:debug,2014-08-19T16:53:59.315,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/993. Updated state: replica (0) [ns_server:debug,2014-08-19T16:53:59.316,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",993,replica,0} [ns_server:debug,2014-08-19T16:53:59.407,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 978. Nacking mccouch update. [views:debug,2014-08-19T16:53:59.407,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/978. Updated state: replica (0) [ns_server:debug,2014-08-19T16:53:59.407,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",978,replica,0} [ns_server:debug,2014-08-19T16:53:59.407,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_tiles<0.4349.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [984,1016,1000,990,1022,1006,996,980,1012,986,1018,1002,992,1008,998,982,1014, 988,1020,1004,1023,994,978,1010] [views:debug,2014-08-19T16:53:59.474,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/978. Updated state: replica (0) [ns_server:debug,2014-08-19T16:53:59.475,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",978,replica,0} [ns_server:debug,2014-08-19T16:53:59.583,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 991. Nacking mccouch update. [views:debug,2014-08-19T16:53:59.583,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/991. Updated state: replica (0) [ns_server:debug,2014-08-19T16:53:59.583,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",991,replica,0} [ns_server:debug,2014-08-19T16:53:59.584,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,750,686,622,558,984,756,724,692,660,628,596,564,1003,990,958,1022,762, 730,698,666,634,602,570,1009,996,964,736,704,672,640,608,576,1015,970,938, 1002,742,710,678,646,614,582,550,1021,976,944,1008,995,748,716,684,652,620, 588,556,982,950,1014,754,722,690,658,626,594,562,1001,988,956,1020,760,728, 696,664,632,600,568,1007,994,962,766,734,702,670,638,606,574,1013,968,1000, 740,708,676,644,612,580,548,1019,974,942,1006,993,746,714,682,650,618,586, 554,980,948,1012,999,752,720,688,656,624,592,560,986,954,1018,758,726,694, 662,630,598,566,1005,992,960,764,732,700,668,636,604,572,1011,998,966,738, 706,674,642,610,578,1017,972,940,1004,991,744,712,680,648,616,584,552,1023, 978,946,1010,718,654,590,952,1016] [views:debug,2014-08-19T16:53:59.666,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/991. Updated state: replica (0) [ns_server:debug,2014-08-19T16:53:59.667,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",991,replica,0} [ns_server:debug,2014-08-19T16:53:59.756,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 976. Nacking mccouch update. [views:debug,2014-08-19T16:53:59.756,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/976. Updated state: replica (0) [ns_server:debug,2014-08-19T16:53:59.756,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",976,replica,0} [ns_server:debug,2014-08-19T16:53:59.756,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_tiles<0.4349.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [984,1016,1000,990,1022,1006,996,980,1012,986,1018,1002,992,976,1008,998,982, 1014,988,1020,1004,1023,994,978,1010] [views:debug,2014-08-19T16:53:59.823,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/976. Updated state: replica (0) [ns_server:debug,2014-08-19T16:53:59.824,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",976,replica,0} [ns_server:debug,2014-08-19T16:53:59.873,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 989. Nacking mccouch update. [views:debug,2014-08-19T16:53:59.874,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/989. Updated state: replica (0) [ns_server:debug,2014-08-19T16:53:59.874,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",989,replica,0} [ns_server:debug,2014-08-19T16:53:59.874,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,750,686,622,558,984,724,660,596,990,958,1022,762,730,698,666,634,602,570, 1009,996,964,736,704,672,640,608,576,1015,970,938,1002,989,742,710,678,646, 614,582,550,1021,976,944,1008,995,748,716,684,652,620,588,556,982,950,1014, 754,722,690,658,626,594,562,1001,988,956,1020,760,728,696,664,632,600,568, 1007,994,962,766,734,702,670,638,606,574,1013,968,1000,740,708,676,644,612, 580,548,1019,974,942,1006,993,746,714,682,650,618,586,554,980,948,1012,999, 752,720,688,656,624,592,560,986,954,1018,758,726,694,662,630,598,566,1005, 992,960,764,732,700,668,636,604,572,1011,998,966,738,706,674,642,610,578, 1017,972,940,1004,991,744,712,680,648,616,584,552,1023,978,946,1010,718,654, 590,952,1016,756,692,628,564,1003] [views:debug,2014-08-19T16:53:59.924,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/989. Updated state: replica (0) [ns_server:debug,2014-08-19T16:53:59.924,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",989,replica,0} [ns_server:debug,2014-08-19T16:54:00.008,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 974. Nacking mccouch update. [views:debug,2014-08-19T16:54:00.008,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/974. Updated state: replica (0) [ns_server:debug,2014-08-19T16:54:00.008,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",974,replica,0} [ns_server:debug,2014-08-19T16:54:00.008,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_tiles<0.4349.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [984,1016,1000,990,974,1022,1006,996,980,1012,986,1018,1002,992,976,1008,998, 982,1014,988,1020,1004,1023,994,978,1010] [views:debug,2014-08-19T16:54:00.083,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/974. Updated state: replica (0) [ns_server:debug,2014-08-19T16:54:00.084,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",974,replica,0} [ns_server:debug,2014-08-19T16:54:00.135,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 987. Nacking mccouch update. [views:debug,2014-08-19T16:54:00.135,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/987. Updated state: replica (0) [ns_server:debug,2014-08-19T16:54:00.135,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",987,replica,0} [ns_server:debug,2014-08-19T16:54:00.135,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,750,686,622,558,984,724,660,596,990,958,1022,762,730,698,666,634,602,570, 1009,996,964,736,704,672,640,608,576,1015,970,938,1002,989,742,710,678,646, 614,582,550,1021,976,944,1008,995,748,716,684,652,620,588,556,982,950,1014, 754,722,690,658,626,594,562,1001,988,956,1020,760,728,696,664,632,600,568, 1007,994,962,766,734,702,670,638,606,574,1013,968,1000,987,740,708,676,644, 612,580,548,1019,974,942,1006,993,746,714,682,650,618,586,554,980,948,1012, 999,752,720,688,656,624,592,560,986,954,1018,758,726,694,662,630,598,566, 1005,992,960,764,732,700,668,636,604,572,1011,998,966,738,706,674,642,610, 578,1017,972,940,1004,991,744,712,680,648,616,584,552,1023,978,946,1010,718, 654,590,952,1016,756,692,628,564,1003] [views:debug,2014-08-19T16:54:00.186,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/987. Updated state: replica (0) [ns_server:debug,2014-08-19T16:54:00.186,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",987,replica,0} [ns_server:debug,2014-08-19T16:54:00.252,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 972. Nacking mccouch update. [views:debug,2014-08-19T16:54:00.252,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/972. Updated state: replica (0) [ns_server:debug,2014-08-19T16:54:00.253,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",972,replica,0} [ns_server:debug,2014-08-19T16:54:00.253,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_tiles<0.4349.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [984,1016,1000,990,974,1022,1006,996,980,1012,986,1018,1002,992,976,1008,998, 982,1014,988,972,1020,1004,1023,994,978,1010] [views:debug,2014-08-19T16:54:00.327,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/972. Updated state: replica (0) [ns_server:debug,2014-08-19T16:54:00.327,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",972,replica,0} [ns_server:debug,2014-08-19T16:54:00.427,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 985. Nacking mccouch update. [views:debug,2014-08-19T16:54:00.427,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/985. Updated state: replica (0) [ns_server:debug,2014-08-19T16:54:00.427,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",985,replica,0} [ns_server:debug,2014-08-19T16:54:00.427,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,750,686,622,558,984,724,660,596,990,958,1022,762,730,698,666,634,602,570, 1009,996,964,736,704,672,640,608,576,1015,970,938,1002,989,742,710,678,646, 614,582,550,1021,976,944,1008,995,748,716,684,652,620,588,556,982,950,1014, 754,722,690,658,626,594,562,1001,988,956,1020,760,728,696,664,632,600,568, 1007,994,962,766,734,702,670,638,606,574,1013,968,1000,987,740,708,676,644, 612,580,548,1019,974,942,1006,993,746,714,682,650,618,586,554,980,948,1012, 999,752,720,688,656,624,592,560,986,954,1018,758,726,694,662,630,598,566, 1005,992,960,764,732,700,668,636,604,572,1011,998,966,985,738,706,674,642, 610,578,1017,972,940,1004,991,744,712,680,648,616,584,552,1023,978,946,1010, 718,654,590,952,1016,756,692,628,564,1003] [views:debug,2014-08-19T16:54:00.528,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/985. Updated state: replica (0) [ns_server:debug,2014-08-19T16:54:00.528,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",985,replica,0} [ns_server:debug,2014-08-19T16:54:00.636,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 970. Nacking mccouch update. [views:debug,2014-08-19T16:54:00.636,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/970. Updated state: replica (0) [ns_server:debug,2014-08-19T16:54:00.636,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",970,replica,0} [ns_server:debug,2014-08-19T16:54:00.636,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_tiles<0.4349.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [984,1016,1000,990,974,1022,1006,996,980,1012,986,970,1018,1002,992,976,1008, 998,982,1014,988,972,1020,1004,1023,994,978,1010] [views:debug,2014-08-19T16:54:00.704,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/970. Updated state: replica (0) [ns_server:debug,2014-08-19T16:54:00.704,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",970,replica,0} [ns_server:debug,2014-08-19T16:54:00.804,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 983. Nacking mccouch update. [views:debug,2014-08-19T16:54:00.804,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/983. Updated state: replica (0) [ns_server:debug,2014-08-19T16:54:00.804,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",983,replica,0} [ns_server:debug,2014-08-19T16:54:00.804,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,750,686,622,558,984,724,660,596,990,958,1022,762,730,698,666,634,602,570, 1009,996,964,983,736,704,672,640,608,576,1015,970,938,1002,989,742,710,678, 646,614,582,550,1021,976,944,1008,995,748,716,684,652,620,588,556,982,950, 1014,754,722,690,658,626,594,562,1001,988,956,1020,760,728,696,664,632,600, 568,1007,994,962,766,734,702,670,638,606,574,1013,968,1000,987,740,708,676, 644,612,580,548,1019,974,942,1006,993,746,714,682,650,618,586,554,980,948, 1012,999,752,720,688,656,624,592,560,986,954,1018,758,726,694,662,630,598, 566,1005,992,960,764,732,700,668,636,604,572,1011,998,966,985,738,706,674, 642,610,578,1017,972,940,1004,991,744,712,680,648,616,584,552,1023,978,946, 1010,718,654,590,952,1016,756,692,628,564,1003] [views:debug,2014-08-19T16:54:00.871,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/983. Updated state: replica (0) [ns_server:debug,2014-08-19T16:54:00.871,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",983,replica,0} [ns_server:debug,2014-08-19T16:54:00.964,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 968. Nacking mccouch update. [views:debug,2014-08-19T16:54:00.965,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/968. Updated state: replica (0) [ns_server:debug,2014-08-19T16:54:00.965,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",968,replica,0} [ns_server:debug,2014-08-19T16:54:00.965,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_tiles<0.4349.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [984,968,1016,1000,990,974,1022,1006,996,980,1012,986,970,1018,1002,992,976, 1008,998,982,1014,988,972,1020,1004,1023,994,978,1010] [views:debug,2014-08-19T16:54:01.022,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/968. Updated state: replica (0) [ns_server:debug,2014-08-19T16:54:01.022,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",968,replica,0} [ns_server:debug,2014-08-19T16:54:01.119,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 981. Nacking mccouch update. [views:debug,2014-08-19T16:54:01.119,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/981. Updated state: replica (0) [ns_server:debug,2014-08-19T16:54:01.120,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",981,replica,0} [ns_server:debug,2014-08-19T16:54:01.120,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,750,686,622,558,984,724,660,596,990,958,1022,762,730,698,666,634,602,570, 1009,996,964,983,736,704,672,640,608,576,1015,970,938,1002,989,742,710,678, 646,614,582,550,1021,976,944,1008,995,748,716,684,652,620,588,556,982,950, 1014,754,722,690,658,626,594,562,1001,988,956,1020,760,728,696,664,632,600, 568,1007,994,962,981,766,734,702,670,638,606,574,1013,968,1000,987,740,708, 676,644,612,580,548,1019,974,942,1006,993,746,714,682,650,618,586,554,980, 948,1012,999,752,720,688,656,624,592,560,986,954,1018,758,726,694,662,630, 598,566,1005,992,960,764,732,700,668,636,604,572,1011,998,966,985,738,706, 674,642,610,578,1017,972,940,1004,991,744,712,680,648,616,584,552,1023,978, 946,1010,718,654,590,952,1016,756,692,628,564,1003] [views:debug,2014-08-19T16:54:01.187,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/981. Updated state: replica (0) [ns_server:debug,2014-08-19T16:54:01.187,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",981,replica,0} [ns_server:debug,2014-08-19T16:54:01.254,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 966. Nacking mccouch update. [views:debug,2014-08-19T16:54:01.254,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/966. Updated state: replica (0) [ns_server:debug,2014-08-19T16:54:01.254,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",966,replica,0} [ns_server:debug,2014-08-19T16:54:01.254,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_tiles<0.4349.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [984,968,1016,1000,990,974,1022,1006,996,980,1012,986,970,1018,1002,992,976, 1008,998,982,966,1014,988,972,1020,1004,1023,994,978,1010] [views:debug,2014-08-19T16:54:01.321,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/966. Updated state: replica (0) [ns_server:debug,2014-08-19T16:54:01.321,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",966,replica,0} [ns_server:debug,2014-08-19T16:54:01.371,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 979. Nacking mccouch update. [views:debug,2014-08-19T16:54:01.371,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/979. Updated state: replica (0) [ns_server:debug,2014-08-19T16:54:01.371,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",979,replica,0} [ns_server:debug,2014-08-19T16:54:01.372,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,750,686,622,558,984,724,660,596,958,1022,762,730,698,666,634,602,570, 1009,996,964,983,736,704,672,640,608,576,1015,970,938,1002,989,742,710,678, 646,614,582,550,1021,976,944,1008,995,748,716,684,652,620,588,556,982,950, 1014,754,722,690,658,626,594,562,1001,988,956,1020,760,728,696,664,632,600, 568,1007,994,962,981,766,734,702,670,638,606,574,1013,968,1000,987,740,708, 676,644,612,580,548,1019,974,942,1006,993,746,714,682,650,618,586,554,980, 948,1012,999,752,720,688,656,624,592,560,986,954,1018,758,726,694,662,630, 598,566,1005,992,960,979,764,732,700,668,636,604,572,1011,998,966,985,738, 706,674,642,610,578,1017,972,940,1004,991,744,712,680,648,616,584,552,1023, 978,946,1010,718,654,590,952,1016,756,692,628,564,1003,990] [views:debug,2014-08-19T16:54:01.547,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/979. Updated state: replica (0) [ns_server:debug,2014-08-19T16:54:01.547,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",979,replica,0} [ns_server:debug,2014-08-19T16:54:01.605,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 964. Nacking mccouch update. [views:debug,2014-08-19T16:54:01.605,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/964. Updated state: replica (0) [ns_server:debug,2014-08-19T16:54:01.605,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",964,replica,0} [ns_server:debug,2014-08-19T16:54:01.606,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_tiles<0.4349.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [984,968,1016,1000,990,974,1022,1006,996,980,964,1012,986,970,1018,1002,992, 976,1008,998,982,966,1014,988,972,1020,1004,1023,994,978,1010] [views:debug,2014-08-19T16:54:01.673,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/964. Updated state: replica (0) [ns_server:debug,2014-08-19T16:54:01.673,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",964,replica,0} [ns_server:debug,2014-08-19T16:54:01.771,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 977. Nacking mccouch update. [views:debug,2014-08-19T16:54:01.772,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/977. Updated state: replica (0) [ns_server:debug,2014-08-19T16:54:01.772,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",977,replica,0} [ns_server:debug,2014-08-19T16:54:01.772,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,750,686,622,558,984,724,660,596,958,1022,977,762,730,698,666,634,602,570, 1009,996,964,983,736,704,672,640,608,576,1015,970,938,1002,989,742,710,678, 646,614,582,550,1021,976,944,1008,995,748,716,684,652,620,588,556,982,950, 1014,754,722,690,658,626,594,562,1001,988,956,1020,760,728,696,664,632,600, 568,1007,994,962,981,766,734,702,670,638,606,574,1013,968,1000,987,740,708, 676,644,612,580,548,1019,974,942,1006,993,746,714,682,650,618,586,554,980, 948,1012,999,752,720,688,656,624,592,560,986,954,1018,758,726,694,662,630, 598,566,1005,992,960,979,764,732,700,668,636,604,572,1011,998,966,985,738, 706,674,642,610,578,1017,972,940,1004,991,744,712,680,648,616,584,552,1023, 978,946,1010,718,654,590,952,1016,756,692,628,564,1003,990] [views:debug,2014-08-19T16:54:01.872,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/977. Updated state: replica (0) [ns_server:debug,2014-08-19T16:54:01.872,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",977,replica,0} [ns_server:debug,2014-08-19T16:54:01.947,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 962. Nacking mccouch update. [views:debug,2014-08-19T16:54:01.947,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/962. Updated state: replica (0) [ns_server:debug,2014-08-19T16:54:01.948,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_tiles<0.4349.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [984,968,1016,1000,990,974,1022,1006,996,980,964,1012,986,970,1018,1002,992, 976,1008,998,982,966,1014,988,972,1020,1004,1023,994,978,962,1010] [ns_server:debug,2014-08-19T16:54:01.948,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",962,replica,0} [views:debug,2014-08-19T16:54:02.023,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/962. Updated state: replica (0) [ns_server:debug,2014-08-19T16:54:02.023,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",962,replica,0} [ns_server:debug,2014-08-19T16:54:02.098,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 975. Nacking mccouch update. [views:debug,2014-08-19T16:54:02.098,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/975. Updated state: replica (0) [ns_server:debug,2014-08-19T16:54:02.098,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",975,replica,0} [ns_server:debug,2014-08-19T16:54:02.099,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,750,686,622,558,984,724,660,596,958,1022,977,762,730,698,666,634,602,570, 1009,996,964,983,736,704,672,640,608,576,1015,970,938,1002,989,742,710,678, 646,614,582,550,1021,976,944,1008,995,748,716,684,652,620,588,556,982,950, 1014,754,722,690,658,626,594,562,1001,988,956,1020,975,760,728,696,664,632, 600,568,1007,994,962,981,766,734,702,670,638,606,574,1013,968,1000,987,740, 708,676,644,612,580,548,1019,974,942,1006,993,746,714,682,650,618,586,554, 980,948,1012,999,752,720,688,656,624,592,560,986,954,1018,758,726,694,662, 630,598,566,1005,992,960,979,764,732,700,668,636,604,572,1011,998,966,985, 738,706,674,642,610,578,1017,972,940,1004,991,744,712,680,648,616,584,552, 1023,978,946,1010,718,654,590,952,1016,756,692,628,564,1003,990] [views:debug,2014-08-19T16:54:02.199,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/975. Updated state: replica (0) [ns_server:debug,2014-08-19T16:54:02.199,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",975,replica,0} [ns_server:debug,2014-08-19T16:54:02.316,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 960. Nacking mccouch update. [views:debug,2014-08-19T16:54:02.316,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/960. Updated state: replica (0) [ns_server:debug,2014-08-19T16:54:02.316,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",960,replica,0} [ns_server:debug,2014-08-19T16:54:02.316,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_tiles<0.4349.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [984,968,1016,1000,990,974,1022,1006,996,980,964,1012,986,970,1018,1002,992, 976,960,1008,998,982,966,1014,988,972,1020,1004,1023,994,978,962,1010] [views:debug,2014-08-19T16:54:02.391,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/960. Updated state: replica (0) [ns_server:debug,2014-08-19T16:54:02.392,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",960,replica,0} [ns_server:debug,2014-08-19T16:54:02.473,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 973. Nacking mccouch update. [views:debug,2014-08-19T16:54:02.473,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/973. Updated state: replica (0) [ns_server:debug,2014-08-19T16:54:02.473,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",973,replica,0} [ns_server:debug,2014-08-19T16:54:02.473,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,750,686,622,558,984,724,660,596,958,1022,977,762,730,698,666,634,602,570, 1009,996,964,983,736,704,672,640,608,576,1015,970,938,1002,989,742,710,678, 646,614,582,550,1021,976,944,1008,995,748,716,684,652,620,588,556,982,950, 1014,754,722,690,658,626,594,562,1001,988,956,1020,975,760,728,696,664,632, 600,568,1007,994,962,981,766,734,702,670,638,606,574,1013,968,1000,987,740, 708,676,644,612,580,548,1019,974,942,1006,993,746,714,682,650,618,586,554, 980,948,1012,999,752,720,688,656,624,592,560,986,954,1018,973,758,726,694, 662,630,598,566,1005,992,960,979,764,732,700,668,636,604,572,1011,998,966, 985,738,706,674,642,610,578,1017,972,940,1004,991,744,712,680,648,616,584, 552,1023,978,946,1010,718,654,590,952,1016,756,692,628,564,1003,990] [views:debug,2014-08-19T16:54:02.548,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/973. Updated state: replica (0) [ns_server:debug,2014-08-19T16:54:02.549,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",973,replica,0} [ns_server:debug,2014-08-19T16:54:02.632,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 958. Nacking mccouch update. [views:debug,2014-08-19T16:54:02.632,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/958. Updated state: replica (0) [ns_server:debug,2014-08-19T16:54:02.632,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_tiles<0.4349.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [984,968,1016,1000,990,974,958,1022,1006,996,980,964,1012,986,970,1018,1002, 992,976,960,1008,998,982,966,1014,988,972,1020,1004,1023,994,978,962,1010] [ns_server:debug,2014-08-19T16:54:02.632,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",958,replica,0} [views:debug,2014-08-19T16:54:02.724,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/958. Updated state: replica (0) [ns_server:debug,2014-08-19T16:54:02.725,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",958,replica,0} [ns_server:debug,2014-08-19T16:54:02.824,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 971. Nacking mccouch update. [views:debug,2014-08-19T16:54:02.824,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/971. Updated state: replica (0) [ns_server:debug,2014-08-19T16:54:02.824,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",971,replica,0} [ns_server:debug,2014-08-19T16:54:02.825,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,750,686,622,558,984,971,724,660,596,958,1022,977,762,730,698,666,634,602, 570,1009,996,964,983,736,704,672,640,608,576,1015,970,938,1002,989,742,710, 678,646,614,582,550,1021,976,944,1008,995,748,716,684,652,620,588,556,982, 950,1014,754,722,690,658,626,594,562,1001,988,956,1020,975,760,728,696,664, 632,600,568,1007,994,962,981,766,734,702,670,638,606,574,1013,968,1000,987, 740,708,676,644,612,580,548,1019,974,942,1006,993,746,714,682,650,618,586, 554,980,948,1012,999,752,720,688,656,624,592,560,986,954,1018,973,758,726, 694,662,630,598,566,1005,992,960,979,764,732,700,668,636,604,572,1011,998, 966,985,738,706,674,642,610,578,1017,972,940,1004,991,744,712,680,648,616, 584,552,1023,978,946,1010,718,654,590,952,1016,756,692,628,564,1003,990] [views:debug,2014-08-19T16:54:02.900,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/971. Updated state: replica (0) [ns_server:debug,2014-08-19T16:54:02.900,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",971,replica,0} [ns_server:debug,2014-08-19T16:54:03.000,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 956. Nacking mccouch update. [views:debug,2014-08-19T16:54:03.000,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/956. Updated state: replica (0) [ns_server:debug,2014-08-19T16:54:03.001,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_tiles<0.4349.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [984,968,1016,1000,990,974,958,1022,1006,996,980,964,1012,986,970,1018,1002, 992,976,960,1008,998,982,966,1014,988,972,956,1020,1004,1023,994,978,962, 1010] [ns_server:debug,2014-08-19T16:54:03.001,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",956,replica,0} [views:debug,2014-08-19T16:54:03.051,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/956. Updated state: replica (0) [ns_server:debug,2014-08-19T16:54:03.051,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",956,replica,0} [ns_server:debug,2014-08-19T16:54:03.194,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 969. Nacking mccouch update. [views:debug,2014-08-19T16:54:03.194,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/969. Updated state: replica (0) [ns_server:debug,2014-08-19T16:54:03.194,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",969,replica,0} [ns_server:debug,2014-08-19T16:54:03.195,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,750,686,622,558,984,971,724,660,596,958,1022,762,698,634,570,1009,996, 964,983,736,704,672,640,608,576,1015,970,938,1002,989,742,710,678,646,614, 582,550,1021,976,944,1008,995,748,716,684,652,620,588,556,982,950,1014,969, 754,722,690,658,626,594,562,1001,988,956,1020,975,760,728,696,664,632,600, 568,1007,994,962,981,766,734,702,670,638,606,574,1013,968,1000,987,740,708, 676,644,612,580,548,1019,974,942,1006,993,746,714,682,650,618,586,554,980, 948,1012,999,752,720,688,656,624,592,560,986,954,1018,973,758,726,694,662, 630,598,566,1005,992,960,979,764,732,700,668,636,604,572,1011,998,966,985, 738,706,674,642,610,578,1017,972,940,1004,991,744,712,680,648,616,584,552, 1023,978,946,1010,718,654,590,952,1016,756,692,628,564,1003,990,977,730,666, 602] [views:debug,2014-08-19T16:54:03.250,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/969. Updated state: replica (0) [ns_server:debug,2014-08-19T16:54:03.250,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",969,replica,0} [ns_server:debug,2014-08-19T16:54:03.342,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 954. Nacking mccouch update. [views:debug,2014-08-19T16:54:03.342,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/954. Updated state: replica (0) [ns_server:debug,2014-08-19T16:54:03.342,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",954,replica,0} [ns_server:debug,2014-08-19T16:54:03.343,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_tiles<0.4349.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [984,968,1016,1000,990,974,958,1022,1006,996,980,964,1012,986,970,954,1018, 1002,992,976,960,1008,998,982,966,1014,988,972,956,1020,1004,1023,994,978, 962,1010] [views:debug,2014-08-19T16:54:03.411,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/954. Updated state: replica (0) [ns_server:debug,2014-08-19T16:54:03.411,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",954,replica,0} [ns_server:debug,2014-08-19T16:54:03.494,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 967. Nacking mccouch update. [views:debug,2014-08-19T16:54:03.494,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/967. Updated state: replica (0) [ns_server:debug,2014-08-19T16:54:03.494,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",967,replica,0} [ns_server:debug,2014-08-19T16:54:03.495,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,750,686,622,558,984,971,724,660,596,958,1022,762,698,634,570,1009,996, 964,983,736,704,672,640,608,576,1015,970,938,1002,989,742,710,678,646,614, 582,550,1021,976,944,1008,995,748,716,684,652,620,588,556,982,950,1014,969, 754,722,690,658,626,594,562,1001,988,956,1020,975,760,728,696,664,632,600, 568,1007,994,962,981,766,734,702,670,638,606,574,1013,968,1000,987,740,708, 676,644,612,580,548,1019,974,942,1006,993,746,714,682,650,618,586,554,980, 948,1012,999,967,752,720,688,656,624,592,560,986,954,1018,973,758,726,694, 662,630,598,566,1005,992,960,979,764,732,700,668,636,604,572,1011,998,966, 985,738,706,674,642,610,578,1017,972,940,1004,991,744,712,680,648,616,584, 552,1023,978,946,1010,718,654,590,952,1016,756,692,628,564,1003,990,977,730, 666,602] [views:debug,2014-08-19T16:54:03.595,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/967. Updated state: replica (0) [ns_server:debug,2014-08-19T16:54:03.595,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",967,replica,0} [ns_server:debug,2014-08-19T16:54:03.695,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 952. Nacking mccouch update. [views:debug,2014-08-19T16:54:03.695,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/952. Updated state: replica (0) [ns_server:debug,2014-08-19T16:54:03.695,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",952,replica,0} [ns_server:debug,2014-08-19T16:54:03.695,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_tiles<0.4349.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [984,968,952,1016,1000,990,974,958,1022,1006,996,980,964,1012,986,970,954, 1018,1002,992,976,960,1008,998,982,966,1014,988,972,956,1020,1004,1023,994, 978,962,1010] [views:debug,2014-08-19T16:54:03.746,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/952. Updated state: replica (0) [ns_server:debug,2014-08-19T16:54:03.746,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",952,replica,0} [ns_server:debug,2014-08-19T16:54:03.854,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 965. Nacking mccouch update. [views:debug,2014-08-19T16:54:03.854,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/965. Updated state: replica (0) [ns_server:debug,2014-08-19T16:54:03.855,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",965,replica,0} [ns_server:debug,2014-08-19T16:54:03.855,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,750,686,622,558,984,971,724,660,596,958,1022,762,698,634,570,1009,996, 964,983,736,704,672,640,608,576,1015,970,938,1002,989,742,710,678,646,614, 582,550,1021,976,944,1008,995,748,716,684,652,620,588,556,982,950,1014,969, 754,722,690,658,626,594,562,1001,988,956,1020,975,760,728,696,664,632,600, 568,1007,994,962,981,766,734,702,670,638,606,574,1013,968,1000,987,740,708, 676,644,612,580,548,1019,974,942,1006,993,746,714,682,650,618,586,554,980, 948,1012,999,967,752,720,688,656,624,592,560,986,954,1018,973,758,726,694, 662,630,598,566,1005,992,960,979,764,732,700,668,636,604,572,1011,998,966, 985,738,706,674,642,610,578,1017,972,940,1004,991,744,712,680,648,616,584, 552,1023,978,946,1010,965,718,654,590,952,1016,756,692,628,564,1003,990,977, 730,666,602] [views:debug,2014-08-19T16:54:03.905,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/965. Updated state: replica (0) [ns_server:debug,2014-08-19T16:54:03.905,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",965,replica,0} [ns_server:debug,2014-08-19T16:54:03.978,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 950. Nacking mccouch update. [views:debug,2014-08-19T16:54:03.978,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/950. Updated state: replica (0) [ns_server:debug,2014-08-19T16:54:03.978,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",950,replica,0} [ns_server:debug,2014-08-19T16:54:03.978,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_tiles<0.4349.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [984,968,952,1016,1000,990,974,958,1022,1006,996,980,964,1012,986,970,954, 1018,1002,992,976,960,1008,998,982,966,950,1014,988,972,956,1020,1004,1023, 994,978,962,1010] [views:debug,2014-08-19T16:54:04.062,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/950. Updated state: replica (0) [ns_server:debug,2014-08-19T16:54:04.062,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",950,replica,0} [ns_server:debug,2014-08-19T16:54:04.129,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 963. Nacking mccouch update. [views:debug,2014-08-19T16:54:04.129,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/963. Updated state: replica (0) [ns_server:debug,2014-08-19T16:54:04.129,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",963,replica,0} [ns_server:debug,2014-08-19T16:54:04.129,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,750,686,622,558,984,971,724,660,596,958,1022,762,698,634,570,1009,996, 964,983,736,704,672,640,608,576,1015,970,938,1002,989,742,710,678,646,614, 582,550,1021,976,944,1008,995,963,748,716,684,652,620,588,556,982,950,1014, 969,754,722,690,658,626,594,562,1001,988,956,1020,975,760,728,696,664,632, 600,568,1007,994,962,981,766,734,702,670,638,606,574,1013,968,1000,987,740, 708,676,644,612,580,548,1019,974,942,1006,993,746,714,682,650,618,586,554, 980,948,1012,999,967,752,720,688,656,624,592,560,986,954,1018,973,758,726, 694,662,630,598,566,1005,992,960,979,764,732,700,668,636,604,572,1011,998, 966,985,738,706,674,642,610,578,1017,972,940,1004,991,744,712,680,648,616, 584,552,1023,978,946,1010,965,718,654,590,952,1016,756,692,628,564,1003,990, 977,730,666,602] [views:debug,2014-08-19T16:54:04.213,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/963. Updated state: replica (0) [ns_server:debug,2014-08-19T16:54:04.213,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",963,replica,0} [ns_server:debug,2014-08-19T16:54:04.296,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 948. Nacking mccouch update. [views:debug,2014-08-19T16:54:04.296,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/948. Updated state: replica (0) [ns_server:debug,2014-08-19T16:54:04.296,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",948,replica,0} [ns_server:debug,2014-08-19T16:54:04.297,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_tiles<0.4349.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [984,968,952,1016,1000,990,974,958,1022,1006,996,980,964,948,1012,986,970,954, 1018,1002,992,976,960,1008,998,982,966,950,1014,988,972,956,1020,1004,1023, 994,978,962,1010] [views:debug,2014-08-19T16:54:04.380,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/948. Updated state: replica (0) [ns_server:debug,2014-08-19T16:54:04.381,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",948,replica,0} [ns_server:debug,2014-08-19T16:54:04.447,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 961. Nacking mccouch update. [views:debug,2014-08-19T16:54:04.447,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/961. Updated state: replica (0) [ns_server:debug,2014-08-19T16:54:04.447,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",961,replica,0} [ns_server:debug,2014-08-19T16:54:04.448,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,750,686,622,558,984,971,724,660,596,958,1022,762,698,634,570,1009,996, 964,983,736,704,672,640,608,576,1015,970,938,1002,989,742,710,678,646,614, 582,550,1021,976,944,1008,995,963,748,716,684,652,620,588,556,982,950,1014, 969,754,722,690,658,626,594,562,1001,988,956,1020,975,760,728,696,664,632, 600,568,1007,994,962,981,766,734,702,670,638,606,574,1013,968,1000,987,740, 708,676,644,612,580,548,1019,974,942,1006,993,961,746,714,682,650,618,586, 554,980,948,1012,999,967,752,720,688,656,624,592,560,986,954,1018,973,758, 726,694,662,630,598,566,1005,992,960,979,764,732,700,668,636,604,572,1011, 998,966,985,738,706,674,642,610,578,1017,972,940,1004,991,744,712,680,648, 616,584,552,1023,978,946,1010,965,718,654,590,952,1016,756,692,628,564,1003, 990,977,730,666,602] [views:debug,2014-08-19T16:54:04.514,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/961. Updated state: replica (0) [ns_server:debug,2014-08-19T16:54:04.515,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",961,replica,0} [ns_server:debug,2014-08-19T16:54:04.596,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 946. Nacking mccouch update. [views:debug,2014-08-19T16:54:04.596,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/946. Updated state: replica (0) [ns_server:debug,2014-08-19T16:54:04.597,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",946,replica,0} [ns_server:debug,2014-08-19T16:54:04.597,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_tiles<0.4349.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [984,968,952,1016,1000,990,974,958,1022,1006,996,980,964,948,1012,986,970,954, 1018,1002,992,976,960,1008,998,982,966,950,1014,988,972,956,1020,1004,1023, 994,978,962,946,1010] [views:debug,2014-08-19T16:54:04.672,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/946. Updated state: replica (0) [ns_server:debug,2014-08-19T16:54:04.673,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",946,replica,0} [ns_server:debug,2014-08-19T16:54:04.722,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 959. Nacking mccouch update. [views:debug,2014-08-19T16:54:04.722,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/959. Updated state: replica (0) [ns_server:debug,2014-08-19T16:54:04.723,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",959,replica,0} [ns_server:debug,2014-08-19T16:54:04.723,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,750,686,622,558,984,971,724,660,596,958,1022,762,698,634,570,1009,996, 983,736,704,672,640,608,576,1015,970,938,1002,989,742,710,678,646,614,582, 550,1021,976,944,1008,995,963,748,716,684,652,620,588,556,982,950,1014,969, 754,722,690,658,626,594,562,1001,988,956,1020,975,760,728,696,664,632,600, 568,1007,994,962,981,766,734,702,670,638,606,574,1013,968,1000,987,740,708, 676,644,612,580,548,1019,974,942,1006,993,961,746,714,682,650,618,586,554, 980,948,1012,999,967,752,720,688,656,624,592,560,986,954,1018,973,758,726, 694,662,630,598,566,1005,992,960,979,764,732,700,668,636,604,572,1011,998, 966,985,738,706,674,642,610,578,1017,972,940,1004,991,959,744,712,680,648, 616,584,552,1023,978,946,1010,965,718,654,590,952,1016,756,692,628,564,1003, 990,977,730,666,602,964] [views:debug,2014-08-19T16:54:04.790,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/959. Updated state: replica (0) [ns_server:debug,2014-08-19T16:54:04.790,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",959,replica,0} [ns_server:debug,2014-08-19T16:54:04.865,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 944. Nacking mccouch update. [views:debug,2014-08-19T16:54:04.865,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/944. Updated state: replica (0) [ns_server:debug,2014-08-19T16:54:04.865,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",944,replica,0} [ns_server:debug,2014-08-19T16:54:04.865,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_tiles<0.4349.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [984,968,952,1016,1000,990,974,958,1022,1006,996,980,964,948,1012,986,970,954, 1018,1002,992,976,960,944,1008,998,982,966,950,1014,988,972,956,1020,1004, 1023,994,978,962,946,1010] [views:debug,2014-08-19T16:54:04.941,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/944. Updated state: replica (0) [ns_server:debug,2014-08-19T16:54:04.941,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",944,replica,0} [ns_server:debug,2014-08-19T16:54:05.016,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 957. Nacking mccouch update. [views:debug,2014-08-19T16:54:05.016,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/957. Updated state: replica (0) [ns_server:debug,2014-08-19T16:54:05.016,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",957,replica,0} [ns_server:debug,2014-08-19T16:54:05.016,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,750,686,622,558,984,971,724,660,596,958,1022,762,698,634,570,1009,996, 983,736,704,672,640,608,576,1015,970,938,1002,989,957,742,710,678,646,614, 582,550,1021,976,944,1008,995,963,748,716,684,652,620,588,556,982,950,1014, 969,754,722,690,658,626,594,562,1001,988,956,1020,975,760,728,696,664,632, 600,568,1007,994,962,981,766,734,702,670,638,606,574,1013,968,1000,987,740, 708,676,644,612,580,548,1019,974,942,1006,993,961,746,714,682,650,618,586, 554,980,948,1012,999,967,752,720,688,656,624,592,560,986,954,1018,973,758, 726,694,662,630,598,566,1005,992,960,979,764,732,700,668,636,604,572,1011, 998,966,985,738,706,674,642,610,578,1017,972,940,1004,991,959,744,712,680, 648,616,584,552,1023,978,946,1010,965,718,654,590,952,1016,756,692,628,564, 1003,990,977,730,666,602,964] [views:debug,2014-08-19T16:54:05.116,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/957. Updated state: replica (0) [ns_server:debug,2014-08-19T16:54:05.117,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",957,replica,0} [ns_server:debug,2014-08-19T16:54:05.183,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 942. Nacking mccouch update. [views:debug,2014-08-19T16:54:05.183,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/942. Updated state: replica (0) [ns_server:debug,2014-08-19T16:54:05.183,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",942,replica,0} [ns_server:debug,2014-08-19T16:54:05.183,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_tiles<0.4349.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [984,968,952,1016,1000,990,974,958,942,1022,1006,996,980,964,948,1012,986,970, 954,1018,1002,992,976,960,944,1008,998,982,966,950,1014,988,972,956,1020, 1004,1023,994,978,962,946,1010] [views:debug,2014-08-19T16:54:05.273,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/942. Updated state: replica (0) [ns_server:debug,2014-08-19T16:54:05.274,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",942,replica,0} [ns_server:debug,2014-08-19T16:54:05.374,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 955. Nacking mccouch update. [views:debug,2014-08-19T16:54:05.374,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/955. Updated state: replica (0) [ns_server:debug,2014-08-19T16:54:05.374,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",955,replica,0} [ns_server:debug,2014-08-19T16:54:05.374,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,750,686,622,558,984,971,724,660,596,958,1022,762,698,634,570,1009,996, 983,736,704,672,640,608,576,1015,970,938,1002,989,957,742,710,678,646,614, 582,550,1021,976,944,1008,995,963,748,716,684,652,620,588,556,982,950,1014, 969,754,722,690,658,626,594,562,1001,988,956,1020,975,760,728,696,664,632, 600,568,1007,994,962,981,766,734,702,670,638,606,574,1013,968,1000,987,955, 740,708,676,644,612,580,548,1019,974,942,1006,993,961,746,714,682,650,618, 586,554,980,948,1012,999,967,752,720,688,656,624,592,560,986,954,1018,973, 758,726,694,662,630,598,566,1005,992,960,979,764,732,700,668,636,604,572, 1011,998,966,985,738,706,674,642,610,578,1017,972,940,1004,991,959,744,712, 680,648,616,584,552,1023,978,946,1010,965,718,654,590,952,1016,756,692,628, 564,1003,990,977,730,666,602,964] [views:debug,2014-08-19T16:54:05.441,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/955. Updated state: replica (0) [ns_server:debug,2014-08-19T16:54:05.441,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",955,replica,0} [ns_server:debug,2014-08-19T16:54:05.534,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 940. Nacking mccouch update. [views:debug,2014-08-19T16:54:05.534,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/940. Updated state: replica (0) [ns_server:debug,2014-08-19T16:54:05.534,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",940,replica,0} [ns_server:debug,2014-08-19T16:54:05.535,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_tiles<0.4349.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [984,968,952,1016,1000,990,974,958,942,1022,1006,996,980,964,948,1012,986,970, 954,1018,1002,992,976,960,944,1008,998,982,966,950,1014,988,972,956,940,1020, 1004,1023,994,978,962,946,1010] [views:debug,2014-08-19T16:54:05.600,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/940. Updated state: replica (0) [ns_server:debug,2014-08-19T16:54:05.600,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",940,replica,0} [ns_server:debug,2014-08-19T16:54:05.717,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 953. Nacking mccouch update. [views:debug,2014-08-19T16:54:05.717,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/953. Updated state: replica (0) [ns_server:debug,2014-08-19T16:54:05.717,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",953,replica,0} [ns_server:debug,2014-08-19T16:54:05.717,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,750,686,622,558,984,971,724,660,596,958,1022,762,698,634,570,1009,996, 983,736,704,672,640,608,576,1015,970,938,1002,989,957,742,710,678,646,614, 582,550,1021,976,944,1008,995,963,748,716,684,652,620,588,556,982,950,1014, 969,754,722,690,658,626,594,562,1001,988,956,1020,975,760,728,696,664,632, 600,568,1007,994,962,981,766,734,702,670,638,606,574,1013,968,1000,987,955, 740,708,676,644,612,580,548,1019,974,942,1006,993,961,746,714,682,650,618, 586,554,980,948,1012,999,967,752,720,688,656,624,592,560,986,954,1018,973, 758,726,694,662,630,598,566,1005,992,960,979,764,732,700,668,636,604,572, 1011,998,966,985,953,738,706,674,642,610,578,1017,972,940,1004,991,959,744, 712,680,648,616,584,552,1023,978,946,1010,965,718,654,590,952,1016,756,692, 628,564,1003,990,977,730,666,602,964] [views:debug,2014-08-19T16:54:05.801,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/953. Updated state: replica (0) [ns_server:debug,2014-08-19T16:54:05.801,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",953,replica,0} [ns_server:debug,2014-08-19T16:54:05.909,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 938. Nacking mccouch update. [views:debug,2014-08-19T16:54:05.909,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/938. Updated state: replica (0) [ns_server:debug,2014-08-19T16:54:05.910,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",938,replica,0} [ns_server:debug,2014-08-19T16:54:05.910,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_tiles<0.4349.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [984,968,952,1016,1000,990,974,958,942,1022,1006,996,980,964,948,1012,986,970, 954,938,1018,1002,992,976,960,944,1008,998,982,966,950,1014,988,972,956,940, 1020,1004,1023,994,978,962,946,1010] [views:debug,2014-08-19T16:54:05.960,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/938. Updated state: replica (0) [ns_server:debug,2014-08-19T16:54:05.960,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",938,replica,0} [ns_server:debug,2014-08-19T16:54:06.092,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 951. Nacking mccouch update. [views:debug,2014-08-19T16:54:06.092,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/951. Updated state: replica (0) [ns_server:debug,2014-08-19T16:54:06.092,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",951,replica,0} [ns_server:debug,2014-08-19T16:54:06.093,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,750,686,622,558,984,971,724,660,596,958,1022,762,698,634,570,1009,996, 983,951,736,704,672,640,608,576,1015,970,938,1002,989,957,742,710,678,646, 614,582,550,1021,976,944,1008,995,963,748,716,684,652,620,588,556,982,950, 1014,969,754,722,690,658,626,594,562,1001,988,956,1020,975,760,728,696,664, 632,600,568,1007,994,962,981,766,734,702,670,638,606,574,1013,968,1000,987, 955,740,708,676,644,612,580,548,1019,974,942,1006,993,961,746,714,682,650, 618,586,554,980,948,1012,999,967,752,720,688,656,624,592,560,986,954,1018, 973,758,726,694,662,630,598,566,1005,992,960,979,764,732,700,668,636,604,572, 1011,998,966,985,953,738,706,674,642,610,578,1017,972,940,1004,991,959,744, 712,680,648,616,584,552,1023,978,946,1010,965,718,654,590,952,1016,756,692, 628,564,1003,990,977,730,666,602,964] [views:debug,2014-08-19T16:54:06.159,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/951. Updated state: replica (0) [ns_server:debug,2014-08-19T16:54:06.159,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",951,replica,0} [ns_server:debug,2014-08-19T16:54:06.273,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 766. Nacking mccouch update. [views:debug,2014-08-19T16:54:06.273,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/766. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:06.274,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",766,active,0} [ns_server:debug,2014-08-19T16:54:06.274,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_tiles<0.4349.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [766,984,968,952,1016,1000,990,974,958,942,1022,1006,996,980,964,948,1012,986, 970,954,938,1018,1002,992,976,960,944,1008,998,982,966,950,1014,988,972,956, 940,1020,1004,1023,994,978,962,946,1010] [views:debug,2014-08-19T16:54:06.360,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/766. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:06.360,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",766,active,0} [ns_server:debug,2014-08-19T16:54:06.460,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 949. Nacking mccouch update. [views:debug,2014-08-19T16:54:06.460,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/949. Updated state: replica (0) [ns_server:debug,2014-08-19T16:54:06.460,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",949,replica,0} [ns_server:debug,2014-08-19T16:54:06.461,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,750,686,622,558,984,971,724,660,596,958,1022,762,698,634,570,1009,996, 983,736,672,608,970,938,1002,989,957,742,710,678,646,614,582,550,1021,976, 944,1008,995,963,748,716,684,652,620,588,556,982,950,1014,969,754,722,690, 658,626,594,562,1001,988,956,1020,975,760,728,696,664,632,600,568,1007,994, 962,981,949,766,734,702,670,638,606,574,1013,968,1000,987,955,740,708,676, 644,612,580,548,1019,974,942,1006,993,961,746,714,682,650,618,586,554,980, 948,1012,999,967,752,720,688,656,624,592,560,986,954,1018,973,758,726,694, 662,630,598,566,1005,992,960,979,764,732,700,668,636,604,572,1011,998,966, 985,953,738,706,674,642,610,578,1017,972,940,1004,991,959,744,712,680,648, 616,584,552,1023,978,946,1010,965,718,654,590,952,1016,756,692,628,564,1003, 990,977,730,666,602,964,951,704,640,576,1015] [views:debug,2014-08-19T16:54:06.554,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/949. Updated state: replica (0) [ns_server:debug,2014-08-19T16:54:06.554,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",949,replica,0} [ns_server:debug,2014-08-19T16:54:06.637,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 764. Nacking mccouch update. [views:debug,2014-08-19T16:54:06.637,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/764. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:06.637,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",764,active,0} [ns_server:debug,2014-08-19T16:54:06.638,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_tiles<0.4349.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [766,984,968,952,1016,1000,990,974,958,942,1022,1006,996,980,964,948,1012,986, 970,954,938,1018,1002,992,976,960,944,1008,764,998,982,966,950,1014,988,972, 956,940,1020,1004,1023,994,978,962,946,1010] [views:debug,2014-08-19T16:54:06.705,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/764. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:06.705,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",764,active,0} [ns_server:debug,2014-08-19T16:54:06.801,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 947. Nacking mccouch update. [views:debug,2014-08-19T16:54:06.802,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/947. Updated state: replica (0) [ns_server:debug,2014-08-19T16:54:06.802,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",947,replica,0} [ns_server:debug,2014-08-19T16:54:06.802,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,750,686,622,558,984,971,724,660,596,958,1022,762,698,634,570,1009,996, 983,736,672,608,970,938,1002,989,957,742,710,678,646,614,582,550,1021,976, 944,1008,995,963,748,716,684,652,620,588,556,982,950,1014,969,754,722,690, 658,626,594,562,1001,988,956,1020,975,760,728,696,664,632,600,568,1007,994, 962,981,949,766,734,702,670,638,606,574,1013,968,1000,987,955,740,708,676, 644,612,580,548,1019,974,942,1006,993,961,746,714,682,650,618,586,554,980, 948,1012,999,967,752,720,688,656,624,592,560,986,954,1018,973,758,726,694, 662,630,598,566,1005,992,960,979,947,764,732,700,668,636,604,572,1011,998, 966,985,953,738,706,674,642,610,578,1017,972,940,1004,991,959,744,712,680, 648,616,584,552,1023,978,946,1010,965,718,654,590,952,1016,756,692,628,564, 1003,990,977,730,666,602,964,951,704,640,576,1015] [views:debug,2014-08-19T16:54:06.877,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/947. Updated state: replica (0) [ns_server:debug,2014-08-19T16:54:06.878,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",947,replica,0} [ns_server:debug,2014-08-19T16:54:07.010,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 762. Nacking mccouch update. [views:debug,2014-08-19T16:54:07.010,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/762. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:07.010,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",762,active,0} [ns_server:debug,2014-08-19T16:54:07.010,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_tiles<0.4349.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [766,984,968,952,1016,1000,990,974,958,942,1022,1006,762,996,980,964,948,1012, 986,970,954,938,1018,1002,992,976,960,944,1008,764,998,982,966,950,1014,988, 972,956,940,1020,1004,1023,994,978,962,946,1010] [views:debug,2014-08-19T16:54:07.079,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/762. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:07.079,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",762,active,0} [ns_server:debug,2014-08-19T16:54:07.180,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 945. Nacking mccouch update. [views:debug,2014-08-19T16:54:07.180,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/945. Updated state: replica (0) [ns_server:debug,2014-08-19T16:54:07.180,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",945,replica,0} [ns_server:debug,2014-08-19T16:54:07.180,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,750,686,622,558,984,971,724,660,596,958,1022,945,762,698,634,570,1009, 996,983,736,672,608,970,938,1002,989,957,742,710,678,646,614,582,550,1021, 976,944,1008,995,963,748,716,684,652,620,588,556,982,950,1014,969,754,722, 690,658,626,594,562,1001,988,956,1020,975,760,728,696,664,632,600,568,1007, 994,962,981,949,766,734,702,670,638,606,574,1013,968,1000,987,955,740,708, 676,644,612,580,548,1019,974,942,1006,993,961,746,714,682,650,618,586,554, 980,948,1012,999,967,752,720,688,656,624,592,560,986,954,1018,973,758,726, 694,662,630,598,566,1005,992,960,979,947,764,732,700,668,636,604,572,1011, 998,966,985,953,738,706,674,642,610,578,1017,972,940,1004,991,959,744,712, 680,648,616,584,552,1023,978,946,1010,965,718,654,590,952,1016,756,692,628, 564,1003,990,977,730,666,602,964,951,704,640,576,1015] [views:debug,2014-08-19T16:54:07.230,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/945. Updated state: replica (0) [ns_server:debug,2014-08-19T16:54:07.230,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",945,replica,0} [ns_server:debug,2014-08-19T16:54:07.297,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 760. Nacking mccouch update. [views:debug,2014-08-19T16:54:07.297,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/760. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:07.297,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",760,active,0} [ns_server:debug,2014-08-19T16:54:07.297,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_tiles<0.4349.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [766,984,968,952,1016,1000,990,974,958,942,1022,1006,762,996,980,964,948,1012, 986,970,954,938,1018,1002,992,976,960,944,1008,764,998,982,966,950,1014,988, 972,956,940,1020,1004,760,1023,994,978,962,946,1010] [views:debug,2014-08-19T16:54:07.347,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/760. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:07.348,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",760,active,0} [ns_server:debug,2014-08-19T16:54:07.431,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 943. Nacking mccouch update. [views:debug,2014-08-19T16:54:07.431,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/943. Updated state: replica (0) [ns_server:debug,2014-08-19T16:54:07.431,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",943,replica,0} [ns_server:debug,2014-08-19T16:54:07.432,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,750,686,622,558,984,971,724,660,596,958,1022,945,762,698,634,570,1009, 996,983,736,672,608,970,938,1002,989,957,742,710,678,646,614,582,550,1021, 976,944,1008,995,963,748,716,684,652,620,588,556,982,950,1014,969,754,722, 690,658,626,594,562,1001,988,956,1020,975,943,760,728,696,664,632,600,568, 1007,994,962,981,949,766,734,702,670,638,606,574,1013,968,1000,987,955,740, 708,676,644,612,580,548,1019,974,942,1006,993,961,746,714,682,650,618,586, 554,980,948,1012,999,967,752,720,688,656,624,592,560,986,954,1018,973,758, 726,694,662,630,598,566,1005,992,960,979,947,764,732,700,668,636,604,572, 1011,998,966,985,953,738,706,674,642,610,578,1017,972,940,1004,991,959,744, 712,680,648,616,584,552,1023,978,946,1010,965,718,654,590,952,1016,756,692, 628,564,1003,990,977,730,666,602,964,951,704,640,576,1015] [views:debug,2014-08-19T16:54:07.497,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/943. Updated state: replica (0) [ns_server:debug,2014-08-19T16:54:07.497,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",943,replica,0} [ns_server:debug,2014-08-19T16:54:07.564,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 758. Nacking mccouch update. [views:debug,2014-08-19T16:54:07.564,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/758. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:07.564,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",758,active,0} [ns_server:debug,2014-08-19T16:54:07.564,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_tiles<0.4349.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [766,984,968,952,1016,1000,990,974,958,942,1022,1006,762,996,980,964,948,1012, 986,970,954,938,1018,1002,758,992,976,960,944,1008,764,998,982,966,950,1014, 988,972,956,940,1020,1004,760,1023,994,978,962,946,1010] [views:debug,2014-08-19T16:54:07.614,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/758. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:07.614,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",758,active,0} [ns_server:debug,2014-08-19T16:54:07.714,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 941. Nacking mccouch update. [views:debug,2014-08-19T16:54:07.714,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/941. Updated state: replica (0) [ns_server:debug,2014-08-19T16:54:07.714,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",941,replica,0} [ns_server:debug,2014-08-19T16:54:07.715,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,750,686,622,558,984,971,724,660,596,958,1022,945,762,698,634,570,1009, 996,983,736,672,608,970,938,1002,989,957,742,710,678,646,614,582,550,1021, 976,944,1008,995,963,748,716,684,652,620,588,556,982,950,1014,969,754,722, 690,658,626,594,562,1001,988,956,1020,975,943,760,728,696,664,632,600,568, 1007,994,962,981,949,766,734,702,670,638,606,574,1013,968,1000,987,955,740, 708,676,644,612,580,548,1019,974,942,1006,993,961,746,714,682,650,618,586, 554,980,948,1012,999,967,752,720,688,656,624,592,560,986,954,1018,973,941, 758,726,694,662,630,598,566,1005,992,960,979,947,764,732,700,668,636,604,572, 1011,998,966,985,953,738,706,674,642,610,578,1017,972,940,1004,991,959,744, 712,680,648,616,584,552,1023,978,946,1010,965,718,654,590,952,1016,756,692, 628,564,1003,990,977,730,666,602,964,951,704,640,576,1015] [views:debug,2014-08-19T16:54:07.815,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/941. Updated state: replica (0) [ns_server:debug,2014-08-19T16:54:07.815,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",941,replica,0} [ns_server:debug,2014-08-19T16:54:07.890,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 756. Nacking mccouch update. [views:debug,2014-08-19T16:54:07.890,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/756. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:07.890,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",756,active,0} [ns_server:debug,2014-08-19T16:54:07.890,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_tiles<0.4349.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [766,984,968,952,1016,1000,756,990,974,958,942,1022,1006,762,996,980,964,948, 1012,986,970,954,938,1018,1002,758,992,976,960,944,1008,764,998,982,966,950, 1014,988,972,956,940,1020,1004,760,1023,994,978,962,946,1010] [views:debug,2014-08-19T16:54:08.016,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/756. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:08.016,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",756,active,0} [ns_server:debug,2014-08-19T16:54:08.149,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 939. Nacking mccouch update. [views:debug,2014-08-19T16:54:08.149,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/939. Updated state: replica (0) [ns_server:debug,2014-08-19T16:54:08.149,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",939,replica,0} [ns_server:debug,2014-08-19T16:54:08.150,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,750,686,622,558,984,971,724,660,596,958,1022,945,762,698,634,570,1009, 996,983,736,672,608,970,989,957,742,710,678,646,614,582,550,1021,976,944, 1008,995,963,748,716,684,652,620,588,556,982,950,1014,969,754,722,690,658, 626,594,562,1001,988,956,1020,975,943,760,728,696,664,632,600,568,1007,994, 962,981,949,766,734,702,670,638,606,574,1013,968,1000,987,955,740,708,676, 644,612,580,548,1019,974,942,1006,993,961,746,714,682,650,618,586,554,980, 948,1012,999,967,752,720,688,656,624,592,560,986,954,1018,973,941,758,726, 694,662,630,598,566,1005,992,960,979,947,764,732,700,668,636,604,572,1011, 998,966,985,953,738,706,674,642,610,578,1017,972,940,1004,991,959,744,712, 680,648,616,584,552,1023,978,946,1010,965,718,654,590,952,1016,939,756,692, 628,564,1003,990,977,730,666,602,964,951,704,640,576,1015,938,1002] [views:debug,2014-08-19T16:54:08.258,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/939. Updated state: replica (0) [ns_server:debug,2014-08-19T16:54:08.258,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",939,replica,0} [ns_server:debug,2014-08-19T16:54:08.331,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 754. Nacking mccouch update. [views:debug,2014-08-19T16:54:08.331,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/754. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:08.332,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",754,active,0} [ns_server:debug,2014-08-19T16:54:08.332,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_tiles<0.4349.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [766,984,968,952,1016,1000,756,990,974,958,942,1022,1006,762,996,980,964,948, 1012,986,970,954,938,1018,1002,758,992,976,960,944,1008,764,998,982,966,950, 1014,754,988,972,956,940,1020,1004,760,1023,994,978,962,946,1010] [views:debug,2014-08-19T16:54:08.382,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/754. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:08.382,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",754,active,0} [ns_server:debug,2014-08-19T16:54:08.457,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 767. Nacking mccouch update. [views:debug,2014-08-19T16:54:08.458,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/767. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:08.458,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",767,active,0} [ns_server:debug,2014-08-19T16:54:08.458,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,750,686,622,558,984,971,724,660,596,958,1022,945,762,698,634,570,1009, 996,983,736,672,608,970,989,957,742,710,678,646,614,582,550,1021,976,944, 1008,995,963,748,716,684,652,620,588,556,982,950,767,1014,969,754,722,690, 658,626,594,562,1001,988,956,1020,975,943,760,728,696,664,632,600,568,1007, 994,962,981,949,766,734,702,670,638,606,574,1013,968,1000,987,955,740,708, 676,644,612,580,548,1019,974,942,1006,993,961,746,714,682,650,618,586,554, 980,948,1012,999,967,752,720,688,656,624,592,560,986,954,1018,973,941,758, 726,694,662,630,598,566,1005,992,960,979,947,764,732,700,668,636,604,572, 1011,998,966,985,953,738,706,674,642,610,578,1017,972,940,1004,991,959,744, 712,680,648,616,584,552,1023,978,946,1010,965,718,654,590,952,1016,939,756, 692,628,564,1003,990,977,730,666,602,964,951,704,640,576,1015,938,1002] [views:debug,2014-08-19T16:54:08.508,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/767. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:08.508,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",767,active,0} [ns_server:debug,2014-08-19T16:54:08.558,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 752. Nacking mccouch update. [views:debug,2014-08-19T16:54:08.558,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/752. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:08.558,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",752,active,0} [ns_server:debug,2014-08-19T16:54:08.559,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_tiles<0.4349.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [766,984,968,952,1016,1000,756,990,974,958,942,1022,1006,762,996,980,964,948, 1012,752,986,970,954,938,1018,1002,758,992,976,960,944,1008,764,998,982,966, 950,1014,754,988,972,956,940,1020,1004,760,1023,994,978,962,946,1010] [views:debug,2014-08-19T16:54:08.609,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/752. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:08.609,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",752,active,0} [ns_server:debug,2014-08-19T16:54:08.667,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 765. Nacking mccouch update. [views:debug,2014-08-19T16:54:08.667,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/765. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:08.668,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",765,active,0} [ns_server:debug,2014-08-19T16:54:08.668,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,750,686,622,558,984,971,724,660,596,958,1022,945,762,698,634,570,1009, 996,983,736,672,608,970,989,957,742,710,678,646,614,582,550,1021,976,944, 1008,995,963,748,716,684,652,620,588,556,982,950,767,1014,969,754,722,690, 658,626,594,562,1001,988,956,1020,975,943,760,728,696,664,632,600,568,1007, 994,962,981,949,766,734,702,670,638,606,574,1013,968,1000,987,955,740,708, 676,644,612,580,548,1019,974,942,1006,993,961,746,714,682,650,618,586,554, 980,948,765,1012,999,967,752,720,688,656,624,592,560,986,954,1018,973,941, 758,726,694,662,630,598,566,1005,992,960,979,947,764,732,700,668,636,604,572, 1011,998,966,985,953,738,706,674,642,610,578,1017,972,940,1004,991,959,744, 712,680,648,616,584,552,1023,978,946,1010,965,718,654,590,952,1016,939,756, 692,628,564,1003,990,977,730,666,602,964,951,704,640,576,1015,938,1002] [views:debug,2014-08-19T16:54:08.743,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/765. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:08.743,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",765,active,0} [ns_server:debug,2014-08-19T16:54:08.843,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 750. Nacking mccouch update. [views:debug,2014-08-19T16:54:08.843,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/750. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:08.844,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",750,active,0} [ns_server:debug,2014-08-19T16:54:08.844,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_tiles<0.4349.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [766,750,984,968,952,1016,1000,756,990,974,958,942,1022,1006,762,996,980,964, 948,1012,752,986,970,954,938,1018,1002,758,992,976,960,944,1008,764,998,982, 966,950,1014,754,988,972,956,940,1020,1004,760,1023,994,978,962,946,1010] [views:debug,2014-08-19T16:54:08.917,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/750. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:08.917,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",750,active,0} [ns_server:debug,2014-08-19T16:54:09.057,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 763. Nacking mccouch update. [views:debug,2014-08-19T16:54:09.057,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/763. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:09.057,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",763,active,0} [ns_server:debug,2014-08-19T16:54:09.058,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,750,686,622,558,984,971,724,660,596,958,1022,945,762,698,634,570,1009, 996,983,736,672,608,970,989,957,742,710,678,646,614,582,550,1021,976,944, 1008,995,963,748,716,684,652,620,588,556,982,950,767,1014,969,754,722,690, 658,626,594,562,1001,988,956,1020,975,943,760,728,696,664,632,600,568,1007, 994,962,981,949,766,734,702,670,638,606,574,1013,968,1000,987,955,740,708, 676,644,612,580,548,1019,974,942,1006,993,961,746,714,682,650,618,586,554, 980,948,765,1012,999,967,752,720,688,656,624,592,560,986,954,1018,973,941, 758,726,694,662,630,598,566,1005,992,960,979,947,764,732,700,668,636,604,572, 1011,998,966,985,953,738,706,674,642,610,578,1017,972,940,1004,991,959,744, 712,680,648,616,584,552,1023,978,946,763,1010,965,718,654,590,952,1016,939, 756,692,628,564,1003,990,977,730,666,602,964,951,704,640,576,1015,938,1002] [views:debug,2014-08-19T16:54:09.143,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/763. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:09.143,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",763,active,0} [ns_server:debug,2014-08-19T16:54:09.243,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 748. Nacking mccouch update. [views:debug,2014-08-19T16:54:09.243,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/748. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:09.243,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",748,active,0} [ns_server:debug,2014-08-19T16:54:09.243,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_tiles<0.4349.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [766,750,984,968,952,1016,1000,756,990,974,958,942,1022,1006,762,996,980,964, 948,1012,752,986,970,954,938,1018,1002,758,992,976,960,944,1008,764,748,998, 982,966,950,1014,754,988,972,956,940,1020,1004,760,1023,994,978,962,946,1010] [views:debug,2014-08-19T16:54:09.344,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/748. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:09.344,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",748,active,0} [ns_server:debug,2014-08-19T16:54:09.469,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 761. Nacking mccouch update. [views:debug,2014-08-19T16:54:09.469,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/761. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:09.469,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",761,active,0} [ns_server:debug,2014-08-19T16:54:09.470,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,750,686,622,558,984,971,724,660,596,958,1022,945,762,698,634,570,1009, 996,983,736,672,608,970,989,957,742,710,678,646,614,582,550,1021,976,944,761, 1008,995,963,748,716,684,652,620,588,556,982,950,767,1014,969,754,722,690, 658,626,594,562,1001,988,956,1020,975,943,760,728,696,664,632,600,568,1007, 994,962,981,949,766,734,702,670,638,606,574,1013,968,1000,987,955,740,708, 676,644,612,580,548,1019,974,942,1006,993,961,746,714,682,650,618,586,554, 980,948,765,1012,999,967,752,720,688,656,624,592,560,986,954,1018,973,941, 758,726,694,662,630,598,566,1005,992,960,979,947,764,732,700,668,636,604,572, 1011,998,966,985,953,738,706,674,642,610,578,1017,972,940,1004,991,959,744, 712,680,648,616,584,552,1023,978,946,763,1010,965,718,654,590,952,1016,939, 756,692,628,564,1003,990,977,730,666,602,964,951,704,640,576,1015,938,1002] [views:debug,2014-08-19T16:54:09.544,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/761. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:09.545,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",761,active,0} [ns_server:debug,2014-08-19T16:54:09.603,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 746. Nacking mccouch update. [views:debug,2014-08-19T16:54:09.603,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/746. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:09.603,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",746,active,0} [ns_server:debug,2014-08-19T16:54:09.603,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_tiles<0.4349.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [766,750,984,968,952,1016,1000,756,990,974,958,942,1022,1006,762,746,996,980, 964,948,1012,752,986,970,954,938,1018,1002,758,992,976,960,944,1008,764,748, 998,982,966,950,1014,754,988,972,956,940,1020,1004,760,1023,994,978,962,946, 1010] [views:debug,2014-08-19T16:54:09.722,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/746. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:09.722,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",746,active,0} [ns_server:debug,2014-08-19T16:54:09.827,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 759. Nacking mccouch update. [views:debug,2014-08-19T16:54:09.827,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/759. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:09.828,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",759,active,0} [ns_server:debug,2014-08-19T16:54:09.828,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,750,686,622,558,984,971,724,660,596,958,1022,945,762,698,634,570,1009, 996,983,736,672,608,970,957,710,646,582,1021,976,944,761,1008,995,963,748, 716,684,652,620,588,556,982,950,767,1014,969,754,722,690,658,626,594,562, 1001,988,956,1020,975,943,760,728,696,664,632,600,568,1007,994,962,981,949, 766,734,702,670,638,606,574,1013,968,1000,987,955,740,708,676,644,612,580, 548,1019,974,942,759,1006,993,961,746,714,682,650,618,586,554,980,948,765, 1012,999,967,752,720,688,656,624,592,560,986,954,1018,973,941,758,726,694, 662,630,598,566,1005,992,960,979,947,764,732,700,668,636,604,572,1011,998, 966,985,953,738,706,674,642,610,578,1017,972,940,1004,991,959,744,712,680, 648,616,584,552,1023,978,946,763,1010,965,718,654,590,952,1016,939,756,692, 628,564,1003,990,977,730,666,602,964,951,704,640,576,1015,938,1002,989,742, 678,614,550] [views:debug,2014-08-19T16:54:09.878,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/759. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:09.878,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",759,active,0} [ns_server:debug,2014-08-19T16:54:09.894,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 744. Nacking mccouch update. [views:debug,2014-08-19T16:54:09.894,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/744. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:09.895,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",744,active,0} [ns_server:debug,2014-08-19T16:54:09.895,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_tiles<0.4349.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [766,750,984,968,952,1016,1000,756,990,974,958,942,1022,1006,762,746,996,980, 964,948,1012,752,986,970,954,938,1018,1002,758,992,976,960,944,1008,764,748, 998,982,966,950,1014,754,988,972,956,940,1020,1004,760,744,1023,994,978,962, 946,1010] [ns_server:debug,2014-08-19T16:54:09.943,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:54:09.943,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:54:09.943,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_tiles<0.4349.1>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:54:09.948,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:54:09.948,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:54:09.948,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_tiles<0.4349.1>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:54:09.949,ns_1@10.242.238.90:ns_config_isasl_sync<0.17399.0>:ns_config_isasl_sync:writeSASLConf:143]Writing isasl passwd file: "/opt/couchbase/var/lib/couchbase/isasl.pw" [ns_server:debug,2014-08-19T16:54:09.950,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}, {"maps_1_8_metahash", [{map,[]}, {fastForwardMap,[]}, {uuid,<<"dfbe82706d975a8e74781701767f7843">>}, {num_replicas,1}, {replica_index,false}, {ram_quota,104857600}, {auth_type,none}, {moxi_port,11221}, {autocompaction,false}, {purge_interval,undefined}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}, {"maps_1_8_tiles", [{map,[]}, {fastForwardMap,[]}, {uuid,<<"e28c79b4e936fc1ba8f8f3d60e6c45c8">>}, {num_replicas,1}, {replica_index,false}, {ram_quota,484442112}, {auth_type,none}, {moxi_port,11222}, {autocompaction,false}, {purge_interval,undefined}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}, [{map,[]}, {fastForwardMap,[]}, {uuid,<<"c3d79a09992fb9cb624d4749117de9cd">>}, {num_replicas,1}, {replica_index,false}, {ram_quota,80530636800}, {auth_type,none}, {moxi_port,11212}, {autocompaction,false}, {purge_interval,undefined}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,[]}]]}] [ns_server:debug,2014-08-19T16:54:09.954,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_tiles<0.4349.1>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:54:09.954,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:54:09.954,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:54:09.954,ns_1@10.242.238.90:ns_bucket_worker<0.17558.0>:ns_bucket_sup:update_childs:84]Starting new child: {{per_bucket_sup,"tiles"}, {single_bucket_sup,start_link,["tiles"]}, permanent,infinity,supervisor, [single_bucket_sup]} [error_logger:info,2014-08-19T16:54:09.955,ns_1@10.242.238.90:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_bucket_sup} started: [{pid,<0.6182.1>}, {name,{per_bucket_sup,"tiles"}}, {mfargs,{single_bucket_sup,start_link,["tiles"]}}, {restart_type,permanent}, {shutdown,infinity}, {child_type,supervisor}] [ns_server:debug,2014-08-19T16:54:09.955,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_tiles<0.4349.1>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:54:09.955,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:54:09.955,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:54:09.956,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}, {"maps_1_8_metahash", [{map,[]}, {fastForwardMap,[]}, {uuid,<<"dfbe82706d975a8e74781701767f7843">>}, {num_replicas,1}, {replica_index,false}, {ram_quota,104857600}, {auth_type,none}, {moxi_port,11221}, {autocompaction,false}, {purge_interval,undefined}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}, {"maps_1_8_tiles", [{map,[]}, {fastForwardMap,[]}, {uuid,<<"e28c79b4e936fc1ba8f8f3d60e6c45c8">>}, {num_replicas,1}, {replica_index,false}, {ram_quota,484442112}, {auth_type,none}, {moxi_port,11222}, {autocompaction,false}, {purge_interval,undefined}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}, {"tiles", [{map,[]}, {fastForwardMap,[]}, {uuid,<<"c3d79a09992fb9cb624d4749117de9cd">>}, {num_replicas,1}, {replica_index,false}, {ram_quota,80530636800}, {auth_type,none}, {moxi_port,11212}, {autocompaction,false}, {purge_interval,undefined}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}]}]}] [user:info,2014-08-19T16:54:10.007,ns_1@10.242.238.90:<0.17397.0>:ns_log:crash_consumption_loop:64]Port server moxi on node 'babysitter_of_ns_1@127.0.0.1' exited with status 0. Restarting. Messages: 2014-08-19 16:53:51: (cproxy_config.c.315) env: MOXI_SASL_PLAIN_USR (13) 2014-08-19 16:53:51: (cproxy_config.c.324) env: MOXI_SASL_PLAIN_PWD (12) EOL on stdin. Exiting [ns_server:debug,2014-08-19T16:54:10.008,ns_1@10.242.238.90:capi_set_view_manager-tiles<0.6184.1>:capi_set_view_manager:init:228]Usable vbuckets: [] [ns_server:debug,2014-08-19T16:54:10.008,ns_1@10.242.238.90:ns_memcached-tiles<0.6205.1>:ns_memcached:init:144]Starting ns_memcached [ns_server:debug,2014-08-19T16:54:10.008,ns_1@10.242.238.90:capi_set_view_manager-tiles<0.6184.1>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [error_logger:info,2014-08-19T16:54:10.008,ns_1@10.242.238.90:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,'single_bucket_sup-tiles'} started: [{pid,<0.6184.1>}, {name,{capi_set_view_manager,"tiles"}}, {mfargs,{capi_set_view_manager,start_link,["tiles"]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [views:debug,2014-08-19T16:54:10.008,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/744. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:10.009,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",744,active,0} [ns_server:debug,2014-08-19T16:54:10.011,ns_1@10.242.238.90:<0.6206.1>:ns_memcached:run_connect_phase:167]Started 'connecting' phase of ns_memcached-tiles. Parent is <0.6205.1> [error_logger:info,2014-08-19T16:54:10.011,ns_1@10.242.238.90:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,'single_bucket_sup-tiles'} started: [{pid,<0.6205.1>}, {name,{ns_memcached,"tiles"}}, {mfargs,{ns_memcached,start_link,["tiles"]}}, {restart_type,permanent}, {shutdown,86400000}, {child_type,worker}] [error_logger:info,2014-08-19T16:54:10.011,ns_1@10.242.238.90:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,'single_bucket_sup-tiles'} started: [{pid,<0.6207.1>}, {name,{tap_replication_manager,"tiles"}}, {mfargs,{tap_replication_manager,start_link,["tiles"]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2014-08-19T16:54:10.012,ns_1@10.242.238.90:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,'single_bucket_sup-tiles'} started: [{pid,<0.6210.1>}, {name,{ns_vbm_new_sup,"tiles"}}, {mfargs,{ns_vbm_new_sup,start_link,["tiles"]}}, {restart_type,permanent}, {shutdown,infinity}, {child_type,supervisor}] [ns_server:info,2014-08-19T16:54:10.012,ns_1@10.242.238.90:janitor_agent-tiles<0.6213.1>:janitor_agent:read_flush_counter:936]Loading flushseq failed: {error,enoent}. Assuming it's equal to global config. [error_logger:info,2014-08-19T16:54:10.012,ns_1@10.242.238.90:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,'single_bucket_sup-tiles'} started: [{pid,<0.6212.1>}, {name,{ns_vbm_sup,"tiles"}}, {mfargs,{ns_vbm_sup,start_link,["tiles"]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,supervisor}] [ns_server:info,2014-08-19T16:54:10.012,ns_1@10.242.238.90:janitor_agent-tiles<0.6213.1>:janitor_agent:read_flush_counter_from_config:943]Initialized flushseq 0 from bucket config [error_logger:info,2014-08-19T16:54:10.012,ns_1@10.242.238.90:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,'single_bucket_sup-tiles'} started: [{pid,<0.6213.1>}, {name,{janitor_agent,"tiles"}}, {mfargs,{janitor_agent,start_link,["tiles"]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [error_logger:info,2014-08-19T16:54:10.012,ns_1@10.242.238.90:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,'single_bucket_sup-tiles'} started: [{pid,<0.6216.1>}, {name,{couch_stats_reader,"tiles"}}, {mfargs,{couch_stats_reader,start_link,["tiles"]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2014-08-19T16:54:10.012,ns_1@10.242.238.90:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,'single_bucket_sup-tiles'} started: [{pid,<0.6217.1>}, {name,{stats_collector,"tiles"}}, {mfargs,{stats_collector,start_link,["tiles"]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2014-08-19T16:54:10.013,ns_1@10.242.238.90:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,'single_bucket_sup-tiles'} started: [{pid,<0.6219.1>}, {name,{stats_archiver,"tiles"}}, {mfargs,{stats_archiver,start_link,["tiles"]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2014-08-19T16:54:10.013,ns_1@10.242.238.90:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,'single_bucket_sup-tiles'} started: [{pid,<0.6221.1>}, {name,{stats_reader,"tiles"}}, {mfargs,{stats_reader,start_link,["tiles"]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2014-08-19T16:54:10.013,ns_1@10.242.238.90:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,'single_bucket_sup-tiles'} started: [{pid,<0.6222.1>}, {name,{failover_safeness_level,"tiles"}}, {mfargs,{failover_safeness_level,start_link,["tiles"]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2014-08-19T16:54:10.014,ns_1@10.242.238.90:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,'single_bucket_sup-tiles'} started: [{pid,<0.6223.1>}, {name,{terse_bucket_info_uploader,"tiles"}}, {mfargs, {terse_bucket_info_uploader,start_link,["tiles"]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [ns_server:debug,2014-08-19T16:54:10.017,ns_1@10.242.238.90:<0.17535.0>:mc_tcp_listener:accept_loop:31]Got new connection [ns_server:debug,2014-08-19T16:54:10.018,ns_1@10.242.238.90:<0.17535.0>:mc_tcp_listener:accept_loop:33]Passed connection to mc_conn_sup: <0.6225.1> [ns_server:info,2014-08-19T16:54:10.020,ns_1@10.242.238.90:ns_memcached-tiles<0.6205.1>:ns_memcached:ensure_bucket:1178]Created bucket "tiles" with config string "ht_size=3079;ht_locks=5;tap_noop_interval=20;max_txn_size=10000;max_size=80530636800;tap_keepalive=300;dbname=/var/lib/pgsql/tiles;allow_data_loss_during_shutdown=true;backend=couchdb;couch_bucket=tiles;couch_port=11213;max_vbuckets=1024;alog_path=/var/lib/pgsql/tiles/access.log;data_traffic_enabled=false;max_num_workers=3;uuid=c3d79a09992fb9cb624d4749117de9cd;vb0=false;waitforwarmup=false;failpartialwarmup=false;" [ns_server:debug,2014-08-19T16:54:10.020,ns_1@10.242.238.90:capi_set_view_manager-tiles<0.6184.1>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:info,2014-08-19T16:54:10.020,ns_1@10.242.238.90:ns_memcached-tiles<0.6205.1>:ns_memcached:handle_cast:609]Main ns_memcached connection established: {ok,#Port<0.23471>} [ns_server:debug,2014-08-19T16:54:10.022,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_tiles<0.4349.1>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:54:10.022,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:54:10.022,ns_1@10.242.238.90:capi_set_view_manager-tiles<0.6184.1>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:54:10.022,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [user:info,2014-08-19T16:54:10.022,ns_1@10.242.238.90:ns_memcached-tiles<0.6205.1>:ns_memcached:handle_cast:632]Bucket "tiles" loaded on node 'ns_1@10.242.238.90' in 0 seconds. [ns_server:debug,2014-08-19T16:54:10.050,ns_1@10.242.238.90:capi_set_view_manager-tiles<0.6184.1>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:54:10.056,ns_1@10.242.238.90:capi_set_view_manager-tiles<0.6184.1>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:54:10.114,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 757. Nacking mccouch update. [views:debug,2014-08-19T16:54:10.114,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/757. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:10.114,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",757,active,0} [ns_server:debug,2014-08-19T16:54:10.115,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,750,686,622,558,984,971,724,660,596,958,1022,945,762,698,634,570,1009, 996,983,736,672,608,970,957,710,646,582,1021,976,944,761,1008,995,963,748, 716,684,652,620,588,556,982,950,767,1014,969,754,722,690,658,626,594,562, 1001,988,956,1020,975,943,760,728,696,664,632,600,568,1007,994,962,981,949, 766,734,702,670,638,606,574,1013,968,1000,987,955,740,708,676,644,612,580, 548,1019,974,942,759,1006,993,961,746,714,682,650,618,586,554,980,948,765, 1012,999,967,752,720,688,656,624,592,560,986,954,1018,973,941,758,726,694, 662,630,598,566,1005,992,960,979,947,764,732,700,668,636,604,572,1011,998, 966,985,953,738,706,674,642,610,578,1017,972,940,757,1004,991,959,744,712, 680,648,616,584,552,1023,978,946,763,1010,965,718,654,590,952,1016,939,756, 692,628,564,1003,990,977,730,666,602,964,951,704,640,576,1015,938,1002,989, 742,678,614,550] [views:debug,2014-08-19T16:54:10.190,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/757. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:10.190,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",757,active,0} [ns_server:debug,2014-08-19T16:54:10.206,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 742. Nacking mccouch update. [views:debug,2014-08-19T16:54:10.206,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/742. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:10.206,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",742,active,0} [ns_server:debug,2014-08-19T16:54:10.206,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_tiles<0.4349.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [766,750,984,968,952,1016,1000,756,990,974,958,942,1022,1006,762,746,996,980, 964,948,1012,752,986,970,954,938,1018,1002,758,742,992,976,960,944,1008,764, 748,998,982,966,950,1014,754,988,972,956,940,1020,1004,760,744,1023,994,978, 962,946,1010] [ns_server:debug,2014-08-19T16:54:10.214,ns_1@10.242.238.90:ns_heart_slow_status_updater<0.17440.0>:ns_heart:current_status_slow:261]Ignoring failure to get stats for bucket: "tiles": {error,no_samples} [views:debug,2014-08-19T16:54:10.289,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/742. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:10.289,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",742,active,0} [ns_server:debug,2014-08-19T16:54:10.439,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 755. Nacking mccouch update. [views:debug,2014-08-19T16:54:10.439,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/755. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:10.439,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",755,active,0} [ns_server:debug,2014-08-19T16:54:10.439,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,750,686,622,558,984,971,724,660,596,958,1022,945,762,698,634,570,1009, 996,983,736,672,608,970,957,710,646,582,1021,976,944,761,1008,995,963,748, 716,684,652,620,588,556,982,950,767,1014,969,754,722,690,658,626,594,562, 1001,988,956,1020,975,943,760,728,696,664,632,600,568,1007,994,962,981,949, 766,734,702,670,638,606,574,1013,968,1000,987,955,740,708,676,644,612,580, 548,1019,974,942,759,1006,993,961,746,714,682,650,618,586,554,980,948,765, 1012,999,967,752,720,688,656,624,592,560,986,954,1018,973,941,758,726,694, 662,630,598,566,1005,992,960,979,947,764,732,700,668,636,604,572,1011,998, 966,985,953,738,706,674,642,610,578,1017,972,940,757,1004,991,959,744,712, 680,648,616,584,552,1023,978,946,763,1010,965,718,654,590,952,1016,939,756, 692,628,564,1003,990,977,730,666,602,964,951,704,640,576,1015,938,755,1002, 989,742,678,614,550] [ns_server:debug,2014-08-19T16:54:10.539,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 740. Nacking mccouch update. [views:debug,2014-08-19T16:54:10.539,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/740. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:10.539,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",740,active,0} [ns_server:debug,2014-08-19T16:54:10.539,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_tiles<0.4349.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [766,750,984,968,952,1016,1000,756,740,990,974,958,942,1022,1006,762,746,996, 980,964,948,1012,752,986,970,954,938,1018,1002,758,742,992,976,960,944,1008, 764,748,998,982,966,950,1014,754,988,972,956,940,1020,1004,760,744,1023,994, 978,962,946,1010] [views:debug,2014-08-19T16:54:10.540,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/755. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:10.540,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",755,active,0} [views:debug,2014-08-19T16:54:10.665,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/740. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:10.665,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",740,active,0} [ns_server:debug,2014-08-19T16:54:10.840,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 753. Nacking mccouch update. [views:debug,2014-08-19T16:54:10.840,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/753. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:10.840,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",753,active,0} [ns_server:debug,2014-08-19T16:54:10.841,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,750,686,622,558,984,971,724,660,596,958,1022,945,762,698,634,570,1009, 996,983,736,672,608,970,957,710,646,582,1021,976,944,761,1008,995,963,748, 716,684,652,620,588,556,982,950,767,1014,969,754,722,690,658,626,594,562, 1001,988,956,1020,975,943,760,728,696,664,632,600,568,1007,994,962,981,949, 766,734,702,670,638,606,574,1013,968,753,1000,987,955,740,708,676,644,612, 580,548,1019,974,942,759,1006,993,961,746,714,682,650,618,586,554,980,948, 765,1012,999,967,752,720,688,656,624,592,560,986,954,1018,973,941,758,726, 694,662,630,598,566,1005,992,960,979,947,764,732,700,668,636,604,572,1011, 998,966,985,953,738,706,674,642,610,578,1017,972,940,757,1004,991,959,744, 712,680,648,616,584,552,1023,978,946,763,1010,965,718,654,590,952,1016,939, 756,692,628,564,1003,990,977,730,666,602,964,951,704,640,576,1015,938,755, 1002,989,742,678,614,550] [ns_server:debug,2014-08-19T16:54:10.899,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 738. Nacking mccouch update. [views:debug,2014-08-19T16:54:10.899,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/738. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:10.899,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",738,active,0} [ns_server:debug,2014-08-19T16:54:10.899,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_tiles<0.4349.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [766,750,984,968,952,1016,1000,756,740,990,974,958,942,1022,1006,762,746,996, 980,964,948,1012,752,986,970,954,938,1018,1002,758,742,992,976,960,944,1008, 764,748,998,982,966,950,1014,754,738,988,972,956,940,1020,1004,760,744,1023, 994,978,962,946,1010] [views:debug,2014-08-19T16:54:10.958,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/753. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:10.958,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",753,active,0} [ns_server:debug,2014-08-19T16:54:11.013,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_tiles<0.4349.1>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:54:11.013,ns_1@10.242.238.90:capi_set_view_manager-tiles<0.6184.1>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:54:11.013,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:54:11.013,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:54:11.016,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_tiles<0.4349.1>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:54:11.016,ns_1@10.242.238.90:capi_set_view_manager-tiles<0.6184.1>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:54:11.016,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:54:11.016,ns_1@10.242.238.90:capi_set_view_manager-default<0.18761.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:info,2014-08-19T16:54:11.018,ns_1@10.242.238.90:<0.6227.1>:ns_memcached:do_handle_call:527]Changed vbucket 1023 state to replica [ns_server:info,2014-08-19T16:54:11.018,ns_1@10.242.238.90:<0.6227.1>:ns_memcached:do_handle_call:527]Changed vbucket 1022 state to replica [ns_server:info,2014-08-19T16:54:11.021,ns_1@10.242.238.90:<0.6227.1>:ns_memcached:do_handle_call:527]Changed vbucket 1021 state to replica [ns_server:info,2014-08-19T16:54:11.022,ns_1@10.242.238.90:<0.6227.1>:ns_memcached:do_handle_call:527]Changed vbucket 1020 state to replica [ns_server:info,2014-08-19T16:54:11.022,ns_1@10.242.238.90:<0.6227.1>:ns_memcached:do_handle_call:527]Changed vbucket 1019 state to replica [ns_server:info,2014-08-19T16:54:11.022,ns_1@10.242.238.90:<0.6227.1>:ns_memcached:do_handle_call:527]Changed vbucket 1018 state to replica [ns_server:debug,2014-08-19T16:54:11.022,ns_1@10.242.238.90:ns_config_log<0.17158.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}, {"maps_1_8_metahash", [{map,[]}, {fastForwardMap,[]}, {uuid,<<"dfbe82706d975a8e74781701767f7843">>}, {num_replicas,1}, {replica_index,false}, {ram_quota,104857600}, {auth_type,none}, {moxi_port,11221}, {autocompaction,false}, {purge_interval,undefined}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}, {"maps_1_8_tiles", [{map,[]}, {fastForwardMap,[]}, {uuid,<<"e28c79b4e936fc1ba8f8f3d60e6c45c8">>}, {num_replicas,1}, {replica_index,false}, {ram_quota,484442112}, {auth_type,none}, {moxi_port,11222}, {autocompaction,false}, {purge_interval,undefined}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}, {"tiles", [{map,[{0,[],['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {1,[],['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {2,[],['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {3,[],['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {4,[],['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {5,[],['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {6,[],['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {7,[],['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {8,[],['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {9,[],['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {10,[],['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {11,[],['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {12,[],['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {13,[],['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {14,[],['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {15,[],['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {16,[],['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {17,[],['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {18,[],['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {19,[],['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {20,[],['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {21,[],['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {22,[],['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {23,[],['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {24,[],['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {25,[],['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {26,[],['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {27,[],['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {28,[],['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {29,[],['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {30,[],['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {31,[],['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {32,[],['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {33,[],['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {34,[],['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {35,[],['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {36,[],['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {37,[],['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {38,[],['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {39,[],['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {40,[],['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {41,[],['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {42,[],['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {43,[],['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {44,[],['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {45,[],['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {46,[],['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {47,[],['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {48,[],['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {49,[],['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {50,[],['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {51,[],['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {52,[],['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {53,[],['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {54,[],['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {55,[],['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {56,[],['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {57,[],['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {58,[],['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {59,[],['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {60,[],['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {61,[],['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {62,[],['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {63,[],['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {64,[],['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {65,[],['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {66,[],['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {67,[],['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {68,[],['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {69,[],['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {70,[],['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {71,[],['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {72,[],['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {73,[],['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {74,[],['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {75,[],['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {76,[],['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {77,[],['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {78,[],['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {79,[],['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {80,[],['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {81,[],['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {82,[],['ns_1@10.242.238.88'|...]}, {83,[],[...]}, {84,[],...}, {85,...}, {...}|...]}, {fastForwardMap,[]}, {uuid,<<"c3d79a09992fb9cb624d4749117de9cd">>}, {num_replicas,1}, {replica_index,false}, {ram_quota,80530636800}, {auth_type,none}, {moxi_port,11212}, {autocompaction,false}, {purge_interval,undefined}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:info,2014-08-19T16:54:11.022,ns_1@10.242.238.90:<0.6227.1>:ns_memcached:do_handle_call:527]Changed vbucket 1017 state to replica [ns_server:info,2014-08-19T16:54:11.024,ns_1@10.242.238.90:<0.6227.1>:ns_memcached:do_handle_call:527]Changed vbucket 1016 state to replica [ns_server:info,2014-08-19T16:54:11.024,ns_1@10.242.238.90:<0.6227.1>:ns_memcached:do_handle_call:527]Changed vbucket 1015 state to replica [ns_server:info,2014-08-19T16:54:11.025,ns_1@10.242.238.90:<0.6227.1>:ns_memcached:do_handle_call:527]Changed vbucket 1014 state to replica [ns_server:info,2014-08-19T16:54:11.025,ns_1@10.242.238.90:<0.6227.1>:ns_memcached:do_handle_call:527]Changed vbucket 1013 state to replica [ns_server:info,2014-08-19T16:54:11.025,ns_1@10.242.238.90:<0.6227.1>:ns_memcached:do_handle_call:527]Changed vbucket 1012 state to replica [ns_server:info,2014-08-19T16:54:11.025,ns_1@10.242.238.90:<0.6227.1>:ns_memcached:do_handle_call:527]Changed vbucket 1011 state to replica [ns_server:info,2014-08-19T16:54:11.026,ns_1@10.242.238.90:<0.6227.1>:ns_memcached:do_handle_call:527]Changed vbucket 1010 state to replica [ns_server:info,2014-08-19T16:54:11.026,ns_1@10.242.238.90:<0.6227.1>:ns_memcached:do_handle_call:527]Changed vbucket 1009 state to replica [ns_server:info,2014-08-19T16:54:11.026,ns_1@10.242.238.90:<0.6227.1>:ns_memcached:do_handle_call:527]Changed vbucket 1008 state to replica [ns_server:info,2014-08-19T16:54:11.027,ns_1@10.242.238.90:<0.6227.1>:ns_memcached:do_handle_call:527]Changed vbucket 1007 state to replica [ns_server:info,2014-08-19T16:54:11.027,ns_1@10.242.238.90:<0.6227.1>:ns_memcached:do_handle_call:527]Changed vbucket 1006 state to replica [ns_server:info,2014-08-19T16:54:11.027,ns_1@10.242.238.90:<0.6227.1>:ns_memcached:do_handle_call:527]Changed vbucket 1005 state to replica [ns_server:info,2014-08-19T16:54:11.027,ns_1@10.242.238.90:<0.6227.1>:ns_memcached:do_handle_call:527]Changed vbucket 1004 state to replica [ns_server:info,2014-08-19T16:54:11.028,ns_1@10.242.238.90:<0.6227.1>:ns_memcached:do_handle_call:527]Changed vbucket 1003 state to replica [ns_server:info,2014-08-19T16:54:11.028,ns_1@10.242.238.90:<0.6227.1>:ns_memcached:do_handle_call:527]Changed vbucket 1002 state to replica [ns_server:info,2014-08-19T16:54:11.028,ns_1@10.242.238.90:<0.6227.1>:ns_memcached:do_handle_call:527]Changed vbucket 1001 state to replica [ns_server:info,2014-08-19T16:54:11.029,ns_1@10.242.238.90:<0.6227.1>:ns_memcached:do_handle_call:527]Changed vbucket 1000 state to replica [ns_server:info,2014-08-19T16:54:11.029,ns_1@10.242.238.90:<0.6227.1>:ns_memcached:do_handle_call:527]Changed vbucket 999 state to replica [ns_server:info,2014-08-19T16:54:11.029,ns_1@10.242.238.90:<0.6227.1>:ns_memcached:do_handle_call:527]Changed vbucket 998 state to replica [ns_server:info,2014-08-19T16:54:11.030,ns_1@10.242.238.90:<0.6227.1>:ns_memcached:do_handle_call:527]Changed vbucket 997 state to replica [ns_server:info,2014-08-19T16:54:11.030,ns_1@10.242.238.90:<0.6227.1>:ns_memcached:do_handle_call:527]Changed vbucket 996 state to replica [ns_server:info,2014-08-19T16:54:11.030,ns_1@10.242.238.90:<0.6227.1>:ns_memcached:do_handle_call:527]Changed vbucket 995 state to replica [ns_server:info,2014-08-19T16:54:11.031,ns_1@10.242.238.90:<0.6227.1>:ns_memcached:do_handle_call:527]Changed vbucket 994 state to replica [ns_server:info,2014-08-19T16:54:11.031,ns_1@10.242.238.90:<0.6227.1>:ns_memcached:do_handle_call:527]Changed vbucket 993 state to replica [ns_server:info,2014-08-19T16:54:11.031,ns_1@10.242.238.90:<0.6227.1>:ns_memcached:do_handle_call:527]Changed vbucket 992 state to replica [ns_server:info,2014-08-19T16:54:11.031,ns_1@10.242.238.90:<0.6227.1>:ns_memcached:do_handle_call:527]Changed vbucket 991 state to replica [ns_server:info,2014-08-19T16:54:11.032,ns_1@10.242.238.90:<0.6227.1>:ns_memcached:do_handle_call:527]Changed vbucket 990 state to replica [ns_server:info,2014-08-19T16:54:11.032,ns_1@10.242.238.90:<0.6227.1>:ns_memcached:do_handle_call:527]Changed vbucket 989 state to replica [ns_server:info,2014-08-19T16:54:11.032,ns_1@10.242.238.90:<0.6227.1>:ns_memcached:do_handle_call:527]Changed vbucket 988 state to replica [ns_server:info,2014-08-19T16:54:11.033,ns_1@10.242.238.90:<0.6227.1>:ns_memcached:do_handle_call:527]Changed vbucket 987 state to replica [ns_server:info,2014-08-19T16:54:11.033,ns_1@10.242.238.90:<0.6227.1>:ns_memcached:do_handle_call:527]Changed vbucket 986 state to replica [ns_server:info,2014-08-19T16:54:11.033,ns_1@10.242.238.90:<0.6227.1>:ns_memcached:do_handle_call:527]Changed vbucket 985 state to replica [ns_server:info,2014-08-19T16:54:11.033,ns_1@10.242.238.90:<0.6227.1>:ns_memcached:do_handle_call:527]Changed vbucket 984 state to replica [ns_server:info,2014-08-19T16:54:11.034,ns_1@10.242.238.90:<0.6227.1>:ns_memcached:do_handle_call:527]Changed vbucket 983 state to replica [ns_server:info,2014-08-19T16:54:11.034,ns_1@10.242.238.90:<0.6227.1>:ns_memcached:do_handle_call:527]Changed vbucket 982 state to replica [ns_server:info,2014-08-19T16:54:11.034,ns_1@10.242.238.90:<0.6227.1>:ns_memcached:do_handle_call:527]Changed vbucket 981 state to replica [ns_server:info,2014-08-19T16:54:11.034,ns_1@10.242.238.90:<0.6227.1>:ns_memcached:do_handle_call:527]Changed vbucket 980 state to replica [ns_server:info,2014-08-19T16:54:11.035,ns_1@10.242.238.90:<0.6227.1>:ns_memcached:do_handle_call:527]Changed vbucket 979 state to replica [ns_server:info,2014-08-19T16:54:11.035,ns_1@10.242.238.90:<0.6227.1>:ns_memcached:do_handle_call:527]Changed vbucket 978 state to replica [ns_server:info,2014-08-19T16:54:11.035,ns_1@10.242.238.90:<0.6227.1>:ns_memcached:do_handle_call:527]Changed vbucket 977 state to replica [ns_server:info,2014-08-19T16:54:11.035,ns_1@10.242.238.90:<0.6227.1>:ns_memcached:do_handle_call:527]Changed vbucket 976 state to replica [ns_server:info,2014-08-19T16:54:11.036,ns_1@10.242.238.90:<0.6227.1>:ns_memcached:do_handle_call:527]Changed vbucket 975 state to replica [ns_server:info,2014-08-19T16:54:11.036,ns_1@10.242.238.90:<0.6227.1>:ns_memcached:do_handle_call:527]Changed vbucket 974 state to replica [ns_server:info,2014-08-19T16:54:11.036,ns_1@10.242.238.90:<0.6227.1>:ns_memcached:do_handle_call:527]Changed vbucket 973 state to replica [ns_server:info,2014-08-19T16:54:11.036,ns_1@10.242.238.90:<0.6227.1>:ns_memcached:do_handle_call:527]Changed vbucket 972 state to replica [ns_server:info,2014-08-19T16:54:11.037,ns_1@10.242.238.90:<0.6227.1>:ns_memcached:do_handle_call:527]Changed vbucket 971 state to replica [ns_server:info,2014-08-19T16:54:11.037,ns_1@10.242.238.90:<0.6227.1>:ns_memcached:do_handle_call:527]Changed vbucket 970 state to replica [ns_server:info,2014-08-19T16:54:11.037,ns_1@10.242.238.90:<0.6227.1>:ns_memcached:do_handle_call:527]Changed vbucket 969 state to replica [ns_server:info,2014-08-19T16:54:11.037,ns_1@10.242.238.90:<0.6227.1>:ns_memcached:do_handle_call:527]Changed vbucket 968 state to replica [ns_server:info,2014-08-19T16:54:11.038,ns_1@10.242.238.90:<0.6227.1>:ns_memcached:do_handle_call:527]Changed vbucket 967 state to replica [ns_server:info,2014-08-19T16:54:11.038,ns_1@10.242.238.90:<0.6227.1>:ns_memcached:do_handle_call:527]Changed vbucket 966 state to replica [ns_server:info,2014-08-19T16:54:11.038,ns_1@10.242.238.90:<0.6227.1>:ns_memcached:do_handle_call:527]Changed vbucket 965 state to replica [ns_server:info,2014-08-19T16:54:11.038,ns_1@10.242.238.90:<0.6227.1>:ns_memcached:do_handle_call:527]Changed vbucket 964 state to replica [ns_server:info,2014-08-19T16:54:11.039,ns_1@10.242.238.90:<0.6227.1>:ns_memcached:do_handle_call:527]Changed vbucket 963 state to replica [ns_server:info,2014-08-19T16:54:11.039,ns_1@10.242.238.90:<0.6227.1>:ns_memcached:do_handle_call:527]Changed vbucket 962 state to replica [ns_server:info,2014-08-19T16:54:11.039,ns_1@10.242.238.90:<0.6227.1>:ns_memcached:do_handle_call:527]Changed vbucket 961 state to replica [ns_server:info,2014-08-19T16:54:11.039,ns_1@10.242.238.90:<0.6227.1>:ns_memcached:do_handle_call:527]Changed vbucket 960 state to replica [ns_server:info,2014-08-19T16:54:11.040,ns_1@10.242.238.90:<0.6227.1>:ns_memcached:do_handle_call:527]Changed vbucket 959 state to replica [ns_server:info,2014-08-19T16:54:11.040,ns_1@10.242.238.90:<0.6227.1>:ns_memcached:do_handle_call:527]Changed vbucket 958 state to replica [ns_server:info,2014-08-19T16:54:11.040,ns_1@10.242.238.90:<0.6227.1>:ns_memcached:do_handle_call:527]Changed vbucket 957 state to replica [ns_server:info,2014-08-19T16:54:11.040,ns_1@10.242.238.90:<0.6227.1>:ns_memcached:do_handle_call:527]Changed vbucket 956 state to replica [ns_server:info,2014-08-19T16:54:11.041,ns_1@10.242.238.90:<0.6227.1>:ns_memcached:do_handle_call:527]Changed vbucket 955 state to replica [ns_server:info,2014-08-19T16:54:11.041,ns_1@10.242.238.90:<0.6227.1>:ns_memcached:do_handle_call:527]Changed vbucket 954 state to replica [ns_server:info,2014-08-19T16:54:11.041,ns_1@10.242.238.90:<0.6227.1>:ns_memcached:do_handle_call:527]Changed vbucket 953 state to replica [ns_server:info,2014-08-19T16:54:11.041,ns_1@10.242.238.90:<0.6227.1>:ns_memcached:do_handle_call:527]Changed vbucket 952 state to replica [ns_server:info,2014-08-19T16:54:11.042,ns_1@10.242.238.90:<0.6227.1>:ns_memcached:do_handle_call:527]Changed vbucket 951 state to replica [ns_server:info,2014-08-19T16:54:11.042,ns_1@10.242.238.90:<0.6227.1>:ns_memcached:do_handle_call:527]Changed vbucket 950 state to replica [ns_server:info,2014-08-19T16:54:11.042,ns_1@10.242.238.90:<0.6227.1>:ns_memcached:do_handle_call:527]Changed vbucket 949 state to replica [ns_server:info,2014-08-19T16:54:11.043,ns_1@10.242.238.90:<0.6227.1>:ns_memcached:do_handle_call:527]Changed vbucket 948 state to replica [ns_server:info,2014-08-19T16:54:11.043,ns_1@10.242.238.90:<0.6227.1>:ns_memcached:do_handle_call:527]Changed vbucket 947 state to replica [ns_server:info,2014-08-19T16:54:11.043,ns_1@10.242.238.90:<0.6227.1>:ns_memcached:do_handle_call:527]Changed vbucket 946 state to replica [ns_server:info,2014-08-19T16:54:11.043,ns_1@10.242.238.90:<0.6227.1>:ns_memcached:do_handle_call:527]Changed vbucket 945 state to replica [ns_server:info,2014-08-19T16:54:11.044,ns_1@10.242.238.90:<0.6227.1>:ns_memcached:do_handle_call:527]Changed vbucket 944 state to replica [ns_server:info,2014-08-19T16:54:11.044,ns_1@10.242.238.90:<0.6227.1>:ns_memcached:do_handle_call:527]Changed vbucket 943 state to replica [ns_server:info,2014-08-19T16:54:11.044,ns_1@10.242.238.90:<0.6227.1>:ns_memcached:do_handle_call:527]Changed vbucket 942 state to replica [ns_server:info,2014-08-19T16:54:11.044,ns_1@10.242.238.90:<0.6227.1>:ns_memcached:do_handle_call:527]Changed vbucket 941 state to replica [ns_server:info,2014-08-19T16:54:11.045,ns_1@10.242.238.90:<0.6227.1>:ns_memcached:do_handle_call:527]Changed vbucket 940 state to replica [ns_server:info,2014-08-19T16:54:11.045,ns_1@10.242.238.90:<0.6227.1>:ns_memcached:do_handle_call:527]Changed vbucket 939 state to replica [ns_server:info,2014-08-19T16:54:11.045,ns_1@10.242.238.90:<0.6227.1>:ns_memcached:do_handle_call:527]Changed vbucket 938 state to replica [ns_server:info,2014-08-19T16:54:11.045,ns_1@10.242.238.90:<0.6227.1>:ns_memcached:do_handle_call:527]Changed vbucket 767 state to active [ns_server:info,2014-08-19T16:54:11.045,ns_1@10.242.238.90:<0.6227.1>:ns_memcached:do_handle_call:527]Changed vbucket 766 state to active [ns_server:info,2014-08-19T16:54:11.046,ns_1@10.242.238.90:<0.6227.1>:ns_memcached:do_handle_call:527]Changed vbucket 765 state to active [ns_server:info,2014-08-19T16:54:11.046,ns_1@10.242.238.90:<0.6227.1>:ns_memcached:do_handle_call:527]Changed vbucket 764 state to active [ns_server:info,2014-08-19T16:54:11.046,ns_1@10.242.238.90:<0.6227.1>:ns_memcached:do_handle_call:527]Changed vbucket 763 state to active [ns_server:info,2014-08-19T16:54:11.046,ns_1@10.242.238.90:<0.6227.1>:ns_memcached:do_handle_call:527]Changed vbucket 762 state to active [ns_server:info,2014-08-19T16:54:11.047,ns_1@10.242.238.90:<0.6227.1>:ns_memcached:do_handle_call:527]Changed vbucket 761 state to active [ns_server:info,2014-08-19T16:54:11.047,ns_1@10.242.238.90:<0.6227.1>:ns_memcached:do_handle_call:527]Changed vbucket 760 state to active [ns_server:info,2014-08-19T16:54:11.047,ns_1@10.242.238.90:<0.6227.1>:ns_memcached:do_handle_call:527]Changed vbucket 759 state to active [ns_server:info,2014-08-19T16:54:11.047,ns_1@10.242.238.90:<0.6227.1>:ns_memcached:do_handle_call:527]Changed vbucket 758 state to active [ns_server:info,2014-08-19T16:54:11.048,ns_1@10.242.238.90:<0.6227.1>:ns_memcached:do_handle_call:527]Changed vbucket 757 state to active [ns_server:info,2014-08-19T16:54:11.048,ns_1@10.242.238.90:<0.6227.1>:ns_memcached:do_handle_call:527]Changed vbucket 756 state to active [ns_server:info,2014-08-19T16:54:11.048,ns_1@10.242.238.90:<0.6227.1>:ns_memcached:do_handle_call:527]Changed vbucket 755 state to active [ns_server:info,2014-08-19T16:54:11.048,ns_1@10.242.238.90:<0.6227.1>:ns_memcached:do_handle_call:527]Changed vbucket 754 state to active [ns_server:info,2014-08-19T16:54:11.049,ns_1@10.242.238.90:<0.6227.1>:ns_memcached:do_handle_call:527]Changed vbucket 753 state to active [ns_server:info,2014-08-19T16:54:11.049,ns_1@10.242.238.90:<0.6227.1>:ns_memcached:do_handle_call:527]Changed vbucket 752 state to active [ns_server:info,2014-08-19T16:54:11.049,ns_1@10.242.238.90:<0.6227.1>:ns_memcached:do_handle_call:527]Changed vbucket 751 state to active [ns_server:info,2014-08-19T16:54:11.049,ns_1@10.242.238.90:<0.6227.1>:ns_memcached:do_handle_call:527]Changed vbucket 750 state to active [views:debug,2014-08-19T16:54:11.050,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/738. Updated state: active (0) [ns_server:info,2014-08-19T16:54:11.050,ns_1@10.242.238.90:<0.6227.1>:ns_memcached:do_handle_call:527]Changed vbucket 749 state to active [ns_server:debug,2014-08-19T16:54:11.050,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",738,active,0} [ns_server:info,2014-08-19T16:54:11.050,ns_1@10.242.238.90:<0.6227.1>:ns_memcached:do_handle_call:527]Changed vbucket 748 state to active [ns_server:info,2014-08-19T16:54:11.050,ns_1@10.242.238.90:<0.6227.1>:ns_memcached:do_handle_call:527]Changed vbucket 747 state to active [ns_server:info,2014-08-19T16:54:11.050,ns_1@10.242.238.90:<0.6227.1>:ns_memcached:do_handle_call:527]Changed vbucket 746 state to active [ns_server:info,2014-08-19T16:54:11.051,ns_1@10.242.238.90:<0.6227.1>:ns_memcached:do_handle_call:527]Changed vbucket 745 state to active [ns_server:info,2014-08-19T16:54:11.051,ns_1@10.242.238.90:<0.6227.1>:ns_memcached:do_handle_call:527]Changed vbucket 744 state to active [ns_server:info,2014-08-19T16:54:11.051,ns_1@10.242.238.90:<0.6227.1>:ns_memcached:do_handle_call:527]Changed vbucket 743 state to active [ns_server:info,2014-08-19T16:54:11.051,ns_1@10.242.238.90:<0.6227.1>:ns_memcached:do_handle_call:527]Changed vbucket 742 state to active [ns_server:info,2014-08-19T16:54:11.052,ns_1@10.242.238.90:<0.6227.1>:ns_memcached:do_handle_call:527]Changed vbucket 741 state to active [ns_server:info,2014-08-19T16:54:11.052,ns_1@10.242.238.90:<0.6227.1>:ns_memcached:do_handle_call:527]Changed vbucket 740 state to active [ns_server:info,2014-08-19T16:54:11.052,ns_1@10.242.238.90:<0.6227.1>:ns_memcached:do_handle_call:527]Changed vbucket 739 state to active [ns_server:info,2014-08-19T16:54:11.052,ns_1@10.242.238.90:<0.6227.1>:ns_memcached:do_handle_call:527]Changed vbucket 738 state to active [ns_server:info,2014-08-19T16:54:11.052,ns_1@10.242.238.90:<0.6227.1>:ns_memcached:do_handle_call:527]Changed vbucket 737 state to active [ns_server:info,2014-08-19T16:54:11.053,ns_1@10.242.238.90:<0.6227.1>:ns_memcached:do_handle_call:527]Changed vbucket 736 state to active [ns_server:info,2014-08-19T16:54:11.053,ns_1@10.242.238.90:<0.6227.1>:ns_memcached:do_handle_call:527]Changed vbucket 735 state to active [ns_server:info,2014-08-19T16:54:11.053,ns_1@10.242.238.90:<0.6227.1>:ns_memcached:do_handle_call:527]Changed vbucket 734 state to active [ns_server:info,2014-08-19T16:54:11.053,ns_1@10.242.238.90:<0.6227.1>:ns_memcached:do_handle_call:527]Changed vbucket 733 state to active [ns_server:info,2014-08-19T16:54:11.054,ns_1@10.242.238.90:<0.6227.1>:ns_memcached:do_handle_call:527]Changed vbucket 732 state to active [ns_server:info,2014-08-19T16:54:11.054,ns_1@10.242.238.90:<0.6227.1>:ns_memcached:do_handle_call:527]Changed vbucket 731 state to active [ns_server:info,2014-08-19T16:54:11.054,ns_1@10.242.238.90:<0.6227.1>:ns_memcached:do_handle_call:527]Changed vbucket 730 state to active [ns_server:info,2014-08-19T16:54:11.054,ns_1@10.242.238.90:<0.6227.1>:ns_memcached:do_handle_call:527]Changed vbucket 729 state to active [ns_server:info,2014-08-19T16:54:11.054,ns_1@10.242.238.90:<0.6227.1>:ns_memcached:do_handle_call:527]Changed vbucket 728 state to active [ns_server:info,2014-08-19T16:54:11.055,ns_1@10.242.238.90:<0.6227.1>:ns_memcached:do_handle_call:527]Changed vbucket 727 state to active [ns_server:info,2014-08-19T16:54:11.055,ns_1@10.242.238.90:<0.6227.1>:ns_memcached:do_handle_call:527]Changed vbucket 726 state to active [ns_server:info,2014-08-19T16:54:11.055,ns_1@10.242.238.90:<0.6227.1>:ns_memcached:do_handle_call:527]Changed vbucket 725 state to active [ns_server:info,2014-08-19T16:54:11.055,ns_1@10.242.238.90:<0.6227.1>:ns_memcached:do_handle_call:527]Changed vbucket 724 state to active [ns_server:info,2014-08-19T16:54:11.056,ns_1@10.242.238.90:<0.6227.1>:ns_memcached:do_handle_call:527]Changed vbucket 723 state to active [ns_server:info,2014-08-19T16:54:11.056,ns_1@10.242.238.90:<0.6227.1>:ns_memcached:do_handle_call:527]Changed vbucket 722 state to active [ns_server:info,2014-08-19T16:54:11.056,ns_1@10.242.238.90:<0.6227.1>:ns_memcached:do_handle_call:527]Changed vbucket 721 state to active [ns_server:info,2014-08-19T16:54:11.056,ns_1@10.242.238.90:<0.6227.1>:ns_memcached:do_handle_call:527]Changed vbucket 720 state to active [ns_server:info,2014-08-19T16:54:11.057,ns_1@10.242.238.90:<0.6227.1>:ns_memcached:do_handle_call:527]Changed vbucket 719 state to active [ns_server:info,2014-08-19T16:54:11.057,ns_1@10.242.238.90:<0.6227.1>:ns_memcached:do_handle_call:527]Changed vbucket 718 state to active [ns_server:info,2014-08-19T16:54:11.057,ns_1@10.242.238.90:<0.6227.1>:ns_memcached:do_handle_call:527]Changed vbucket 717 state to active [ns_server:info,2014-08-19T16:54:11.057,ns_1@10.242.238.90:<0.6227.1>:ns_memcached:do_handle_call:527]Changed vbucket 716 state to active [ns_server:info,2014-08-19T16:54:11.057,ns_1@10.242.238.90:<0.6227.1>:ns_memcached:do_handle_call:527]Changed vbucket 715 state to active [ns_server:info,2014-08-19T16:54:11.058,ns_1@10.242.238.90:<0.6227.1>:ns_memcached:do_handle_call:527]Changed vbucket 714 state to active [ns_server:info,2014-08-19T16:54:11.058,ns_1@10.242.238.90:<0.6227.1>:ns_memcached:do_handle_call:527]Changed vbucket 713 state to active [ns_server:info,2014-08-19T16:54:11.058,ns_1@10.242.238.90:<0.6227.1>:ns_memcached:do_handle_call:527]Changed vbucket 712 state to active [ns_server:info,2014-08-19T16:54:11.058,ns_1@10.242.238.90:<0.6227.1>:ns_memcached:do_handle_call:527]Changed vbucket 711 state to active [ns_server:info,2014-08-19T16:54:11.059,ns_1@10.242.238.90:<0.6227.1>:ns_memcached:do_handle_call:527]Changed vbucket 710 state to active [ns_server:info,2014-08-19T16:54:11.059,ns_1@10.242.238.90:<0.6227.1>:ns_memcached:do_handle_call:527]Changed vbucket 709 state to active [ns_server:info,2014-08-19T16:54:11.059,ns_1@10.242.238.90:<0.6227.1>:ns_memcached:do_handle_call:527]Changed vbucket 708 state to active [ns_server:info,2014-08-19T16:54:11.059,ns_1@10.242.238.90:<0.6227.1>:ns_memcached:do_handle_call:527]Changed vbucket 707 state to active [ns_server:info,2014-08-19T16:54:11.060,ns_1@10.242.238.90:<0.6227.1>:ns_memcached:do_handle_call:527]Changed vbucket 706 state to active [ns_server:info,2014-08-19T16:54:11.060,ns_1@10.242.238.90:<0.6227.1>:ns_memcached:do_handle_call:527]Changed vbucket 705 state to active [ns_server:info,2014-08-19T16:54:11.060,ns_1@10.242.238.90:<0.6227.1>:ns_memcached:do_handle_call:527]Changed vbucket 704 state to active [ns_server:info,2014-08-19T16:54:11.060,ns_1@10.242.238.90:<0.6227.1>:ns_memcached:do_handle_call:527]Changed vbucket 703 state to active [ns_server:info,2014-08-19T16:54:11.061,ns_1@10.242.238.90:<0.6227.1>:ns_memcached:do_handle_call:527]Changed vbucket 702 state to active [ns_server:info,2014-08-19T16:54:11.061,ns_1@10.242.238.90:<0.6227.1>:ns_memcached:do_handle_call:527]Changed vbucket 701 state to active [ns_server:info,2014-08-19T16:54:11.061,ns_1@10.242.238.90:<0.6227.1>:ns_memcached:do_handle_call:527]Changed vbucket 700 state to active [ns_server:info,2014-08-19T16:54:11.061,ns_1@10.242.238.90:<0.6227.1>:ns_memcached:do_handle_call:527]Changed vbucket 699 state to active [ns_server:info,2014-08-19T16:54:11.062,ns_1@10.242.238.90:<0.6227.1>:ns_memcached:do_handle_call:527]Changed vbucket 698 state to active [ns_server:info,2014-08-19T16:54:11.062,ns_1@10.242.238.90:<0.6227.1>:ns_memcached:do_handle_call:527]Changed vbucket 697 state to active [ns_server:info,2014-08-19T16:54:11.062,ns_1@10.242.238.90:<0.6227.1>:ns_memcached:do_handle_call:527]Changed vbucket 696 state to active [ns_server:info,2014-08-19T16:54:11.062,ns_1@10.242.238.90:<0.6227.1>:ns_memcached:do_handle_call:527]Changed vbucket 695 state to active [ns_server:info,2014-08-19T16:54:11.063,ns_1@10.242.238.90:<0.6227.1>:ns_memcached:do_handle_call:527]Changed vbucket 694 state to active [ns_server:info,2014-08-19T16:54:11.063,ns_1@10.242.238.90:<0.6227.1>:ns_memcached:do_handle_call:527]Changed vbucket 693 state to active [ns_server:info,2014-08-19T16:54:11.063,ns_1@10.242.238.90:<0.6227.1>:ns_memcached:do_handle_call:527]Changed vbucket 692 state to active [ns_server:info,2014-08-19T16:54:11.063,ns_1@10.242.238.90:<0.6227.1>:ns_memcached:do_handle_call:527]Changed vbucket 691 state to active [ns_server:info,2014-08-19T16:54:11.063,ns_1@10.242.238.90:<0.6227.1>:ns_memcached:do_handle_call:527]Changed vbucket 690 state to active [ns_server:info,2014-08-19T16:54:11.064,ns_1@10.242.238.90:<0.6227.1>:ns_memcached:do_handle_call:527]Changed vbucket 689 state to active [ns_server:info,2014-08-19T16:54:11.064,ns_1@10.242.238.90:<0.6227.1>:ns_memcached:do_handle_call:527]Changed vbucket 688 state to active [ns_server:info,2014-08-19T16:54:11.064,ns_1@10.242.238.90:<0.6227.1>:ns_memcached:do_handle_call:527]Changed vbucket 687 state to active [ns_server:info,2014-08-19T16:54:11.065,ns_1@10.242.238.90:<0.6227.1>:ns_memcached:do_handle_call:527]Changed vbucket 686 state to active [ns_server:info,2014-08-19T16:54:11.065,ns_1@10.242.238.90:<0.6227.1>:ns_memcached:do_handle_call:527]Changed vbucket 685 state to active [ns_server:info,2014-08-19T16:54:11.065,ns_1@10.242.238.90:<0.6227.1>:ns_memcached:do_handle_call:527]Changed vbucket 684 state to active [ns_server:info,2014-08-19T16:54:11.065,ns_1@10.242.238.90:<0.6227.1>:ns_memcached:do_handle_call:527]Changed vbucket 683 state to active [ns_server:info,2014-08-19T16:54:11.066,ns_1@10.242.238.90:<0.6227.1>:ns_memcached:do_handle_call:527]Changed vbucket 682 state to active [ns_server:info,2014-08-19T16:54:11.066,ns_1@10.242.238.90:<0.6227.1>:ns_memcached:do_handle_call:527]Changed vbucket 681 state to active [ns_server:info,2014-08-19T16:54:11.066,ns_1@10.242.238.90:<0.6227.1>:ns_memcached:do_handle_call:527]Changed vbucket 680 state to active [ns_server:info,2014-08-19T16:54:11.066,ns_1@10.242.238.90:<0.6227.1>:ns_memcached:do_handle_call:527]Changed vbucket 679 state to active [ns_server:info,2014-08-19T16:54:11.067,ns_1@10.242.238.90:<0.6227.1>:ns_memcached:do_handle_call:527]Changed vbucket 678 state to active [ns_server:info,2014-08-19T16:54:11.067,ns_1@10.242.238.90:<0.6227.1>:ns_memcached:do_handle_call:527]Changed vbucket 677 state to active [ns_server:info,2014-08-19T16:54:11.067,ns_1@10.242.238.90:<0.6227.1>:ns_memcached:do_handle_call:527]Changed vbucket 676 state to active [ns_server:info,2014-08-19T16:54:11.068,ns_1@10.242.238.90:<0.6227.1>:ns_memcached:do_handle_call:527]Changed vbucket 675 state to active [ns_server:info,2014-08-19T16:54:11.068,ns_1@10.242.238.90:<0.6227.1>:ns_memcached:do_handle_call:527]Changed vbucket 674 state to active [ns_server:info,2014-08-19T16:54:11.068,ns_1@10.242.238.90:<0.6227.1>:ns_memcached:do_handle_call:527]Changed vbucket 673 state to active [ns_server:info,2014-08-19T16:54:11.069,ns_1@10.242.238.90:<0.6227.1>:ns_memcached:do_handle_call:527]Changed vbucket 672 state to active [ns_server:info,2014-08-19T16:54:11.073,ns_1@10.242.238.90:<0.6227.1>:ns_memcached:do_handle_call:527]Changed vbucket 671 state to active [ns_server:info,2014-08-19T16:54:11.073,ns_1@10.242.238.90:<0.6228.1>:ns_memcached:do_handle_call:527]Changed vbucket 670 state to active [ns_server:info,2014-08-19T16:54:11.074,ns_1@10.242.238.90:<0.6228.1>:ns_memcached:do_handle_call:527]Changed vbucket 669 state to active [ns_server:info,2014-08-19T16:54:11.074,ns_1@10.242.238.90:<0.6228.1>:ns_memcached:do_handle_call:527]Changed vbucket 668 state to active [ns_server:info,2014-08-19T16:54:11.075,ns_1@10.242.238.90:<0.6228.1>:ns_memcached:do_handle_call:527]Changed vbucket 667 state to active [ns_server:info,2014-08-19T16:54:11.076,ns_1@10.242.238.90:<0.6228.1>:ns_memcached:do_handle_call:527]Changed vbucket 666 state to active [ns_server:info,2014-08-19T16:54:11.077,ns_1@10.242.238.90:<0.6228.1>:ns_memcached:do_handle_call:527]Changed vbucket 665 state to active [ns_server:info,2014-08-19T16:54:11.079,ns_1@10.242.238.90:<0.6228.1>:ns_memcached:do_handle_call:527]Changed vbucket 664 state to active [ns_server:info,2014-08-19T16:54:11.080,ns_1@10.242.238.90:<0.6228.1>:ns_memcached:do_handle_call:527]Changed vbucket 663 state to active [ns_server:info,2014-08-19T16:54:11.080,ns_1@10.242.238.90:<0.6228.1>:ns_memcached:do_handle_call:527]Changed vbucket 662 state to active [ns_server:info,2014-08-19T16:54:11.081,ns_1@10.242.238.90:<0.6228.1>:ns_memcached:do_handle_call:527]Changed vbucket 661 state to active [ns_server:info,2014-08-19T16:54:11.081,ns_1@10.242.238.90:<0.6228.1>:ns_memcached:do_handle_call:527]Changed vbucket 660 state to active [ns_server:info,2014-08-19T16:54:11.082,ns_1@10.242.238.90:<0.6228.1>:ns_memcached:do_handle_call:527]Changed vbucket 659 state to active [ns_server:info,2014-08-19T16:54:11.082,ns_1@10.242.238.90:<0.6228.1>:ns_memcached:do_handle_call:527]Changed vbucket 658 state to active [ns_server:info,2014-08-19T16:54:11.083,ns_1@10.242.238.90:<0.6228.1>:ns_memcached:do_handle_call:527]Changed vbucket 657 state to active [ns_server:info,2014-08-19T16:54:11.083,ns_1@10.242.238.90:<0.6228.1>:ns_memcached:do_handle_call:527]Changed vbucket 656 state to active [ns_server:info,2014-08-19T16:54:11.083,ns_1@10.242.238.90:<0.6228.1>:ns_memcached:do_handle_call:527]Changed vbucket 655 state to active [ns_server:info,2014-08-19T16:54:11.084,ns_1@10.242.238.90:<0.6228.1>:ns_memcached:do_handle_call:527]Changed vbucket 654 state to active [ns_server:info,2014-08-19T16:54:11.084,ns_1@10.242.238.90:<0.6228.1>:ns_memcached:do_handle_call:527]Changed vbucket 653 state to active [ns_server:info,2014-08-19T16:54:11.089,ns_1@10.242.238.90:<0.6228.1>:ns_memcached:do_handle_call:527]Changed vbucket 652 state to active [ns_server:info,2014-08-19T16:54:11.089,ns_1@10.242.238.90:<0.6228.1>:ns_memcached:do_handle_call:527]Changed vbucket 651 state to active [ns_server:info,2014-08-19T16:54:11.089,ns_1@10.242.238.90:<0.6228.1>:ns_memcached:do_handle_call:527]Changed vbucket 650 state to active [ns_server:info,2014-08-19T16:54:11.090,ns_1@10.242.238.90:<0.6228.1>:ns_memcached:do_handle_call:527]Changed vbucket 649 state to active [ns_server:info,2014-08-19T16:54:11.090,ns_1@10.242.238.90:<0.6228.1>:ns_memcached:do_handle_call:527]Changed vbucket 648 state to active [ns_server:info,2014-08-19T16:54:11.090,ns_1@10.242.238.90:<0.6228.1>:ns_memcached:do_handle_call:527]Changed vbucket 647 state to active [ns_server:info,2014-08-19T16:54:11.090,ns_1@10.242.238.90:<0.6228.1>:ns_memcached:do_handle_call:527]Changed vbucket 646 state to active [ns_server:info,2014-08-19T16:54:11.091,ns_1@10.242.238.90:<0.6228.1>:ns_memcached:do_handle_call:527]Changed vbucket 645 state to active [ns_server:info,2014-08-19T16:54:11.091,ns_1@10.242.238.90:<0.6228.1>:ns_memcached:do_handle_call:527]Changed vbucket 644 state to active [ns_server:info,2014-08-19T16:54:11.091,ns_1@10.242.238.90:<0.6228.1>:ns_memcached:do_handle_call:527]Changed vbucket 643 state to active [ns_server:info,2014-08-19T16:54:11.091,ns_1@10.242.238.90:<0.6228.1>:ns_memcached:do_handle_call:527]Changed vbucket 642 state to active [ns_server:info,2014-08-19T16:54:11.092,ns_1@10.242.238.90:<0.6228.1>:ns_memcached:do_handle_call:527]Changed vbucket 641 state to active [ns_server:info,2014-08-19T16:54:11.092,ns_1@10.242.238.90:<0.6228.1>:ns_memcached:do_handle_call:527]Changed vbucket 640 state to active [ns_server:info,2014-08-19T16:54:11.092,ns_1@10.242.238.90:<0.6228.1>:ns_memcached:do_handle_call:527]Changed vbucket 639 state to active [ns_server:info,2014-08-19T16:54:11.092,ns_1@10.242.238.90:<0.6228.1>:ns_memcached:do_handle_call:527]Changed vbucket 638 state to active [ns_server:info,2014-08-19T16:54:11.092,ns_1@10.242.238.90:<0.6228.1>:ns_memcached:do_handle_call:527]Changed vbucket 637 state to active [ns_server:info,2014-08-19T16:54:11.093,ns_1@10.242.238.90:<0.6228.1>:ns_memcached:do_handle_call:527]Changed vbucket 636 state to active [ns_server:info,2014-08-19T16:54:11.093,ns_1@10.242.238.90:<0.6228.1>:ns_memcached:do_handle_call:527]Changed vbucket 635 state to active [ns_server:info,2014-08-19T16:54:11.094,ns_1@10.242.238.90:<0.6228.1>:ns_memcached:do_handle_call:527]Changed vbucket 634 state to active [ns_server:info,2014-08-19T16:54:11.094,ns_1@10.242.238.90:<0.6228.1>:ns_memcached:do_handle_call:527]Changed vbucket 633 state to active [ns_server:info,2014-08-19T16:54:11.094,ns_1@10.242.238.90:<0.6228.1>:ns_memcached:do_handle_call:527]Changed vbucket 632 state to active [ns_server:info,2014-08-19T16:54:11.094,ns_1@10.242.238.90:<0.6228.1>:ns_memcached:do_handle_call:527]Changed vbucket 631 state to active [ns_server:info,2014-08-19T16:54:11.095,ns_1@10.242.238.90:<0.6228.1>:ns_memcached:do_handle_call:527]Changed vbucket 630 state to active [ns_server:info,2014-08-19T16:54:11.095,ns_1@10.242.238.90:<0.6228.1>:ns_memcached:do_handle_call:527]Changed vbucket 629 state to active [ns_server:info,2014-08-19T16:54:11.095,ns_1@10.242.238.90:<0.6228.1>:ns_memcached:do_handle_call:527]Changed vbucket 628 state to active [ns_server:info,2014-08-19T16:54:11.095,ns_1@10.242.238.90:<0.6228.1>:ns_memcached:do_handle_call:527]Changed vbucket 627 state to active [ns_server:info,2014-08-19T16:54:11.095,ns_1@10.242.238.90:<0.6228.1>:ns_memcached:do_handle_call:527]Changed vbucket 626 state to active [ns_server:info,2014-08-19T16:54:11.096,ns_1@10.242.238.90:<0.6228.1>:ns_memcached:do_handle_call:527]Changed vbucket 625 state to active [ns_server:info,2014-08-19T16:54:11.096,ns_1@10.242.238.90:<0.6228.1>:ns_memcached:do_handle_call:527]Changed vbucket 624 state to active [ns_server:info,2014-08-19T16:54:11.096,ns_1@10.242.238.90:<0.6228.1>:ns_memcached:do_handle_call:527]Changed vbucket 623 state to active [ns_server:info,2014-08-19T16:54:11.096,ns_1@10.242.238.90:<0.6228.1>:ns_memcached:do_handle_call:527]Changed vbucket 622 state to active [ns_server:info,2014-08-19T16:54:11.097,ns_1@10.242.238.90:<0.6228.1>:ns_memcached:do_handle_call:527]Changed vbucket 621 state to active [ns_server:info,2014-08-19T16:54:11.097,ns_1@10.242.238.90:<0.6228.1>:ns_memcached:do_handle_call:527]Changed vbucket 620 state to active [ns_server:info,2014-08-19T16:54:11.097,ns_1@10.242.238.90:<0.6228.1>:ns_memcached:do_handle_call:527]Changed vbucket 619 state to active [ns_server:info,2014-08-19T16:54:11.097,ns_1@10.242.238.90:<0.6228.1>:ns_memcached:do_handle_call:527]Changed vbucket 618 state to active [ns_server:info,2014-08-19T16:54:11.098,ns_1@10.242.238.90:<0.6228.1>:ns_memcached:do_handle_call:527]Changed vbucket 617 state to active [ns_server:info,2014-08-19T16:54:11.098,ns_1@10.242.238.90:<0.6228.1>:ns_memcached:do_handle_call:527]Changed vbucket 616 state to active [ns_server:info,2014-08-19T16:54:11.098,ns_1@10.242.238.90:<0.6228.1>:ns_memcached:do_handle_call:527]Changed vbucket 615 state to active [ns_server:info,2014-08-19T16:54:11.098,ns_1@10.242.238.90:<0.6228.1>:ns_memcached:do_handle_call:527]Changed vbucket 614 state to active [ns_server:info,2014-08-19T16:54:11.099,ns_1@10.242.238.90:<0.6228.1>:ns_memcached:do_handle_call:527]Changed vbucket 613 state to active [ns_server:info,2014-08-19T16:54:11.099,ns_1@10.242.238.90:<0.6228.1>:ns_memcached:do_handle_call:527]Changed vbucket 612 state to active [ns_server:info,2014-08-19T16:54:11.099,ns_1@10.242.238.90:<0.6228.1>:ns_memcached:do_handle_call:527]Changed vbucket 611 state to active [ns_server:info,2014-08-19T16:54:11.099,ns_1@10.242.238.90:<0.6228.1>:ns_memcached:do_handle_call:527]Changed vbucket 610 state to active [ns_server:info,2014-08-19T16:54:11.100,ns_1@10.242.238.90:<0.6228.1>:ns_memcached:do_handle_call:527]Changed vbucket 609 state to active [ns_server:info,2014-08-19T16:54:11.100,ns_1@10.242.238.90:<0.6228.1>:ns_memcached:do_handle_call:527]Changed vbucket 608 state to active [ns_server:info,2014-08-19T16:54:11.100,ns_1@10.242.238.90:<0.6228.1>:ns_memcached:do_handle_call:527]Changed vbucket 607 state to active [ns_server:info,2014-08-19T16:54:11.100,ns_1@10.242.238.90:<0.6228.1>:ns_memcached:do_handle_call:527]Changed vbucket 606 state to active [ns_server:info,2014-08-19T16:54:11.101,ns_1@10.242.238.90:<0.6228.1>:ns_memcached:do_handle_call:527]Changed vbucket 605 state to active [ns_server:info,2014-08-19T16:54:11.101,ns_1@10.242.238.90:<0.6228.1>:ns_memcached:do_handle_call:527]Changed vbucket 604 state to active [ns_server:info,2014-08-19T16:54:11.101,ns_1@10.242.238.90:<0.6228.1>:ns_memcached:do_handle_call:527]Changed vbucket 603 state to active [ns_server:info,2014-08-19T16:54:11.101,ns_1@10.242.238.90:<0.6228.1>:ns_memcached:do_handle_call:527]Changed vbucket 602 state to active [ns_server:info,2014-08-19T16:54:11.102,ns_1@10.242.238.90:<0.6228.1>:ns_memcached:do_handle_call:527]Changed vbucket 601 state to active [ns_server:info,2014-08-19T16:54:11.102,ns_1@10.242.238.90:<0.6228.1>:ns_memcached:do_handle_call:527]Changed vbucket 600 state to active [ns_server:info,2014-08-19T16:54:11.102,ns_1@10.242.238.90:<0.6228.1>:ns_memcached:do_handle_call:527]Changed vbucket 599 state to active [ns_server:info,2014-08-19T16:54:11.102,ns_1@10.242.238.90:<0.6228.1>:ns_memcached:do_handle_call:527]Changed vbucket 598 state to active [ns_server:info,2014-08-19T16:54:11.103,ns_1@10.242.238.90:<0.6228.1>:ns_memcached:do_handle_call:527]Changed vbucket 597 state to active [ns_server:info,2014-08-19T16:54:11.103,ns_1@10.242.238.90:<0.6228.1>:ns_memcached:do_handle_call:527]Changed vbucket 596 state to active [ns_server:info,2014-08-19T16:54:11.103,ns_1@10.242.238.90:<0.6228.1>:ns_memcached:do_handle_call:527]Changed vbucket 595 state to active [ns_server:info,2014-08-19T16:54:11.103,ns_1@10.242.238.90:<0.6228.1>:ns_memcached:do_handle_call:527]Changed vbucket 594 state to active [ns_server:info,2014-08-19T16:54:11.104,ns_1@10.242.238.90:<0.6228.1>:ns_memcached:do_handle_call:527]Changed vbucket 593 state to active [ns_server:info,2014-08-19T16:54:11.104,ns_1@10.242.238.90:<0.6228.1>:ns_memcached:do_handle_call:527]Changed vbucket 592 state to active [ns_server:info,2014-08-19T16:54:11.104,ns_1@10.242.238.90:<0.6228.1>:ns_memcached:do_handle_call:527]Changed vbucket 591 state to active [ns_server:info,2014-08-19T16:54:11.104,ns_1@10.242.238.90:<0.6228.1>:ns_memcached:do_handle_call:527]Changed vbucket 590 state to active [ns_server:info,2014-08-19T16:54:11.105,ns_1@10.242.238.90:<0.6228.1>:ns_memcached:do_handle_call:527]Changed vbucket 589 state to active [ns_server:info,2014-08-19T16:54:11.105,ns_1@10.242.238.90:<0.6228.1>:ns_memcached:do_handle_call:527]Changed vbucket 588 state to active [ns_server:info,2014-08-19T16:54:11.105,ns_1@10.242.238.90:<0.6228.1>:ns_memcached:do_handle_call:527]Changed vbucket 587 state to active [ns_server:info,2014-08-19T16:54:11.105,ns_1@10.242.238.90:<0.6228.1>:ns_memcached:do_handle_call:527]Changed vbucket 586 state to active [ns_server:info,2014-08-19T16:54:11.106,ns_1@10.242.238.90:<0.6228.1>:ns_memcached:do_handle_call:527]Changed vbucket 585 state to active [ns_server:info,2014-08-19T16:54:11.106,ns_1@10.242.238.90:<0.6228.1>:ns_memcached:do_handle_call:527]Changed vbucket 584 state to active [ns_server:info,2014-08-19T16:54:11.106,ns_1@10.242.238.90:<0.6228.1>:ns_memcached:do_handle_call:527]Changed vbucket 583 state to active [ns_server:info,2014-08-19T16:54:11.106,ns_1@10.242.238.90:<0.6228.1>:ns_memcached:do_handle_call:527]Changed vbucket 582 state to active [ns_server:info,2014-08-19T16:54:11.107,ns_1@10.242.238.90:<0.6228.1>:ns_memcached:do_handle_call:527]Changed vbucket 581 state to active [ns_server:info,2014-08-19T16:54:11.107,ns_1@10.242.238.90:<0.6228.1>:ns_memcached:do_handle_call:527]Changed vbucket 580 state to active [ns_server:info,2014-08-19T16:54:11.107,ns_1@10.242.238.90:<0.6228.1>:ns_memcached:do_handle_call:527]Changed vbucket 579 state to active [ns_server:info,2014-08-19T16:54:11.107,ns_1@10.242.238.90:<0.6228.1>:ns_memcached:do_handle_call:527]Changed vbucket 578 state to active [ns_server:info,2014-08-19T16:54:11.108,ns_1@10.242.238.90:<0.6228.1>:ns_memcached:do_handle_call:527]Changed vbucket 577 state to active [ns_server:info,2014-08-19T16:54:11.108,ns_1@10.242.238.90:<0.6228.1>:ns_memcached:do_handle_call:527]Changed vbucket 576 state to active [ns_server:info,2014-08-19T16:54:11.108,ns_1@10.242.238.90:<0.6228.1>:ns_memcached:do_handle_call:527]Changed vbucket 575 state to active [ns_server:info,2014-08-19T16:54:11.108,ns_1@10.242.238.90:<0.6228.1>:ns_memcached:do_handle_call:527]Changed vbucket 574 state to active [ns_server:info,2014-08-19T16:54:11.109,ns_1@10.242.238.90:<0.6228.1>:ns_memcached:do_handle_call:527]Changed vbucket 573 state to active [ns_server:info,2014-08-19T16:54:11.109,ns_1@10.242.238.90:<0.6228.1>:ns_memcached:do_handle_call:527]Changed vbucket 572 state to active [ns_server:info,2014-08-19T16:54:11.109,ns_1@10.242.238.90:<0.6228.1>:ns_memcached:do_handle_call:527]Changed vbucket 571 state to active [ns_server:info,2014-08-19T16:54:11.109,ns_1@10.242.238.90:<0.6228.1>:ns_memcached:do_handle_call:527]Changed vbucket 570 state to active [ns_server:info,2014-08-19T16:54:11.110,ns_1@10.242.238.90:<0.6228.1>:ns_memcached:do_handle_call:527]Changed vbucket 569 state to active [ns_server:info,2014-08-19T16:54:11.110,ns_1@10.242.238.90:<0.6228.1>:ns_memcached:do_handle_call:527]Changed vbucket 568 state to active [ns_server:info,2014-08-19T16:54:11.110,ns_1@10.242.238.90:<0.6228.1>:ns_memcached:do_handle_call:527]Changed vbucket 567 state to active [ns_server:info,2014-08-19T16:54:11.111,ns_1@10.242.238.90:<0.6228.1>:ns_memcached:do_handle_call:527]Changed vbucket 566 state to active [ns_server:info,2014-08-19T16:54:11.111,ns_1@10.242.238.90:<0.6228.1>:ns_memcached:do_handle_call:527]Changed vbucket 565 state to active [ns_server:info,2014-08-19T16:54:11.111,ns_1@10.242.238.90:<0.6228.1>:ns_memcached:do_handle_call:527]Changed vbucket 564 state to active [ns_server:info,2014-08-19T16:54:11.112,ns_1@10.242.238.90:<0.6228.1>:ns_memcached:do_handle_call:527]Changed vbucket 563 state to active [ns_server:info,2014-08-19T16:54:11.112,ns_1@10.242.238.90:<0.6228.1>:ns_memcached:do_handle_call:527]Changed vbucket 562 state to active [ns_server:info,2014-08-19T16:54:11.112,ns_1@10.242.238.90:<0.6228.1>:ns_memcached:do_handle_call:527]Changed vbucket 561 state to active [ns_server:info,2014-08-19T16:54:11.112,ns_1@10.242.238.90:<0.6228.1>:ns_memcached:do_handle_call:527]Changed vbucket 560 state to active [ns_server:info,2014-08-19T16:54:11.113,ns_1@10.242.238.90:<0.6228.1>:ns_memcached:do_handle_call:527]Changed vbucket 559 state to active [ns_server:info,2014-08-19T16:54:11.113,ns_1@10.242.238.90:<0.6228.1>:ns_memcached:do_handle_call:527]Changed vbucket 558 state to active [ns_server:info,2014-08-19T16:54:11.113,ns_1@10.242.238.90:<0.6228.1>:ns_memcached:do_handle_call:527]Changed vbucket 557 state to active [ns_server:info,2014-08-19T16:54:11.114,ns_1@10.242.238.90:<0.6228.1>:ns_memcached:do_handle_call:527]Changed vbucket 556 state to active [ns_server:info,2014-08-19T16:54:11.114,ns_1@10.242.238.90:<0.6228.1>:ns_memcached:do_handle_call:527]Changed vbucket 555 state to active [ns_server:info,2014-08-19T16:54:11.114,ns_1@10.242.238.90:<0.6228.1>:ns_memcached:do_handle_call:527]Changed vbucket 554 state to active [ns_server:info,2014-08-19T16:54:11.114,ns_1@10.242.238.90:<0.6228.1>:ns_memcached:do_handle_call:527]Changed vbucket 553 state to active [ns_server:info,2014-08-19T16:54:11.115,ns_1@10.242.238.90:<0.6228.1>:ns_memcached:do_handle_call:527]Changed vbucket 552 state to active [ns_server:info,2014-08-19T16:54:11.115,ns_1@10.242.238.90:<0.6228.1>:ns_memcached:do_handle_call:527]Changed vbucket 551 state to active [ns_server:info,2014-08-19T16:54:11.115,ns_1@10.242.238.90:<0.6228.1>:ns_memcached:do_handle_call:527]Changed vbucket 550 state to active [ns_server:info,2014-08-19T16:54:11.115,ns_1@10.242.238.90:<0.6228.1>:ns_memcached:do_handle_call:527]Changed vbucket 549 state to active [ns_server:info,2014-08-19T16:54:11.116,ns_1@10.242.238.90:<0.6228.1>:ns_memcached:do_handle_call:527]Changed vbucket 548 state to active [ns_server:info,2014-08-19T16:54:11.116,ns_1@10.242.238.90:<0.6228.1>:ns_memcached:do_handle_call:527]Changed vbucket 547 state to active [ns_server:info,2014-08-19T16:54:11.116,ns_1@10.242.238.90:<0.6228.1>:ns_memcached:do_handle_call:527]Changed vbucket 546 state to active [ns_server:info,2014-08-19T16:54:11.116,ns_1@10.242.238.90:<0.6228.1>:ns_memcached:do_handle_call:527]Changed vbucket 545 state to active [ns_server:info,2014-08-19T16:54:11.117,ns_1@10.242.238.90:<0.6228.1>:ns_memcached:do_handle_call:527]Changed vbucket 544 state to active [ns_server:info,2014-08-19T16:54:11.117,ns_1@10.242.238.90:<0.6228.1>:ns_memcached:do_handle_call:527]Changed vbucket 543 state to active [ns_server:info,2014-08-19T16:54:11.117,ns_1@10.242.238.90:<0.6228.1>:ns_memcached:do_handle_call:527]Changed vbucket 542 state to active [ns_server:info,2014-08-19T16:54:11.117,ns_1@10.242.238.90:<0.6228.1>:ns_memcached:do_handle_call:527]Changed vbucket 541 state to active [ns_server:info,2014-08-19T16:54:11.118,ns_1@10.242.238.90:<0.6228.1>:ns_memcached:do_handle_call:527]Changed vbucket 540 state to active [ns_server:info,2014-08-19T16:54:11.118,ns_1@10.242.238.90:<0.6228.1>:ns_memcached:do_handle_call:527]Changed vbucket 539 state to active [ns_server:info,2014-08-19T16:54:11.118,ns_1@10.242.238.90:<0.6228.1>:ns_memcached:do_handle_call:527]Changed vbucket 538 state to active [ns_server:info,2014-08-19T16:54:11.118,ns_1@10.242.238.90:<0.6228.1>:ns_memcached:do_handle_call:527]Changed vbucket 537 state to active [ns_server:info,2014-08-19T16:54:11.119,ns_1@10.242.238.90:<0.6228.1>:ns_memcached:do_handle_call:527]Changed vbucket 536 state to active [ns_server:info,2014-08-19T16:54:11.119,ns_1@10.242.238.90:<0.6228.1>:ns_memcached:do_handle_call:527]Changed vbucket 535 state to active [ns_server:info,2014-08-19T16:54:11.119,ns_1@10.242.238.90:<0.6228.1>:ns_memcached:do_handle_call:527]Changed vbucket 534 state to active [ns_server:info,2014-08-19T16:54:11.119,ns_1@10.242.238.90:<0.6228.1>:ns_memcached:do_handle_call:527]Changed vbucket 533 state to active [ns_server:info,2014-08-19T16:54:11.120,ns_1@10.242.238.90:<0.6228.1>:ns_memcached:do_handle_call:527]Changed vbucket 532 state to active [ns_server:info,2014-08-19T16:54:11.120,ns_1@10.242.238.90:<0.6228.1>:ns_memcached:do_handle_call:527]Changed vbucket 531 state to active [ns_server:info,2014-08-19T16:54:11.120,ns_1@10.242.238.90:<0.6228.1>:ns_memcached:do_handle_call:527]Changed vbucket 530 state to active [ns_server:info,2014-08-19T16:54:11.120,ns_1@10.242.238.90:<0.6228.1>:ns_memcached:do_handle_call:527]Changed vbucket 529 state to active [ns_server:info,2014-08-19T16:54:11.121,ns_1@10.242.238.90:<0.6228.1>:ns_memcached:do_handle_call:527]Changed vbucket 528 state to active [ns_server:info,2014-08-19T16:54:11.121,ns_1@10.242.238.90:<0.6228.1>:ns_memcached:do_handle_call:527]Changed vbucket 527 state to active [ns_server:info,2014-08-19T16:54:11.121,ns_1@10.242.238.90:<0.6228.1>:ns_memcached:do_handle_call:527]Changed vbucket 526 state to active [ns_server:info,2014-08-19T16:54:11.121,ns_1@10.242.238.90:<0.6228.1>:ns_memcached:do_handle_call:527]Changed vbucket 525 state to active [ns_server:info,2014-08-19T16:54:11.121,ns_1@10.242.238.90:<0.6228.1>:ns_memcached:do_handle_call:527]Changed vbucket 524 state to active [ns_server:info,2014-08-19T16:54:11.122,ns_1@10.242.238.90:<0.6228.1>:ns_memcached:do_handle_call:527]Changed vbucket 523 state to active [ns_server:info,2014-08-19T16:54:11.122,ns_1@10.242.238.90:<0.6228.1>:ns_memcached:do_handle_call:527]Changed vbucket 522 state to active [ns_server:info,2014-08-19T16:54:11.122,ns_1@10.242.238.90:<0.6228.1>:ns_memcached:do_handle_call:527]Changed vbucket 521 state to active [ns_server:info,2014-08-19T16:54:11.122,ns_1@10.242.238.90:<0.6228.1>:ns_memcached:do_handle_call:527]Changed vbucket 520 state to active [ns_server:info,2014-08-19T16:54:11.123,ns_1@10.242.238.90:<0.6228.1>:ns_memcached:do_handle_call:527]Changed vbucket 519 state to active [ns_server:info,2014-08-19T16:54:11.123,ns_1@10.242.238.90:<0.6228.1>:ns_memcached:do_handle_call:527]Changed vbucket 518 state to active [ns_server:info,2014-08-19T16:54:11.123,ns_1@10.242.238.90:<0.6228.1>:ns_memcached:do_handle_call:527]Changed vbucket 517 state to active [ns_server:info,2014-08-19T16:54:11.123,ns_1@10.242.238.90:<0.6228.1>:ns_memcached:do_handle_call:527]Changed vbucket 516 state to active [ns_server:info,2014-08-19T16:54:11.124,ns_1@10.242.238.90:<0.6228.1>:ns_memcached:do_handle_call:527]Changed vbucket 515 state to active [ns_server:info,2014-08-19T16:54:11.124,ns_1@10.242.238.90:<0.6228.1>:ns_memcached:do_handle_call:527]Changed vbucket 514 state to active [ns_server:info,2014-08-19T16:54:11.124,ns_1@10.242.238.90:<0.6228.1>:ns_memcached:do_handle_call:527]Changed vbucket 513 state to active [ns_server:info,2014-08-19T16:54:11.125,ns_1@10.242.238.90:<0.6228.1>:ns_memcached:do_handle_call:527]Changed vbucket 512 state to active [ns_server:info,2014-08-19T16:54:11.125,ns_1@10.242.238.90:<0.6228.1>:ns_memcached:do_handle_call:527]Changed vbucket 426 state to replica [ns_server:info,2014-08-19T16:54:11.125,ns_1@10.242.238.90:<0.6228.1>:ns_memcached:do_handle_call:527]Changed vbucket 425 state to replica [ns_server:info,2014-08-19T16:54:11.125,ns_1@10.242.238.90:<0.6228.1>:ns_memcached:do_handle_call:527]Changed vbucket 424 state to replica [ns_server:info,2014-08-19T16:54:11.125,ns_1@10.242.238.90:<0.6228.1>:ns_memcached:do_handle_call:527]Changed vbucket 423 state to replica [ns_server:info,2014-08-19T16:54:11.126,ns_1@10.242.238.90:<0.6228.1>:ns_memcached:do_handle_call:527]Changed vbucket 422 state to replica [ns_server:info,2014-08-19T16:54:11.126,ns_1@10.242.238.90:<0.6228.1>:ns_memcached:do_handle_call:527]Changed vbucket 421 state to replica [ns_server:info,2014-08-19T16:54:11.126,ns_1@10.242.238.90:<0.6228.1>:ns_memcached:do_handle_call:527]Changed vbucket 420 state to replica [ns_server:info,2014-08-19T16:54:11.127,ns_1@10.242.238.90:<0.6228.1>:ns_memcached:do_handle_call:527]Changed vbucket 419 state to replica [ns_server:info,2014-08-19T16:54:11.127,ns_1@10.242.238.90:<0.6228.1>:ns_memcached:do_handle_call:527]Changed vbucket 418 state to replica [ns_server:info,2014-08-19T16:54:11.127,ns_1@10.242.238.90:<0.6228.1>:ns_memcached:do_handle_call:527]Changed vbucket 417 state to replica [ns_server:info,2014-08-19T16:54:11.127,ns_1@10.242.238.90:<0.6228.1>:ns_memcached:do_handle_call:527]Changed vbucket 416 state to replica [ns_server:info,2014-08-19T16:54:11.128,ns_1@10.242.238.90:<0.6228.1>:ns_memcached:do_handle_call:527]Changed vbucket 415 state to replica [ns_server:info,2014-08-19T16:54:11.128,ns_1@10.242.238.90:<0.6228.1>:ns_memcached:do_handle_call:527]Changed vbucket 414 state to replica [ns_server:info,2014-08-19T16:54:11.128,ns_1@10.242.238.90:<0.6228.1>:ns_memcached:do_handle_call:527]Changed vbucket 413 state to replica [ns_server:info,2014-08-19T16:54:11.128,ns_1@10.242.238.90:<0.6228.1>:ns_memcached:do_handle_call:527]Changed vbucket 412 state to replica [ns_server:info,2014-08-19T16:54:11.129,ns_1@10.242.238.90:<0.6228.1>:ns_memcached:do_handle_call:527]Changed vbucket 411 state to replica [ns_server:info,2014-08-19T16:54:11.129,ns_1@10.242.238.90:<0.6228.1>:ns_memcached:do_handle_call:527]Changed vbucket 410 state to replica [ns_server:info,2014-08-19T16:54:11.129,ns_1@10.242.238.90:<0.6228.1>:ns_memcached:do_handle_call:527]Changed vbucket 409 state to replica [ns_server:info,2014-08-19T16:54:11.129,ns_1@10.242.238.90:<0.6228.1>:ns_memcached:do_handle_call:527]Changed vbucket 408 state to replica [ns_server:info,2014-08-19T16:54:11.130,ns_1@10.242.238.90:<0.6228.1>:ns_memcached:do_handle_call:527]Changed vbucket 407 state to replica [ns_server:info,2014-08-19T16:54:11.130,ns_1@10.242.238.90:<0.6228.1>:ns_memcached:do_handle_call:527]Changed vbucket 406 state to replica [ns_server:info,2014-08-19T16:54:11.130,ns_1@10.242.238.90:<0.6228.1>:ns_memcached:do_handle_call:527]Changed vbucket 405 state to replica [ns_server:info,2014-08-19T16:54:11.130,ns_1@10.242.238.90:<0.6228.1>:ns_memcached:do_handle_call:527]Changed vbucket 404 state to replica [ns_server:info,2014-08-19T16:54:11.131,ns_1@10.242.238.90:<0.6228.1>:ns_memcached:do_handle_call:527]Changed vbucket 403 state to replica [ns_server:info,2014-08-19T16:54:11.131,ns_1@10.242.238.90:<0.6228.1>:ns_memcached:do_handle_call:527]Changed vbucket 402 state to replica [ns_server:info,2014-08-19T16:54:11.131,ns_1@10.242.238.90:<0.6228.1>:ns_memcached:do_handle_call:527]Changed vbucket 401 state to replica [ns_server:info,2014-08-19T16:54:11.131,ns_1@10.242.238.90:<0.6228.1>:ns_memcached:do_handle_call:527]Changed vbucket 400 state to replica [ns_server:info,2014-08-19T16:54:11.132,ns_1@10.242.238.90:<0.6228.1>:ns_memcached:do_handle_call:527]Changed vbucket 399 state to replica [ns_server:info,2014-08-19T16:54:11.132,ns_1@10.242.238.90:<0.6228.1>:ns_memcached:do_handle_call:527]Changed vbucket 398 state to replica [ns_server:info,2014-08-19T16:54:11.132,ns_1@10.242.238.90:<0.6228.1>:ns_memcached:do_handle_call:527]Changed vbucket 397 state to replica [ns_server:info,2014-08-19T16:54:11.132,ns_1@10.242.238.90:<0.6228.1>:ns_memcached:do_handle_call:527]Changed vbucket 396 state to replica [ns_server:info,2014-08-19T16:54:11.133,ns_1@10.242.238.90:<0.6228.1>:ns_memcached:do_handle_call:527]Changed vbucket 395 state to replica [ns_server:info,2014-08-19T16:54:11.133,ns_1@10.242.238.90:<0.6228.1>:ns_memcached:do_handle_call:527]Changed vbucket 394 state to replica [ns_server:info,2014-08-19T16:54:11.133,ns_1@10.242.238.90:<0.6228.1>:ns_memcached:do_handle_call:527]Changed vbucket 393 state to replica [ns_server:info,2014-08-19T16:54:11.133,ns_1@10.242.238.90:<0.6228.1>:ns_memcached:do_handle_call:527]Changed vbucket 392 state to replica [ns_server:info,2014-08-19T16:54:11.134,ns_1@10.242.238.90:<0.6228.1>:ns_memcached:do_handle_call:527]Changed vbucket 391 state to replica [ns_server:info,2014-08-19T16:54:11.134,ns_1@10.242.238.90:<0.6228.1>:ns_memcached:do_handle_call:527]Changed vbucket 390 state to replica [ns_server:info,2014-08-19T16:54:11.134,ns_1@10.242.238.90:<0.6228.1>:ns_memcached:do_handle_call:527]Changed vbucket 389 state to replica [ns_server:info,2014-08-19T16:54:11.134,ns_1@10.242.238.90:<0.6228.1>:ns_memcached:do_handle_call:527]Changed vbucket 388 state to replica [ns_server:info,2014-08-19T16:54:11.135,ns_1@10.242.238.90:<0.6228.1>:ns_memcached:do_handle_call:527]Changed vbucket 387 state to replica [ns_server:info,2014-08-19T16:54:11.135,ns_1@10.242.238.90:<0.6228.1>:ns_memcached:do_handle_call:527]Changed vbucket 386 state to replica [ns_server:info,2014-08-19T16:54:11.135,ns_1@10.242.238.90:<0.6228.1>:ns_memcached:do_handle_call:527]Changed vbucket 385 state to replica [ns_server:info,2014-08-19T16:54:11.135,ns_1@10.242.238.90:<0.6228.1>:ns_memcached:do_handle_call:527]Changed vbucket 384 state to replica [ns_server:info,2014-08-19T16:54:11.136,ns_1@10.242.238.90:<0.6228.1>:ns_memcached:do_handle_call:527]Changed vbucket 383 state to replica [ns_server:info,2014-08-19T16:54:11.136,ns_1@10.242.238.90:<0.6228.1>:ns_memcached:do_handle_call:527]Changed vbucket 382 state to replica [ns_server:info,2014-08-19T16:54:11.136,ns_1@10.242.238.90:<0.6228.1>:ns_memcached:do_handle_call:527]Changed vbucket 381 state to replica [ns_server:info,2014-08-19T16:54:11.136,ns_1@10.242.238.90:<0.6228.1>:ns_memcached:do_handle_call:527]Changed vbucket 380 state to replica [ns_server:info,2014-08-19T16:54:11.137,ns_1@10.242.238.90:<0.6228.1>:ns_memcached:do_handle_call:527]Changed vbucket 379 state to replica [ns_server:info,2014-08-19T16:54:11.137,ns_1@10.242.238.90:<0.6228.1>:ns_memcached:do_handle_call:527]Changed vbucket 378 state to replica [ns_server:info,2014-08-19T16:54:11.137,ns_1@10.242.238.90:<0.6228.1>:ns_memcached:do_handle_call:527]Changed vbucket 377 state to replica [ns_server:info,2014-08-19T16:54:11.138,ns_1@10.242.238.90:<0.6228.1>:ns_memcached:do_handle_call:527]Changed vbucket 376 state to replica [ns_server:info,2014-08-19T16:54:11.138,ns_1@10.242.238.90:<0.6228.1>:ns_memcached:do_handle_call:527]Changed vbucket 375 state to replica [ns_server:info,2014-08-19T16:54:11.138,ns_1@10.242.238.90:<0.6228.1>:ns_memcached:do_handle_call:527]Changed vbucket 374 state to replica [ns_server:info,2014-08-19T16:54:11.139,ns_1@10.242.238.90:<0.6228.1>:ns_memcached:do_handle_call:527]Changed vbucket 373 state to replica [ns_server:info,2014-08-19T16:54:11.139,ns_1@10.242.238.90:<0.6228.1>:ns_memcached:do_handle_call:527]Changed vbucket 372 state to replica [ns_server:info,2014-08-19T16:54:11.139,ns_1@10.242.238.90:<0.6228.1>:ns_memcached:do_handle_call:527]Changed vbucket 371 state to replica [ns_server:info,2014-08-19T16:54:11.140,ns_1@10.242.238.90:<0.6228.1>:ns_memcached:do_handle_call:527]Changed vbucket 370 state to replica [ns_server:info,2014-08-19T16:54:11.140,ns_1@10.242.238.90:<0.6228.1>:ns_memcached:do_handle_call:527]Changed vbucket 369 state to replica [ns_server:info,2014-08-19T16:54:11.140,ns_1@10.242.238.90:<0.6228.1>:ns_memcached:do_handle_call:527]Changed vbucket 368 state to replica [ns_server:info,2014-08-19T16:54:11.141,ns_1@10.242.238.90:<0.6228.1>:ns_memcached:do_handle_call:527]Changed vbucket 367 state to replica [ns_server:info,2014-08-19T16:54:11.141,ns_1@10.242.238.90:<0.6228.1>:ns_memcached:do_handle_call:527]Changed vbucket 366 state to replica [ns_server:info,2014-08-19T16:54:11.141,ns_1@10.242.238.90:<0.6228.1>:ns_memcached:do_handle_call:527]Changed vbucket 365 state to replica [ns_server:info,2014-08-19T16:54:11.143,ns_1@10.242.238.90:<0.6228.1>:ns_memcached:do_handle_call:527]Changed vbucket 364 state to replica [ns_server:info,2014-08-19T16:54:11.143,ns_1@10.242.238.90:<0.6228.1>:ns_memcached:do_handle_call:527]Changed vbucket 363 state to replica [ns_server:info,2014-08-19T16:54:11.143,ns_1@10.242.238.90:<0.6228.1>:ns_memcached:do_handle_call:527]Changed vbucket 362 state to replica [ns_server:info,2014-08-19T16:54:11.144,ns_1@10.242.238.90:<0.6228.1>:ns_memcached:do_handle_call:527]Changed vbucket 361 state to replica [ns_server:info,2014-08-19T16:54:11.144,ns_1@10.242.238.90:<0.6228.1>:ns_memcached:do_handle_call:527]Changed vbucket 360 state to replica [ns_server:info,2014-08-19T16:54:11.144,ns_1@10.242.238.90:<0.6228.1>:ns_memcached:do_handle_call:527]Changed vbucket 359 state to replica [ns_server:info,2014-08-19T16:54:11.144,ns_1@10.242.238.90:<0.6228.1>:ns_memcached:do_handle_call:527]Changed vbucket 358 state to replica [ns_server:info,2014-08-19T16:54:11.145,ns_1@10.242.238.90:<0.6228.1>:ns_memcached:do_handle_call:527]Changed vbucket 357 state to replica [ns_server:info,2014-08-19T16:54:11.145,ns_1@10.242.238.90:<0.6228.1>:ns_memcached:do_handle_call:527]Changed vbucket 356 state to replica [ns_server:info,2014-08-19T16:54:11.145,ns_1@10.242.238.90:<0.6228.1>:ns_memcached:do_handle_call:527]Changed vbucket 355 state to replica [ns_server:info,2014-08-19T16:54:11.145,ns_1@10.242.238.90:<0.6228.1>:ns_memcached:do_handle_call:527]Changed vbucket 354 state to replica [ns_server:info,2014-08-19T16:54:11.146,ns_1@10.242.238.90:<0.6228.1>:ns_memcached:do_handle_call:527]Changed vbucket 353 state to replica [ns_server:info,2014-08-19T16:54:11.146,ns_1@10.242.238.90:<0.6228.1>:ns_memcached:do_handle_call:527]Changed vbucket 352 state to replica [ns_server:info,2014-08-19T16:54:11.146,ns_1@10.242.238.90:<0.6228.1>:ns_memcached:do_handle_call:527]Changed vbucket 351 state to replica [ns_server:info,2014-08-19T16:54:11.146,ns_1@10.242.238.90:<0.6228.1>:ns_memcached:do_handle_call:527]Changed vbucket 350 state to replica [ns_server:info,2014-08-19T16:54:11.146,ns_1@10.242.238.90:<0.6228.1>:ns_memcached:do_handle_call:527]Changed vbucket 349 state to replica [ns_server:info,2014-08-19T16:54:11.147,ns_1@10.242.238.90:<0.6228.1>:ns_memcached:do_handle_call:527]Changed vbucket 348 state to replica [ns_server:info,2014-08-19T16:54:11.147,ns_1@10.242.238.90:<0.6228.1>:ns_memcached:do_handle_call:527]Changed vbucket 347 state to replica [ns_server:info,2014-08-19T16:54:11.147,ns_1@10.242.238.90:<0.6228.1>:ns_memcached:do_handle_call:527]Changed vbucket 346 state to replica [ns_server:info,2014-08-19T16:54:11.147,ns_1@10.242.238.90:<0.6228.1>:ns_memcached:do_handle_call:527]Changed vbucket 345 state to replica [ns_server:info,2014-08-19T16:54:11.148,ns_1@10.242.238.90:<0.6228.1>:ns_memcached:do_handle_call:527]Changed vbucket 344 state to replica [ns_server:info,2014-08-19T16:54:11.148,ns_1@10.242.238.90:<0.6228.1>:ns_memcached:do_handle_call:527]Changed vbucket 343 state to replica [ns_server:info,2014-08-19T16:54:11.148,ns_1@10.242.238.90:<0.6228.1>:ns_memcached:do_handle_call:527]Changed vbucket 342 state to replica [ns_server:info,2014-08-19T16:54:11.148,ns_1@10.242.238.90:<0.6228.1>:ns_memcached:do_handle_call:527]Changed vbucket 170 state to replica [ns_server:info,2014-08-19T16:54:11.148,ns_1@10.242.238.90:<0.6228.1>:ns_memcached:do_handle_call:527]Changed vbucket 169 state to replica [ns_server:info,2014-08-19T16:54:11.149,ns_1@10.242.238.90:<0.6228.1>:ns_memcached:do_handle_call:527]Changed vbucket 168 state to replica [ns_server:info,2014-08-19T16:54:11.149,ns_1@10.242.238.90:<0.6228.1>:ns_memcached:do_handle_call:527]Changed vbucket 167 state to replica [ns_server:info,2014-08-19T16:54:11.149,ns_1@10.242.238.90:<0.6228.1>:ns_memcached:do_handle_call:527]Changed vbucket 166 state to replica [ns_server:info,2014-08-19T16:54:11.149,ns_1@10.242.238.90:<0.6228.1>:ns_memcached:do_handle_call:527]Changed vbucket 165 state to replica [ns_server:info,2014-08-19T16:54:11.150,ns_1@10.242.238.90:<0.6228.1>:ns_memcached:do_handle_call:527]Changed vbucket 164 state to replica [ns_server:info,2014-08-19T16:54:11.150,ns_1@10.242.238.90:<0.6228.1>:ns_memcached:do_handle_call:527]Changed vbucket 163 state to replica [ns_server:info,2014-08-19T16:54:11.150,ns_1@10.242.238.90:<0.6228.1>:ns_memcached:do_handle_call:527]Changed vbucket 162 state to replica [ns_server:info,2014-08-19T16:54:11.150,ns_1@10.242.238.90:<0.6228.1>:ns_memcached:do_handle_call:527]Changed vbucket 161 state to replica [ns_server:info,2014-08-19T16:54:11.150,ns_1@10.242.238.90:<0.6228.1>:ns_memcached:do_handle_call:527]Changed vbucket 160 state to replica [ns_server:info,2014-08-19T16:54:11.151,ns_1@10.242.238.90:<0.6228.1>:ns_memcached:do_handle_call:527]Changed vbucket 159 state to replica [ns_server:info,2014-08-19T16:54:11.151,ns_1@10.242.238.90:<0.6228.1>:ns_memcached:do_handle_call:527]Changed vbucket 158 state to replica [ns_server:info,2014-08-19T16:54:11.151,ns_1@10.242.238.90:<0.6228.1>:ns_memcached:do_handle_call:527]Changed vbucket 157 state to replica [ns_server:info,2014-08-19T16:54:11.151,ns_1@10.242.238.90:<0.6228.1>:ns_memcached:do_handle_call:527]Changed vbucket 156 state to replica [ns_server:info,2014-08-19T16:54:11.152,ns_1@10.242.238.90:<0.6228.1>:ns_memcached:do_handle_call:527]Changed vbucket 155 state to replica [ns_server:info,2014-08-19T16:54:11.152,ns_1@10.242.238.90:<0.6228.1>:ns_memcached:do_handle_call:527]Changed vbucket 154 state to replica [ns_server:info,2014-08-19T16:54:11.152,ns_1@10.242.238.90:<0.6228.1>:ns_memcached:do_handle_call:527]Changed vbucket 153 state to replica [ns_server:info,2014-08-19T16:54:11.152,ns_1@10.242.238.90:<0.6228.1>:ns_memcached:do_handle_call:527]Changed vbucket 152 state to replica [ns_server:info,2014-08-19T16:54:11.152,ns_1@10.242.238.90:<0.6228.1>:ns_memcached:do_handle_call:527]Changed vbucket 151 state to replica [ns_server:info,2014-08-19T16:54:11.153,ns_1@10.242.238.90:<0.6228.1>:ns_memcached:do_handle_call:527]Changed vbucket 150 state to replica [ns_server:info,2014-08-19T16:54:11.153,ns_1@10.242.238.90:<0.6228.1>:ns_memcached:do_handle_call:527]Changed vbucket 149 state to replica [ns_server:info,2014-08-19T16:54:11.153,ns_1@10.242.238.90:<0.6228.1>:ns_memcached:do_handle_call:527]Changed vbucket 148 state to replica [ns_server:info,2014-08-19T16:54:11.153,ns_1@10.242.238.90:<0.6228.1>:ns_memcached:do_handle_call:527]Changed vbucket 147 state to replica [ns_server:info,2014-08-19T16:54:11.154,ns_1@10.242.238.90:<0.6228.1>:ns_memcached:do_handle_call:527]Changed vbucket 146 state to replica [ns_server:info,2014-08-19T16:54:11.154,ns_1@10.242.238.90:<0.6228.1>:ns_memcached:do_handle_call:527]Changed vbucket 145 state to replica [ns_server:info,2014-08-19T16:54:11.154,ns_1@10.242.238.90:<0.6228.1>:ns_memcached:do_handle_call:527]Changed vbucket 144 state to replica [ns_server:info,2014-08-19T16:54:11.154,ns_1@10.242.238.90:<0.6228.1>:ns_memcached:do_handle_call:527]Changed vbucket 143 state to replica [ns_server:info,2014-08-19T16:54:11.154,ns_1@10.242.238.90:<0.6228.1>:ns_memcached:do_handle_call:527]Changed vbucket 142 state to replica [ns_server:info,2014-08-19T16:54:11.155,ns_1@10.242.238.90:<0.6228.1>:ns_memcached:do_handle_call:527]Changed vbucket 141 state to replica [ns_server:info,2014-08-19T16:54:11.155,ns_1@10.242.238.90:<0.6228.1>:ns_memcached:do_handle_call:527]Changed vbucket 140 state to replica [ns_server:info,2014-08-19T16:54:11.155,ns_1@10.242.238.90:<0.6228.1>:ns_memcached:do_handle_call:527]Changed vbucket 139 state to replica [ns_server:info,2014-08-19T16:54:11.155,ns_1@10.242.238.90:<0.6228.1>:ns_memcached:do_handle_call:527]Changed vbucket 138 state to replica [ns_server:info,2014-08-19T16:54:11.155,ns_1@10.242.238.90:<0.6228.1>:ns_memcached:do_handle_call:527]Changed vbucket 137 state to replica [ns_server:info,2014-08-19T16:54:11.156,ns_1@10.242.238.90:<0.6228.1>:ns_memcached:do_handle_call:527]Changed vbucket 136 state to replica [ns_server:info,2014-08-19T16:54:11.156,ns_1@10.242.238.90:<0.6228.1>:ns_memcached:do_handle_call:527]Changed vbucket 135 state to replica [ns_server:info,2014-08-19T16:54:11.156,ns_1@10.242.238.90:<0.6228.1>:ns_memcached:do_handle_call:527]Changed vbucket 134 state to replica [ns_server:info,2014-08-19T16:54:11.156,ns_1@10.242.238.90:<0.6228.1>:ns_memcached:do_handle_call:527]Changed vbucket 133 state to replica [ns_server:info,2014-08-19T16:54:11.157,ns_1@10.242.238.90:<0.6228.1>:ns_memcached:do_handle_call:527]Changed vbucket 132 state to replica [ns_server:info,2014-08-19T16:54:11.157,ns_1@10.242.238.90:<0.6228.1>:ns_memcached:do_handle_call:527]Changed vbucket 131 state to replica [ns_server:info,2014-08-19T16:54:11.157,ns_1@10.242.238.90:<0.6228.1>:ns_memcached:do_handle_call:527]Changed vbucket 130 state to replica [ns_server:info,2014-08-19T16:54:11.158,ns_1@10.242.238.90:<0.6228.1>:ns_memcached:do_handle_call:527]Changed vbucket 129 state to replica [ns_server:info,2014-08-19T16:54:11.158,ns_1@10.242.238.90:<0.6228.1>:ns_memcached:do_handle_call:527]Changed vbucket 128 state to replica [ns_server:info,2014-08-19T16:54:11.158,ns_1@10.242.238.90:<0.6228.1>:ns_memcached:do_handle_call:527]Changed vbucket 127 state to replica [ns_server:info,2014-08-19T16:54:11.158,ns_1@10.242.238.90:<0.6228.1>:ns_memcached:do_handle_call:527]Changed vbucket 126 state to replica [ns_server:info,2014-08-19T16:54:11.159,ns_1@10.242.238.90:<0.6228.1>:ns_memcached:do_handle_call:527]Changed vbucket 125 state to replica [ns_server:info,2014-08-19T16:54:11.159,ns_1@10.242.238.90:<0.6228.1>:ns_memcached:do_handle_call:527]Changed vbucket 124 state to replica [ns_server:info,2014-08-19T16:54:11.159,ns_1@10.242.238.90:<0.6228.1>:ns_memcached:do_handle_call:527]Changed vbucket 123 state to replica [ns_server:info,2014-08-19T16:54:11.159,ns_1@10.242.238.90:<0.6228.1>:ns_memcached:do_handle_call:527]Changed vbucket 122 state to replica [ns_server:info,2014-08-19T16:54:11.160,ns_1@10.242.238.90:<0.6228.1>:ns_memcached:do_handle_call:527]Changed vbucket 121 state to replica [ns_server:info,2014-08-19T16:54:11.160,ns_1@10.242.238.90:<0.6228.1>:ns_memcached:do_handle_call:527]Changed vbucket 120 state to replica [ns_server:info,2014-08-19T16:54:11.160,ns_1@10.242.238.90:<0.6228.1>:ns_memcached:do_handle_call:527]Changed vbucket 119 state to replica [ns_server:info,2014-08-19T16:54:11.160,ns_1@10.242.238.90:<0.6228.1>:ns_memcached:do_handle_call:527]Changed vbucket 118 state to replica [ns_server:info,2014-08-19T16:54:11.160,ns_1@10.242.238.90:<0.6228.1>:ns_memcached:do_handle_call:527]Changed vbucket 117 state to replica [ns_server:info,2014-08-19T16:54:11.161,ns_1@10.242.238.90:<0.6228.1>:ns_memcached:do_handle_call:527]Changed vbucket 116 state to replica [ns_server:info,2014-08-19T16:54:11.161,ns_1@10.242.238.90:<0.6228.1>:ns_memcached:do_handle_call:527]Changed vbucket 115 state to replica [ns_server:info,2014-08-19T16:54:11.161,ns_1@10.242.238.90:<0.6228.1>:ns_memcached:do_handle_call:527]Changed vbucket 114 state to replica [ns_server:info,2014-08-19T16:54:11.161,ns_1@10.242.238.90:<0.6228.1>:ns_memcached:do_handle_call:527]Changed vbucket 113 state to replica [ns_server:info,2014-08-19T16:54:11.161,ns_1@10.242.238.90:<0.6228.1>:ns_memcached:do_handle_call:527]Changed vbucket 112 state to replica [ns_server:info,2014-08-19T16:54:11.162,ns_1@10.242.238.90:<0.6228.1>:ns_memcached:do_handle_call:527]Changed vbucket 111 state to replica [ns_server:info,2014-08-19T16:54:11.162,ns_1@10.242.238.90:<0.6228.1>:ns_memcached:do_handle_call:527]Changed vbucket 110 state to replica [ns_server:info,2014-08-19T16:54:11.162,ns_1@10.242.238.90:<0.6228.1>:ns_memcached:do_handle_call:527]Changed vbucket 109 state to replica [ns_server:info,2014-08-19T16:54:11.162,ns_1@10.242.238.90:<0.6228.1>:ns_memcached:do_handle_call:527]Changed vbucket 108 state to replica [ns_server:info,2014-08-19T16:54:11.163,ns_1@10.242.238.90:<0.6228.1>:ns_memcached:do_handle_call:527]Changed vbucket 107 state to replica [ns_server:info,2014-08-19T16:54:11.163,ns_1@10.242.238.90:<0.6228.1>:ns_memcached:do_handle_call:527]Changed vbucket 106 state to replica [ns_server:info,2014-08-19T16:54:11.163,ns_1@10.242.238.90:<0.6228.1>:ns_memcached:do_handle_call:527]Changed vbucket 105 state to replica [ns_server:info,2014-08-19T16:54:11.163,ns_1@10.242.238.90:<0.6228.1>:ns_memcached:do_handle_call:527]Changed vbucket 104 state to replica [ns_server:info,2014-08-19T16:54:11.163,ns_1@10.242.238.90:<0.6228.1>:ns_memcached:do_handle_call:527]Changed vbucket 103 state to replica [ns_server:info,2014-08-19T16:54:11.164,ns_1@10.242.238.90:<0.6228.1>:ns_memcached:do_handle_call:527]Changed vbucket 102 state to replica [ns_server:info,2014-08-19T16:54:11.164,ns_1@10.242.238.90:<0.6228.1>:ns_memcached:do_handle_call:527]Changed vbucket 101 state to replica [ns_server:info,2014-08-19T16:54:11.164,ns_1@10.242.238.90:<0.6228.1>:ns_memcached:do_handle_call:527]Changed vbucket 100 state to replica [ns_server:info,2014-08-19T16:54:11.164,ns_1@10.242.238.90:<0.6228.1>:ns_memcached:do_handle_call:527]Changed vbucket 99 state to replica [ns_server:info,2014-08-19T16:54:11.164,ns_1@10.242.238.90:<0.6228.1>:ns_memcached:do_handle_call:527]Changed vbucket 98 state to replica [ns_server:info,2014-08-19T16:54:11.165,ns_1@10.242.238.90:<0.6228.1>:ns_memcached:do_handle_call:527]Changed vbucket 97 state to replica [ns_server:info,2014-08-19T16:54:11.165,ns_1@10.242.238.90:<0.6228.1>:ns_memcached:do_handle_call:527]Changed vbucket 96 state to replica [ns_server:info,2014-08-19T16:54:11.165,ns_1@10.242.238.90:<0.6228.1>:ns_memcached:do_handle_call:527]Changed vbucket 95 state to replica [ns_server:info,2014-08-19T16:54:11.165,ns_1@10.242.238.90:<0.6228.1>:ns_memcached:do_handle_call:527]Changed vbucket 94 state to replica [ns_server:info,2014-08-19T16:54:11.166,ns_1@10.242.238.90:<0.6228.1>:ns_memcached:do_handle_call:527]Changed vbucket 93 state to replica [ns_server:info,2014-08-19T16:54:11.166,ns_1@10.242.238.90:<0.6228.1>:ns_memcached:do_handle_call:527]Changed vbucket 92 state to replica [ns_server:info,2014-08-19T16:54:11.166,ns_1@10.242.238.90:<0.6228.1>:ns_memcached:do_handle_call:527]Changed vbucket 91 state to replica [ns_server:info,2014-08-19T16:54:11.166,ns_1@10.242.238.90:<0.6228.1>:ns_memcached:do_handle_call:527]Changed vbucket 90 state to replica [ns_server:info,2014-08-19T16:54:11.167,ns_1@10.242.238.90:<0.6228.1>:ns_memcached:do_handle_call:527]Changed vbucket 89 state to replica [ns_server:info,2014-08-19T16:54:11.167,ns_1@10.242.238.90:<0.6228.1>:ns_memcached:do_handle_call:527]Changed vbucket 88 state to replica [ns_server:info,2014-08-19T16:54:11.167,ns_1@10.242.238.90:<0.6228.1>:ns_memcached:do_handle_call:527]Changed vbucket 87 state to replica [ns_server:info,2014-08-19T16:54:11.167,ns_1@10.242.238.90:<0.6228.1>:ns_memcached:do_handle_call:527]Changed vbucket 86 state to replica [ns_server:debug,2014-08-19T16:54:11.199,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 751. Nacking mccouch update. [views:debug,2014-08-19T16:54:11.199,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/751. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:11.199,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",751,active,0} [ns_server:debug,2014-08-19T16:54:11.199,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,750,686,622,558,984,971,724,660,596,958,1022,945,762,698,634,570,1009, 996,983,736,672,608,970,957,710,646,582,1021,976,944,761,1008,995,963,748, 716,684,652,620,588,556,982,950,767,1014,969,754,722,690,658,626,594,562, 1001,988,956,1020,975,943,760,728,696,664,632,600,568,1007,994,962,981,949, 766,734,702,670,638,606,574,1013,968,753,1000,987,955,740,708,676,644,612, 580,548,1019,974,942,759,1006,993,961,746,714,682,650,618,586,554,980,948, 765,1012,999,967,752,720,688,656,624,592,560,986,954,1018,973,941,758,726, 694,662,630,598,566,1005,992,960,979,947,764,732,700,668,636,604,572,1011, 998,966,751,985,953,738,706,674,642,610,578,1017,972,940,757,1004,991,959, 744,712,680,648,616,584,552,1023,978,946,763,1010,965,718,654,590,952,1016, 939,756,692,628,564,1003,990,977,730,666,602,964,951,704,640,576,1015,938, 755,1002,989,742,678,614,550] [ns_server:info,2014-08-19T16:54:11.204,ns_1@10.242.238.90:tap_replication_manager-tiles<0.6207.1>:tap_replication_manager:start_child:172]Starting replication from 'ns_1@10.242.238.91' for [938,939,940,941,942,943,944,945,946,947,948,949,950,951,952,953,954,955,956, 957,958,959,960,961,962,963,964,965,966,967,968,969,970,971,972,973,974,975, 976,977,978,979,980,981,982,983,984,985,986,987,988,989,990,991,992,993,994, 995,996,997,998,999,1000,1001,1002,1003,1004,1005,1006,1007,1008,1009,1010, 1011,1012,1013,1014,1015,1016,1017,1018,1019,1020,1021,1022,1023] [ns_server:info,2014-08-19T16:54:11.205,ns_1@10.242.238.90:tap_replication_manager-tiles<0.6207.1>:tap_replication_manager:start_child:172]Starting replication from 'ns_1@10.242.238.89' for [342,343,344,345,346,347,348,349,350,351,352,353,354,355,356,357,358,359,360, 361,362,363,364,365,366,367,368,369,370,371,372,373,374,375,376,377,378,379, 380,381,382,383,384,385,386,387,388,389,390,391,392,393,394,395,396,397,398, 399,400,401,402,403,404,405,406,407,408,409,410,411,412,413,414,415,416,417, 418,419,420,421,422,423,424,425,426] [error_logger:info,2014-08-19T16:54:11.205,ns_1@10.242.238.90:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,'ns_vbm_new_sup-tiles'} started: [{pid,<0.6362.1>}, {name, {new_child_id, [938,939,940,941,942,943,944,945,946,947,948, 949,950,951,952,953,954,955,956,957,958,959, 960,961,962,963,964,965,966,967,968,969,970, 971,972,973,974,975,976,977,978,979,980,981, 982,983,984,985,986,987,988,989,990,991,992, 993,994,995,996,997,998,999,1000,1001,1002, 1003,1004,1005,1006,1007,1008,1009,1010,1011, 1012,1013,1014,1015,1016,1017,1018,1019,1020, 1021,1022,1023], 'ns_1@10.242.238.91'}}, {mfargs, {ebucketmigrator_srv,start_link, [{"10.242.238.91",11209}, {"10.242.238.90",11209}, [{on_not_ready_vbuckets, #Fun}, {username,"tiles"}, {password,get_from_config}, {vbuckets, [938,939,940,941,942,943,944,945,946,947, 948,949,950,951,952,953,954,955,956,957, 958,959,960,961,962,963,964,965,966,967, 968,969,970,971,972,973,974,975,976,977, 978,979,980,981,982,983,984,985,986,987, 988,989,990,991,992,993,994,995,996,997, 998,999,1000,1001,1002,1003,1004,1005, 1006,1007,1008,1009,1010,1011,1012,1013, 1014,1015,1016,1017,1018,1019,1020,1021, 1022,1023]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]]}}, {restart_type,temporary}, {shutdown,60000}, {child_type,worker}] [ns_server:info,2014-08-19T16:54:11.207,ns_1@10.242.238.90:tap_replication_manager-tiles<0.6207.1>:tap_replication_manager:start_child:172]Starting replication from 'ns_1@10.242.238.88' for [86,87,88,89,90,91,92,93,94,95,96,97,98,99,100,101,102,103,104,105,106,107, 108,109,110,111,112,113,114,115,116,117,118,119,120,121,122,123,124,125,126, 127,128,129,130,131,132,133,134,135,136,137,138,139,140,141,142,143,144,145, 146,147,148,149,150,151,152,153,154,155,156,157,158,159,160,161,162,163,164, 165,166,167,168,169,170] [error_logger:info,2014-08-19T16:54:11.207,ns_1@10.242.238.90:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,'ns_vbm_new_sup-tiles'} started: [{pid,<0.6363.1>}, {name, {new_child_id, [342,343,344,345,346,347,348,349,350,351,352, 353,354,355,356,357,358,359,360,361,362,363, 364,365,366,367,368,369,370,371,372,373,374, 375,376,377,378,379,380,381,382,383,384,385, 386,387,388,389,390,391,392,393,394,395,396, 397,398,399,400,401,402,403,404,405,406,407, 408,409,410,411,412,413,414,415,416,417,418, 419,420,421,422,423,424,425,426], 'ns_1@10.242.238.89'}}, {mfargs, {ebucketmigrator_srv,start_link, [{"10.242.238.89",11209}, {"10.242.238.90",11209}, [{on_not_ready_vbuckets, #Fun}, {username,"tiles"}, {password,get_from_config}, {vbuckets, [342,343,344,345,346,347,348,349,350,351, 352,353,354,355,356,357,358,359,360,361, 362,363,364,365,366,367,368,369,370,371, 372,373,374,375,376,377,378,379,380,381, 382,383,384,385,386,387,388,389,390,391, 392,393,394,395,396,397,398,399,400,401, 402,403,404,405,406,407,408,409,410,411, 412,413,414,415,416,417,418,419,420,421, 422,423,424,425,426]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]]}}, {restart_type,temporary}, {shutdown,60000}, {child_type,worker}] [error_logger:info,2014-08-19T16:54:11.209,ns_1@10.242.238.90:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,'ns_vbm_new_sup-tiles'} started: [{pid,<0.6364.1>}, {name, {new_child_id, [86,87,88,89,90,91,92,93,94,95,96,97,98,99,100, 101,102,103,104,105,106,107,108,109,110,111, 112,113,114,115,116,117,118,119,120,121,122, 123,124,125,126,127,128,129,130,131,132,133, 134,135,136,137,138,139,140,141,142,143,144, 145,146,147,148,149,150,151,152,153,154,155, 156,157,158,159,160,161,162,163,164,165,166, 167,168,169,170], 'ns_1@10.242.238.88'}}, {mfargs, {ebucketmigrator_srv,start_link, [{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{on_not_ready_vbuckets, #Fun}, {username,"tiles"}, {password,get_from_config}, {vbuckets, [86,87,88,89,90,91,92,93,94,95,96,97,98, 99,100,101,102,103,104,105,106,107,108, 109,110,111,112,113,114,115,116,117,118, 119,120,121,122,123,124,125,126,127,128, 129,130,131,132,133,134,135,136,137,138, 139,140,141,142,143,144,145,146,147,148, 149,150,151,152,153,154,155,156,157,158, 159,160,161,162,163,164,165,166,167,168, 169,170]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]]}}, {restart_type,temporary}, {shutdown,60000}, {child_type,worker}] [ns_server:info,2014-08-19T16:54:11.209,ns_1@10.242.238.90:ns_memcached-tiles<0.6205.1>:ns_memcached:handle_call:247]Enabling traffic to bucket "tiles" [ns_server:info,2014-08-19T16:54:11.209,ns_1@10.242.238.90:ns_memcached-tiles<0.6205.1>:ns_memcached:handle_call:251]Bucket "tiles" marked as warmed in 1 seconds [ns_server:debug,2014-08-19T16:54:11.248,ns_1@10.242.238.90:<0.6364.1>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_ns_1@10.242.238.90 [rebalance:info,2014-08-19T16:54:11.256,ns_1@10.242.238.90:<0.6364.1>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[86,87,88,89,90,91,92,93,94,95,96,97,98,99,100,101,102,103,104,105, 106,107,108,109,110,111,112,113,114,115,116,117,118,119,120,121, 122,123,124,125,126,127,128,129,130,131,132,133,134,135,136,137, 138,139,140,141,142,143,144,145,146,147,148,149,150,151,152,153, 154,155,156,157,158,159,160,161,162,163,164,165,166,167,168,169, 170]}, {checkpoints,[{86,0}, {87,0}, {88,0}, {89,0}, {90,0}, {91,0}, {92,0}, {93,0}, {94,0}, {95,0}, {96,0}, {97,0}, {98,0}, {99,0}, {100,0}, {101,0}, {102,0}, {103,0}, {104,0}, {105,0}, {106,0}, {107,0}, {108,0}, {109,0}, {110,0}, {111,0}, {112,0}, {113,0}, {114,0}, {115,0}, {116,0}, {117,0}, {118,0}, {119,0}, {120,0}, {121,0}, {122,0}, {123,0}, {124,0}, {125,0}, {126,0}, {127,0}, {128,0}, {129,0}, {130,0}, {131,0}, {132,0}, {133,0}, {134,0}, {135,0}, {136,0}, {137,0}, {138,0}, {139,0}, {140,0}, {141,0}, {142,0}, {143,0}, {144,0}, {145,0}, {146,0}, {147,0}, {148,0}, {149,0}, {150,0}, {151,0}, {152,0}, {153,0}, {154,0}, {155,0}, {156,0}, {157,0}, {158,0}, {159,0}, {160,0}, {161,0}, {162,0}, {163,0}, {164,0}, {165,0}, {166,0}, {167,0}, {168,0}, {169,0}, {170,0}]}, {name,<<"replication_ns_1@10.242.238.90">>}, {takeover,false}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{on_not_ready_vbuckets,#Fun}, {username,"tiles"}, {password,get_from_config}, {vbuckets,[86,87,88,89,90,91,92,93,94,95,96,97,98,99,100,101,102,103,104, 105,106,107,108,109,110,111,112,113,114,115,116,117,118,119,120, 121,122,123,124,125,126,127,128,129,130,131,132,133,134,135,136, 137,138,139,140,141,142,143,144,145,146,147,148,149,150,151,152, 153,154,155,156,157,158,159,160,161,162,163,164,165,166,167,168, 169,170]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]} [rebalance:debug,2014-08-19T16:54:11.258,ns_1@10.242.238.90:<0.6364.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.6365.1> [rebalance:info,2014-08-19T16:54:11.259,ns_1@10.242.238.90:<0.6364.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 86 [rebalance:info,2014-08-19T16:54:11.259,ns_1@10.242.238.90:<0.6364.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 87 [rebalance:info,2014-08-19T16:54:11.259,ns_1@10.242.238.90:<0.6364.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 88 [rebalance:info,2014-08-19T16:54:11.259,ns_1@10.242.238.90:<0.6364.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 89 [rebalance:info,2014-08-19T16:54:11.259,ns_1@10.242.238.90:<0.6364.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 90 [rebalance:info,2014-08-19T16:54:11.260,ns_1@10.242.238.90:<0.6364.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 91 [rebalance:info,2014-08-19T16:54:11.260,ns_1@10.242.238.90:<0.6364.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 92 [rebalance:info,2014-08-19T16:54:11.260,ns_1@10.242.238.90:<0.6364.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 93 [rebalance:info,2014-08-19T16:54:11.260,ns_1@10.242.238.90:<0.6364.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 94 [rebalance:info,2014-08-19T16:54:11.260,ns_1@10.242.238.90:<0.6364.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 95 [rebalance:info,2014-08-19T16:54:11.260,ns_1@10.242.238.90:<0.6364.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 96 [rebalance:info,2014-08-19T16:54:11.260,ns_1@10.242.238.90:<0.6364.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 97 [rebalance:info,2014-08-19T16:54:11.260,ns_1@10.242.238.90:<0.6364.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 98 [rebalance:info,2014-08-19T16:54:11.261,ns_1@10.242.238.90:<0.6364.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 99 [rebalance:info,2014-08-19T16:54:11.261,ns_1@10.242.238.90:<0.6364.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 100 [rebalance:info,2014-08-19T16:54:11.261,ns_1@10.242.238.90:<0.6364.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 101 [rebalance:info,2014-08-19T16:54:11.261,ns_1@10.242.238.90:<0.6364.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 102 [rebalance:info,2014-08-19T16:54:11.261,ns_1@10.242.238.90:<0.6364.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 103 [rebalance:info,2014-08-19T16:54:11.261,ns_1@10.242.238.90:<0.6364.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 104 [rebalance:info,2014-08-19T16:54:11.261,ns_1@10.242.238.90:<0.6364.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 105 [rebalance:info,2014-08-19T16:54:11.261,ns_1@10.242.238.90:<0.6364.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 106 [rebalance:info,2014-08-19T16:54:11.261,ns_1@10.242.238.90:<0.6364.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 107 [rebalance:info,2014-08-19T16:54:11.261,ns_1@10.242.238.90:<0.6364.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 108 [rebalance:info,2014-08-19T16:54:11.262,ns_1@10.242.238.90:<0.6364.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 109 [rebalance:info,2014-08-19T16:54:11.262,ns_1@10.242.238.90:<0.6364.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 110 [rebalance:info,2014-08-19T16:54:11.262,ns_1@10.242.238.90:<0.6364.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 111 [rebalance:info,2014-08-19T16:54:11.262,ns_1@10.242.238.90:<0.6364.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 112 [rebalance:info,2014-08-19T16:54:11.262,ns_1@10.242.238.90:<0.6364.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 113 [rebalance:info,2014-08-19T16:54:11.262,ns_1@10.242.238.90:<0.6364.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 114 [rebalance:info,2014-08-19T16:54:11.262,ns_1@10.242.238.90:<0.6364.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 115 [rebalance:info,2014-08-19T16:54:11.263,ns_1@10.242.238.90:<0.6364.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 116 [rebalance:info,2014-08-19T16:54:11.263,ns_1@10.242.238.90:<0.6364.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 117 [rebalance:info,2014-08-19T16:54:11.263,ns_1@10.242.238.90:<0.6364.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 118 [rebalance:info,2014-08-19T16:54:11.263,ns_1@10.242.238.90:<0.6364.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 119 [rebalance:info,2014-08-19T16:54:11.263,ns_1@10.242.238.90:<0.6364.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 120 [rebalance:info,2014-08-19T16:54:11.263,ns_1@10.242.238.90:<0.6364.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 121 [rebalance:info,2014-08-19T16:54:11.263,ns_1@10.242.238.90:<0.6364.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 122 [rebalance:info,2014-08-19T16:54:11.263,ns_1@10.242.238.90:<0.6364.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 123 [rebalance:info,2014-08-19T16:54:11.263,ns_1@10.242.238.90:<0.6364.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 124 [rebalance:info,2014-08-19T16:54:11.263,ns_1@10.242.238.90:<0.6364.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 125 [rebalance:info,2014-08-19T16:54:11.263,ns_1@10.242.238.90:<0.6364.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 126 [rebalance:info,2014-08-19T16:54:11.264,ns_1@10.242.238.90:<0.6364.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 127 [rebalance:info,2014-08-19T16:54:11.264,ns_1@10.242.238.90:<0.6364.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 128 [rebalance:info,2014-08-19T16:54:11.264,ns_1@10.242.238.90:<0.6364.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 129 [rebalance:info,2014-08-19T16:54:11.264,ns_1@10.242.238.90:<0.6364.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 130 [ns_server:debug,2014-08-19T16:54:11.264,ns_1@10.242.238.90:<0.6362.1>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_ns_1@10.242.238.90 [rebalance:info,2014-08-19T16:54:11.264,ns_1@10.242.238.90:<0.6364.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 131 [rebalance:info,2014-08-19T16:54:11.264,ns_1@10.242.238.90:<0.6364.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 132 [rebalance:info,2014-08-19T16:54:11.264,ns_1@10.242.238.90:<0.6364.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 133 [rebalance:info,2014-08-19T16:54:11.264,ns_1@10.242.238.90:<0.6364.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 134 [rebalance:info,2014-08-19T16:54:11.265,ns_1@10.242.238.90:<0.6364.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 135 [rebalance:info,2014-08-19T16:54:11.265,ns_1@10.242.238.90:<0.6364.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 136 [rebalance:info,2014-08-19T16:54:11.265,ns_1@10.242.238.90:<0.6364.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 137 [rebalance:info,2014-08-19T16:54:11.265,ns_1@10.242.238.90:<0.6364.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 138 [rebalance:info,2014-08-19T16:54:11.265,ns_1@10.242.238.90:<0.6364.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 139 [rebalance:info,2014-08-19T16:54:11.265,ns_1@10.242.238.90:<0.6364.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 140 [rebalance:info,2014-08-19T16:54:11.265,ns_1@10.242.238.90:<0.6364.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 141 [rebalance:info,2014-08-19T16:54:11.266,ns_1@10.242.238.90:<0.6364.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 142 [rebalance:info,2014-08-19T16:54:11.266,ns_1@10.242.238.90:<0.6364.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 143 [rebalance:info,2014-08-19T16:54:11.266,ns_1@10.242.238.90:<0.6364.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 144 [rebalance:info,2014-08-19T16:54:11.266,ns_1@10.242.238.90:<0.6364.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 145 [rebalance:info,2014-08-19T16:54:11.266,ns_1@10.242.238.90:<0.6364.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 146 [rebalance:info,2014-08-19T16:54:11.267,ns_1@10.242.238.90:<0.6364.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 147 [rebalance:info,2014-08-19T16:54:11.267,ns_1@10.242.238.90:<0.6364.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 148 [rebalance:info,2014-08-19T16:54:11.267,ns_1@10.242.238.90:<0.6364.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 149 [rebalance:info,2014-08-19T16:54:11.267,ns_1@10.242.238.90:<0.6364.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 150 [rebalance:info,2014-08-19T16:54:11.267,ns_1@10.242.238.90:<0.6364.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 151 [rebalance:info,2014-08-19T16:54:11.267,ns_1@10.242.238.90:<0.6364.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 152 [rebalance:info,2014-08-19T16:54:11.267,ns_1@10.242.238.90:<0.6364.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 153 [rebalance:info,2014-08-19T16:54:11.267,ns_1@10.242.238.90:<0.6364.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 154 [rebalance:info,2014-08-19T16:54:11.268,ns_1@10.242.238.90:<0.6364.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 155 [rebalance:info,2014-08-19T16:54:11.268,ns_1@10.242.238.90:<0.6364.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 156 [rebalance:info,2014-08-19T16:54:11.268,ns_1@10.242.238.90:<0.6364.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 157 [rebalance:info,2014-08-19T16:54:11.268,ns_1@10.242.238.90:<0.6364.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 158 [rebalance:info,2014-08-19T16:54:11.268,ns_1@10.242.238.90:<0.6364.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 159 [rebalance:info,2014-08-19T16:54:11.267,ns_1@10.242.238.90:<0.6362.1>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[938,939,940,941,942,943,944,945,946,947,948,949,950,951,952,953, 954,955,956,957,958,959,960,961,962,963,964,965,966,967,968,969, 970,971,972,973,974,975,976,977,978,979,980,981,982,983,984,985, 986,987,988,989,990,991,992,993,994,995,996,997,998,999,1000,1001, 1002,1003,1004,1005,1006,1007,1008,1009,1010,1011,1012,1013,1014, 1015,1016,1017,1018,1019,1020,1021,1022,1023]}, {checkpoints,[{938,0}, {939,0}, {940,0}, {941,0}, {942,0}, {943,0}, {944,0}, {945,0}, {946,0}, {947,0}, {948,0}, {949,0}, {950,0}, {951,0}, {952,0}, {953,0}, {954,0}, {955,0}, {956,0}, {957,0}, {958,0}, {959,0}, {960,0}, {961,0}, {962,0}, {963,0}, {964,0}, {965,0}, {966,0}, {967,0}, {968,0}, {969,0}, {970,0}, {971,0}, {972,0}, {973,0}, {974,0}, {975,0}, {976,0}, {977,0}, {978,0}, {979,0}, {980,0}, {981,0}, {982,0}, {983,0}, {984,0}, {985,0}, {986,0}, {987,0}, {988,0}, {989,0}, {990,0}, {991,0}, {992,0}, {993,0}, {994,0}, {995,0}, {996,0}, {997,0}, {998,0}, {999,0}, {1000,0}, {1001,0}, {1002,0}, {1003,0}, {1004,0}, {1005,0}, {1006,0}, {1007,0}, {1008,0}, {1009,0}, {1010,0}, {1011,0}, {1012,0}, {1013,0}, {1014,0}, {1015,0}, {1016,0}, {1017,0}, {1018,0}, {1019,0}, {1020,0}, {1021,0}, {1022,0}, {1023,0}]}, {name,<<"replication_ns_1@10.242.238.90">>}, {takeover,false}] {{"10.242.238.91",11209}, {"10.242.238.90",11209}, [{on_not_ready_vbuckets,#Fun}, {username,"tiles"}, {password,get_from_config}, {vbuckets,[938,939,940,941,942,943,944,945,946,947,948,949,950,951,952,953, 954,955,956,957,958,959,960,961,962,963,964,965,966,967,968,969, 970,971,972,973,974,975,976,977,978,979,980,981,982,983,984,985, 986,987,988,989,990,991,992,993,994,995,996,997,998,999,1000, 1001,1002,1003,1004,1005,1006,1007,1008,1009,1010,1011,1012,1013, 1014,1015,1016,1017,1018,1019,1020,1021,1022,1023]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]} [rebalance:info,2014-08-19T16:54:11.268,ns_1@10.242.238.90:<0.6364.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 160 [rebalance:info,2014-08-19T16:54:11.268,ns_1@10.242.238.90:<0.6364.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 161 [rebalance:info,2014-08-19T16:54:11.269,ns_1@10.242.238.90:<0.6364.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 162 [rebalance:info,2014-08-19T16:54:11.269,ns_1@10.242.238.90:<0.6364.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 163 [rebalance:info,2014-08-19T16:54:11.269,ns_1@10.242.238.90:<0.6364.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 164 [rebalance:debug,2014-08-19T16:54:11.269,ns_1@10.242.238.90:<0.6362.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.6366.1> [rebalance:info,2014-08-19T16:54:11.269,ns_1@10.242.238.90:<0.6364.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 165 [rebalance:info,2014-08-19T16:54:11.269,ns_1@10.242.238.90:<0.6364.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 166 [rebalance:info,2014-08-19T16:54:11.269,ns_1@10.242.238.90:<0.6364.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 167 [rebalance:info,2014-08-19T16:54:11.269,ns_1@10.242.238.90:<0.6364.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 168 [rebalance:info,2014-08-19T16:54:11.269,ns_1@10.242.238.90:<0.6364.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 169 [rebalance:info,2014-08-19T16:54:11.269,ns_1@10.242.238.90:<0.6364.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 170 [rebalance:info,2014-08-19T16:54:11.270,ns_1@10.242.238.90:<0.6362.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 938 [rebalance:info,2014-08-19T16:54:11.270,ns_1@10.242.238.90:<0.6362.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 939 [rebalance:info,2014-08-19T16:54:11.270,ns_1@10.242.238.90:<0.6362.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 940 [rebalance:info,2014-08-19T16:54:11.270,ns_1@10.242.238.90:<0.6362.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 941 [rebalance:info,2014-08-19T16:54:11.270,ns_1@10.242.238.90:<0.6362.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 942 [rebalance:info,2014-08-19T16:54:11.271,ns_1@10.242.238.90:<0.6362.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 943 [rebalance:info,2014-08-19T16:54:11.271,ns_1@10.242.238.90:<0.6362.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 944 [rebalance:info,2014-08-19T16:54:11.271,ns_1@10.242.238.90:<0.6362.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 945 [rebalance:info,2014-08-19T16:54:11.271,ns_1@10.242.238.90:<0.6362.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 946 [rebalance:info,2014-08-19T16:54:11.271,ns_1@10.242.238.90:<0.6362.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 947 [rebalance:info,2014-08-19T16:54:11.271,ns_1@10.242.238.90:<0.6362.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 948 [rebalance:info,2014-08-19T16:54:11.271,ns_1@10.242.238.90:<0.6362.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 949 [rebalance:info,2014-08-19T16:54:11.271,ns_1@10.242.238.90:<0.6362.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 950 [rebalance:info,2014-08-19T16:54:11.272,ns_1@10.242.238.90:<0.6362.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 951 [rebalance:info,2014-08-19T16:54:11.272,ns_1@10.242.238.90:<0.6362.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 952 [ns_server:debug,2014-08-19T16:54:11.272,ns_1@10.242.238.90:<0.6364.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:info,2014-08-19T16:54:11.272,ns_1@10.242.238.90:<0.6362.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 953 [ns_server:debug,2014-08-19T16:54:11.272,ns_1@10.242.238.90:<0.6364.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:info,2014-08-19T16:54:11.272,ns_1@10.242.238.90:<0.6362.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 954 [ns_server:debug,2014-08-19T16:54:11.272,ns_1@10.242.238.90:<0.6364.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:54:11.272,ns_1@10.242.238.90:<0.6363.1>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: replication_ns_1@10.242.238.90 [ns_server:debug,2014-08-19T16:54:11.272,ns_1@10.242.238.90:<0.6364.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:info,2014-08-19T16:54:11.272,ns_1@10.242.238.90:<0.6362.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 955 [ns_server:debug,2014-08-19T16:54:11.272,ns_1@10.242.238.90:<0.6364.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:info,2014-08-19T16:54:11.272,ns_1@10.242.238.90:<0.6362.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 956 [ns_server:debug,2014-08-19T16:54:11.272,ns_1@10.242.238.90:<0.6364.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:info,2014-08-19T16:54:11.272,ns_1@10.242.238.90:<0.6362.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 957 [ns_server:debug,2014-08-19T16:54:11.272,ns_1@10.242.238.90:<0.6364.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:54:11.273,ns_1@10.242.238.90:<0.6364.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:info,2014-08-19T16:54:11.273,ns_1@10.242.238.90:<0.6362.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 958 [ns_server:debug,2014-08-19T16:54:11.273,ns_1@10.242.238.90:<0.6364.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:info,2014-08-19T16:54:11.273,ns_1@10.242.238.90:<0.6362.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 959 [ns_server:debug,2014-08-19T16:54:11.273,ns_1@10.242.238.90:<0.6364.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:info,2014-08-19T16:54:11.273,ns_1@10.242.238.90:<0.6362.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 960 [ns_server:debug,2014-08-19T16:54:11.273,ns_1@10.242.238.90:<0.6364.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:info,2014-08-19T16:54:11.273,ns_1@10.242.238.90:<0.6362.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 961 [ns_server:debug,2014-08-19T16:54:11.273,ns_1@10.242.238.90:<0.6364.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:54:11.273,ns_1@10.242.238.90:<0.6364.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:info,2014-08-19T16:54:11.273,ns_1@10.242.238.90:<0.6362.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 962 [ns_server:debug,2014-08-19T16:54:11.273,ns_1@10.242.238.90:<0.6364.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:info,2014-08-19T16:54:11.273,ns_1@10.242.238.90:<0.6362.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 963 [ns_server:debug,2014-08-19T16:54:11.273,ns_1@10.242.238.90:<0.6364.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:54:11.273,ns_1@10.242.238.90:<0.6364.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:info,2014-08-19T16:54:11.273,ns_1@10.242.238.90:<0.6362.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 964 [ns_server:debug,2014-08-19T16:54:11.273,ns_1@10.242.238.90:<0.6364.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:info,2014-08-19T16:54:11.273,ns_1@10.242.238.90:<0.6362.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 965 [ns_server:debug,2014-08-19T16:54:11.273,ns_1@10.242.238.90:<0.6364.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:54:11.274,ns_1@10.242.238.90:<0.6364.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:info,2014-08-19T16:54:11.274,ns_1@10.242.238.90:<0.6362.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 966 [ns_server:debug,2014-08-19T16:54:11.274,ns_1@10.242.238.90:<0.6364.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:info,2014-08-19T16:54:11.274,ns_1@10.242.238.90:<0.6362.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 967 [ns_server:debug,2014-08-19T16:54:11.274,ns_1@10.242.238.90:<0.6364.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:info,2014-08-19T16:54:11.274,ns_1@10.242.238.90:<0.6362.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 968 [ns_server:debug,2014-08-19T16:54:11.274,ns_1@10.242.238.90:<0.6364.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:info,2014-08-19T16:54:11.274,ns_1@10.242.238.90:<0.6362.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 969 [ns_server:debug,2014-08-19T16:54:11.274,ns_1@10.242.238.90:<0.6364.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:info,2014-08-19T16:54:11.274,ns_1@10.242.238.90:<0.6362.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 970 [ns_server:debug,2014-08-19T16:54:11.274,ns_1@10.242.238.90:<0.6364.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:info,2014-08-19T16:54:11.274,ns_1@10.242.238.90:<0.6362.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 971 [ns_server:debug,2014-08-19T16:54:11.274,ns_1@10.242.238.90:<0.6364.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:info,2014-08-19T16:54:11.274,ns_1@10.242.238.90:<0.6362.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 972 [ns_server:debug,2014-08-19T16:54:11.274,ns_1@10.242.238.90:<0.6364.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:info,2014-08-19T16:54:11.274,ns_1@10.242.238.90:<0.6362.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 973 [ns_server:debug,2014-08-19T16:54:11.274,ns_1@10.242.238.90:<0.6364.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:54:11.274,ns_1@10.242.238.90:<0.6364.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:info,2014-08-19T16:54:11.274,ns_1@10.242.238.90:<0.6362.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 974 [ns_server:debug,2014-08-19T16:54:11.274,ns_1@10.242.238.90:<0.6364.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:54:11.274,ns_1@10.242.238.90:<0.6364.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:info,2014-08-19T16:54:11.275,ns_1@10.242.238.90:<0.6362.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 975 [ns_server:debug,2014-08-19T16:54:11.275,ns_1@10.242.238.90:<0.6364.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:54:11.275,ns_1@10.242.238.90:<0.6364.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:54:11.275,ns_1@10.242.238.90:<0.6364.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:info,2014-08-19T16:54:11.275,ns_1@10.242.238.90:<0.6362.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 976 [ns_server:debug,2014-08-19T16:54:11.275,ns_1@10.242.238.90:<0.6364.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:54:11.275,ns_1@10.242.238.90:<0.6364.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:info,2014-08-19T16:54:11.275,ns_1@10.242.238.90:<0.6362.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 977 [ns_server:debug,2014-08-19T16:54:11.275,ns_1@10.242.238.90:<0.6364.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:54:11.275,ns_1@10.242.238.90:<0.6364.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:info,2014-08-19T16:54:11.275,ns_1@10.242.238.90:<0.6362.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 978 [ns_server:debug,2014-08-19T16:54:11.275,ns_1@10.242.238.90:<0.6364.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:54:11.275,ns_1@10.242.238.90:<0.6364.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:info,2014-08-19T16:54:11.275,ns_1@10.242.238.90:<0.6362.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 979 [ns_server:debug,2014-08-19T16:54:11.275,ns_1@10.242.238.90:<0.6364.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:54:11.275,ns_1@10.242.238.90:<0.6364.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:info,2014-08-19T16:54:11.275,ns_1@10.242.238.90:<0.6362.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 980 [ns_server:debug,2014-08-19T16:54:11.275,ns_1@10.242.238.90:<0.6364.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:info,2014-08-19T16:54:11.275,ns_1@10.242.238.90:<0.6362.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 981 [ns_server:debug,2014-08-19T16:54:11.275,ns_1@10.242.238.90:<0.6364.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:54:11.275,ns_1@10.242.238.90:<0.6364.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:info,2014-08-19T16:54:11.276,ns_1@10.242.238.90:<0.6362.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 982 [ns_server:debug,2014-08-19T16:54:11.276,ns_1@10.242.238.90:<0.6364.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:info,2014-08-19T16:54:11.276,ns_1@10.242.238.90:<0.6362.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 983 [ns_server:debug,2014-08-19T16:54:11.276,ns_1@10.242.238.90:<0.6364.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:54:11.276,ns_1@10.242.238.90:<0.6364.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:info,2014-08-19T16:54:11.276,ns_1@10.242.238.90:<0.6362.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 984 [ns_server:debug,2014-08-19T16:54:11.276,ns_1@10.242.238.90:<0.6364.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:info,2014-08-19T16:54:11.276,ns_1@10.242.238.90:<0.6362.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 985 [rebalance:info,2014-08-19T16:54:11.276,ns_1@10.242.238.90:<0.6362.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 986 [ns_server:debug,2014-08-19T16:54:11.276,ns_1@10.242.238.90:<0.6364.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:54:11.276,ns_1@10.242.238.90:<0.6364.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:info,2014-08-19T16:54:11.276,ns_1@10.242.238.90:<0.6362.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 987 [ns_server:debug,2014-08-19T16:54:11.276,ns_1@10.242.238.90:<0.6364.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:54:11.276,ns_1@10.242.238.90:<0.6364.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:info,2014-08-19T16:54:11.276,ns_1@10.242.238.90:<0.6362.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 988 [ns_server:debug,2014-08-19T16:54:11.276,ns_1@10.242.238.90:<0.6364.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:info,2014-08-19T16:54:11.276,ns_1@10.242.238.90:<0.6362.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 989 [ns_server:debug,2014-08-19T16:54:11.276,ns_1@10.242.238.90:<0.6364.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:info,2014-08-19T16:54:11.277,ns_1@10.242.238.90:<0.6362.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 990 [ns_server:debug,2014-08-19T16:54:11.277,ns_1@10.242.238.90:<0.6364.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:54:11.277,ns_1@10.242.238.90:<0.6364.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:54:11.277,ns_1@10.242.238.90:<0.6364.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:info,2014-08-19T16:54:11.277,ns_1@10.242.238.90:<0.6362.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 991 [ns_server:debug,2014-08-19T16:54:11.277,ns_1@10.242.238.90:<0.6364.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:info,2014-08-19T16:54:11.277,ns_1@10.242.238.90:<0.6362.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 992 [ns_server:debug,2014-08-19T16:54:11.277,ns_1@10.242.238.90:<0.6364.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:54:11.277,ns_1@10.242.238.90:<0.6364.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:info,2014-08-19T16:54:11.277,ns_1@10.242.238.90:<0.6362.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 993 [ns_server:debug,2014-08-19T16:54:11.277,ns_1@10.242.238.90:<0.6364.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:info,2014-08-19T16:54:11.277,ns_1@10.242.238.90:<0.6362.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 994 [ns_server:debug,2014-08-19T16:54:11.277,ns_1@10.242.238.90:<0.6364.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:54:11.277,ns_1@10.242.238.90:<0.6364.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:info,2014-08-19T16:54:11.277,ns_1@10.242.238.90:<0.6362.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 995 [ns_server:debug,2014-08-19T16:54:11.277,ns_1@10.242.238.90:<0.6364.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:info,2014-08-19T16:54:11.277,ns_1@10.242.238.90:<0.6362.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 996 [ns_server:debug,2014-08-19T16:54:11.278,ns_1@10.242.238.90:<0.6364.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:info,2014-08-19T16:54:11.278,ns_1@10.242.238.90:<0.6362.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 997 [ns_server:debug,2014-08-19T16:54:11.278,ns_1@10.242.238.90:<0.6364.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:info,2014-08-19T16:54:11.278,ns_1@10.242.238.90:<0.6362.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 998 [ns_server:debug,2014-08-19T16:54:11.278,ns_1@10.242.238.90:<0.6364.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:54:11.278,ns_1@10.242.238.90:<0.6364.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:info,2014-08-19T16:54:11.278,ns_1@10.242.238.90:<0.6362.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 999 [ns_server:debug,2014-08-19T16:54:11.278,ns_1@10.242.238.90:<0.6364.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:info,2014-08-19T16:54:11.278,ns_1@10.242.238.90:<0.6362.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 1000 [ns_server:debug,2014-08-19T16:54:11.278,ns_1@10.242.238.90:<0.6364.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:info,2014-08-19T16:54:11.278,ns_1@10.242.238.90:<0.6363.1>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[342,343,344,345,346,347,348,349,350,351,352,353,354,355,356,357, 358,359,360,361,362,363,364,365,366,367,368,369,370,371,372,373, 374,375,376,377,378,379,380,381,382,383,384,385,386,387,388,389, 390,391,392,393,394,395,396,397,398,399,400,401,402,403,404,405, 406,407,408,409,410,411,412,413,414,415,416,417,418,419,420,421, 422,423,424,425,426]}, {checkpoints,[{342,0}, {343,0}, {344,0}, {345,0}, {346,0}, {347,0}, {348,0}, {349,0}, {350,0}, {351,0}, {352,0}, {353,0}, {354,0}, {355,0}, {356,0}, {357,0}, {358,0}, {359,0}, {360,0}, {361,0}, {362,0}, {363,0}, {364,0}, {365,0}, {366,0}, {367,0}, {368,0}, {369,0}, {370,0}, {371,0}, {372,0}, {373,0}, {374,0}, {375,0}, {376,0}, {377,0}, {378,0}, {379,0}, {380,0}, {381,0}, {382,0}, {383,0}, {384,0}, {385,0}, {386,0}, {387,0}, {388,0}, {389,0}, {390,0}, {391,0}, {392,0}, {393,0}, {394,0}, {395,0}, {396,0}, {397,0}, {398,0}, {399,0}, {400,0}, {401,0}, {402,0}, {403,0}, {404,0}, {405,0}, {406,0}, {407,0}, {408,0}, {409,0}, {410,0}, {411,0}, {412,0}, {413,0}, {414,0}, {415,0}, {416,0}, {417,0}, {418,0}, {419,0}, {420,0}, {421,0}, {422,0}, {423,0}, {424,0}, {425,0}, {426,0}]}, {name,<<"replication_ns_1@10.242.238.90">>}, {takeover,false}] {{"10.242.238.89",11209}, {"10.242.238.90",11209}, [{on_not_ready_vbuckets,#Fun}, {username,"tiles"}, {password,get_from_config}, {vbuckets,[342,343,344,345,346,347,348,349,350,351,352,353,354,355,356,357, 358,359,360,361,362,363,364,365,366,367,368,369,370,371,372,373, 374,375,376,377,378,379,380,381,382,383,384,385,386,387,388,389, 390,391,392,393,394,395,396,397,398,399,400,401,402,403,404,405, 406,407,408,409,410,411,412,413,414,415,416,417,418,419,420,421, 422,423,424,425,426]}, {set_to_pending_state,false}, {takeover,false}, {suffix,"ns_1@10.242.238.90"}]} [ns_server:debug,2014-08-19T16:54:11.278,ns_1@10.242.238.90:<0.6364.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:info,2014-08-19T16:54:11.278,ns_1@10.242.238.90:<0.6362.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 1001 [ns_server:debug,2014-08-19T16:54:11.278,ns_1@10.242.238.90:<0.6364.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:info,2014-08-19T16:54:11.278,ns_1@10.242.238.90:<0.6362.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 1002 [ns_server:debug,2014-08-19T16:54:11.278,ns_1@10.242.238.90:<0.6364.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:54:11.279,ns_1@10.242.238.90:<0.6364.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:info,2014-08-19T16:54:11.279,ns_1@10.242.238.90:<0.6362.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 1003 [ns_server:debug,2014-08-19T16:54:11.279,ns_1@10.242.238.90:<0.6364.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:info,2014-08-19T16:54:11.279,ns_1@10.242.238.90:<0.6362.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 1004 [ns_server:debug,2014-08-19T16:54:11.279,ns_1@10.242.238.90:<0.6364.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:info,2014-08-19T16:54:11.279,ns_1@10.242.238.90:<0.6362.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 1005 [ns_server:debug,2014-08-19T16:54:11.279,ns_1@10.242.238.90:<0.6364.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:54:11.279,ns_1@10.242.238.90:<0.6364.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:debug,2014-08-19T16:54:11.279,ns_1@10.242.238.90:<0.6363.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.6367.1> [ns_server:debug,2014-08-19T16:54:11.279,ns_1@10.242.238.90:<0.6364.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:info,2014-08-19T16:54:11.279,ns_1@10.242.238.90:<0.6362.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 1006 [ns_server:debug,2014-08-19T16:54:11.279,ns_1@10.242.238.90:<0.6364.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:info,2014-08-19T16:54:11.279,ns_1@10.242.238.90:<0.6362.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 1007 [ns_server:debug,2014-08-19T16:54:11.279,ns_1@10.242.238.90:<0.6364.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:54:11.279,ns_1@10.242.238.90:<0.6364.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:info,2014-08-19T16:54:11.279,ns_1@10.242.238.90:<0.6362.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 1008 [ns_server:debug,2014-08-19T16:54:11.280,ns_1@10.242.238.90:<0.6364.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:54:11.280,ns_1@10.242.238.90:<0.6364.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:info,2014-08-19T16:54:11.280,ns_1@10.242.238.90:<0.6362.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 1009 [ns_server:debug,2014-08-19T16:54:11.280,ns_1@10.242.238.90:<0.6364.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:info,2014-08-19T16:54:11.280,ns_1@10.242.238.90:<0.6362.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 1010 [rebalance:info,2014-08-19T16:54:11.280,ns_1@10.242.238.90:<0.6362.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 1011 [rebalance:info,2014-08-19T16:54:11.280,ns_1@10.242.238.90:<0.6362.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 1012 [rebalance:info,2014-08-19T16:54:11.280,ns_1@10.242.238.90:<0.6362.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 1013 [rebalance:info,2014-08-19T16:54:11.280,ns_1@10.242.238.90:<0.6362.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 1014 [rebalance:info,2014-08-19T16:54:11.280,ns_1@10.242.238.90:<0.6363.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 342 [rebalance:info,2014-08-19T16:54:11.280,ns_1@10.242.238.90:<0.6362.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 1015 [rebalance:info,2014-08-19T16:54:11.280,ns_1@10.242.238.90:<0.6363.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 343 [rebalance:info,2014-08-19T16:54:11.281,ns_1@10.242.238.90:<0.6362.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 1016 [rebalance:info,2014-08-19T16:54:11.281,ns_1@10.242.238.90:<0.6363.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 344 [rebalance:info,2014-08-19T16:54:11.281,ns_1@10.242.238.90:<0.6363.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 345 [rebalance:info,2014-08-19T16:54:11.281,ns_1@10.242.238.90:<0.6362.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 1017 [rebalance:info,2014-08-19T16:54:11.281,ns_1@10.242.238.90:<0.6363.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 346 [rebalance:info,2014-08-19T16:54:11.281,ns_1@10.242.238.90:<0.6363.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 347 [rebalance:info,2014-08-19T16:54:11.281,ns_1@10.242.238.90:<0.6362.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 1018 [rebalance:info,2014-08-19T16:54:11.281,ns_1@10.242.238.90:<0.6363.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 348 [rebalance:info,2014-08-19T16:54:11.281,ns_1@10.242.238.90:<0.6362.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 1019 [rebalance:info,2014-08-19T16:54:11.281,ns_1@10.242.238.90:<0.6363.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 349 [rebalance:info,2014-08-19T16:54:11.281,ns_1@10.242.238.90:<0.6362.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 1020 [rebalance:info,2014-08-19T16:54:11.281,ns_1@10.242.238.90:<0.6363.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 350 [rebalance:info,2014-08-19T16:54:11.281,ns_1@10.242.238.90:<0.6362.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 1021 [rebalance:info,2014-08-19T16:54:11.281,ns_1@10.242.238.90:<0.6363.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 351 [rebalance:info,2014-08-19T16:54:11.282,ns_1@10.242.238.90:<0.6362.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 1022 [rebalance:info,2014-08-19T16:54:11.282,ns_1@10.242.238.90:<0.6363.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 352 [rebalance:info,2014-08-19T16:54:11.282,ns_1@10.242.238.90:<0.6362.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 1023 [rebalance:info,2014-08-19T16:54:11.282,ns_1@10.242.238.90:<0.6363.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 353 [rebalance:info,2014-08-19T16:54:11.282,ns_1@10.242.238.90:<0.6363.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 354 [rebalance:info,2014-08-19T16:54:11.282,ns_1@10.242.238.90:<0.6363.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 355 [rebalance:info,2014-08-19T16:54:11.282,ns_1@10.242.238.90:<0.6363.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 356 [rebalance:info,2014-08-19T16:54:11.282,ns_1@10.242.238.90:<0.6363.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 357 [rebalance:info,2014-08-19T16:54:11.282,ns_1@10.242.238.90:<0.6363.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 358 [rebalance:info,2014-08-19T16:54:11.282,ns_1@10.242.238.90:<0.6363.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 359 [rebalance:info,2014-08-19T16:54:11.283,ns_1@10.242.238.90:<0.6363.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 360 [rebalance:info,2014-08-19T16:54:11.283,ns_1@10.242.238.90:<0.6363.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 361 [rebalance:info,2014-08-19T16:54:11.283,ns_1@10.242.238.90:<0.6363.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 362 [rebalance:info,2014-08-19T16:54:11.283,ns_1@10.242.238.90:<0.6363.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 363 [rebalance:info,2014-08-19T16:54:11.283,ns_1@10.242.238.90:<0.6363.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 364 [rebalance:info,2014-08-19T16:54:11.283,ns_1@10.242.238.90:<0.6363.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 365 [rebalance:info,2014-08-19T16:54:11.283,ns_1@10.242.238.90:<0.6363.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 366 [rebalance:info,2014-08-19T16:54:11.283,ns_1@10.242.238.90:<0.6363.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 367 [rebalance:info,2014-08-19T16:54:11.283,ns_1@10.242.238.90:<0.6363.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 368 [rebalance:info,2014-08-19T16:54:11.283,ns_1@10.242.238.90:<0.6363.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 369 [rebalance:info,2014-08-19T16:54:11.284,ns_1@10.242.238.90:<0.6363.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 370 [rebalance:info,2014-08-19T16:54:11.284,ns_1@10.242.238.90:<0.6363.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 371 [rebalance:info,2014-08-19T16:54:11.284,ns_1@10.242.238.90:<0.6363.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 372 [rebalance:info,2014-08-19T16:54:11.284,ns_1@10.242.238.90:<0.6363.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 373 [rebalance:info,2014-08-19T16:54:11.284,ns_1@10.242.238.90:<0.6363.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 374 [rebalance:info,2014-08-19T16:54:11.284,ns_1@10.242.238.90:<0.6363.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 375 [rebalance:info,2014-08-19T16:54:11.284,ns_1@10.242.238.90:<0.6363.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 376 [rebalance:info,2014-08-19T16:54:11.284,ns_1@10.242.238.90:<0.6363.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 377 [ns_server:debug,2014-08-19T16:54:11.284,ns_1@10.242.238.90:<0.6362.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:info,2014-08-19T16:54:11.284,ns_1@10.242.238.90:<0.6363.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 378 [ns_server:debug,2014-08-19T16:54:11.284,ns_1@10.242.238.90:<0.6362.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:info,2014-08-19T16:54:11.284,ns_1@10.242.238.90:<0.6363.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 379 [ns_server:debug,2014-08-19T16:54:11.284,ns_1@10.242.238.90:<0.6362.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:info,2014-08-19T16:54:11.284,ns_1@10.242.238.90:<0.6363.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 380 [ns_server:debug,2014-08-19T16:54:11.285,ns_1@10.242.238.90:<0.6362.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:info,2014-08-19T16:54:11.285,ns_1@10.242.238.90:<0.6363.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 381 [rebalance:info,2014-08-19T16:54:11.285,ns_1@10.242.238.90:<0.6363.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 382 [ns_server:debug,2014-08-19T16:54:11.285,ns_1@10.242.238.90:<0.6362.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:54:11.285,ns_1@10.242.238.90:<0.6362.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:info,2014-08-19T16:54:11.285,ns_1@10.242.238.90:<0.6363.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 383 [ns_server:debug,2014-08-19T16:54:11.285,ns_1@10.242.238.90:<0.6362.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:info,2014-08-19T16:54:11.285,ns_1@10.242.238.90:<0.6363.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 384 [ns_server:debug,2014-08-19T16:54:11.285,ns_1@10.242.238.90:<0.6362.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:info,2014-08-19T16:54:11.285,ns_1@10.242.238.90:<0.6363.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 385 [ns_server:debug,2014-08-19T16:54:11.285,ns_1@10.242.238.90:<0.6362.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:54:11.285,ns_1@10.242.238.90:<0.6362.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:info,2014-08-19T16:54:11.285,ns_1@10.242.238.90:<0.6363.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 386 [ns_server:debug,2014-08-19T16:54:11.285,ns_1@10.242.238.90:<0.6362.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:info,2014-08-19T16:54:11.285,ns_1@10.242.238.90:<0.6363.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 387 [ns_server:debug,2014-08-19T16:54:11.285,ns_1@10.242.238.90:<0.6362.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:54:11.286,ns_1@10.242.238.90:<0.6362.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:info,2014-08-19T16:54:11.286,ns_1@10.242.238.90:<0.6363.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 388 [ns_server:debug,2014-08-19T16:54:11.286,ns_1@10.242.238.90:<0.6362.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:54:11.286,ns_1@10.242.238.90:<0.6362.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:info,2014-08-19T16:54:11.286,ns_1@10.242.238.90:<0.6363.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 389 [ns_server:debug,2014-08-19T16:54:11.286,ns_1@10.242.238.90:<0.6362.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:info,2014-08-19T16:54:11.286,ns_1@10.242.238.90:<0.6363.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 390 [ns_server:debug,2014-08-19T16:54:11.286,ns_1@10.242.238.90:<0.6362.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:info,2014-08-19T16:54:11.286,ns_1@10.242.238.90:<0.6363.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 391 [ns_server:debug,2014-08-19T16:54:11.286,ns_1@10.242.238.90:<0.6362.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:info,2014-08-19T16:54:11.286,ns_1@10.242.238.90:<0.6363.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 392 [ns_server:debug,2014-08-19T16:54:11.286,ns_1@10.242.238.90:<0.6362.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:54:11.286,ns_1@10.242.238.90:<0.6362.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:info,2014-08-19T16:54:11.286,ns_1@10.242.238.90:<0.6363.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 393 [ns_server:debug,2014-08-19T16:54:11.286,ns_1@10.242.238.90:<0.6362.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:info,2014-08-19T16:54:11.286,ns_1@10.242.238.90:<0.6363.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 394 [ns_server:debug,2014-08-19T16:54:11.286,ns_1@10.242.238.90:<0.6362.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:info,2014-08-19T16:54:11.287,ns_1@10.242.238.90:<0.6363.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 395 [ns_server:debug,2014-08-19T16:54:11.287,ns_1@10.242.238.90:<0.6362.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:info,2014-08-19T16:54:11.287,ns_1@10.242.238.90:<0.6363.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 396 [ns_server:debug,2014-08-19T16:54:11.287,ns_1@10.242.238.90:<0.6362.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:info,2014-08-19T16:54:11.287,ns_1@10.242.238.90:<0.6363.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 397 [ns_server:debug,2014-08-19T16:54:11.287,ns_1@10.242.238.90:<0.6362.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:54:11.287,ns_1@10.242.238.90:<0.6362.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:info,2014-08-19T16:54:11.287,ns_1@10.242.238.90:<0.6363.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 398 [ns_server:debug,2014-08-19T16:54:11.287,ns_1@10.242.238.90:<0.6362.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:info,2014-08-19T16:54:11.287,ns_1@10.242.238.90:<0.6363.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 399 [ns_server:debug,2014-08-19T16:54:11.287,ns_1@10.242.238.90:<0.6362.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:info,2014-08-19T16:54:11.287,ns_1@10.242.238.90:<0.6363.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 400 [ns_server:debug,2014-08-19T16:54:11.287,ns_1@10.242.238.90:<0.6362.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:54:11.287,ns_1@10.242.238.90:<0.6362.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:info,2014-08-19T16:54:11.287,ns_1@10.242.238.90:<0.6363.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 401 [ns_server:debug,2014-08-19T16:54:11.287,ns_1@10.242.238.90:<0.6362.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:info,2014-08-19T16:54:11.287,ns_1@10.242.238.90:<0.6363.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 402 [ns_server:debug,2014-08-19T16:54:11.287,ns_1@10.242.238.90:<0.6362.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:54:11.287,ns_1@10.242.238.90:<0.6362.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:info,2014-08-19T16:54:11.287,ns_1@10.242.238.90:<0.6363.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 403 [ns_server:debug,2014-08-19T16:54:11.288,ns_1@10.242.238.90:<0.6362.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:info,2014-08-19T16:54:11.288,ns_1@10.242.238.90:<0.6363.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 404 [ns_server:debug,2014-08-19T16:54:11.288,ns_1@10.242.238.90:<0.6362.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:info,2014-08-19T16:54:11.288,ns_1@10.242.238.90:<0.6363.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 405 [ns_server:debug,2014-08-19T16:54:11.288,ns_1@10.242.238.90:<0.6362.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:info,2014-08-19T16:54:11.288,ns_1@10.242.238.90:<0.6363.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 406 [ns_server:debug,2014-08-19T16:54:11.288,ns_1@10.242.238.90:<0.6362.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:info,2014-08-19T16:54:11.288,ns_1@10.242.238.90:<0.6363.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 407 [ns_server:debug,2014-08-19T16:54:11.288,ns_1@10.242.238.90:<0.6362.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:54:11.288,ns_1@10.242.238.90:<0.6362.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:info,2014-08-19T16:54:11.288,ns_1@10.242.238.90:<0.6363.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 408 [ns_server:debug,2014-08-19T16:54:11.288,ns_1@10.242.238.90:<0.6362.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:info,2014-08-19T16:54:11.288,ns_1@10.242.238.90:<0.6363.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 409 [ns_server:debug,2014-08-19T16:54:11.288,ns_1@10.242.238.90:<0.6362.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:54:11.288,ns_1@10.242.238.90:<0.6362.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:info,2014-08-19T16:54:11.288,ns_1@10.242.238.90:<0.6363.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 410 [ns_server:debug,2014-08-19T16:54:11.288,ns_1@10.242.238.90:<0.6362.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:info,2014-08-19T16:54:11.288,ns_1@10.242.238.90:<0.6363.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 411 [ns_server:debug,2014-08-19T16:54:11.288,ns_1@10.242.238.90:<0.6362.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:54:11.288,ns_1@10.242.238.90:<0.6362.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:54:11.288,ns_1@10.242.238.90:<0.6362.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:54:11.289,ns_1@10.242.238.90:<0.6362.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:info,2014-08-19T16:54:11.289,ns_1@10.242.238.90:<0.6363.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 412 [ns_server:debug,2014-08-19T16:54:11.289,ns_1@10.242.238.90:<0.6362.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:info,2014-08-19T16:54:11.289,ns_1@10.242.238.90:<0.6363.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 413 [ns_server:debug,2014-08-19T16:54:11.289,ns_1@10.242.238.90:<0.6362.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:54:11.289,ns_1@10.242.238.90:<0.6362.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:info,2014-08-19T16:54:11.289,ns_1@10.242.238.90:<0.6363.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 414 [ns_server:debug,2014-08-19T16:54:11.289,ns_1@10.242.238.90:<0.6362.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:54:11.289,ns_1@10.242.238.90:<0.6362.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:info,2014-08-19T16:54:11.289,ns_1@10.242.238.90:<0.6363.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 415 [ns_server:debug,2014-08-19T16:54:11.289,ns_1@10.242.238.90:<0.6362.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:info,2014-08-19T16:54:11.289,ns_1@10.242.238.90:<0.6363.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 416 [ns_server:debug,2014-08-19T16:54:11.289,ns_1@10.242.238.90:<0.6362.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:54:11.289,ns_1@10.242.238.90:<0.6362.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:info,2014-08-19T16:54:11.289,ns_1@10.242.238.90:<0.6363.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 417 [ns_server:debug,2014-08-19T16:54:11.289,ns_1@10.242.238.90:<0.6362.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:info,2014-08-19T16:54:11.289,ns_1@10.242.238.90:<0.6363.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 418 [ns_server:debug,2014-08-19T16:54:11.289,ns_1@10.242.238.90:<0.6362.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:54:11.289,ns_1@10.242.238.90:<0.6362.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:info,2014-08-19T16:54:11.289,ns_1@10.242.238.90:<0.6363.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 419 [ns_server:debug,2014-08-19T16:54:11.289,ns_1@10.242.238.90:<0.6362.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:info,2014-08-19T16:54:11.290,ns_1@10.242.238.90:<0.6363.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 420 [ns_server:debug,2014-08-19T16:54:11.290,ns_1@10.242.238.90:<0.6362.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:54:11.290,ns_1@10.242.238.90:<0.6362.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:info,2014-08-19T16:54:11.290,ns_1@10.242.238.90:<0.6363.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 421 [ns_server:debug,2014-08-19T16:54:11.290,ns_1@10.242.238.90:<0.6362.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:info,2014-08-19T16:54:11.290,ns_1@10.242.238.90:<0.6363.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 422 [ns_server:debug,2014-08-19T16:54:11.290,ns_1@10.242.238.90:<0.6362.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:info,2014-08-19T16:54:11.290,ns_1@10.242.238.90:<0.6363.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 423 [ns_server:debug,2014-08-19T16:54:11.290,ns_1@10.242.238.90:<0.6362.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:54:11.290,ns_1@10.242.238.90:<0.6362.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:info,2014-08-19T16:54:11.290,ns_1@10.242.238.90:<0.6363.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 424 [ns_server:debug,2014-08-19T16:54:11.290,ns_1@10.242.238.90:<0.6362.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:54:11.290,ns_1@10.242.238.90:<0.6362.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:info,2014-08-19T16:54:11.290,ns_1@10.242.238.90:<0.6363.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 425 [ns_server:debug,2014-08-19T16:54:11.290,ns_1@10.242.238.90:<0.6362.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [rebalance:info,2014-08-19T16:54:11.290,ns_1@10.242.238.90:<0.6363.1>:ebucketmigrator_srv:process_upstream:1017]Initial stream for vbucket 426 [ns_server:debug,2014-08-19T16:54:11.290,ns_1@10.242.238.90:<0.6362.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:54:11.290,ns_1@10.242.238.90:<0.6362.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:54:11.290,ns_1@10.242.238.90:<0.6362.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:54:11.290,ns_1@10.242.238.90:<0.6362.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:54:11.291,ns_1@10.242.238.90:<0.6362.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:54:11.291,ns_1@10.242.238.90:<0.6362.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:54:11.291,ns_1@10.242.238.90:<0.6362.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:54:11.291,ns_1@10.242.238.90:<0.6362.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:54:11.291,ns_1@10.242.238.90:<0.6362.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:54:11.291,ns_1@10.242.238.90:<0.6362.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:54:11.291,ns_1@10.242.238.90:<0.6362.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:54:11.291,ns_1@10.242.238.90:<0.6362.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:54:11.291,ns_1@10.242.238.90:<0.6362.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:54:11.291,ns_1@10.242.238.90:<0.6362.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:54:11.291,ns_1@10.242.238.90:<0.6362.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:54:11.291,ns_1@10.242.238.90:<0.6362.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:54:11.292,ns_1@10.242.238.90:<0.6362.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:54:11.292,ns_1@10.242.238.90:<0.6362.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:54:11.293,ns_1@10.242.238.90:<0.6363.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:54:11.293,ns_1@10.242.238.90:<0.6363.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:54:11.293,ns_1@10.242.238.90:<0.6363.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:54:11.293,ns_1@10.242.238.90:<0.6363.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:54:11.293,ns_1@10.242.238.90:<0.6363.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:54:11.293,ns_1@10.242.238.90:<0.6363.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:54:11.293,ns_1@10.242.238.90:<0.6363.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:54:11.293,ns_1@10.242.238.90:<0.6363.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:54:11.293,ns_1@10.242.238.90:<0.6363.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:54:11.293,ns_1@10.242.238.90:<0.6363.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:54:11.293,ns_1@10.242.238.90:<0.6363.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:54:11.294,ns_1@10.242.238.90:<0.6363.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:54:11.294,ns_1@10.242.238.90:<0.6363.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:54:11.294,ns_1@10.242.238.90:<0.6363.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:54:11.294,ns_1@10.242.238.90:<0.6363.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:54:11.294,ns_1@10.242.238.90:<0.6363.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:54:11.294,ns_1@10.242.238.90:<0.6363.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:54:11.294,ns_1@10.242.238.90:<0.6363.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:54:11.294,ns_1@10.242.238.90:<0.6363.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:54:11.294,ns_1@10.242.238.90:<0.6363.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:54:11.294,ns_1@10.242.238.90:<0.6363.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:54:11.294,ns_1@10.242.238.90:<0.6363.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:54:11.295,ns_1@10.242.238.90:<0.6363.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:54:11.295,ns_1@10.242.238.90:<0.6363.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:54:11.295,ns_1@10.242.238.90:<0.6363.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:54:11.295,ns_1@10.242.238.90:<0.6363.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:54:11.295,ns_1@10.242.238.90:<0.6363.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:54:11.295,ns_1@10.242.238.90:<0.6363.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:54:11.295,ns_1@10.242.238.90:<0.6363.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:54:11.295,ns_1@10.242.238.90:<0.6363.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:54:11.295,ns_1@10.242.238.90:<0.6363.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:54:11.295,ns_1@10.242.238.90:<0.6363.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:54:11.295,ns_1@10.242.238.90:<0.6363.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:54:11.295,ns_1@10.242.238.90:<0.6363.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:54:11.295,ns_1@10.242.238.90:<0.6363.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:info,2014-08-19T16:54:11.295,ns_1@10.242.238.90:ns_doctor<0.17441.0>:ns_doctor:update_status:241]The following buckets became ready on node 'ns_1@10.242.238.88': ["tiles"] [ns_server:debug,2014-08-19T16:54:11.295,ns_1@10.242.238.90:<0.6363.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:54:11.296,ns_1@10.242.238.90:<0.6363.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:54:11.296,ns_1@10.242.238.90:<0.6363.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:54:11.296,ns_1@10.242.238.90:<0.6363.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:54:11.296,ns_1@10.242.238.90:<0.6363.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:54:11.296,ns_1@10.242.238.90:<0.6363.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:54:11.296,ns_1@10.242.238.90:<0.6363.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:54:11.296,ns_1@10.242.238.90:<0.6363.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:54:11.296,ns_1@10.242.238.90:<0.6363.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:54:11.296,ns_1@10.242.238.90:<0.6363.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:54:11.296,ns_1@10.242.238.90:<0.6363.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:54:11.296,ns_1@10.242.238.90:<0.6363.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:54:11.296,ns_1@10.242.238.90:<0.6363.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:54:11.296,ns_1@10.242.238.90:<0.6363.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:54:11.297,ns_1@10.242.238.90:<0.6363.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:54:11.297,ns_1@10.242.238.90:<0.6363.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:54:11.297,ns_1@10.242.238.90:<0.6363.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:54:11.297,ns_1@10.242.238.90:<0.6363.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:54:11.297,ns_1@10.242.238.90:<0.6363.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:54:11.297,ns_1@10.242.238.90:<0.6363.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:54:11.297,ns_1@10.242.238.90:<0.6363.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:54:11.297,ns_1@10.242.238.90:<0.6363.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:54:11.297,ns_1@10.242.238.90:<0.6363.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:54:11.297,ns_1@10.242.238.90:<0.6363.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:54:11.297,ns_1@10.242.238.90:<0.6363.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:54:11.297,ns_1@10.242.238.90:<0.6363.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:54:11.297,ns_1@10.242.238.90:<0.6363.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:54:11.297,ns_1@10.242.238.90:<0.6363.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:54:11.297,ns_1@10.242.238.90:<0.6363.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:54:11.297,ns_1@10.242.238.90:<0.6363.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:54:11.298,ns_1@10.242.238.90:<0.6363.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:54:11.298,ns_1@10.242.238.90:<0.6363.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:54:11.298,ns_1@10.242.238.90:<0.6363.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:54:11.298,ns_1@10.242.238.90:<0.6363.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:54:11.298,ns_1@10.242.238.90:<0.6363.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:54:11.298,ns_1@10.242.238.90:<0.6363.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:54:11.298,ns_1@10.242.238.90:<0.6363.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:54:11.298,ns_1@10.242.238.90:<0.6363.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:54:11.298,ns_1@10.242.238.90:<0.6363.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:54:11.298,ns_1@10.242.238.90:<0.6363.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:54:11.298,ns_1@10.242.238.90:<0.6363.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:54:11.298,ns_1@10.242.238.90:<0.6363.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:54:11.298,ns_1@10.242.238.90:<0.6363.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:54:11.298,ns_1@10.242.238.90:<0.6363.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:54:11.298,ns_1@10.242.238.90:<0.6363.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:54:11.299,ns_1@10.242.238.90:<0.6363.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:54:11.299,ns_1@10.242.238.90:<0.6363.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:54:11.299,ns_1@10.242.238.90:<0.6363.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:54:11.299,ns_1@10.242.238.90:<0.6363.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:54:11.299,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 1023. Nacking mccouch update. [ns_server:debug,2014-08-19T16:54:11.299,ns_1@10.242.238.90:<0.6363.1>:ebucketmigrator_srv:process_upstream:1031]seen backfill-close message [ns_server:debug,2014-08-19T16:54:11.299,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 736. Nacking mccouch update. [views:debug,2014-08-19T16:54:11.299,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/1023. Updated state: replica (0) [views:debug,2014-08-19T16:54:11.299,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/736. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:11.299,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",1023,replica,0} [ns_server:debug,2014-08-19T16:54:11.299,ns_1@10.242.238.90:capi_set_view_manager-tiles<0.6184.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [1023] [ns_server:debug,2014-08-19T16:54:11.299,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",736,active,0} [ns_server:debug,2014-08-19T16:54:11.299,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_tiles<0.4349.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [766,750,984,968,952,1016,1000,756,740,990,974,958,942,1022,1006,762,746,996, 980,964,948,1012,752,736,986,970,954,938,1018,1002,758,742,992,976,960,944, 1008,764,748,998,982,966,950,1014,754,738,988,972,956,940,1020,1004,760,744, 1023,994,978,962,946,1010] [views:debug,2014-08-19T16:54:11.300,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/751. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:11.300,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",751,active,0} [views:debug,2014-08-19T16:54:11.409,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/1023. Updated state: replica (0) [views:debug,2014-08-19T16:54:11.409,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/736. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:11.409,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",736,active,0} [ns_server:debug,2014-08-19T16:54:11.409,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",1023,replica,0} [ns_server:info,2014-08-19T16:54:11.456,ns_1@10.242.238.90:ns_doctor<0.17441.0>:ns_doctor:update_status:241]The following buckets became ready on node 'ns_1@10.242.238.91': ["tiles"] [ns_server:info,2014-08-19T16:54:11.487,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:do_pull:341]Pulling config from: 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:54:11.559,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 749. Nacking mccouch update. [views:debug,2014-08-19T16:54:11.559,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/749. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:11.559,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",749,active,0} [ns_server:debug,2014-08-19T16:54:11.560,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,750,686,622,558,984,971,724,660,596,958,1022,945,762,698,634,570,1009, 996,749,983,736,672,608,970,957,710,646,582,1021,944,761,1008,995,963,748, 716,684,652,620,588,556,982,950,767,1014,969,754,722,690,658,626,594,562, 1001,988,956,1020,975,943,760,728,696,664,632,600,568,1007,994,962,981,949, 766,734,702,670,638,606,574,1013,968,753,1000,987,955,740,708,676,644,612, 580,548,1019,974,942,759,1006,993,961,746,714,682,650,618,586,554,980,948, 765,1012,999,967,752,720,688,656,624,592,560,986,954,1018,973,941,758,726, 694,662,630,598,566,1005,992,960,979,947,764,732,700,668,636,604,572,1011, 998,966,751,985,953,738,706,674,642,610,578,1017,972,940,757,1004,991,959, 744,712,680,648,616,584,552,1023,978,946,763,1010,965,718,654,590,952,1016, 939,756,692,628,564,1003,990,977,730,666,602,964,951,704,640,576,1015,938, 755,1002,989,742,678,614,550,976] [ns_server:debug,2014-08-19T16:54:11.659,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 1022. Nacking mccouch update. [views:debug,2014-08-19T16:54:11.659,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/1022. Updated state: replica (0) [ns_server:debug,2014-08-19T16:54:11.660,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",1022,replica,0} [ns_server:debug,2014-08-19T16:54:11.660,ns_1@10.242.238.90:capi_set_view_manager-tiles<0.6184.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [1022,1023] [ns_server:debug,2014-08-19T16:54:11.676,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 734. Nacking mccouch update. [views:debug,2014-08-19T16:54:11.676,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/734. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:11.676,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",734,active,0} [ns_server:debug,2014-08-19T16:54:11.677,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_tiles<0.4349.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [766,750,734,984,968,952,1016,1000,756,740,990,974,958,942,1022,1006,762,746, 996,980,964,948,1012,752,736,986,970,954,938,1018,1002,758,742,992,976,960, 944,1008,764,748,998,982,966,950,1014,754,738,988,972,956,940,1020,1004,760, 744,1023,994,978,962,946,1010] [views:debug,2014-08-19T16:54:11.677,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/749. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:11.677,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",749,active,0} [views:debug,2014-08-19T16:54:11.752,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/1022. Updated state: replica (0) [ns_server:debug,2014-08-19T16:54:11.752,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",1022,replica,0} [views:debug,2014-08-19T16:54:11.825,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/734. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:11.826,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",734,active,0} [ns_server:debug,2014-08-19T16:54:12.034,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 747. Nacking mccouch update. [views:debug,2014-08-19T16:54:12.034,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/747. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:12.035,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",747,active,0} [ns_server:debug,2014-08-19T16:54:12.035,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,750,686,622,558,984,971,724,660,596,958,1022,945,762,698,634,570,1009, 996,749,983,736,672,608,970,957,710,646,582,1021,944,761,1008,995,963,748, 716,684,652,620,588,556,982,950,767,1014,969,754,722,690,658,626,594,562, 1001,988,956,1020,975,943,760,728,696,664,632,600,568,1007,994,962,747,981, 949,766,734,702,670,638,606,574,1013,968,753,1000,987,955,740,708,676,644, 612,580,548,1019,974,942,759,1006,993,961,746,714,682,650,618,586,554,980, 948,765,1012,999,967,752,720,688,656,624,592,560,986,954,1018,973,941,758, 726,694,662,630,598,566,1005,992,960,979,947,764,732,700,668,636,604,572, 1011,998,966,751,985,953,738,706,674,642,610,578,1017,972,940,757,1004,991, 959,744,712,680,648,616,584,552,1023,978,946,763,1010,965,718,654,590,952, 1016,939,756,692,628,564,1003,990,977,730,666,602,964,951,704,640,576,1015, 938,755,1002,989,742,678,614,550,976] [ns_server:debug,2014-08-19T16:54:12.041,ns_1@10.242.238.90:compaction_daemon<0.17567.0>:compaction_daemon:handle_info:447]Starting compaction for the following buckets: [<<"tiles">>,<<"maps_1_8_tiles">>,<<"maps_1_8_metahash">>,<<"default">>] [ns_server:info,2014-08-19T16:54:12.045,ns_1@10.242.238.90:<0.6451.1>:compaction_daemon:check_all_dbs_exist:1611]Skipping compaction of bucket `tiles` since at least database `tiles/100` seems to be missing. [ns_server:info,2014-08-19T16:54:12.049,ns_1@10.242.238.90:<0.6452.1>:compaction_daemon:check_all_dbs_exist:1611]Skipping compaction of bucket `maps_1_8_tiles` since at least database `maps_1_8_tiles/100` seems to be missing. [ns_server:info,2014-08-19T16:54:12.053,ns_1@10.242.238.90:<0.6454.1>:compaction_daemon:check_all_dbs_exist:1611]Skipping compaction of bucket `maps_1_8_metahash` since at least database `maps_1_8_metahash/100` seems to be missing. [ns_server:info,2014-08-19T16:54:12.057,ns_1@10.242.238.90:<0.6457.1>:compaction_daemon:try_to_cleanup_indexes:650]Cleaning up indexes for bucket `default` [ns_server:debug,2014-08-19T16:54:12.076,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 1020. Nacking mccouch update. [views:debug,2014-08-19T16:54:12.076,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/1020. Updated state: replica (0) [ns_server:debug,2014-08-19T16:54:12.076,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",1020,replica,0} [ns_server:debug,2014-08-19T16:54:12.076,ns_1@10.242.238.90:capi_set_view_manager-tiles<0.6184.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [1022,1020,1023] [ns_server:info,2014-08-19T16:54:12.076,ns_1@10.242.238.90:<0.6457.1>:compaction_daemon:spawn_bucket_compactor:609]Compacting bucket default with config: [{database_fragmentation_threshold,{30,undefined}}, {view_fragmentation_threshold,{30,undefined}}] [ns_server:debug,2014-08-19T16:54:12.081,ns_1@10.242.238.90:<0.6465.1>:compaction_daemon:bucket_needs_compaction:1042]`default` data size is 70794, disk size is 10529838 [ns_server:debug,2014-08-19T16:54:12.082,ns_1@10.242.238.90:compaction_daemon<0.17567.0>:compaction_daemon:handle_info:505]Finished compaction iteration. [ns_server:debug,2014-08-19T16:54:12.082,ns_1@10.242.238.90:compaction_daemon<0.17567.0>:compaction_daemon:schedule_next_compaction:1519]Finished compaction too soon. Next run will be in 30s [views:debug,2014-08-19T16:54:12.152,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/747. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:12.152,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",747,active,0} [ns_server:debug,2014-08-19T16:54:12.235,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 732. Nacking mccouch update. [views:debug,2014-08-19T16:54:12.235,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/732. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:12.235,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",732,active,0} [ns_server:debug,2014-08-19T16:54:12.235,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_tiles<0.4349.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [766,750,734,984,968,952,1016,1000,756,740,990,974,958,942,1022,1006,762,746, 996,980,964,948,1012,752,736,986,970,954,938,1018,1002,758,742,992,976,960, 944,1008,764,748,732,998,982,966,950,1014,754,738,988,972,956,940,1020,1004, 760,744,1023,994,978,962,946,1010] [views:debug,2014-08-19T16:54:12.236,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/1020. Updated state: replica (0) [ns_server:debug,2014-08-19T16:54:12.236,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",1020,replica,0} [views:debug,2014-08-19T16:54:12.367,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/732. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:12.367,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",732,active,0} [ns_server:debug,2014-08-19T16:54:12.461,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 745. Nacking mccouch update. [views:debug,2014-08-19T16:54:12.461,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/745. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:12.461,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",745,active,0} [ns_server:debug,2014-08-19T16:54:12.462,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,750,686,622,558,984,971,724,660,596,958,1022,945,762,698,634,570,1009, 996,749,983,736,672,608,970,957,710,646,582,1021,944,761,1008,995,963,748, 716,684,652,620,588,556,982,950,767,1014,969,754,722,690,658,626,594,562, 1001,988,956,1020,975,943,760,728,696,664,632,600,568,1007,994,962,747,981, 949,766,734,702,670,638,606,574,1013,968,753,1000,987,955,740,708,676,644, 612,580,548,1019,974,942,759,1006,993,961,746,714,682,650,618,586,554,980, 948,765,1012,999,967,752,720,688,656,624,592,560,986,954,1018,973,941,758, 726,694,662,630,598,566,1005,992,960,745,979,947,764,732,700,668,636,604,572, 1011,998,966,751,985,953,738,706,674,642,610,578,1017,972,940,757,1004,991, 959,744,712,680,648,616,584,552,1023,978,946,763,1010,965,718,654,590,952, 1016,939,756,692,628,564,1003,990,977,730,666,602,964,951,704,640,576,1015, 938,755,1002,989,742,678,614,550,976] [ns_server:debug,2014-08-19T16:54:12.537,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 1018. Nacking mccouch update. [views:debug,2014-08-19T16:54:12.537,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/1018. Updated state: replica (0) [ns_server:debug,2014-08-19T16:54:12.537,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",1018,replica,0} [ns_server:debug,2014-08-19T16:54:12.537,ns_1@10.242.238.90:capi_set_view_manager-tiles<0.6184.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [1022,1018,1020,1023] [views:debug,2014-08-19T16:54:12.563,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/745. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:12.563,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",745,active,0} [views:debug,2014-08-19T16:54:12.669,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/1018. Updated state: replica (0) [ns_server:debug,2014-08-19T16:54:12.669,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",1018,replica,0} [ns_server:debug,2014-08-19T16:54:12.686,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 730. Nacking mccouch update. [views:debug,2014-08-19T16:54:12.686,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/730. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:12.686,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",730,active,0} [ns_server:debug,2014-08-19T16:54:12.686,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_tiles<0.4349.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [766,750,734,984,968,952,1016,1000,756,740,990,974,958,942,1022,1006,762,746, 730,996,980,964,948,1012,752,736,986,970,954,938,1018,1002,758,742,992,976, 960,944,1008,764,748,732,998,982,966,950,1014,754,738,988,972,956,940,1020, 1004,760,744,1023,994,978,962,946,1010] [views:debug,2014-08-19T16:54:12.811,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/730. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:12.812,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",730,active,0} [ns_server:debug,2014-08-19T16:54:12.912,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 743. Nacking mccouch update. [views:debug,2014-08-19T16:54:12.928,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/743. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:12.929,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",743,active,0} [ns_server:debug,2014-08-19T16:54:12.929,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 1016. Nacking mccouch update. [views:debug,2014-08-19T16:54:12.929,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/1016. Updated state: replica (0) [ns_server:debug,2014-08-19T16:54:12.929,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",1016,replica,0} [ns_server:debug,2014-08-19T16:54:12.929,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,750,686,622,558,984,971,724,660,596,958,1022,945,762,698,634,570,1009, 996,749,983,736,672,608,970,957,710,646,582,1021,944,761,1008,995,963,748, 716,684,652,620,588,556,982,950,767,1014,969,754,722,690,658,626,594,562, 1001,988,956,1020,975,943,760,728,696,664,632,600,568,1007,994,962,747,981, 949,766,734,702,670,638,606,574,1013,968,753,1000,987,955,740,708,676,644, 612,580,548,1019,974,942,759,1006,993,961,746,714,682,650,618,586,554,980, 948,765,1012,999,967,752,720,688,656,624,592,560,986,954,1018,973,941,758, 726,694,662,630,598,566,1005,992,960,745,979,947,764,732,700,668,636,604,572, 1011,998,966,751,985,953,738,706,674,642,610,578,1017,972,940,757,1004,991, 959,744,712,680,648,616,584,552,1023,978,946,763,1010,965,718,654,590,952, 1016,939,756,692,628,564,1003,990,743,977,730,666,602,964,951,704,640,576, 1015,938,755,1002,989,742,678,614,550,976] [ns_server:debug,2014-08-19T16:54:12.929,ns_1@10.242.238.90:capi_set_view_manager-tiles<0.6184.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [1016,1022,1018,1020,1023] [views:debug,2014-08-19T16:54:13.029,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/743. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:13.029,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",743,active,0} [views:debug,2014-08-19T16:54:13.071,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/1016. Updated state: replica (0) [ns_server:debug,2014-08-19T16:54:13.071,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",1016,replica,0} [ns_server:debug,2014-08-19T16:54:13.089,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 728. Nacking mccouch update. [views:debug,2014-08-19T16:54:13.089,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/728. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:13.089,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",728,active,0} [ns_server:debug,2014-08-19T16:54:13.089,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_tiles<0.4349.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [766,750,734,984,968,952,1016,1000,756,740,990,974,958,942,1022,1006,762,746, 730,996,980,964,948,1012,752,736,986,970,954,938,1018,1002,758,742,992,976, 960,944,1008,764,748,732,998,982,966,950,1014,754,738,988,972,956,940,1020, 1004,760,744,728,1023,994,978,962,946,1010] [views:debug,2014-08-19T16:54:13.207,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/728. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:13.207,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",728,active,0} [ns_server:debug,2014-08-19T16:54:13.295,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 741. Nacking mccouch update. [views:debug,2014-08-19T16:54:13.295,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/741. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:13.296,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",741,active,0} [ns_server:debug,2014-08-19T16:54:13.296,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,750,686,622,558,984,971,724,660,596,958,1022,945,762,698,634,570,1009, 996,749,983,736,672,608,970,957,710,646,582,1021,944,761,1008,995,963,748, 716,684,652,620,588,556,982,950,767,1014,969,754,722,690,658,626,594,562, 1001,988,956,741,1020,975,943,760,728,696,664,632,600,568,1007,994,962,747, 981,949,766,734,702,670,638,606,574,1013,968,753,1000,987,955,740,708,676, 644,612,580,548,1019,974,942,759,1006,993,961,746,714,682,650,618,586,554, 980,948,765,1012,999,967,752,720,688,656,624,592,560,986,954,1018,973,941, 758,726,694,662,630,598,566,1005,992,960,745,979,947,764,732,700,668,636,604, 572,1011,998,966,751,985,953,738,706,674,642,610,578,1017,972,940,757,1004, 991,959,744,712,680,648,616,584,552,1023,978,946,763,1010,965,718,654,590, 952,1016,939,756,692,628,564,1003,990,743,977,730,666,602,964,951,704,640, 576,1015,938,755,1002,989,742,678,614,550,976] [ns_server:debug,2014-08-19T16:54:13.329,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 1014. Nacking mccouch update. [views:debug,2014-08-19T16:54:13.329,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/1014. Updated state: replica (0) [ns_server:debug,2014-08-19T16:54:13.329,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",1014,replica,0} [ns_server:debug,2014-08-19T16:54:13.329,ns_1@10.242.238.90:capi_set_view_manager-tiles<0.6184.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [1016,1022,1018,1014,1020,1023] [ns_server:debug,2014-08-19T16:54:13.397,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 726. Nacking mccouch update. [views:debug,2014-08-19T16:54:13.397,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/726. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:13.397,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",726,active,0} [ns_server:debug,2014-08-19T16:54:13.397,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_tiles<0.4349.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [766,750,734,984,968,952,1016,1000,756,740,990,974,958,942,1022,1006,762,746, 730,996,980,964,948,1012,752,736,986,970,954,938,1018,1002,758,742,726,992, 976,960,944,1008,764,748,732,998,982,966,950,1014,754,738,988,972,956,940, 1020,1004,760,744,728,1023,994,978,962,946,1010] [views:debug,2014-08-19T16:54:13.398,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/741. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:13.398,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",741,active,0} [ns_server:info,2014-08-19T16:54:13.399,ns_1@10.242.238.90:ns_doctor<0.17441.0>:ns_doctor:update_status:241]The following buckets became ready on node 'ns_1@10.242.238.90': ["tiles"] [views:debug,2014-08-19T16:54:13.431,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/1014. Updated state: replica (0) [ns_server:debug,2014-08-19T16:54:13.431,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",1014,replica,0} [views:debug,2014-08-19T16:54:13.526,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/726. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:13.526,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",726,active,0} [ns_server:debug,2014-08-19T16:54:13.648,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 739. Nacking mccouch update. [views:debug,2014-08-19T16:54:13.649,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/739. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:13.649,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 1012. Nacking mccouch update. [views:debug,2014-08-19T16:54:13.649,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/1012. Updated state: replica (0) [ns_server:debug,2014-08-19T16:54:13.649,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",739,active,0} [ns_server:debug,2014-08-19T16:54:13.649,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",1012,replica,0} [ns_server:debug,2014-08-19T16:54:13.649,ns_1@10.242.238.90:capi_set_view_manager-tiles<0.6184.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [1016,1022,1012,1018,1014,1020,1023] [ns_server:debug,2014-08-19T16:54:13.649,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,750,686,622,558,984,971,724,660,596,958,1022,945,762,698,634,570,1009, 996,749,983,736,672,608,970,957,710,646,582,1021,944,761,1008,995,748,684, 620,556,982,950,767,1014,969,754,722,690,658,626,594,562,1001,988,956,741, 1020,975,943,760,728,696,664,632,600,568,1007,994,962,747,981,949,766,734, 702,670,638,606,574,1013,968,753,1000,987,955,740,708,676,644,612,580,548, 1019,974,942,759,1006,993,961,746,714,682,650,618,586,554,980,948,765,1012, 999,967,752,720,688,656,624,592,560,986,954,739,1018,973,941,758,726,694,662, 630,598,566,1005,992,960,745,979,947,764,732,700,668,636,604,572,1011,998, 966,751,985,953,738,706,674,642,610,578,1017,972,940,757,1004,991,959,744, 712,680,648,616,584,552,1023,978,946,763,1010,965,718,654,590,952,1016,939, 756,692,628,564,1003,990,743,977,730,666,602,964,951,704,640,576,1015,938, 755,1002,989,742,678,614,550,976,963,716,652,588] [views:debug,2014-08-19T16:54:13.724,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/1012. Updated state: replica (0) [ns_server:debug,2014-08-19T16:54:13.724,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",1012,replica,0} [views:debug,2014-08-19T16:54:13.724,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/739. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:13.724,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",739,active,0} [ns_server:debug,2014-08-19T16:54:13.782,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 724. Nacking mccouch update. [views:debug,2014-08-19T16:54:13.782,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/724. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:13.782,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",724,active,0} [ns_server:debug,2014-08-19T16:54:13.783,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_tiles<0.4349.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [766,750,734,984,968,952,1016,1000,756,740,724,990,974,958,942,1022,1006,762, 746,730,996,980,964,948,1012,752,736,986,970,954,938,1018,1002,758,742,726, 992,976,960,944,1008,764,748,732,998,982,966,950,1014,754,738,988,972,956, 940,1020,1004,760,744,728,1023,994,978,962,946,1010] [views:debug,2014-08-19T16:54:13.875,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/724. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:13.875,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",724,active,0} [ns_server:debug,2014-08-19T16:54:13.982,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 1010. Nacking mccouch update. [ns_server:debug,2014-08-19T16:54:13.982,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 737. Nacking mccouch update. [views:debug,2014-08-19T16:54:13.983,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/1010. Updated state: replica (0) [ns_server:debug,2014-08-19T16:54:13.983,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",1010,replica,0} [views:debug,2014-08-19T16:54:13.983,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/737. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:13.983,ns_1@10.242.238.90:capi_set_view_manager-tiles<0.6184.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [1016,1022,1012,1018,1014,1020,1023,1010] [ns_server:debug,2014-08-19T16:54:13.983,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",737,active,0} [ns_server:debug,2014-08-19T16:54:13.983,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,750,686,622,558,984,737,971,724,660,596,958,1022,945,762,698,634,570, 1009,996,749,983,736,672,608,970,957,710,646,582,1021,944,761,1008,995,748, 684,620,556,982,950,767,1014,969,754,722,690,658,626,594,562,1001,988,956, 741,1020,975,943,760,728,696,664,632,600,568,1007,994,962,747,981,949,766, 734,702,670,638,606,574,1013,968,753,1000,987,955,740,708,676,644,612,580, 548,1019,974,942,759,1006,993,961,746,714,682,650,618,586,554,980,948,765, 1012,999,967,752,720,688,656,624,592,560,986,954,739,1018,973,941,758,726, 694,662,630,598,566,1005,992,960,745,979,947,764,732,700,668,636,604,572, 1011,998,966,751,985,953,738,706,674,642,610,578,1017,972,940,757,1004,991, 959,744,712,680,648,616,584,552,1023,978,946,763,1010,965,718,654,590,952, 1016,939,756,692,628,564,1003,990,743,977,730,666,602,964,951,704,640,576, 1015,938,755,1002,989,742,678,614,550,976,963,716,652,588] [ns_server:debug,2014-08-19T16:54:14.083,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 722. Nacking mccouch update. [views:debug,2014-08-19T16:54:14.083,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/722. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:14.083,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",722,active,0} [ns_server:debug,2014-08-19T16:54:14.083,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_tiles<0.4349.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [766,750,734,984,968,952,1016,1000,756,740,724,990,974,958,942,1022,1006,762, 746,730,996,980,964,948,1012,752,736,986,970,954,938,1018,1002,758,742,726, 992,976,960,944,1008,764,748,732,998,982,966,950,1014,754,738,722,988,972, 956,940,1020,1004,760,744,728,1023,994,978,962,946,1010] [views:debug,2014-08-19T16:54:14.100,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/737. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:14.100,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",737,active,0} [views:debug,2014-08-19T16:54:14.101,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/1010. Updated state: replica (0) [ns_server:debug,2014-08-19T16:54:14.101,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",1010,replica,0} [views:debug,2014-08-19T16:54:14.184,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/722. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:14.184,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",722,active,0} [ns_server:debug,2014-08-19T16:54:14.376,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 735. Nacking mccouch update. [views:debug,2014-08-19T16:54:14.376,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/735. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:14.376,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",735,active,0} [ns_server:debug,2014-08-19T16:54:14.377,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,750,686,622,558,984,737,971,724,660,596,958,1022,945,762,698,634,570, 1009,996,749,983,736,672,608,970,957,710,646,582,1021,944,761,1008,995,748, 684,620,556,982,950,767,735,1014,969,754,722,690,658,626,594,562,1001,988, 956,741,1020,975,943,760,728,696,664,632,600,568,1007,994,962,747,981,949, 766,734,702,670,638,606,574,1013,968,753,1000,987,955,740,708,676,644,612, 580,548,1019,974,942,759,1006,993,961,746,714,682,650,618,586,554,980,948, 765,1012,999,967,752,720,688,656,624,592,560,986,954,739,1018,973,941,758, 726,694,662,630,598,566,1005,992,960,745,979,947,764,732,700,668,636,604,572, 1011,998,966,751,985,953,738,706,674,642,610,578,1017,972,940,757,1004,991, 959,744,712,680,648,616,584,552,1023,978,946,763,1010,965,718,654,590,952, 1016,939,756,692,628,564,1003,990,743,977,730,666,602,964,951,704,640,576, 1015,938,755,1002,989,742,678,614,550,976,963,716,652,588] [ns_server:debug,2014-08-19T16:54:14.393,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 1008. Nacking mccouch update. [ns_server:debug,2014-08-19T16:54:14.393,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 720. Nacking mccouch update. [views:debug,2014-08-19T16:54:14.393,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/1008. Updated state: replica (0) [views:debug,2014-08-19T16:54:14.393,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/720. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:14.393,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",1008,replica,0} [ns_server:debug,2014-08-19T16:54:14.393,ns_1@10.242.238.90:capi_set_view_manager-tiles<0.6184.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [1016,1022,1012,1018,1008,1014,1020,1023,1010] [ns_server:debug,2014-08-19T16:54:14.393,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",720,active,0} [ns_server:debug,2014-08-19T16:54:14.394,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_tiles<0.4349.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [766,750,734,984,968,952,1016,1000,756,740,724,990,974,958,942,1022,1006,762, 746,730,996,980,964,948,1012,752,736,720,986,970,954,938,1018,1002,758,742, 726,992,976,960,944,1008,764,748,732,998,982,966,950,1014,754,738,722,988, 972,956,940,1020,1004,760,744,728,1023,994,978,962,946,1010] [views:debug,2014-08-19T16:54:14.519,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/735. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:14.519,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",735,active,0} [views:debug,2014-08-19T16:54:14.535,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/1008. Updated state: replica (0) [ns_server:debug,2014-08-19T16:54:14.535,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",1008,replica,0} [views:debug,2014-08-19T16:54:14.535,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/720. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:14.536,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",720,active,0} [ns_server:info,2014-08-19T16:54:14.770,ns_1@10.242.238.90:ns_doctor<0.17441.0>:ns_doctor:update_status:241]The following buckets became ready on node 'ns_1@10.242.238.89': ["tiles"] [ns_server:debug,2014-08-19T16:54:14.809,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 733. Nacking mccouch update. [ns_server:debug,2014-08-19T16:54:14.809,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 1006. Nacking mccouch update. [views:debug,2014-08-19T16:54:14.809,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/733. Updated state: active (0) [views:debug,2014-08-19T16:54:14.810,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/1006. Updated state: replica (0) [ns_server:debug,2014-08-19T16:54:14.810,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",733,active,0} [ns_server:debug,2014-08-19T16:54:14.810,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",1006,replica,0} [ns_server:debug,2014-08-19T16:54:14.810,ns_1@10.242.238.90:capi_set_view_manager-tiles<0.6184.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [1016,1022,1006,1012,1018,1008,1014,1020,1023,1010] [ns_server:debug,2014-08-19T16:54:14.810,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,750,686,622,558,984,737,971,724,660,596,958,1022,945,762,698,634,570, 1009,996,749,983,736,672,608,970,957,710,646,582,1021,944,761,1008,995,748, 684,620,556,982,950,767,735,1014,969,754,722,690,658,626,594,562,1001,988, 956,741,1020,975,943,760,728,696,664,632,600,568,1007,994,962,747,981,949, 766,734,702,670,638,606,574,1013,968,753,1000,987,955,740,708,676,644,612, 580,548,1019,974,942,759,1006,993,961,746,714,682,650,618,586,554,980,948, 765,733,1012,999,967,752,720,688,656,624,592,560,986,954,739,1018,973,941, 758,726,694,662,630,598,566,1005,992,960,745,979,947,764,732,700,668,636,604, 572,1011,998,966,751,985,953,738,706,674,642,610,578,1017,972,940,757,1004, 991,959,744,712,680,648,616,584,552,1023,978,946,763,1010,965,718,654,590, 952,1016,939,756,692,628,564,1003,990,743,977,730,666,602,964,951,704,640, 576,1015,938,755,1002,989,742,678,614,550,976,963,716,652,588] [ns_server:debug,2014-08-19T16:54:14.860,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 718. Nacking mccouch update. [views:debug,2014-08-19T16:54:14.860,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/718. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:14.860,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",718,active,0} [ns_server:debug,2014-08-19T16:54:14.860,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_tiles<0.4349.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [766,750,734,718,984,968,952,1016,1000,756,740,724,990,974,958,942,1022,1006, 762,746,730,996,980,964,948,1012,752,736,720,986,970,954,938,1018,1002,758, 742,726,992,976,960,944,1008,764,748,732,998,982,966,950,1014,754,738,722, 988,972,956,940,1020,1004,760,744,728,1023,994,978,962,946,1010] [views:debug,2014-08-19T16:54:14.961,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/1006. Updated state: replica (0) [views:debug,2014-08-19T16:54:14.961,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/733. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:14.961,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",1006,replica,0} [ns_server:debug,2014-08-19T16:54:14.961,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",733,active,0} [views:debug,2014-08-19T16:54:14.994,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/718. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:14.994,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",718,active,0} [ns_server:debug,2014-08-19T16:54:15.186,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 1004. Nacking mccouch update. [ns_server:debug,2014-08-19T16:54:15.186,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 716. Nacking mccouch update. [views:debug,2014-08-19T16:54:15.186,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/1004. Updated state: replica (0) [views:debug,2014-08-19T16:54:15.186,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/716. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:15.187,ns_1@10.242.238.90:capi_set_view_manager-tiles<0.6184.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [1016,1022,1006,1012,1018,1008,1014,1020,1004,1023,1010] [ns_server:debug,2014-08-19T16:54:15.187,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",1004,replica,0} [ns_server:debug,2014-08-19T16:54:15.187,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",716,active,0} [ns_server:debug,2014-08-19T16:54:15.187,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_tiles<0.4349.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [766,750,734,718,984,968,952,1016,1000,756,740,724,990,974,958,942,1022,1006, 762,746,730,996,980,964,948,1012,752,736,720,986,970,954,938,1018,1002,758, 742,726,992,976,960,944,1008,764,748,732,716,998,982,966,950,1014,754,738, 722,988,972,956,940,1020,1004,760,744,728,1023,994,978,962,946,1010] [ns_server:debug,2014-08-19T16:54:15.220,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 731. Nacking mccouch update. [views:debug,2014-08-19T16:54:15.220,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/731. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:15.220,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",731,active,0} [ns_server:debug,2014-08-19T16:54:15.220,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,750,686,622,558,984,737,971,724,660,596,958,1022,945,762,698,634,570, 1009,996,749,983,736,672,608,970,957,710,646,582,1021,944,761,1008,995,748, 684,620,556,982,950,767,735,1014,969,754,722,690,658,626,594,562,1001,988, 956,741,1020,975,943,760,728,696,664,632,600,568,1007,994,962,747,981,949, 766,734,702,670,638,606,574,1013,968,753,1000,987,955,740,708,676,644,612, 580,548,1019,974,942,759,1006,993,961,746,714,682,650,618,586,554,980,948, 765,733,1012,999,967,752,720,688,656,624,592,560,986,954,739,1018,973,941, 758,726,694,662,630,598,566,1005,992,960,745,979,947,764,732,700,668,636,604, 572,1011,998,966,751,985,953,738,706,674,642,610,578,1017,972,940,757,1004, 991,959,744,712,680,648,616,584,552,1023,978,946,763,731,1010,965,718,654, 590,952,1016,939,756,692,628,564,1003,990,743,977,730,666,602,964,951,704, 640,576,1015,938,755,1002,989,742,678,614,550,976,963,716,652,588] [views:debug,2014-08-19T16:54:15.311,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/1004. Updated state: replica (0) [views:debug,2014-08-19T16:54:15.311,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/716. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:15.311,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",1004,replica,0} [ns_server:debug,2014-08-19T16:54:15.311,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",716,active,0} [views:debug,2014-08-19T16:54:15.343,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/731. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:15.344,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",731,active,0} [ns_server:debug,2014-08-19T16:54:15.495,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 714. Nacking mccouch update. [views:debug,2014-08-19T16:54:15.495,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/714. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:15.495,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",714,active,0} [ns_server:debug,2014-08-19T16:54:15.495,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_tiles<0.4349.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [766,750,734,718,984,968,952,1016,1000,756,740,724,990,974,958,942,1022,1006, 762,746,730,714,996,980,964,948,1012,752,736,720,986,970,954,938,1018,1002, 758,742,726,992,976,960,944,1008,764,748,732,716,998,982,966,950,1014,754, 738,722,988,972,956,940,1020,1004,760,744,728,1023,994,978,962,946,1010] [ns_server:debug,2014-08-19T16:54:15.578,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 1002. Nacking mccouch update. [views:debug,2014-08-19T16:54:15.579,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/1002. Updated state: replica (0) [ns_server:debug,2014-08-19T16:54:15.579,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",1002,replica,0} [ns_server:debug,2014-08-19T16:54:15.579,ns_1@10.242.238.90:capi_set_view_manager-tiles<0.6184.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [1016,1022,1006,1012,1018,1002,1008,1014,1020,1004,1023,1010] [ns_server:debug,2014-08-19T16:54:15.620,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 729. Nacking mccouch update. [views:debug,2014-08-19T16:54:15.620,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/729. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:15.621,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",729,active,0} [ns_server:debug,2014-08-19T16:54:15.621,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,750,686,622,558,984,737,971,724,660,596,958,1022,945,762,698,634,570, 1009,996,749,983,736,672,608,970,957,710,646,582,1021,944,761,1008,995,748, 684,620,556,982,735,969,754,722,690,658,626,594,562,1001,988,956,741,1020, 975,943,760,728,696,664,632,600,568,1007,994,962,747,981,949,766,734,702,670, 638,606,574,1013,968,753,1000,987,955,740,708,676,644,612,580,548,1019,974, 942,759,1006,993,961,746,714,682,650,618,586,554,980,948,765,733,1012,999, 967,752,720,688,656,624,592,560,986,954,739,1018,973,941,758,726,694,662,630, 598,566,1005,992,960,745,979,947,764,732,700,668,636,604,572,1011,998,966, 751,985,953,738,706,674,642,610,578,1017,972,940,757,1004,991,959,744,712, 680,648,616,584,552,1023,978,946,763,731,1010,965,718,654,590,952,1016,939, 756,692,628,564,1003,990,743,977,730,666,602,964,951,704,640,576,1015,938, 755,1002,989,742,678,614,550,976,729,963,716,652,588,950,767,1014] [views:debug,2014-08-19T16:54:15.638,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/714. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:15.638,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",714,active,0} [views:debug,2014-08-19T16:54:15.679,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/1002. Updated state: replica (0) [ns_server:debug,2014-08-19T16:54:15.680,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",1002,replica,0} [views:debug,2014-08-19T16:54:15.738,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/729. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:15.738,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",729,active,0} [ns_server:debug,2014-08-19T16:54:15.878,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 712. Nacking mccouch update. [views:debug,2014-08-19T16:54:15.878,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/712. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:15.879,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",712,active,0} [ns_server:debug,2014-08-19T16:54:15.879,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_tiles<0.4349.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [766,750,734,718,984,968,952,1016,1000,756,740,724,990,974,958,942,1022,1006, 762,746,730,714,996,980,964,948,1012,752,736,720,986,970,954,938,1018,1002, 758,742,726,992,976,960,944,1008,764,748,732,716,998,982,966,950,1014,754, 738,722,988,972,956,940,1020,1004,760,744,728,712,1023,994,978,962,946,1010] [ns_server:debug,2014-08-19T16:54:15.928,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 1000. Nacking mccouch update. [views:debug,2014-08-19T16:54:15.928,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/1000. Updated state: replica (0) [ns_server:debug,2014-08-19T16:54:15.929,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",1000,replica,0} [ns_server:debug,2014-08-19T16:54:15.929,ns_1@10.242.238.90:capi_set_view_manager-tiles<0.6184.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [1016,1000,1022,1006,1012,1018,1002,1008,1014,1020,1004,1023,1010] [ns_server:debug,2014-08-19T16:54:15.945,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 727. Nacking mccouch update. [views:debug,2014-08-19T16:54:15.945,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/727. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:15.945,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",727,active,0} [ns_server:debug,2014-08-19T16:54:15.946,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,750,686,622,558,984,737,971,724,660,596,958,1022,945,762,698,634,570, 1009,996,749,983,736,672,608,970,957,710,646,582,1021,944,761,1008,995,748, 684,620,556,982,735,969,754,722,690,658,626,594,562,1001,988,956,741,1020, 975,943,760,728,696,664,632,600,568,1007,994,962,747,981,949,766,734,702,670, 638,606,574,1013,968,753,1000,987,955,740,708,676,644,612,580,548,1019,974, 942,759,727,1006,993,961,746,714,682,650,618,586,554,980,948,765,733,1012, 999,967,752,720,688,656,624,592,560,986,954,739,1018,973,941,758,726,694,662, 630,598,566,1005,992,960,745,979,947,764,732,700,668,636,604,572,1011,998, 966,751,985,953,738,706,674,642,610,578,1017,972,940,757,1004,991,959,744, 712,680,648,616,584,552,1023,978,946,763,731,1010,965,718,654,590,952,1016, 939,756,692,628,564,1003,990,743,977,730,666,602,964,951,704,640,576,1015, 938,755,1002,989,742,678,614,550,976,729,963,716,652,588,950,767,1014] [views:debug,2014-08-19T16:54:16.021,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/712. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:16.021,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",712,active,0} [views:debug,2014-08-19T16:54:16.054,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/1000. Updated state: replica (0) [ns_server:debug,2014-08-19T16:54:16.055,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",1000,replica,0} [views:debug,2014-08-19T16:54:16.088,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/727. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:16.088,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",727,active,0} [ns_server:debug,2014-08-19T16:54:16.306,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 710. Nacking mccouch update. [views:debug,2014-08-19T16:54:16.307,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/710. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:16.307,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",710,active,0} [ns_server:debug,2014-08-19T16:54:16.307,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_tiles<0.4349.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [766,750,734,718,984,968,952,1016,1000,756,740,724,990,974,958,942,1022,1006, 762,746,730,714,996,980,964,948,1012,752,736,720,986,970,954,938,1018,1002, 758,742,726,710,992,976,960,944,1008,764,748,732,716,998,982,966,950,1014, 754,738,722,988,972,956,940,1020,1004,760,744,728,712,1023,994,978,962,946, 1010] [ns_server:debug,2014-08-19T16:54:16.323,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 998. Nacking mccouch update. [ns_server:debug,2014-08-19T16:54:16.323,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 725. Nacking mccouch update. [views:debug,2014-08-19T16:54:16.323,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/998. Updated state: replica (0) [views:debug,2014-08-19T16:54:16.324,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/725. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:16.324,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",998,replica,0} [ns_server:debug,2014-08-19T16:54:16.324,ns_1@10.242.238.90:capi_set_view_manager-tiles<0.6184.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [1016,1000,1022,1006,1012,1018,1002,1008,998,1014,1020,1004,1023,1010] [ns_server:debug,2014-08-19T16:54:16.324,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",725,active,0} [ns_server:debug,2014-08-19T16:54:16.324,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,750,686,622,558,984,737,971,724,660,596,958,1022,945,762,698,634,570, 1009,996,749,983,736,672,608,970,957,710,646,582,1021,944,761,1008,995,748, 684,620,556,982,735,969,754,722,690,658,626,594,562,1001,988,956,741,1020, 975,943,760,728,696,664,632,600,568,1007,994,962,747,981,949,766,734,702,670, 638,606,574,1013,968,753,1000,987,955,740,708,676,644,612,580,548,1019,974, 942,759,727,1006,993,961,746,714,682,650,618,586,554,980,948,765,733,1012, 999,967,752,720,688,656,624,592,560,986,954,739,1018,973,941,758,726,694,662, 630,598,566,1005,992,960,745,979,947,764,732,700,668,636,604,572,1011,998, 966,751,985,953,738,706,674,642,610,578,1017,972,940,757,725,1004,991,959, 744,712,680,648,616,584,552,1023,978,946,763,731,1010,965,718,654,590,952, 1016,939,756,692,628,564,1003,990,743,977,730,666,602,964,951,704,640,576, 1015,938,755,1002,989,742,678,614,550,976,729,963,716,652,588,950,767,1014] [views:debug,2014-08-19T16:54:16.441,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/710. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:16.441,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",710,active,0} [views:debug,2014-08-19T16:54:16.474,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/725. Updated state: active (0) [views:debug,2014-08-19T16:54:16.474,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/998. Updated state: replica (0) [ns_server:debug,2014-08-19T16:54:16.474,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",725,active,0} [ns_server:debug,2014-08-19T16:54:16.474,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",998,replica,0} [ns_server:debug,2014-08-19T16:54:16.732,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 996. Nacking mccouch update. [ns_server:debug,2014-08-19T16:54:16.732,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 708. Nacking mccouch update. [views:debug,2014-08-19T16:54:16.732,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/996. Updated state: replica (0) [views:debug,2014-08-19T16:54:16.732,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/708. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:16.733,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",996,replica,0} [ns_server:debug,2014-08-19T16:54:16.733,ns_1@10.242.238.90:capi_set_view_manager-tiles<0.6184.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [1016,1000,1022,1006,996,1012,1018,1002,1008,998,1014,1020,1004,1023,1010] [ns_server:debug,2014-08-19T16:54:16.733,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",708,active,0} [ns_server:debug,2014-08-19T16:54:16.733,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_tiles<0.4349.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [766,750,734,718,984,968,952,1016,1000,756,740,724,708,990,974,958,942,1022, 1006,762,746,730,714,996,980,964,948,1012,752,736,720,986,970,954,938,1018, 1002,758,742,726,710,992,976,960,944,1008,764,748,732,716,998,982,966,950, 1014,754,738,722,988,972,956,940,1020,1004,760,744,728,712,1023,994,978,962, 946,1010] [ns_server:debug,2014-08-19T16:54:16.749,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 723. Nacking mccouch update. [views:debug,2014-08-19T16:54:16.749,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/723. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:16.749,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",723,active,0} [ns_server:debug,2014-08-19T16:54:16.750,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,750,686,622,558,984,737,971,724,660,596,958,1022,945,762,698,634,570, 1009,996,749,983,736,672,608,970,723,957,710,646,582,1021,944,761,1008,995, 748,684,620,556,982,735,969,754,722,690,658,626,594,562,1001,988,956,741, 1020,975,943,760,728,696,664,632,600,568,1007,994,962,747,981,949,766,734, 702,670,638,606,574,1013,968,753,1000,987,955,740,708,676,644,612,580,548, 1019,974,942,759,727,1006,993,961,746,714,682,650,618,586,554,980,948,765, 733,1012,999,967,752,720,688,656,624,592,560,986,954,739,1018,973,941,758, 726,694,662,630,598,566,1005,992,960,745,979,947,764,732,700,668,636,604,572, 1011,998,966,751,985,953,738,706,674,642,610,578,1017,972,940,757,725,1004, 991,959,744,712,680,648,616,584,552,1023,978,946,763,731,1010,965,718,654, 590,952,1016,939,756,692,628,564,1003,990,743,977,730,666,602,964,951,704, 640,576,1015,938,755,1002,989,742,678,614,550,976,729,963,716,652,588,950, 767,1014] [views:debug,2014-08-19T16:54:16.875,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/996. Updated state: replica (0) [views:debug,2014-08-19T16:54:16.875,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/708. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:16.875,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",996,replica,0} [ns_server:debug,2014-08-19T16:54:16.875,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",708,active,0} [views:debug,2014-08-19T16:54:16.904,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/723. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:16.904,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",723,active,0} [ns_server:debug,2014-08-19T16:54:17.109,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 994. Nacking mccouch update. [views:debug,2014-08-19T16:54:17.109,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/994. Updated state: replica (0) [ns_server:debug,2014-08-19T16:54:17.109,ns_1@10.242.238.90:capi_set_view_manager-tiles<0.6184.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [1016,1000,1022,1006,996,1012,1018,1002,1008,998,1014,1020,1004,1023,994,1010] [ns_server:debug,2014-08-19T16:54:17.110,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",994,replica,0} [ns_server:debug,2014-08-19T16:54:17.151,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 706. Nacking mccouch update. [views:debug,2014-08-19T16:54:17.151,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/706. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:17.151,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",706,active,0} [ns_server:debug,2014-08-19T16:54:17.151,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_tiles<0.4349.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [766,750,734,718,984,968,952,1016,1000,756,740,724,708,990,974,958,942,1022, 1006,762,746,730,714,996,980,964,948,1012,752,736,720,986,970,954,938,1018, 1002,758,742,726,710,992,976,960,944,1008,764,748,732,716,998,982,966,950, 1014,754,738,722,706,988,972,956,940,1020,1004,760,744,728,712,1023,994,978, 962,946,1010] [ns_server:debug,2014-08-19T16:54:17.185,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 721. Nacking mccouch update. [views:debug,2014-08-19T16:54:17.185,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/721. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:17.185,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",721,active,0} [ns_server:debug,2014-08-19T16:54:17.185,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,750,686,622,558,984,737,971,724,660,596,958,1022,945,762,698,634,570, 1009,996,749,983,736,672,608,970,723,957,710,646,582,1021,944,761,1008,995, 748,684,620,556,982,735,969,754,722,690,658,626,594,562,1001,988,956,741, 1020,975,943,760,728,696,664,632,600,568,1007,994,962,747,981,949,766,734, 702,670,638,606,574,1013,968,753,721,1000,987,955,740,708,676,644,612,580, 548,1019,974,942,759,727,1006,993,961,746,714,682,650,618,586,554,980,948, 765,733,1012,999,967,752,720,688,656,624,592,560,986,954,739,1018,973,941, 758,726,694,662,630,598,566,1005,992,960,745,979,947,764,732,700,668,636,604, 572,1011,998,966,751,985,953,738,706,674,642,610,578,1017,972,940,757,725, 1004,991,959,744,712,680,648,616,584,552,1023,978,946,763,731,1010,965,718, 654,590,952,1016,939,756,692,628,564,1003,990,743,977,730,666,602,964,951, 704,640,576,1015,938,755,1002,989,742,678,614,550,976,729,963,716,652,588, 950,767,1014] [views:debug,2014-08-19T16:54:17.252,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/994. Updated state: replica (0) [ns_server:debug,2014-08-19T16:54:17.252,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",994,replica,0} [views:debug,2014-08-19T16:54:17.269,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/706. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:17.269,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",706,active,0} [views:debug,2014-08-19T16:54:17.285,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/721. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:17.285,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",721,active,0} [ns_server:debug,2014-08-19T16:54:17.442,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 992. Nacking mccouch update. [views:debug,2014-08-19T16:54:17.442,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/992. Updated state: replica (0) [ns_server:debug,2014-08-19T16:54:17.442,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",992,replica,0} [ns_server:debug,2014-08-19T16:54:17.443,ns_1@10.242.238.90:capi_set_view_manager-tiles<0.6184.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [1016,1000,1022,1006,996,1012,1018,1002,992,1008,998,1014,1020,1004,1023,994, 1010] [ns_server:debug,2014-08-19T16:54:17.517,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 719. Nacking mccouch update. [views:debug,2014-08-19T16:54:17.518,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/719. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:17.518,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",719,active,0} [ns_server:debug,2014-08-19T16:54:17.518,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,750,686,622,558,984,737,971,724,660,596,958,1022,945,762,698,634,570, 1009,996,749,983,736,672,608,970,723,957,710,646,582,1021,944,761,1008,995, 748,684,620,556,982,735,969,722,658,594,988,956,741,1020,975,943,760,728,696, 664,632,600,568,1007,994,962,747,981,949,766,734,702,670,638,606,574,1013, 968,753,721,1000,987,955,740,708,676,644,612,580,548,1019,974,942,759,727, 1006,993,961,746,714,682,650,618,586,554,980,948,765,733,1012,999,967,752, 720,688,656,624,592,560,986,954,739,1018,973,941,758,726,694,662,630,598,566, 1005,992,960,745,979,947,764,732,700,668,636,604,572,1011,998,966,751,719, 985,953,738,706,674,642,610,578,1017,972,940,757,725,1004,991,959,744,712, 680,648,616,584,552,1023,978,946,763,731,1010,965,718,654,590,952,1016,939, 756,692,628,564,1003,990,743,977,730,666,602,964,951,704,640,576,1015,938, 755,1002,989,742,678,614,550,976,729,963,716,652,588,950,767,1014,754,690, 626,562,1001] [ns_server:debug,2014-08-19T16:54:17.543,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 704. Nacking mccouch update. [views:debug,2014-08-19T16:54:17.543,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/704. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:17.543,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",704,active,0} [ns_server:debug,2014-08-19T16:54:17.543,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_tiles<0.4349.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [766,750,734,718,984,968,952,1016,1000,756,740,724,708,990,974,958,942,1022, 1006,762,746,730,714,996,980,964,948,1012,752,736,720,704,986,970,954,938, 1018,1002,758,742,726,710,992,976,960,944,1008,764,748,732,716,998,982,966, 950,1014,754,738,722,706,988,972,956,940,1020,1004,760,744,728,712,1023,994, 978,962,946,1010] [views:debug,2014-08-19T16:54:17.585,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/992. Updated state: replica (0) [ns_server:debug,2014-08-19T16:54:17.585,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",992,replica,0} [views:debug,2014-08-19T16:54:17.619,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/719. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:17.619,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",719,active,0} [views:debug,2014-08-19T16:54:17.652,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/704. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:17.652,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",704,active,0} [ns_server:debug,2014-08-19T16:54:17.786,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 990. Nacking mccouch update. [views:debug,2014-08-19T16:54:17.786,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/990. Updated state: replica (0) [ns_server:debug,2014-08-19T16:54:17.786,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",990,replica,0} [ns_server:debug,2014-08-19T16:54:17.786,ns_1@10.242.238.90:capi_set_view_manager-tiles<0.6184.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [1016,1000,990,1022,1006,996,1012,1018,1002,992,1008,998,1014,1020,1004,1023, 994,1010] [ns_server:debug,2014-08-19T16:54:17.861,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 717. Nacking mccouch update. [views:debug,2014-08-19T16:54:17.862,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/717. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:17.862,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",717,active,0} [views:debug,2014-08-19T16:54:17.862,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/990. Updated state: replica (0) [ns_server:debug,2014-08-19T16:54:17.862,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",990,replica,0} [ns_server:debug,2014-08-19T16:54:17.862,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,750,686,622,558,984,737,971,724,660,596,958,1022,945,762,698,634,570, 1009,996,749,983,736,672,608,970,723,957,710,646,582,1021,944,761,1008,995, 748,684,620,556,982,735,969,722,658,594,988,956,741,1020,975,943,760,728,696, 664,632,600,568,1007,994,962,747,981,949,766,734,702,670,638,606,574,1013, 968,753,721,1000,987,955,740,708,676,644,612,580,548,1019,974,942,759,727, 1006,993,961,746,714,682,650,618,586,554,980,948,765,733,1012,999,967,752, 720,688,656,624,592,560,986,954,739,1018,973,941,758,726,694,662,630,598,566, 1005,992,960,745,979,947,764,732,700,668,636,604,572,1011,998,966,751,719, 985,953,738,706,674,642,610,578,1017,972,940,757,725,1004,991,959,744,712, 680,648,616,584,552,1023,978,946,763,731,1010,965,718,654,590,952,1016,939, 756,692,628,564,1003,990,743,977,730,666,602,964,717,951,704,640,576,1015, 938,755,1002,989,742,678,614,550,976,729,963,716,652,588,950,767,1014,754, 690,626,562,1001] [ns_server:debug,2014-08-19T16:54:17.902,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 702. Nacking mccouch update. [views:debug,2014-08-19T16:54:17.902,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/702. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:17.902,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",702,active,0} [ns_server:debug,2014-08-19T16:54:17.902,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_tiles<0.4349.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [766,750,734,718,702,984,968,952,1016,1000,756,740,724,708,990,974,958,942, 1022,1006,762,746,730,714,996,980,964,948,1012,752,736,720,704,986,970,954, 938,1018,1002,758,742,726,710,992,976,960,944,1008,764,748,732,716,998,982, 966,950,1014,754,738,722,706,988,972,956,940,1020,1004,760,744,728,712,1023, 994,978,962,946,1010] [ns_server:info,2014-08-19T16:54:17.909,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:do_pull:341]Pulling config from: 'ns_1@10.242.238.89' [views:debug,2014-08-19T16:54:17.987,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/702. Updated state: active (0) [views:debug,2014-08-19T16:54:17.987,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/717. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:17.987,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",702,active,0} [ns_server:debug,2014-08-19T16:54:17.987,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",717,active,0} [ns_server:debug,2014-08-19T16:54:18.154,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 988. Nacking mccouch update. [views:debug,2014-08-19T16:54:18.154,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/988. Updated state: replica (0) [ns_server:debug,2014-08-19T16:54:18.154,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",988,replica,0} [ns_server:debug,2014-08-19T16:54:18.154,ns_1@10.242.238.90:capi_set_view_manager-tiles<0.6184.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [1016,1000,990,1022,1006,996,1012,1018,1002,992,1008,998,1014,988,1020,1004, 1023,994,1010] [views:debug,2014-08-19T16:54:18.271,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/988. Updated state: replica (0) [ns_server:debug,2014-08-19T16:54:18.271,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",988,replica,0} [ns_server:debug,2014-08-19T16:54:18.380,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 700. Nacking mccouch update. [ns_server:debug,2014-08-19T16:54:18.380,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 715. Nacking mccouch update. [views:debug,2014-08-19T16:54:18.380,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/700. Updated state: active (0) [views:debug,2014-08-19T16:54:18.380,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/715. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:18.380,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",700,active,0} [ns_server:debug,2014-08-19T16:54:18.380,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",715,active,0} [ns_server:debug,2014-08-19T16:54:18.380,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_tiles<0.4349.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [766,750,734,718,702,984,968,952,1016,1000,756,740,724,708,990,974,958,942, 1022,1006,762,746,730,714,996,980,964,948,1012,752,736,720,704,986,970,954, 938,1018,1002,758,742,726,710,992,976,960,944,1008,764,748,732,716,700,998, 982,966,950,1014,754,738,722,706,988,972,956,940,1020,1004,760,744,728,712, 1023,994,978,962,946,1010] [ns_server:debug,2014-08-19T16:54:18.380,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,750,686,622,558,984,737,971,724,660,596,958,1022,945,762,698,634,570, 1009,996,749,983,736,672,608,970,723,957,710,646,582,1021,944,761,1008,995, 748,684,620,556,982,735,969,722,658,594,988,956,741,1020,975,943,760,728,696, 664,632,600,568,1007,994,962,747,715,981,949,766,734,702,670,638,606,574, 1013,968,753,721,1000,987,955,740,708,676,644,612,580,548,1019,974,942,759, 727,1006,993,961,746,714,682,650,618,586,554,980,948,765,733,1012,999,967, 752,720,688,656,624,592,560,986,954,739,1018,973,941,758,726,694,662,630,598, 566,1005,992,960,745,979,947,764,732,700,668,636,604,572,1011,998,966,751, 719,985,953,738,706,674,642,610,578,1017,972,940,757,725,1004,991,959,744, 712,680,648,616,584,552,1023,978,946,763,731,1010,965,718,654,590,952,1016, 939,756,692,628,564,1003,990,743,977,730,666,602,964,717,951,704,640,576, 1015,938,755,1002,989,742,678,614,550,976,729,963,716,652,588,950,767,1014, 754,690,626,562,1001] [views:debug,2014-08-19T16:54:18.480,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/700. Updated state: active (0) [views:debug,2014-08-19T16:54:18.480,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/715. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:18.480,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",700,active,0} [ns_server:debug,2014-08-19T16:54:18.480,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",715,active,0} [ns_server:debug,2014-08-19T16:54:18.589,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 986. Nacking mccouch update. [views:debug,2014-08-19T16:54:18.589,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/986. Updated state: replica (0) [ns_server:debug,2014-08-19T16:54:18.589,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",986,replica,0} [ns_server:debug,2014-08-19T16:54:18.589,ns_1@10.242.238.90:capi_set_view_manager-tiles<0.6184.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [1016,1000,990,1022,1006,996,1012,986,1018,1002,992,1008,998,1014,988,1020, 1004,1023,994,1010] [views:debug,2014-08-19T16:54:18.662,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/986. Updated state: replica (0) [ns_server:debug,2014-08-19T16:54:18.662,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",986,replica,0} [ns_server:debug,2014-08-19T16:54:18.787,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 698. Nacking mccouch update. [ns_server:debug,2014-08-19T16:54:18.788,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 713. Nacking mccouch update. [views:debug,2014-08-19T16:54:18.788,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/698. Updated state: active (0) [views:debug,2014-08-19T16:54:18.788,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/713. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:18.788,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",698,active,0} [ns_server:debug,2014-08-19T16:54:18.788,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",713,active,0} [ns_server:debug,2014-08-19T16:54:18.788,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_tiles<0.4349.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [766,750,734,718,702,984,968,952,1016,1000,756,740,724,708,990,974,958,942, 1022,1006,762,746,730,714,698,996,980,964,948,1012,752,736,720,704,986,970, 954,938,1018,1002,758,742,726,710,992,976,960,944,1008,764,748,732,716,700, 998,982,966,950,1014,754,738,722,706,988,972,956,940,1020,1004,760,744,728, 712,1023,994,978,962,946,1010] [ns_server:debug,2014-08-19T16:54:18.789,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,750,686,622,558,984,737,971,724,660,596,958,1022,945,762,698,634,570, 1009,996,749,983,736,672,608,970,723,957,710,646,582,1021,944,761,1008,995, 748,684,620,556,982,735,969,722,658,594,988,956,741,1020,975,943,760,728,696, 664,632,600,568,1007,994,962,747,715,981,949,766,734,702,670,638,606,574, 1013,968,753,721,1000,987,955,740,708,676,644,612,580,548,1019,974,942,759, 727,1006,993,961,746,714,682,650,618,586,554,980,948,765,733,1012,999,967, 752,720,688,656,624,592,560,986,954,739,1018,973,941,758,726,694,662,630,598, 566,1005,992,960,745,713,979,947,764,732,700,668,636,604,572,1011,998,966, 751,719,985,953,738,706,674,642,610,578,1017,972,940,757,725,1004,991,959, 744,712,680,648,616,584,552,1023,978,946,763,731,1010,965,718,654,590,952, 1016,939,756,692,628,564,1003,990,743,977,730,666,602,964,717,951,704,640, 576,1015,938,755,1002,989,742,678,614,550,976,729,963,716,652,588,950,767, 1014,754,690,626,562,1001] [ns_server:debug,2014-08-19T16:54:18.921,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 984. Nacking mccouch update. [views:debug,2014-08-19T16:54:18.921,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/984. Updated state: replica (0) [ns_server:debug,2014-08-19T16:54:18.922,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",984,replica,0} [ns_server:debug,2014-08-19T16:54:18.922,ns_1@10.242.238.90:capi_set_view_manager-tiles<0.6184.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [984,1016,1000,990,1022,1006,996,1012,986,1018,1002,992,1008,998,1014,988, 1020,1004,1023,994,1010] [views:debug,2014-08-19T16:54:18.939,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/713. Updated state: active (0) [views:debug,2014-08-19T16:54:18.939,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/698. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:18.939,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",713,active,0} [ns_server:debug,2014-08-19T16:54:18.939,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",698,active,0} [views:debug,2014-08-19T16:54:19.039,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/984. Updated state: replica (0) [ns_server:debug,2014-08-19T16:54:19.039,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",984,replica,0} [ns_server:debug,2014-08-19T16:54:19.191,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 711. Nacking mccouch update. [ns_server:debug,2014-08-19T16:54:19.191,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 696. Nacking mccouch update. [ns_server:debug,2014-08-19T16:54:19.191,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 982. Nacking mccouch update. [views:debug,2014-08-19T16:54:19.191,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/711. Updated state: active (0) [views:debug,2014-08-19T16:54:19.191,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/696. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:19.191,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",711,active,0} [views:debug,2014-08-19T16:54:19.191,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/982. Updated state: replica (0) [ns_server:debug,2014-08-19T16:54:19.191,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",696,active,0} [ns_server:debug,2014-08-19T16:54:19.191,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",982,replica,0} [ns_server:debug,2014-08-19T16:54:19.192,ns_1@10.242.238.90:capi_set_view_manager-tiles<0.6184.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [984,1016,1000,990,1022,1006,996,1012,986,1018,1002,992,1008,998,982,1014,988, 1020,1004,1023,994,1010] [ns_server:debug,2014-08-19T16:54:19.192,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_tiles<0.4349.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [766,750,734,718,702,984,968,952,1016,1000,756,740,724,708,990,974,958,942, 1022,1006,762,746,730,714,698,996,980,964,948,1012,752,736,720,704,986,970, 954,938,1018,1002,758,742,726,710,992,976,960,944,1008,764,748,732,716,700, 998,982,966,950,1014,754,738,722,706,988,972,956,940,1020,1004,760,744,728, 712,696,1023,994,978,962,946,1010] [ns_server:debug,2014-08-19T16:54:19.192,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,750,686,622,558,984,737,971,724,660,596,958,711,1022,945,762,698,634,570, 1009,996,749,983,736,672,608,970,723,957,710,646,582,1021,944,761,1008,995, 748,684,620,556,982,735,969,722,658,594,988,956,741,1020,975,943,760,728,696, 664,632,600,568,1007,994,962,747,715,981,949,766,734,702,670,638,606,574, 1013,968,753,721,1000,987,955,740,708,676,644,612,580,548,1019,974,942,759, 727,1006,993,961,746,714,682,650,618,586,554,980,948,765,733,1012,999,967, 752,720,688,656,624,592,560,986,954,739,1018,973,941,758,726,694,662,630,598, 566,1005,992,960,745,713,979,947,764,732,700,668,636,604,572,1011,998,966, 751,719,985,953,738,706,674,642,610,578,1017,972,940,757,725,1004,991,959, 744,712,680,648,616,584,552,1023,978,946,763,731,1010,965,718,654,590,952, 1016,939,756,692,628,564,1003,990,743,977,730,666,602,964,717,951,704,640, 576,1015,938,755,1002,989,742,678,614,550,976,729,963,716,652,588,950,767, 1014,754,690,626,562,1001] [views:debug,2014-08-19T16:54:19.292,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/696. Updated state: active (0) [views:debug,2014-08-19T16:54:19.292,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/982. Updated state: replica (0) [ns_server:debug,2014-08-19T16:54:19.292,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",696,active,0} [views:debug,2014-08-19T16:54:19.292,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/711. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:19.292,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",982,replica,0} [ns_server:debug,2014-08-19T16:54:19.292,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",711,active,0} [ns_server:debug,2014-08-19T16:54:19.591,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 980. Nacking mccouch update. [views:debug,2014-08-19T16:54:19.592,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/980. Updated state: replica (0) [ns_server:debug,2014-08-19T16:54:19.592,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",980,replica,0} [ns_server:debug,2014-08-19T16:54:19.592,ns_1@10.242.238.90:capi_set_view_manager-tiles<0.6184.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [984,1016,1000,990,1022,1006,996,980,1012,986,1018,1002,992,1008,998,982,1014, 988,1020,1004,1023,994,1010] [ns_server:debug,2014-08-19T16:54:19.625,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 694. Nacking mccouch update. [ns_server:debug,2014-08-19T16:54:19.625,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 709. Nacking mccouch update. [views:debug,2014-08-19T16:54:19.625,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/694. Updated state: active (0) [views:debug,2014-08-19T16:54:19.625,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/709. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:19.625,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",694,active,0} [ns_server:debug,2014-08-19T16:54:19.625,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",709,active,0} [ns_server:debug,2014-08-19T16:54:19.625,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_tiles<0.4349.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,718,984,968,952,1016,1000,756,740,724,708,990,974,958,942,1022,1006,762, 746,730,714,698,996,980,964,948,1012,752,736,720,704,986,970,954,938,1018, 1002,758,742,726,710,694,992,976,960,944,1008,764,748,732,716,700,998,982, 966,950,1014,754,738,722,706,988,972,956,940,1020,1004,760,744,728,712,696, 1023,994,978,962,946,1010,766,734,702] [ns_server:debug,2014-08-19T16:54:19.626,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,750,686,622,558,984,737,971,724,660,596,958,711,1022,945,762,698,634,570, 1009,996,749,983,736,672,608,970,723,957,710,646,582,1021,944,761,1008,995, 748,684,620,556,982,735,969,722,658,594,956,709,1020,975,943,760,728,696,664, 632,600,568,1007,994,962,747,715,981,949,766,734,702,670,638,606,574,1013, 968,753,721,1000,987,955,740,708,676,644,612,580,548,1019,974,942,759,727, 1006,993,961,746,714,682,650,618,586,554,980,948,765,733,1012,999,967,752, 720,688,656,624,592,560,986,954,739,1018,973,941,758,726,694,662,630,598,566, 1005,992,960,745,713,979,947,764,732,700,668,636,604,572,1011,998,966,751, 719,985,953,738,706,674,642,610,578,1017,972,940,757,725,1004,991,959,744, 712,680,648,616,584,552,1023,978,946,763,731,1010,965,718,654,590,952,1016, 939,756,692,628,564,1003,990,743,977,730,666,602,964,717,951,704,640,576, 1015,938,755,1002,989,742,678,614,550,976,729,963,716,652,588,950,767,1014, 754,690,626,562,1001,988,741] [views:debug,2014-08-19T16:54:19.726,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/980. Updated state: replica (0) [ns_server:debug,2014-08-19T16:54:19.726,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",980,replica,0} [views:debug,2014-08-19T16:54:19.755,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/694. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:19.755,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",694,active,0} [views:debug,2014-08-19T16:54:19.755,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/709. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:19.755,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",709,active,0} [ns_server:debug,2014-08-19T16:54:20.001,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 978. Nacking mccouch update. [views:debug,2014-08-19T16:54:20.001,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/978. Updated state: replica (0) [ns_server:debug,2014-08-19T16:54:20.002,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",978,replica,0} [ns_server:debug,2014-08-19T16:54:20.002,ns_1@10.242.238.90:capi_set_view_manager-tiles<0.6184.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [984,1016,1000,990,1022,1006,996,980,1012,986,1018,1002,992,1008,998,982,1014, 988,1020,1004,1023,994,978,1010] [ns_server:debug,2014-08-19T16:54:20.044,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 692. Nacking mccouch update. [ns_server:debug,2014-08-19T16:54:20.044,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 707. Nacking mccouch update. [views:debug,2014-08-19T16:54:20.044,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/692. Updated state: active (0) [views:debug,2014-08-19T16:54:20.044,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/707. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:20.044,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",692,active,0} [ns_server:debug,2014-08-19T16:54:20.044,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",707,active,0} [ns_server:debug,2014-08-19T16:54:20.044,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_tiles<0.4349.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,718,984,968,952,1016,1000,756,740,724,708,692,990,974,958,942,1022,1006, 762,746,730,714,698,996,980,964,948,1012,752,736,720,704,986,970,954,938, 1018,1002,758,742,726,710,694,992,976,960,944,1008,764,748,732,716,700,998, 982,966,950,1014,754,738,722,706,988,972,956,940,1020,1004,760,744,728,712, 696,1023,994,978,962,946,1010,766,734,702] [ns_server:debug,2014-08-19T16:54:20.045,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,750,686,622,558,984,737,971,724,660,596,958,711,1022,945,762,698,634,570, 1009,996,749,983,736,672,608,970,723,957,710,646,582,1021,944,761,1008,995, 748,684,620,556,982,735,969,722,658,594,956,709,1020,975,943,760,728,696,664, 632,600,568,1007,994,962,747,715,981,949,766,734,702,670,638,606,574,1013, 968,753,721,1000,987,955,740,708,676,644,612,580,548,1019,974,942,759,727, 1006,993,961,746,714,682,650,618,586,554,980,948,765,733,1012,999,967,752, 720,688,656,624,592,560,986,954,739,707,1018,973,941,758,726,694,662,630,598, 566,1005,992,960,745,713,979,947,764,732,700,668,636,604,572,1011,998,966, 751,719,985,953,738,706,674,642,610,578,1017,972,940,757,725,1004,991,959, 744,712,680,648,616,584,552,1023,978,946,763,731,1010,965,718,654,590,952, 1016,939,756,692,628,564,1003,990,743,977,730,666,602,964,717,951,704,640, 576,1015,938,755,1002,989,742,678,614,550,976,729,963,716,652,588,950,767, 1014,754,690,626,562,1001,988,741] [views:debug,2014-08-19T16:54:20.134,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/978. Updated state: replica (0) [ns_server:debug,2014-08-19T16:54:20.134,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",978,replica,0} [views:debug,2014-08-19T16:54:20.150,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/692. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:20.150,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",692,active,0} [views:debug,2014-08-19T16:54:20.150,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/707. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:20.151,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",707,active,0} [ns_server:debug,2014-08-19T16:54:20.326,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 976. Nacking mccouch update. [views:debug,2014-08-19T16:54:20.326,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/976. Updated state: replica (0) [ns_server:debug,2014-08-19T16:54:20.327,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",976,replica,0} [ns_server:debug,2014-08-19T16:54:20.327,ns_1@10.242.238.90:capi_set_view_manager-tiles<0.6184.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [984,1016,1000,990,1022,1006,996,980,1012,986,1018,1002,992,976,1008,998,982, 1014,988,1020,1004,1023,994,978,1010] [ns_server:debug,2014-08-19T16:54:20.368,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 690. Nacking mccouch update. [views:debug,2014-08-19T16:54:20.368,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/690. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:20.368,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",690,active,0} [ns_server:debug,2014-08-19T16:54:20.368,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_tiles<0.4349.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,718,984,968,952,1016,1000,756,740,724,708,692,990,974,958,942,1022,1006, 762,746,730,714,698,996,980,964,948,1012,752,736,720,704,986,970,954,938, 1018,1002,758,742,726,710,694,992,976,960,944,1008,764,748,732,716,700,998, 982,966,950,1014,754,738,722,706,690,988,972,956,940,1020,1004,760,744,728, 712,696,1023,994,978,962,946,1010,766,734,702] [ns_server:debug,2014-08-19T16:54:20.385,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 705. Nacking mccouch update. [views:debug,2014-08-19T16:54:20.385,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/705. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:20.385,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",705,active,0} [ns_server:debug,2014-08-19T16:54:20.386,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,750,686,622,558,984,737,971,724,660,596,958,711,1022,945,762,698,634,570, 1009,996,749,983,736,672,608,970,723,957,710,646,582,1021,944,761,1008,995, 748,684,620,556,982,735,969,722,658,594,956,709,1020,975,943,760,728,696,664, 632,600,568,1007,994,962,747,715,981,949,766,734,702,670,638,606,574,1013, 968,753,721,1000,987,955,740,708,676,644,612,580,548,1019,974,942,759,727, 1006,993,961,746,714,682,650,618,586,554,980,948,765,733,1012,999,967,752, 720,688,656,624,592,560,986,954,739,707,1018,973,941,758,726,694,662,630,598, 566,1005,992,960,745,713,979,947,764,732,700,668,636,604,572,1011,998,966, 751,719,985,953,738,706,674,642,610,578,1017,972,940,757,725,1004,991,959, 744,712,680,648,616,584,552,1023,978,946,763,731,1010,965,718,654,590,952, 705,1016,939,756,692,628,564,1003,990,743,977,730,666,602,964,717,951,704, 640,576,1015,938,755,1002,989,742,678,614,550,976,729,963,716,652,588,950, 767,1014,754,690,626,562,1001,988,741] [views:debug,2014-08-19T16:54:20.419,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/976. Updated state: replica (0) [ns_server:debug,2014-08-19T16:54:20.419,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",976,replica,0} [views:debug,2014-08-19T16:54:20.461,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/690. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:20.461,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",690,active,0} [views:debug,2014-08-19T16:54:20.494,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/705. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:20.495,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",705,active,0} [ns_server:debug,2014-08-19T16:54:20.652,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 974. Nacking mccouch update. [views:debug,2014-08-19T16:54:20.652,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/974. Updated state: replica (0) [ns_server:debug,2014-08-19T16:54:20.652,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",974,replica,0} [ns_server:debug,2014-08-19T16:54:20.653,ns_1@10.242.238.90:capi_set_view_manager-tiles<0.6184.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [984,1016,1000,990,974,1022,1006,996,980,1012,986,1018,1002,992,976,1008,998, 982,1014,988,1020,1004,1023,994,978,1010] [ns_server:debug,2014-08-19T16:54:20.719,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 688. Nacking mccouch update. [views:debug,2014-08-19T16:54:20.719,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/688. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:20.719,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",688,active,0} [ns_server:debug,2014-08-19T16:54:20.720,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_tiles<0.4349.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,718,984,968,952,1016,1000,756,740,724,708,692,990,974,958,942,1022,1006, 762,746,730,714,698,996,980,964,948,1012,752,736,720,704,688,986,970,954,938, 1018,1002,758,742,726,710,694,992,976,960,944,1008,764,748,732,716,700,998, 982,966,950,1014,754,738,722,706,690,988,972,956,940,1020,1004,760,744,728, 712,696,1023,994,978,962,946,1010,766,734,702] [ns_server:debug,2014-08-19T16:54:20.753,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 703. Nacking mccouch update. [views:debug,2014-08-19T16:54:20.753,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/703. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:20.753,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",703,active,0} [ns_server:debug,2014-08-19T16:54:20.753,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,750,686,622,558,984,737,971,724,660,596,958,711,1022,945,762,698,634,570, 1009,996,749,983,736,672,608,970,723,957,710,646,582,1021,944,761,1008,995, 748,684,620,556,982,735,969,722,658,594,956,709,1020,975,943,760,728,696,664, 632,600,568,1007,994,962,747,715,981,949,766,734,702,670,638,606,574,1013, 968,753,721,1000,987,955,740,708,676,644,612,580,548,1019,974,942,759,727, 1006,993,961,746,714,682,650,618,586,554,980,948,765,733,1012,999,967,752, 720,688,656,624,592,560,986,954,739,707,1018,973,941,758,726,694,662,630,598, 566,1005,992,960,745,713,979,947,764,732,700,668,636,604,572,1011,998,966, 751,719,985,953,738,706,674,642,610,578,1017,972,940,757,725,1004,991,959, 744,712,680,648,616,584,552,1023,978,946,763,731,1010,965,718,654,590,952, 705,1016,939,756,692,628,564,1003,990,743,977,730,666,602,964,717,951,704, 640,576,1015,938,755,1002,989,742,678,614,550,976,729,963,716,652,588,950, 767,703,1014,754,690,626,562,1001,988,741] [views:debug,2014-08-19T16:54:20.786,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/974. Updated state: replica (0) [ns_server:debug,2014-08-19T16:54:20.787,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",974,replica,0} [views:debug,2014-08-19T16:54:20.837,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/688. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:20.837,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",688,active,0} [views:debug,2014-08-19T16:54:20.862,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/703. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:20.862,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",703,active,0} [ns_server:debug,2014-08-19T16:54:21.038,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 972. Nacking mccouch update. [views:debug,2014-08-19T16:54:21.038,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/972. Updated state: replica (0) [ns_server:debug,2014-08-19T16:54:21.038,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",972,replica,0} [ns_server:debug,2014-08-19T16:54:21.038,ns_1@10.242.238.90:capi_set_view_manager-tiles<0.6184.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [984,1016,1000,990,974,1022,1006,996,980,1012,986,1018,1002,992,976,1008,998, 982,1014,988,972,1020,1004,1023,994,978,1010] [ns_server:debug,2014-08-19T16:54:21.130,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 686. Nacking mccouch update. [ns_server:debug,2014-08-19T16:54:21.130,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 701. Nacking mccouch update. [views:debug,2014-08-19T16:54:21.130,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/686. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:21.130,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",686,active,0} [views:debug,2014-08-19T16:54:21.130,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/701. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:21.130,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",701,active,0} [ns_server:debug,2014-08-19T16:54:21.130,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_tiles<0.4349.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,718,686,984,968,952,1016,1000,756,740,724,708,692,990,974,958,942,1022, 1006,762,746,730,714,698,996,980,964,948,1012,752,736,720,704,688,986,970, 954,938,1018,1002,758,742,726,710,694,992,976,960,944,1008,764,748,732,716, 700,998,982,966,950,1014,754,738,722,706,690,988,972,956,940,1020,1004,760, 744,728,712,696,1023,994,978,962,946,1010,766,734,702] [views:debug,2014-08-19T16:54:21.131,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/972. Updated state: replica (0) [ns_server:debug,2014-08-19T16:54:21.131,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",972,replica,0} [ns_server:debug,2014-08-19T16:54:21.131,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,750,686,622,558,984,737,971,724,660,596,958,711,1022,945,762,698,634,570, 1009,996,749,983,736,672,608,970,723,957,710,646,582,1021,944,761,1008,995, 748,684,620,556,982,735,969,722,658,594,956,709,1020,975,943,760,728,696,664, 632,600,568,1007,994,962,747,715,981,949,766,734,702,670,638,606,574,1013, 968,753,721,1000,987,955,740,708,676,644,612,580,548,1019,974,942,759,727, 1006,993,961,746,714,682,650,618,586,554,980,948,765,733,701,1012,999,967, 752,720,688,656,624,592,560,986,954,739,707,1018,973,941,758,726,694,662,630, 598,566,1005,992,960,745,713,979,947,764,732,700,668,636,604,572,1011,998, 966,751,719,985,953,738,706,674,642,610,578,1017,972,940,757,725,1004,991, 959,744,712,680,648,616,584,552,1023,978,946,763,731,1010,965,718,654,590, 952,705,1016,939,756,692,628,564,1003,990,743,977,730,666,602,964,717,951, 704,640,576,1015,938,755,1002,989,742,678,614,550,976,729,963,716,652,588, 950,767,703,1014,754,690,626,562,1001,988,741] [views:debug,2014-08-19T16:54:21.256,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/686. Updated state: active (0) [views:debug,2014-08-19T16:54:21.256,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/701. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:21.256,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",686,active,0} [ns_server:debug,2014-08-19T16:54:21.256,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",701,active,0} [ns_server:debug,2014-08-19T16:54:21.387,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 970. Nacking mccouch update. [views:debug,2014-08-19T16:54:21.387,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/970. Updated state: replica (0) [ns_server:debug,2014-08-19T16:54:21.387,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",970,replica,0} [ns_server:debug,2014-08-19T16:54:21.388,ns_1@10.242.238.90:capi_set_view_manager-tiles<0.6184.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [984,1016,1000,990,974,1022,1006,996,980,1012,986,970,1018,1002,992,976,1008, 998,982,1014,988,972,1020,1004,1023,994,978,1010] [views:debug,2014-08-19T16:54:21.438,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/970. Updated state: replica (0) [ns_server:debug,2014-08-19T16:54:21.438,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",970,replica,0} [ns_server:debug,2014-08-19T16:54:21.521,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 699. Nacking mccouch update. [ns_server:debug,2014-08-19T16:54:21.521,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 684. Nacking mccouch update. [views:debug,2014-08-19T16:54:21.522,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/699. Updated state: active (0) [views:debug,2014-08-19T16:54:21.522,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/684. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:21.522,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",699,active,0} [ns_server:debug,2014-08-19T16:54:21.522,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",684,active,0} [ns_server:debug,2014-08-19T16:54:21.522,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_tiles<0.4349.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,718,686,984,952,1016,756,740,724,708,692,990,974,958,942,1022,1006,762, 746,730,714,698,996,980,964,948,1012,752,736,720,704,688,986,970,954,938, 1018,1002,758,742,726,710,694,992,976,960,944,1008,764,748,732,716,700,684, 998,982,966,950,1014,754,738,722,706,690,988,972,956,940,1020,1004,760,744, 728,712,696,1023,994,978,962,946,1010,766,734,702,968,1000] [ns_server:debug,2014-08-19T16:54:21.522,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,750,686,622,558,984,737,971,724,660,596,958,711,1022,945,762,698,634,570, 1009,996,749,983,736,672,608,970,723,957,710,646,582,1021,944,761,1008,995, 748,684,620,556,982,735,969,722,658,594,956,709,1020,943,760,696,632,568, 1007,994,962,747,715,981,949,766,734,702,670,638,606,574,1013,968,753,721, 1000,987,955,740,708,676,644,612,580,548,1019,974,942,759,727,1006,993,961, 746,714,682,650,618,586,554,980,948,765,733,701,1012,999,967,752,720,688,656, 624,592,560,986,954,739,707,1018,973,941,758,726,694,662,630,598,566,1005, 992,960,745,713,979,947,764,732,700,668,636,604,572,1011,998,966,751,719,985, 953,738,706,674,642,610,578,1017,972,940,757,725,1004,991,959,744,712,680, 648,616,584,552,1023,978,946,763,731,699,1010,965,718,654,590,952,705,1016, 939,756,692,628,564,1003,990,743,977,730,666,602,964,717,951,704,640,576, 1015,938,755,1002,989,742,678,614,550,976,729,963,716,652,588,950,767,703, 1014,754,690,626,562,1001,988,741,975,728,664,600] [views:debug,2014-08-19T16:54:21.581,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/699. Updated state: active (0) [views:debug,2014-08-19T16:54:21.581,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/684. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:21.581,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",699,active,0} [ns_server:debug,2014-08-19T16:54:21.581,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",684,active,0} [ns_server:debug,2014-08-19T16:54:21.673,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 968. Nacking mccouch update. [views:debug,2014-08-19T16:54:21.673,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/968. Updated state: replica (0) [ns_server:debug,2014-08-19T16:54:21.673,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",968,replica,0} [ns_server:debug,2014-08-19T16:54:21.673,ns_1@10.242.238.90:capi_set_view_manager-tiles<0.6184.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [984,968,1016,1000,990,974,1022,1006,996,980,1012,986,970,1018,1002,992,976, 1008,998,982,1014,988,972,1020,1004,1023,994,978,1010] [views:debug,2014-08-19T16:54:21.724,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/968. Updated state: replica (0) [ns_server:debug,2014-08-19T16:54:21.724,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",968,replica,0} [ns_server:debug,2014-08-19T16:54:21.848,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 682. Nacking mccouch update. [ns_server:debug,2014-08-19T16:54:21.848,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 697. Nacking mccouch update. [views:debug,2014-08-19T16:54:21.848,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/682. Updated state: active (0) [views:debug,2014-08-19T16:54:21.848,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/697. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:21.848,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",682,active,0} [ns_server:debug,2014-08-19T16:54:21.848,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",697,active,0} [ns_server:debug,2014-08-19T16:54:21.848,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_tiles<0.4349.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,718,686,984,952,1016,756,740,724,708,692,990,974,958,942,1022,1006,762, 746,730,714,698,682,996,980,964,948,1012,752,736,720,704,688,986,970,954,938, 1018,1002,758,742,726,710,694,992,976,960,944,1008,764,748,732,716,700,684, 998,982,966,950,1014,754,738,722,706,690,988,972,956,940,1020,1004,760,744, 728,712,696,1023,994,978,962,946,1010,766,734,702,968,1000] [ns_server:debug,2014-08-19T16:54:21.849,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,750,686,622,558,984,737,971,724,660,596,958,711,1022,945,762,698,634,570, 1009,996,749,983,736,672,608,970,723,957,710,646,582,1021,944,761,697,1008, 995,748,684,620,556,982,735,969,722,658,594,956,709,1020,943,760,696,632,568, 1007,994,962,747,715,981,949,766,734,702,670,638,606,574,1013,968,753,721, 1000,987,955,740,708,676,644,612,580,548,1019,974,942,759,727,1006,993,961, 746,714,682,650,618,586,554,980,948,765,733,701,1012,999,967,752,720,688,656, 624,592,560,986,954,739,707,1018,973,941,758,726,694,662,630,598,566,1005, 992,960,745,713,979,947,764,732,700,668,636,604,572,1011,998,966,751,719,985, 953,738,706,674,642,610,578,1017,972,940,757,725,1004,991,959,744,712,680, 648,616,584,552,1023,978,946,763,731,699,1010,965,718,654,590,952,705,1016, 939,756,692,628,564,1003,990,743,977,730,666,602,964,717,951,704,640,576, 1015,938,755,1002,989,742,678,614,550,976,729,963,716,652,588,950,767,703, 1014,754,690,626,562,1001,988,741,975,728,664,600] [ns_server:debug,2014-08-19T16:54:21.948,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 966. Nacking mccouch update. [views:debug,2014-08-19T16:54:21.948,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/966. Updated state: replica (0) [ns_server:debug,2014-08-19T16:54:21.948,ns_1@10.242.238.90:capi_set_view_manager-tiles<0.6184.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [984,968,1016,1000,990,974,1022,1006,996,980,1012,986,970,1018,1002,992,976, 1008,998,982,966,1014,988,972,1020,1004,1023,994,978,1010] [ns_server:debug,2014-08-19T16:54:21.948,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",966,replica,0} [views:debug,2014-08-19T16:54:21.965,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/682. Updated state: active (0) [views:debug,2014-08-19T16:54:21.966,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/697. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:21.966,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",682,active,0} [ns_server:debug,2014-08-19T16:54:21.966,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",697,active,0} [views:debug,2014-08-19T16:54:22.074,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/966. Updated state: replica (0) [ns_server:debug,2014-08-19T16:54:22.075,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",966,replica,0} [ns_server:debug,2014-08-19T16:54:22.176,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 695. Nacking mccouch update. [views:debug,2014-08-19T16:54:22.176,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/695. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:22.176,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",695,active,0} [ns_server:debug,2014-08-19T16:54:22.177,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,750,686,622,558,984,737,971,724,660,596,958,711,1022,945,762,698,634,570, 1009,996,749,983,736,672,608,970,723,957,710,646,582,1021,944,761,697,1008, 995,748,684,620,556,982,735,969,722,658,594,956,709,1020,943,760,696,632,568, 1007,994,962,747,715,981,949,766,734,702,670,638,606,574,1013,968,753,721, 1000,987,955,740,708,676,644,612,580,548,1019,974,942,759,727,695,1006,993, 961,746,714,682,650,618,586,554,980,948,765,733,701,1012,999,967,752,720,688, 656,624,592,560,986,954,739,707,1018,973,941,758,726,694,662,630,598,566, 1005,992,960,745,713,979,947,764,732,700,668,636,604,572,1011,998,966,751, 719,985,953,738,706,674,642,610,578,1017,972,940,757,725,1004,991,959,744, 712,680,648,616,584,552,1023,978,946,763,731,699,1010,965,718,654,590,952, 705,1016,939,756,692,628,564,1003,990,743,977,730,666,602,964,717,951,704, 640,576,1015,938,755,1002,989,742,678,614,550,976,729,963,716,652,588,950, 767,703,1014,754,690,626,562,1001,988,741,975,728,664,600] [ns_server:debug,2014-08-19T16:54:22.235,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 680. Nacking mccouch update. [views:debug,2014-08-19T16:54:22.235,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/680. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:22.235,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",680,active,0} [ns_server:debug,2014-08-19T16:54:22.235,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_tiles<0.4349.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,718,686,984,952,1016,756,740,724,708,692,990,974,958,942,1022,1006,762, 746,730,714,698,682,996,980,964,948,1012,752,736,720,704,688,986,970,954,938, 1018,1002,758,742,726,710,694,992,976,960,944,1008,764,748,732,716,700,684, 998,982,966,950,1014,754,738,722,706,690,988,972,956,940,1020,1004,760,744, 728,712,696,680,1023,994,978,962,946,1010,766,734,702,968,1000] [ns_server:debug,2014-08-19T16:54:22.276,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 964. Nacking mccouch update. [views:debug,2014-08-19T16:54:22.277,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/964. Updated state: replica (0) [ns_server:debug,2014-08-19T16:54:22.277,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",964,replica,0} [ns_server:debug,2014-08-19T16:54:22.277,ns_1@10.242.238.90:capi_set_view_manager-tiles<0.6184.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [984,968,1016,1000,990,974,1022,1006,996,980,964,1012,986,970,1018,1002,992, 976,1008,998,982,966,1014,988,972,1020,1004,1023,994,978,1010] [views:debug,2014-08-19T16:54:22.294,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/695. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:22.294,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",695,active,0} [views:debug,2014-08-19T16:54:22.344,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/680. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:22.344,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",680,active,0} [views:debug,2014-08-19T16:54:22.378,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/964. Updated state: replica (0) [ns_server:debug,2014-08-19T16:54:22.378,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",964,replica,0} [ns_server:debug,2014-08-19T16:54:22.567,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 693. Nacking mccouch update. [views:debug,2014-08-19T16:54:22.568,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/693. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:22.568,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",693,active,0} [ns_server:debug,2014-08-19T16:54:22.568,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,750,686,622,558,984,737,971,724,660,596,958,711,1022,945,762,698,634,570, 1009,996,749,983,736,672,608,970,723,957,710,646,582,1021,944,761,697,1008, 995,748,684,620,556,982,735,969,722,658,594,956,709,1020,943,760,696,632,568, 1007,994,962,747,715,981,949,766,734,702,670,638,606,574,1013,968,753,721, 1000,987,955,740,708,676,644,612,580,548,1019,974,942,759,727,695,1006,993, 961,746,714,682,650,618,586,554,980,948,765,733,701,1012,999,967,752,720,688, 656,624,592,560,986,954,739,707,1018,973,941,758,726,694,662,630,598,566, 1005,992,960,745,713,979,947,764,732,700,668,636,604,572,1011,998,966,751, 719,985,953,738,706,674,642,610,578,1017,972,940,757,725,693,1004,991,959, 744,712,680,648,616,584,552,1023,978,946,763,731,699,1010,965,718,654,590, 952,705,1016,939,756,692,628,564,1003,990,743,977,730,666,602,964,717,951, 704,640,576,1015,938,755,1002,989,742,678,614,550,976,729,963,716,652,588, 950,767,703,1014,754,690,626,562,1001,988,741,975,728,664,600] [ns_server:debug,2014-08-19T16:54:22.676,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 962. Nacking mccouch update. [views:debug,2014-08-19T16:54:22.676,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/962. Updated state: replica (0) [ns_server:debug,2014-08-19T16:54:22.676,ns_1@10.242.238.90:capi_set_view_manager-tiles<0.6184.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [984,968,1016,1000,990,974,1022,1006,996,980,964,1012,986,970,1018,1002,992, 976,1008,998,982,966,1014,988,972,1020,1004,1023,994,978,962,1010] [ns_server:debug,2014-08-19T16:54:22.676,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",962,replica,0} [ns_server:debug,2014-08-19T16:54:22.726,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 678. Nacking mccouch update. [views:debug,2014-08-19T16:54:22.726,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/678. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:22.726,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",678,active,0} [views:debug,2014-08-19T16:54:22.727,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/693. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:22.727,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",693,active,0} [ns_server:debug,2014-08-19T16:54:22.727,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_tiles<0.4349.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,718,686,984,952,1016,756,740,724,708,692,990,974,958,942,1022,1006,762, 746,730,714,698,682,996,980,964,948,1012,752,736,720,704,688,986,970,954,938, 1018,1002,758,742,726,710,694,678,992,976,960,944,1008,764,748,732,716,700, 684,998,982,966,950,1014,754,738,722,706,690,988,972,956,940,1020,1004,760, 744,728,712,696,680,1023,994,978,962,946,1010,766,734,702,968,1000] [views:debug,2014-08-19T16:54:22.802,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/962. Updated state: replica (0) [ns_server:debug,2014-08-19T16:54:22.802,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",962,replica,0} [views:debug,2014-08-19T16:54:22.861,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/678. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:22.861,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",678,active,0} [ns_server:debug,2014-08-19T16:54:23.044,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 691. Nacking mccouch update. [views:debug,2014-08-19T16:54:23.045,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/691. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:23.045,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",691,active,0} [ns_server:debug,2014-08-19T16:54:23.045,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,750,686,622,558,984,737,971,724,660,596,958,711,1022,945,762,698,634,570, 1009,996,749,983,736,672,608,970,723,957,710,646,582,1021,944,761,697,1008, 995,748,684,620,556,982,735,969,722,658,594,956,709,1020,943,760,696,632,568, 1007,994,962,747,715,981,949,766,734,702,670,638,606,574,1013,968,753,721, 1000,987,955,740,708,676,644,612,580,548,1019,974,942,759,727,695,1006,993, 961,746,714,682,650,618,586,554,980,948,765,733,701,1012,999,967,752,720,688, 656,624,592,560,986,954,739,707,1018,973,941,758,726,694,662,630,598,566, 1005,992,960,745,713,979,947,764,732,700,668,636,604,572,1011,998,966,751, 719,985,953,738,706,674,642,610,578,1017,972,940,757,725,693,1004,991,959, 744,712,680,648,616,584,552,1023,978,946,763,731,699,1010,965,718,654,590, 952,705,1016,939,756,692,628,564,1003,990,743,977,730,666,602,964,717,951, 704,640,576,1015,938,755,691,1002,989,742,678,614,550,976,729,963,716,652, 588,950,767,703,1014,754,690,626,562,1001,988,741,975,728,664,600] [ns_server:debug,2014-08-19T16:54:23.103,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 960. Nacking mccouch update. [views:debug,2014-08-19T16:54:23.103,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/960. Updated state: replica (0) [ns_server:debug,2014-08-19T16:54:23.103,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",960,replica,0} [ns_server:debug,2014-08-19T16:54:23.103,ns_1@10.242.238.90:capi_set_view_manager-tiles<0.6184.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [984,968,1016,1000,990,974,1022,1006,996,980,964,1012,986,970,1018,1002,992, 976,960,1008,998,982,966,1014,988,972,1020,1004,1023,994,978,962,1010] [views:debug,2014-08-19T16:54:23.211,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/691. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:23.211,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",691,active,0} [ns_server:debug,2014-08-19T16:54:23.227,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 676. Nacking mccouch update. [views:debug,2014-08-19T16:54:23.228,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/676. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:23.228,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",676,active,0} [views:debug,2014-08-19T16:54:23.228,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/960. Updated state: replica (0) [ns_server:debug,2014-08-19T16:54:23.228,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_tiles<0.4349.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,718,686,984,952,1016,756,740,724,708,692,676,990,974,958,942,1022,1006, 762,746,730,714,698,682,996,980,964,948,1012,752,736,720,704,688,986,970,954, 938,1018,1002,758,742,726,710,694,678,992,976,960,944,1008,764,748,732,716, 700,684,998,982,966,950,1014,754,738,722,706,690,988,972,956,940,1020,1004, 760,744,728,712,696,680,1023,994,978,962,946,1010,766,734,702,968,1000] [ns_server:debug,2014-08-19T16:54:23.228,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",960,replica,0} [views:debug,2014-08-19T16:54:23.322,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/676. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:23.322,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",676,active,0} [ns_server:debug,2014-08-19T16:54:23.429,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 958. Nacking mccouch update. [ns_server:debug,2014-08-19T16:54:23.429,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 689. Nacking mccouch update. [views:debug,2014-08-19T16:54:23.429,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/958. Updated state: replica (0) [ns_server:debug,2014-08-19T16:54:23.429,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",958,replica,0} [views:debug,2014-08-19T16:54:23.429,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/689. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:23.429,ns_1@10.242.238.90:capi_set_view_manager-tiles<0.6184.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [984,968,1016,1000,990,974,958,1022,1006,996,980,964,1012,986,970,1018,1002, 992,976,960,1008,998,982,966,1014,988,972,1020,1004,1023,994,978,962,1010] [ns_server:debug,2014-08-19T16:54:23.429,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",689,active,0} [ns_server:debug,2014-08-19T16:54:23.430,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,750,686,622,558,984,737,971,724,660,596,958,711,1022,945,762,698,634,570, 1009,996,749,983,736,672,608,970,723,957,710,646,582,1021,944,761,697,1008, 995,748,684,620,556,982,735,969,722,658,594,956,709,1020,943,760,696,632,568, 1007,994,747,981,949,766,734,702,670,638,606,574,1013,968,753,721,689,1000, 987,955,740,708,676,644,612,580,548,1019,974,942,759,727,695,1006,993,961, 746,714,682,650,618,586,554,980,948,765,733,701,1012,999,967,752,720,688,656, 624,592,560,986,954,739,707,1018,973,941,758,726,694,662,630,598,566,1005, 992,960,745,713,979,947,764,732,700,668,636,604,572,1011,998,966,751,719,985, 953,738,706,674,642,610,578,1017,972,940,757,725,693,1004,991,959,744,712, 680,648,616,584,552,1023,978,946,763,731,699,1010,965,718,654,590,952,705, 1016,939,756,692,628,564,1003,990,743,977,730,666,602,964,717,951,704,640, 576,1015,938,755,691,1002,989,742,678,614,550,976,729,963,716,652,588,950, 767,703,1014,754,690,626,562,1001,988,741,975,728,664,600,962,715] [ns_server:debug,2014-08-19T16:54:23.538,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 674. Nacking mccouch update. [views:debug,2014-08-19T16:54:23.538,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/674. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:23.538,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",674,active,0} [ns_server:debug,2014-08-19T16:54:23.538,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_tiles<0.4349.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,718,686,984,952,1016,756,724,692,990,974,958,942,1022,1006,762,746,730, 714,698,682,996,980,964,948,1012,752,736,720,704,688,986,970,954,938,1018, 1002,758,742,726,710,694,678,992,976,960,944,1008,764,748,732,716,700,684, 998,982,966,950,1014,754,738,722,706,690,674,988,972,956,940,1020,1004,760, 744,728,712,696,680,1023,994,978,962,946,1010,766,734,702,968,1000,740,708, 676] [views:debug,2014-08-19T16:54:23.580,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/958. Updated state: replica (0) [views:debug,2014-08-19T16:54:23.580,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/689. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:23.580,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",958,replica,0} [ns_server:debug,2014-08-19T16:54:23.580,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",689,active,0} [views:debug,2014-08-19T16:54:23.656,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/674. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:23.656,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",674,active,0} [ns_server:debug,2014-08-19T16:54:23.862,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 687. Nacking mccouch update. [views:debug,2014-08-19T16:54:23.862,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/687. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:23.862,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",687,active,0} [ns_server:debug,2014-08-19T16:54:23.863,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,750,686,622,558,984,737,971,724,660,596,958,711,1022,945,762,698,634,570, 1009,996,749,983,736,672,608,970,723,957,710,646,582,1021,944,761,697,1008, 995,748,684,620,556,982,735,969,722,658,594,956,709,1020,943,760,696,632,568, 1007,994,747,981,949,766,734,702,670,638,606,574,1013,968,753,721,689,1000, 987,955,740,708,676,644,612,580,548,1019,974,942,759,727,695,1006,993,961, 746,714,682,650,618,586,554,980,948,765,733,701,1012,999,967,752,720,688,656, 624,592,560,986,954,739,707,1018,973,941,758,726,694,662,630,598,566,1005, 992,960,745,713,979,947,764,732,700,668,636,604,572,1011,998,966,751,719,687, 985,953,738,706,674,642,610,578,1017,972,940,757,725,693,1004,991,959,744, 712,680,648,616,584,552,1023,978,946,763,731,699,1010,965,718,654,590,952, 705,1016,939,756,692,628,564,1003,990,743,977,730,666,602,964,717,951,704, 640,576,1015,938,755,691,1002,989,742,678,614,550,976,729,963,716,652,588, 950,767,703,1014,754,690,626,562,1001,988,741,975,728,664,600,962,715] [ns_server:debug,2014-08-19T16:54:23.929,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 672. Nacking mccouch update. [ns_server:debug,2014-08-19T16:54:23.929,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 956. Nacking mccouch update. [views:debug,2014-08-19T16:54:23.929,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/672. Updated state: active (0) [views:debug,2014-08-19T16:54:23.929,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/956. Updated state: replica (0) [ns_server:debug,2014-08-19T16:54:23.929,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",672,active,0} [ns_server:debug,2014-08-19T16:54:23.929,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",956,replica,0} [ns_server:debug,2014-08-19T16:54:23.929,ns_1@10.242.238.90:capi_set_view_manager-tiles<0.6184.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [984,968,1016,1000,990,974,958,1022,1006,996,980,964,1012,986,970,1018,1002, 992,976,960,1008,998,982,966,1014,988,972,956,1020,1004,1023,994,978,962, 1010] [ns_server:debug,2014-08-19T16:54:23.929,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_tiles<0.4349.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,718,686,984,952,1016,756,724,692,990,974,958,942,1022,1006,762,746,730, 714,698,682,996,980,964,948,1012,752,736,720,704,688,672,986,970,954,938, 1018,1002,758,742,726,710,694,678,992,976,960,944,1008,764,748,732,716,700, 684,998,982,966,950,1014,754,738,722,706,690,674,988,972,956,940,1020,1004, 760,744,728,712,696,680,1023,994,978,962,946,1010,766,734,702,968,1000,740, 708,676] [views:debug,2014-08-19T16:54:23.979,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/687. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:23.979,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",687,active,0} [views:debug,2014-08-19T16:54:24.071,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/956. Updated state: replica (0) [views:debug,2014-08-19T16:54:24.072,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/672. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:24.072,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",956,replica,0} [ns_server:debug,2014-08-19T16:54:24.072,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",672,active,0} [ns_server:debug,2014-08-19T16:54:24.222,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 685. Nacking mccouch update. [views:debug,2014-08-19T16:54:24.222,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/685. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:24.222,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",685,active,0} [ns_server:debug,2014-08-19T16:54:24.223,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,750,686,622,558,984,737,971,724,660,596,958,711,1022,945,762,698,634,570, 1009,996,749,685,983,736,672,608,970,723,957,710,646,582,1021,944,761,697, 1008,995,748,684,620,556,982,735,969,722,658,594,956,709,1020,943,760,696, 632,568,1007,994,747,981,949,766,734,702,670,638,606,574,1013,968,753,721, 689,1000,987,955,740,708,676,644,612,580,548,1019,974,942,759,727,695,1006, 993,961,746,714,682,650,618,586,554,980,948,765,733,701,1012,999,967,752,720, 688,656,624,592,560,986,954,739,707,1018,973,941,758,726,694,662,630,598,566, 1005,992,960,745,713,979,947,764,732,700,668,636,604,572,1011,998,966,751, 719,687,985,953,738,706,674,642,610,578,1017,972,940,757,725,693,1004,991, 959,744,712,680,648,616,584,552,1023,978,946,763,731,699,1010,965,718,654, 590,952,705,1016,939,756,692,628,564,1003,990,743,977,730,666,602,964,717, 951,704,640,576,1015,938,755,691,1002,989,742,678,614,550,976,729,963,716, 652,588,950,767,703,1014,754,690,626,562,1001,988,741,975,728,664,600,962, 715] [ns_server:debug,2014-08-19T16:54:24.331,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 954. Nacking mccouch update. [views:debug,2014-08-19T16:54:24.331,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/954. Updated state: replica (0) [ns_server:debug,2014-08-19T16:54:24.331,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",954,replica,0} [ns_server:debug,2014-08-19T16:54:24.331,ns_1@10.242.238.90:capi_set_view_manager-tiles<0.6184.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [984,968,1016,1000,990,974,958,1022,1006,996,980,964,1012,986,970,954,1018, 1002,992,976,960,1008,998,982,966,1014,988,972,956,1020,1004,1023,994,978, 962,1010] [ns_server:debug,2014-08-19T16:54:24.347,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 670. Nacking mccouch update. [views:debug,2014-08-19T16:54:24.348,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/670. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:24.348,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",670,active,0} [views:debug,2014-08-19T16:54:24.348,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/685. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:24.348,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",685,active,0} [ns_server:debug,2014-08-19T16:54:24.348,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_tiles<0.4349.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,718,686,984,952,1016,756,724,692,990,974,958,942,1022,1006,762,746,730, 714,698,682,996,980,964,948,1012,752,736,720,704,688,672,986,970,954,938, 1018,1002,758,742,726,710,694,678,992,976,960,944,1008,764,748,732,716,700, 684,998,982,966,950,1014,754,738,722,706,690,674,988,972,956,940,1020,1004, 760,744,728,712,696,680,1023,994,978,962,946,1010,766,734,702,670,968,1000, 740,708,676] [views:debug,2014-08-19T16:54:24.431,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/954. Updated state: replica (0) [ns_server:debug,2014-08-19T16:54:24.432,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",954,replica,0} [views:debug,2014-08-19T16:54:24.473,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/670. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:24.473,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",670,active,0} [ns_server:debug,2014-08-19T16:54:24.590,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 683. Nacking mccouch update. [views:debug,2014-08-19T16:54:24.590,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/683. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:24.590,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",683,active,0} [ns_server:debug,2014-08-19T16:54:24.591,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,750,686,622,558,984,737,971,724,660,596,958,711,1022,945,762,698,634,570, 1009,996,749,685,983,736,672,608,970,723,957,710,646,582,1021,944,761,697, 1008,995,748,684,620,556,982,735,969,722,658,594,956,709,1020,943,760,696, 632,568,1007,994,747,683,981,949,766,734,702,670,638,606,574,1013,968,753, 721,689,1000,987,955,740,708,676,644,612,580,548,1019,974,942,759,727,695, 1006,993,961,746,714,682,650,618,586,554,980,948,765,733,701,1012,999,967, 752,720,688,656,624,592,560,986,954,739,707,1018,973,941,758,726,694,662,630, 598,566,1005,992,960,745,713,979,947,764,732,700,668,636,604,572,1011,998, 966,751,719,687,985,953,738,706,674,642,610,578,1017,972,940,757,725,693, 1004,991,959,744,712,680,648,616,584,552,1023,978,946,763,731,699,1010,965, 718,654,590,952,705,1016,939,756,692,628,564,1003,990,743,977,730,666,602, 964,717,951,704,640,576,1015,938,755,691,1002,989,742,678,614,550,976,729, 963,716,652,588,950,767,703,1014,754,690,626,562,1001,988,741,975,728,664, 600,962,715] [ns_server:debug,2014-08-19T16:54:24.624,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 952. Nacking mccouch update. [views:debug,2014-08-19T16:54:24.624,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/952. Updated state: replica (0) [ns_server:debug,2014-08-19T16:54:24.624,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",952,replica,0} [ns_server:debug,2014-08-19T16:54:24.624,ns_1@10.242.238.90:capi_set_view_manager-tiles<0.6184.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [984,968,952,1016,1000,990,974,958,1022,1006,996,980,964,1012,986,970,954, 1018,1002,992,976,960,1008,998,982,966,1014,988,972,956,1020,1004,1023,994, 978,962,1010] [views:debug,2014-08-19T16:54:24.699,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/683. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:24.699,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",683,active,0} [ns_server:debug,2014-08-19T16:54:24.716,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 668. Nacking mccouch update. [views:debug,2014-08-19T16:54:24.716,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/668. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:24.716,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",668,active,0} [ns_server:debug,2014-08-19T16:54:24.716,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_tiles<0.4349.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,718,686,984,952,1016,756,724,692,990,974,958,942,1022,1006,762,746,730, 714,698,682,996,980,964,948,1012,752,736,720,704,688,672,986,970,954,938, 1018,1002,758,742,726,710,694,678,992,976,960,944,1008,764,748,732,716,700, 684,668,998,982,966,950,1014,754,738,722,706,690,674,988,972,956,940,1020, 1004,760,744,728,712,696,680,1023,994,978,962,946,1010,766,734,702,670,968, 1000,740,708,676] [views:debug,2014-08-19T16:54:24.716,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/952. Updated state: replica (0) [ns_server:debug,2014-08-19T16:54:24.716,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",952,replica,0} [views:debug,2014-08-19T16:54:24.818,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/668. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:24.818,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",668,active,0} [ns_server:debug,2014-08-19T16:54:24.969,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 681. Nacking mccouch update. [views:debug,2014-08-19T16:54:24.969,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/681. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:24.969,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",681,active,0} [ns_server:debug,2014-08-19T16:54:24.969,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,750,686,622,558,984,737,971,724,660,596,958,711,1022,945,762,698,634,570, 1009,996,749,685,983,736,672,608,970,723,957,710,646,582,1021,944,761,697, 1008,995,748,684,620,556,982,735,969,722,658,594,956,709,1020,943,760,696, 632,568,1007,994,747,683,981,949,766,734,702,670,638,606,574,1013,968,753, 721,689,1000,987,955,740,708,676,644,612,580,548,1019,974,942,759,727,695, 1006,993,961,746,714,682,650,618,586,554,980,948,765,733,701,1012,999,967, 752,720,688,656,624,592,560,986,954,739,707,1018,973,941,758,726,694,662,630, 598,566,1005,992,960,745,713,681,979,947,764,732,700,668,636,604,572,1011, 998,966,751,719,687,985,953,738,706,674,642,610,578,1017,972,940,757,725,693, 1004,991,959,744,712,680,648,616,584,552,1023,978,946,763,731,699,1010,965, 718,654,590,952,705,1016,939,756,692,628,564,1003,990,743,977,730,666,602, 964,717,951,704,640,576,1015,938,755,691,1002,989,742,678,614,550,976,729, 963,716,652,588,950,767,703,1014,754,690,626,562,1001,988,741,975,728,664, 600,962,715] [ns_server:debug,2014-08-19T16:54:25.010,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 950. Nacking mccouch update. [views:debug,2014-08-19T16:54:25.011,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/950. Updated state: replica (0) [ns_server:debug,2014-08-19T16:54:25.011,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",950,replica,0} [ns_server:debug,2014-08-19T16:54:25.011,ns_1@10.242.238.90:capi_set_view_manager-tiles<0.6184.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [984,968,952,1016,1000,990,974,958,1022,1006,996,980,964,1012,986,970,954, 1018,1002,992,976,960,1008,998,982,966,950,1014,988,972,956,1020,1004,1023, 994,978,962,1010] [views:debug,2014-08-19T16:54:25.078,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/681. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:25.078,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",681,active,0} [ns_server:debug,2014-08-19T16:54:25.098,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 666. Nacking mccouch update. [views:debug,2014-08-19T16:54:25.098,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/666. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:25.099,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",666,active,0} [views:debug,2014-08-19T16:54:25.099,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/950. Updated state: replica (0) [ns_server:debug,2014-08-19T16:54:25.099,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",950,replica,0} [ns_server:debug,2014-08-19T16:54:25.099,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_tiles<0.4349.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,718,686,984,952,1016,756,724,692,990,974,958,942,1022,1006,762,746,730, 714,698,682,666,996,980,964,948,1012,752,736,720,704,688,672,986,970,954,938, 1018,1002,758,742,726,710,694,678,992,976,960,944,1008,764,748,732,716,700, 684,668,998,982,966,950,1014,754,738,722,706,690,674,988,972,956,940,1020, 1004,760,744,728,712,696,680,1023,994,978,962,946,1010,766,734,702,670,968, 1000,740,708,676] [views:debug,2014-08-19T16:54:25.201,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/666. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:25.201,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",666,active,0} [ns_server:debug,2014-08-19T16:54:25.300,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 679. Nacking mccouch update. [ns_server:debug,2014-08-19T16:54:25.300,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 948. Nacking mccouch update. [views:debug,2014-08-19T16:54:25.300,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/679. Updated state: active (0) [views:debug,2014-08-19T16:54:25.300,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/948. Updated state: replica (0) [ns_server:debug,2014-08-19T16:54:25.300,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",679,active,0} [ns_server:debug,2014-08-19T16:54:25.300,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",948,replica,0} [ns_server:debug,2014-08-19T16:54:25.301,ns_1@10.242.238.90:capi_set_view_manager-tiles<0.6184.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [984,968,952,1016,1000,990,974,958,1022,1006,996,980,964,948,1012,986,970,954, 1018,1002,992,976,960,1008,998,982,966,950,1014,988,972,956,1020,1004,1023, 994,978,962,1010] [ns_server:debug,2014-08-19T16:54:25.301,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,750,686,622,558,984,737,971,724,660,596,958,711,1022,945,762,698,634,570, 1009,996,749,685,983,736,672,608,970,723,957,710,646,582,1021,944,761,697, 1008,995,748,684,620,556,982,735,969,722,658,594,956,709,1020,943,760,696, 632,568,1007,994,747,683,981,734,670,606,968,753,721,689,1000,987,955,740, 708,676,644,612,580,548,1019,974,942,759,727,695,1006,993,961,746,714,682, 650,618,586,554,980,948,765,733,701,1012,999,967,752,720,688,656,624,592,560, 986,954,739,707,1018,973,941,758,726,694,662,630,598,566,1005,992,960,745, 713,681,979,947,764,732,700,668,636,604,572,1011,998,966,751,719,687,985,953, 738,706,674,642,610,578,1017,972,940,757,725,693,1004,991,959,744,712,680, 648,616,584,552,1023,978,946,763,731,699,1010,965,718,654,590,952,705,1016, 939,756,692,628,564,1003,990,743,679,977,730,666,602,964,717,951,704,640,576, 1015,938,755,691,1002,989,742,678,614,550,976,729,963,716,652,588,950,767, 703,1014,754,690,626,562,1001,988,741,975,728,664,600,962,715,949,766,702, 638,574,1013] [ns_server:debug,2014-08-19T16:54:25.410,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 664. Nacking mccouch update. [views:debug,2014-08-19T16:54:25.410,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/664. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:25.410,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",664,active,0} [ns_server:debug,2014-08-19T16:54:25.410,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_tiles<0.4349.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,718,686,984,952,1016,756,724,692,990,958,1022,762,746,730,714,698,682, 666,996,980,964,948,1012,752,736,720,704,688,672,986,970,954,938,1018,1002, 758,742,726,710,694,678,992,976,960,944,1008,764,748,732,716,700,684,668,998, 982,966,950,1014,754,738,722,706,690,674,988,972,956,940,1020,1004,760,744, 728,712,696,680,664,1023,994,978,962,946,1010,766,734,702,670,968,1000,740, 708,676,974,942,1006] [views:debug,2014-08-19T16:54:25.427,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/679. Updated state: active (0) [views:debug,2014-08-19T16:54:25.427,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/948. Updated state: replica (0) [ns_server:debug,2014-08-19T16:54:25.428,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",679,active,0} [ns_server:debug,2014-08-19T16:54:25.428,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",948,replica,0} [views:debug,2014-08-19T16:54:25.553,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/664. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:25.553,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",664,active,0} [ns_server:debug,2014-08-19T16:54:25.702,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 946. Nacking mccouch update. [views:debug,2014-08-19T16:54:25.703,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/946. Updated state: replica (0) [ns_server:debug,2014-08-19T16:54:25.703,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",946,replica,0} [ns_server:debug,2014-08-19T16:54:25.703,ns_1@10.242.238.90:capi_set_view_manager-tiles<0.6184.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [984,968,952,1016,1000,990,974,958,1022,1006,996,980,964,948,1012,986,970,954, 1018,1002,992,976,960,1008,998,982,966,950,1014,988,972,956,1020,1004,1023, 994,978,962,946,1010] [ns_server:debug,2014-08-19T16:54:25.769,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 677. Nacking mccouch update. [views:debug,2014-08-19T16:54:25.769,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/677. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:25.769,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",677,active,0} [ns_server:debug,2014-08-19T16:54:25.770,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,750,686,622,558,984,737,971,724,660,596,958,711,1022,945,762,698,634,570, 1009,996,749,685,983,736,672,608,970,723,957,710,646,582,1021,944,761,697, 1008,995,748,684,620,556,982,735,969,722,658,594,956,709,1020,943,760,696, 632,568,1007,994,747,683,981,734,670,606,968,753,721,689,1000,987,955,740, 708,676,644,612,580,548,1019,974,942,759,727,695,1006,993,961,746,714,682, 650,618,586,554,980,948,765,733,701,1012,999,967,752,720,688,656,624,592,560, 986,954,739,707,1018,973,941,758,726,694,662,630,598,566,1005,992,960,745, 713,681,979,947,764,732,700,668,636,604,572,1011,998,966,751,719,687,985,953, 738,706,674,642,610,578,1017,972,940,757,725,693,1004,991,959,744,712,680, 648,616,584,552,1023,978,946,763,731,699,1010,965,718,654,590,952,705,1016, 939,756,692,628,564,1003,990,743,679,977,730,666,602,964,717,951,704,640,576, 1015,938,755,691,1002,989,742,678,614,550,976,729,963,716,652,588,950,767, 703,1014,754,690,626,562,1001,988,741,677,975,728,664,600,962,715,949,766, 702,638,574,1013] [ns_server:debug,2014-08-19T16:54:25.803,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 662. Nacking mccouch update. [views:debug,2014-08-19T16:54:25.803,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/662. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:25.803,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",662,active,0} [ns_server:debug,2014-08-19T16:54:25.803,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_tiles<0.4349.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,718,686,984,952,1016,756,724,692,990,958,1022,762,746,730,714,698,682, 666,996,980,964,948,1012,752,736,720,704,688,672,986,970,954,938,1018,1002, 758,742,726,710,694,678,662,992,976,960,944,1008,764,748,732,716,700,684,668, 998,982,966,950,1014,754,738,722,706,690,674,988,972,956,940,1020,1004,760, 744,728,712,696,680,664,1023,994,978,962,946,1010,766,734,702,670,968,1000, 740,708,676,974,942,1006] [views:debug,2014-08-19T16:54:25.820,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/946. Updated state: replica (0) [ns_server:debug,2014-08-19T16:54:25.820,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",946,replica,0} [views:debug,2014-08-19T16:54:25.887,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/677. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:25.887,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",677,active,0} [views:debug,2014-08-19T16:54:25.921,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/662. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:25.921,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",662,active,0} [ns_server:debug,2014-08-19T16:54:26.038,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 944. Nacking mccouch update. [views:debug,2014-08-19T16:54:26.038,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/944. Updated state: replica (0) [ns_server:debug,2014-08-19T16:54:26.038,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",944,replica,0} [ns_server:debug,2014-08-19T16:54:26.038,ns_1@10.242.238.90:capi_set_view_manager-tiles<0.6184.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [984,968,952,1016,1000,990,974,958,1022,1006,996,980,964,948,1012,986,970,954, 1018,1002,992,976,960,944,1008,998,982,966,950,1014,988,972,956,1020,1004, 1023,994,978,962,946,1010] [ns_server:debug,2014-08-19T16:54:26.122,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 675. Nacking mccouch update. [views:debug,2014-08-19T16:54:26.122,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/675. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:26.122,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",675,active,0} [views:debug,2014-08-19T16:54:26.122,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/944. Updated state: replica (0) [ns_server:debug,2014-08-19T16:54:26.122,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",944,replica,0} [ns_server:debug,2014-08-19T16:54:26.123,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,750,686,622,558,984,737,971,724,660,596,958,711,1022,945,762,698,634,570, 1009,996,749,685,983,736,672,608,970,723,957,710,646,582,1021,944,761,697, 1008,995,748,684,620,556,982,735,969,722,658,594,956,709,1020,943,760,696, 632,568,1007,994,747,683,981,734,670,606,968,753,721,689,1000,987,955,740, 708,676,644,612,580,548,1019,974,942,759,727,695,1006,993,961,746,714,682, 650,618,586,554,980,948,765,733,701,1012,999,967,752,720,688,656,624,592,560, 986,954,739,707,675,1018,973,941,758,726,694,662,630,598,566,1005,992,960, 745,713,681,979,947,764,732,700,668,636,604,572,1011,998,966,751,719,687,985, 953,738,706,674,642,610,578,1017,972,940,757,725,693,1004,991,959,744,712, 680,648,616,584,552,1023,978,946,763,731,699,1010,965,718,654,590,952,705, 1016,939,756,692,628,564,1003,990,743,679,977,730,666,602,964,717,951,704, 640,576,1015,938,755,691,1002,989,742,678,614,550,976,729,963,716,652,588, 950,767,703,1014,754,690,626,562,1001,988,741,677,975,728,664,600,962,715, 949,766,702,638,574,1013] [ns_server:debug,2014-08-19T16:54:26.163,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 660. Nacking mccouch update. [views:debug,2014-08-19T16:54:26.164,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/660. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:26.164,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",660,active,0} [ns_server:debug,2014-08-19T16:54:26.164,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_tiles<0.4349.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,718,686,984,952,1016,756,724,692,660,990,958,1022,762,746,730,714,698, 682,666,996,980,964,948,1012,752,736,720,704,688,672,986,970,954,938,1018, 1002,758,742,726,710,694,678,662,992,976,960,944,1008,764,748,732,716,700, 684,668,998,982,966,950,1014,754,738,722,706,690,674,988,972,956,940,1020, 1004,760,744,728,712,696,680,664,1023,994,978,962,946,1010,766,734,702,670, 968,1000,740,708,676,974,942,1006] [views:debug,2014-08-19T16:54:26.290,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/660. Updated state: active (0) [views:debug,2014-08-19T16:54:26.290,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/675. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:26.290,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",660,active,0} [ns_server:debug,2014-08-19T16:54:26.290,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",675,active,0} [ns_server:debug,2014-08-19T16:54:26.420,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 942. Nacking mccouch update. [views:debug,2014-08-19T16:54:26.421,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/942. Updated state: replica (0) [ns_server:debug,2014-08-19T16:54:26.421,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",942,replica,0} [ns_server:debug,2014-08-19T16:54:26.421,ns_1@10.242.238.90:capi_set_view_manager-tiles<0.6184.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [984,968,952,1016,1000,990,974,958,942,1022,1006,996,980,964,948,1012,986,970, 954,1018,1002,992,976,960,944,1008,998,982,966,950,1014,988,972,956,1020, 1004,1023,994,978,962,946,1010] [views:debug,2014-08-19T16:54:26.521,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/942. Updated state: replica (0) [ns_server:debug,2014-08-19T16:54:26.521,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",942,replica,0} [ns_server:debug,2014-08-19T16:54:26.613,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 658. Nacking mccouch update. [ns_server:debug,2014-08-19T16:54:26.613,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 673. Nacking mccouch update. [views:debug,2014-08-19T16:54:26.613,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/658. Updated state: active (0) [views:debug,2014-08-19T16:54:26.613,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/673. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:26.613,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",658,active,0} [ns_server:debug,2014-08-19T16:54:26.613,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",673,active,0} [ns_server:debug,2014-08-19T16:54:26.613,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_tiles<0.4349.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,718,686,984,952,1016,756,724,692,660,990,958,1022,762,746,730,714,698, 682,666,996,980,964,948,1012,752,736,720,704,688,672,986,970,954,938,1018, 1002,758,742,726,710,694,678,662,992,976,960,944,1008,764,748,732,716,700, 684,668,998,982,966,950,1014,754,738,722,706,690,674,658,988,972,956,940, 1020,1004,760,744,728,712,696,680,664,1023,994,978,962,946,1010,766,734,702, 670,968,1000,740,708,676,974,942,1006] [ns_server:debug,2014-08-19T16:54:26.614,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,750,686,622,558,984,737,673,971,724,660,596,958,711,1022,945,762,698,634, 570,1009,996,749,685,983,736,672,608,970,723,957,710,646,582,1021,944,761, 697,1008,995,748,684,620,556,982,735,969,722,658,594,956,709,1020,943,760, 696,632,568,1007,994,747,683,981,734,670,606,968,753,721,689,1000,987,955, 740,708,676,644,612,580,548,1019,974,942,759,727,695,1006,993,961,746,714, 682,650,618,586,554,980,948,765,733,701,1012,999,967,752,720,688,656,624,592, 560,986,954,739,707,675,1018,973,941,758,726,694,662,630,598,566,1005,992, 960,745,713,681,979,947,764,732,700,668,636,604,572,1011,998,966,751,719,687, 985,953,738,706,674,642,610,578,1017,972,940,757,725,693,1004,991,959,744, 712,680,648,616,584,552,1023,978,946,763,731,699,1010,965,718,654,590,952, 705,1016,939,756,692,628,564,1003,990,743,679,977,730,666,602,964,717,951, 704,640,576,1015,938,755,691,1002,989,742,678,614,550,976,729,963,716,652, 588,950,767,703,1014,754,690,626,562,1001,988,741,677,975,728,664,600,962, 715,949,766,702,638,574,1013] [views:debug,2014-08-19T16:54:26.722,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/673. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:26.722,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",673,active,0} [views:debug,2014-08-19T16:54:26.722,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/658. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:26.722,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",658,active,0} [ns_server:debug,2014-08-19T16:54:26.847,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 940. Nacking mccouch update. [views:debug,2014-08-19T16:54:26.847,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/940. Updated state: replica (0) [ns_server:debug,2014-08-19T16:54:26.847,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",940,replica,0} [ns_server:debug,2014-08-19T16:54:26.848,ns_1@10.242.238.90:capi_set_view_manager-tiles<0.6184.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [984,968,952,1016,1000,990,974,958,942,1022,1006,996,980,964,948,1012,986,970, 954,1018,1002,992,976,960,944,1008,998,982,966,950,1014,988,972,956,940,1020, 1004,1023,994,978,962,946,1010] [views:debug,2014-08-19T16:54:26.932,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/940. Updated state: replica (0) [ns_server:debug,2014-08-19T16:54:26.932,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",940,replica,0} [ns_server:debug,2014-08-19T16:54:27.064,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 656. Nacking mccouch update. [views:debug,2014-08-19T16:54:27.064,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/656. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:27.065,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",656,active,0} [ns_server:debug,2014-08-19T16:54:27.065,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_tiles<0.4349.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,718,686,984,952,1016,756,724,692,660,990,958,1022,762,746,730,714,698, 682,666,996,980,964,948,1012,752,736,720,704,688,672,656,986,970,954,938, 1018,1002,758,742,726,710,694,678,662,992,976,960,944,1008,764,748,732,716, 700,684,668,998,982,966,950,1014,754,738,722,706,690,674,658,988,972,956,940, 1020,1004,760,744,728,712,696,680,664,1023,994,978,962,946,1010,766,734,702, 670,968,1000,740,708,676,974,942,1006] [ns_server:debug,2014-08-19T16:54:27.090,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 671. Nacking mccouch update. [views:debug,2014-08-19T16:54:27.090,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/671. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:27.090,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",671,active,0} [ns_server:debug,2014-08-19T16:54:27.091,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,750,686,622,558,984,737,673,971,724,660,596,958,711,1022,945,762,698,634, 570,1009,996,749,685,983,736,672,608,970,723,957,710,646,582,1021,944,761, 697,1008,995,748,684,620,556,982,735,671,969,722,658,594,956,709,1020,943, 760,696,632,568,1007,994,747,683,981,734,670,606,968,753,721,689,1000,987, 955,740,708,676,644,612,580,548,1019,974,942,759,727,695,1006,993,961,746, 714,682,650,618,586,554,980,948,765,733,701,1012,999,967,752,720,688,656,624, 592,560,986,954,739,707,675,1018,973,941,758,726,694,662,630,598,566,1005, 992,960,745,713,681,979,947,764,732,700,668,636,604,572,1011,998,966,751,719, 687,985,953,738,706,674,642,610,578,1017,972,940,757,725,693,1004,991,959, 744,712,680,648,616,584,552,1023,978,946,763,731,699,1010,965,718,654,590, 952,705,1016,939,756,692,628,564,1003,990,743,679,977,730,666,602,964,717, 951,704,640,576,1015,938,755,691,1002,989,742,678,614,550,976,729,963,716, 652,588,950,767,703,1014,754,690,626,562,1001,988,741,677,975,728,664,600, 962,715,949,766,702,638,574,1013] [ns_server:debug,2014-08-19T16:54:27.165,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 938. Nacking mccouch update. [views:debug,2014-08-19T16:54:27.165,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/938. Updated state: replica (0) [ns_server:debug,2014-08-19T16:54:27.165,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",938,replica,0} [views:debug,2014-08-19T16:54:27.165,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/656. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:27.165,ns_1@10.242.238.90:capi_set_view_manager-tiles<0.6184.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [984,968,952,1016,1000,990,974,958,942,1022,1006,996,980,964,948,1012,986,970, 954,938,1018,1002,992,976,960,944,1008,998,982,966,950,1014,988,972,956,940, 1020,1004,1023,994,978,962,946,1010] [ns_server:debug,2014-08-19T16:54:27.166,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",656,active,0} [views:debug,2014-08-19T16:54:27.207,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/671. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:27.208,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",671,active,0} [views:debug,2014-08-19T16:54:27.335,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/938. Updated state: replica (0) [ns_server:debug,2014-08-19T16:54:27.335,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",938,replica,0} [ns_server:debug,2014-08-19T16:54:27.408,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 654. Nacking mccouch update. [views:debug,2014-08-19T16:54:27.408,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/654. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:27.409,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",654,active,0} [ns_server:debug,2014-08-19T16:54:27.409,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_tiles<0.4349.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,718,686,654,984,952,1016,756,724,692,660,990,958,1022,762,730,698,666, 996,980,964,948,1012,752,736,720,704,688,672,656,986,970,954,938,1018,1002, 758,742,726,710,694,678,662,992,976,960,944,1008,764,748,732,716,700,684,668, 998,982,966,950,1014,754,738,722,706,690,674,658,988,972,956,940,1020,1004, 760,744,728,712,696,680,664,1023,994,978,962,946,1010,766,734,702,670,968, 1000,740,708,676,974,942,1006,746,714,682] [ns_server:debug,2014-08-19T16:54:27.425,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 669. Nacking mccouch update. [views:debug,2014-08-19T16:54:27.425,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/669. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:27.425,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",669,active,0} [ns_server:debug,2014-08-19T16:54:27.426,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,750,686,622,558,984,737,673,971,724,660,596,958,711,1022,945,762,698,634, 570,1009,996,749,685,983,736,672,608,970,723,957,710,646,582,1021,944,761, 697,1008,995,748,684,620,556,982,735,671,969,722,658,594,956,709,1020,943, 760,696,632,568,1007,994,747,683,981,734,670,606,968,721,987,955,740,708,676, 644,612,580,548,1019,974,942,759,727,695,1006,993,961,746,714,682,650,618, 586,554,980,948,765,733,701,669,1012,999,967,752,720,688,656,624,592,560,986, 954,739,707,675,1018,973,941,758,726,694,662,630,598,566,1005,992,960,745, 713,681,979,947,764,732,700,668,636,604,572,1011,998,966,751,719,687,985,953, 738,706,674,642,610,578,1017,972,940,757,725,693,1004,991,959,744,712,680, 648,616,584,552,1023,978,946,763,731,699,1010,965,718,654,590,952,705,1016, 939,756,692,628,564,1003,990,743,679,977,730,666,602,964,717,951,704,640,576, 1015,938,755,691,1002,989,742,678,614,550,976,729,963,716,652,588,950,767, 703,1014,754,690,626,562,1001,988,741,677,975,728,664,600,962,715,949,766, 702,638,574,1013,753,689,1000] [views:debug,2014-08-19T16:54:27.493,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/654. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:27.493,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",654,active,0} [ns_server:debug,2014-08-19T16:54:27.526,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 766. Nacking mccouch update. [views:debug,2014-08-19T16:54:27.526,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/766. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:27.526,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",766,active,0} [views:debug,2014-08-19T16:54:27.526,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/669. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:27.526,ns_1@10.242.238.90:capi_set_view_manager-tiles<0.6184.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [766,984,968,952,1016,1000,990,974,958,942,1022,1006,996,980,964,948,1012,986, 970,954,938,1018,1002,992,976,960,944,1008,998,982,966,950,1014,988,972,956, 940,1020,1004,1023,994,978,962,946,1010] [ns_server:debug,2014-08-19T16:54:27.527,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",669,active,0} [views:debug,2014-08-19T16:54:27.617,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/766. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:27.617,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",766,active,0} [ns_server:debug,2014-08-19T16:54:27.758,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 667. Nacking mccouch update. [ns_server:debug,2014-08-19T16:54:27.758,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 652. Nacking mccouch update. [views:debug,2014-08-19T16:54:27.758,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/667. Updated state: active (0) [views:debug,2014-08-19T16:54:27.758,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/652. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:27.758,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",667,active,0} [ns_server:debug,2014-08-19T16:54:27.758,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",652,active,0} [ns_server:debug,2014-08-19T16:54:27.758,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_tiles<0.4349.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,718,686,654,984,952,1016,756,724,692,660,990,958,1022,762,730,698,666, 996,980,964,948,1012,752,736,720,704,688,672,656,986,970,954,938,1018,1002, 758,742,726,710,694,678,662,992,976,960,944,1008,764,748,732,716,700,684,668, 652,998,982,966,950,1014,754,738,722,706,690,674,658,988,972,956,940,1020, 1004,760,744,728,712,696,680,664,1023,994,978,962,946,1010,766,734,702,670, 968,1000,740,708,676,974,942,1006,746,714,682] [ns_server:debug,2014-08-19T16:54:27.758,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,750,686,622,558,984,737,673,971,724,660,596,958,711,1022,945,762,698,634, 570,1009,996,749,685,983,736,672,608,970,723,957,710,646,582,1021,944,761, 697,1008,995,748,684,620,556,982,735,671,969,722,658,594,956,709,1020,943, 760,696,632,568,1007,994,747,683,981,734,670,606,968,721,987,955,740,708,676, 644,612,580,548,1019,974,942,759,727,695,1006,993,961,746,714,682,650,618, 586,554,980,948,765,733,701,669,1012,999,967,752,720,688,656,624,592,560,986, 954,739,707,675,1018,973,941,758,726,694,662,630,598,566,1005,992,960,745, 713,681,979,947,764,732,700,668,636,604,572,1011,998,966,751,719,687,985,953, 738,706,674,642,610,578,1017,972,940,757,725,693,1004,991,959,744,712,680, 648,616,584,552,1023,978,946,763,731,699,667,1010,965,718,654,590,952,705, 1016,939,756,692,628,564,1003,990,743,679,977,730,666,602,964,717,951,704, 640,576,1015,938,755,691,1002,989,742,678,614,550,976,729,963,716,652,588, 950,767,703,1014,754,690,626,562,1001,988,741,677,975,728,664,600,962,715, 949,766,702,638,574,1013,753,689,1000] [ns_server:debug,2014-08-19T16:54:27.858,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 764. Nacking mccouch update. [views:debug,2014-08-19T16:54:27.858,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/764. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:27.858,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",764,active,0} [ns_server:debug,2014-08-19T16:54:27.858,ns_1@10.242.238.90:capi_set_view_manager-tiles<0.6184.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [766,984,968,952,1016,1000,990,974,958,942,1022,1006,996,980,964,948,1012,986, 970,954,938,1018,1002,992,976,960,944,1008,764,998,982,966,950,1014,988,972, 956,940,1020,1004,1023,994,978,962,946,1010] [views:debug,2014-08-19T16:54:27.892,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/667. Updated state: active (0) [views:debug,2014-08-19T16:54:27.892,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/652. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:27.892,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",667,active,0} [ns_server:debug,2014-08-19T16:54:27.892,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",652,active,0} [views:debug,2014-08-19T16:54:27.992,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/764. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:27.993,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",764,active,0} [ns_server:debug,2014-08-19T16:54:28.152,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 650. Nacking mccouch update. [views:debug,2014-08-19T16:54:28.152,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/650. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:28.153,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",650,active,0} [ns_server:debug,2014-08-19T16:54:28.153,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_tiles<0.4349.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,718,686,654,984,952,1016,756,724,692,660,990,958,1022,762,730,698,666, 996,980,964,948,1012,752,736,720,704,688,672,656,986,970,954,938,1018,1002, 758,742,726,710,694,678,662,992,976,960,944,1008,764,748,732,716,700,684,668, 652,998,982,966,950,1014,754,738,722,706,690,674,658,988,972,956,940,1020, 1004,760,744,728,712,696,680,664,1023,994,978,962,946,1010,766,734,702,670, 968,1000,740,708,676,974,942,1006,746,714,682,650] [ns_server:debug,2014-08-19T16:54:28.261,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 762. Nacking mccouch update. [ns_server:debug,2014-08-19T16:54:28.261,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 665. Nacking mccouch update. [views:debug,2014-08-19T16:54:28.261,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/762. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:28.261,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",762,active,0} [views:debug,2014-08-19T16:54:28.261,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/665. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:28.262,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",665,active,0} [ns_server:debug,2014-08-19T16:54:28.262,ns_1@10.242.238.90:capi_set_view_manager-tiles<0.6184.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [766,984,968,952,1016,1000,990,974,958,942,1022,1006,762,996,980,964,948,1012, 986,970,954,938,1018,1002,992,976,960,944,1008,764,998,982,966,950,1014,988, 972,956,940,1020,1004,1023,994,978,962,946,1010] [ns_server:debug,2014-08-19T16:54:28.262,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,750,686,622,558,984,737,673,971,724,660,596,958,711,1022,945,762,698,634, 570,1009,996,749,685,983,736,672,608,970,723,957,710,646,582,1021,944,761, 697,1008,995,748,684,620,556,982,735,671,969,722,658,594,956,709,1020,943, 760,696,632,568,1007,994,747,683,981,734,670,606,968,721,987,955,740,708,676, 644,612,580,548,1019,974,942,759,727,695,1006,993,961,746,714,682,650,618, 586,554,980,948,765,733,701,669,1012,999,967,752,720,688,656,624,592,560,986, 954,739,707,675,1018,973,941,758,726,694,662,630,598,566,1005,992,960,745, 713,681,979,947,764,732,700,668,636,604,572,1011,998,966,751,719,687,985,953, 738,706,674,642,610,578,1017,972,940,757,725,693,1004,991,959,744,712,680, 648,616,584,552,1023,978,946,763,731,699,667,1010,965,718,654,590,952,705, 1016,939,756,692,628,564,1003,990,743,679,977,730,666,602,964,717,951,704, 640,576,1015,938,755,691,1002,989,742,678,614,550,976,729,665,963,716,652, 588,950,767,703,1014,754,690,626,562,1001,988,741,677,975,728,664,600,962, 715,949,766,702,638,574,1013,753,689,1000] [views:debug,2014-08-19T16:54:28.328,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/650. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:28.328,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",650,active,0} [views:debug,2014-08-19T16:54:28.387,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/762. Updated state: active (0) [views:debug,2014-08-19T16:54:28.387,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/665. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:28.387,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",762,active,0} [ns_server:debug,2014-08-19T16:54:28.387,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",665,active,0} [ns_server:debug,2014-08-19T16:54:28.504,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 648. Nacking mccouch update. [views:debug,2014-08-19T16:54:28.504,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/648. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:28.504,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",648,active,0} [ns_server:debug,2014-08-19T16:54:28.505,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_tiles<0.4349.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,718,686,654,984,952,1016,756,724,692,660,990,958,1022,762,730,698,666, 996,980,964,948,1012,752,736,720,704,688,672,656,986,970,954,938,1018,1002, 758,742,726,710,694,678,662,992,976,960,944,1008,764,748,732,716,700,684,668, 652,998,982,966,950,1014,754,738,722,706,690,674,658,988,972,956,940,1020, 1004,760,744,728,712,696,680,664,648,1023,994,978,962,946,1010,766,734,702, 670,968,1000,740,708,676,974,942,1006,746,714,682,650] [ns_server:debug,2014-08-19T16:54:28.604,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 663. Nacking mccouch update. [ns_server:debug,2014-08-19T16:54:28.605,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 760. Nacking mccouch update. [views:debug,2014-08-19T16:54:28.605,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/663. Updated state: active (0) [views:debug,2014-08-19T16:54:28.605,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/760. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:28.605,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",663,active,0} [views:debug,2014-08-19T16:54:28.605,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/648. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:28.605,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",760,active,0} [ns_server:debug,2014-08-19T16:54:28.605,ns_1@10.242.238.90:capi_set_view_manager-tiles<0.6184.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [766,984,968,952,1016,1000,990,974,958,942,1022,1006,762,996,980,964,948,1012, 986,970,954,938,1018,1002,992,976,960,944,1008,764,998,982,966,950,1014,988, 972,956,940,1020,1004,760,1023,994,978,962,946,1010] [ns_server:debug,2014-08-19T16:54:28.605,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",648,active,0} [ns_server:debug,2014-08-19T16:54:28.605,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,750,686,622,558,984,737,673,971,724,660,596,958,711,1022,945,762,698,634, 570,1009,996,749,685,983,736,672,608,970,723,957,710,646,582,1021,944,761, 697,1008,995,748,684,620,556,982,735,671,969,722,658,594,956,709,1020,943, 760,696,632,568,1007,994,747,683,981,734,670,606,968,721,987,955,740,708,676, 644,612,580,548,1019,974,942,759,727,695,663,1006,993,961,746,714,682,650, 618,586,554,980,948,765,733,701,669,1012,999,967,752,720,688,656,624,592,560, 986,954,739,707,675,1018,973,941,758,726,694,662,630,598,566,1005,992,960, 745,713,681,979,947,764,732,700,668,636,604,572,1011,998,966,751,719,687,985, 953,738,706,674,642,610,578,1017,972,940,757,725,693,1004,991,959,744,712, 680,648,616,584,552,1023,978,946,763,731,699,667,1010,965,718,654,590,952, 705,1016,939,756,692,628,564,1003,990,743,679,977,730,666,602,964,717,951, 704,640,576,1015,938,755,691,1002,989,742,678,614,550,976,729,665,963,716, 652,588,950,767,703,1014,754,690,626,562,1001,988,741,677,975,728,664,600, 962,715,949,766,702,638,574,1013,753,689,1000] [views:debug,2014-08-19T16:54:28.663,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/663. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:28.664,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",663,active,0} [views:debug,2014-08-19T16:54:28.705,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/760. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:28.705,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",760,active,0} [ns_server:debug,2014-08-19T16:54:28.839,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 646. Nacking mccouch update. [views:debug,2014-08-19T16:54:28.840,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/646. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:28.840,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",646,active,0} [ns_server:debug,2014-08-19T16:54:28.840,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_tiles<0.4349.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,718,686,654,984,952,1016,756,724,692,660,990,958,1022,762,730,698,666, 996,980,964,948,1012,752,736,720,704,688,672,656,986,970,954,938,1018,1002, 758,742,726,710,694,678,662,646,992,976,960,944,1008,764,748,732,716,700,684, 668,652,998,982,966,950,1014,754,738,722,706,690,674,658,988,972,956,940, 1020,1004,760,744,728,712,696,680,664,648,1023,994,978,962,946,1010,766,734, 702,670,968,1000,740,708,676,974,942,1006,746,714,682,650] [ns_server:debug,2014-08-19T16:54:28.911,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 758. Nacking mccouch update. [ns_server:debug,2014-08-19T16:54:28.911,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 661. Nacking mccouch update. [views:debug,2014-08-19T16:54:28.912,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/758. Updated state: active (0) [views:debug,2014-08-19T16:54:28.912,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/661. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:28.912,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",758,active,0} [ns_server:debug,2014-08-19T16:54:28.912,ns_1@10.242.238.90:capi_set_view_manager-tiles<0.6184.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [766,984,968,952,1016,1000,990,974,958,942,1022,1006,762,996,980,964,948,1012, 986,970,954,938,1018,1002,758,992,976,960,944,1008,764,998,982,966,950,1014, 988,972,956,940,1020,1004,760,1023,994,978,962,946,1010] [ns_server:debug,2014-08-19T16:54:28.912,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",661,active,0} [ns_server:debug,2014-08-19T16:54:28.913,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,750,686,622,558,984,737,673,971,724,660,596,958,711,1022,945,762,698,634, 570,1009,996,749,685,983,736,672,608,970,723,957,710,646,582,1021,944,761, 697,1008,995,748,684,620,556,982,735,671,969,722,658,594,956,709,1020,943, 760,696,632,568,1007,994,747,683,981,734,670,606,968,721,987,955,740,708,676, 644,612,580,548,1019,974,942,759,727,695,663,1006,993,961,746,714,682,650, 618,586,554,980,948,765,733,701,669,1012,999,967,752,720,688,656,624,592,560, 986,954,739,707,675,1018,973,941,758,726,694,662,630,598,566,1005,992,960, 745,713,681,979,947,764,732,700,668,636,604,572,1011,998,966,751,719,687,985, 953,738,706,674,642,610,578,1017,972,940,757,725,693,661,1004,991,959,744, 712,680,648,616,584,552,1023,978,946,763,731,699,667,1010,965,718,654,590, 952,705,1016,939,756,692,628,564,1003,990,743,679,977,730,666,602,964,717, 951,704,640,576,1015,938,755,691,1002,989,742,678,614,550,976,729,665,963, 716,652,588,950,767,703,1014,754,690,626,562,1001,988,741,677,975,728,664, 600,962,715,949,766,702,638,574,1013,753,689,1000] [views:debug,2014-08-19T16:54:28.987,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/646. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:28.987,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",646,active,0} [views:debug,2014-08-19T16:54:29.054,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/758. Updated state: active (0) [views:debug,2014-08-19T16:54:29.054,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/661. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:29.055,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",758,active,0} [ns_server:debug,2014-08-19T16:54:29.055,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",661,active,0} [ns_server:debug,2014-08-19T16:54:29.263,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 644. Nacking mccouch update. [views:debug,2014-08-19T16:54:29.263,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/644. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:29.263,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",644,active,0} [ns_server:debug,2014-08-19T16:54:29.264,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_tiles<0.4349.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,718,686,654,984,952,1016,756,724,692,660,990,958,1022,762,730,698,666, 996,964,752,736,720,704,688,672,656,986,970,954,938,1018,1002,758,742,726, 710,694,678,662,646,992,976,960,944,1008,764,748,732,716,700,684,668,652,998, 982,966,950,1014,754,738,722,706,690,674,658,988,972,956,940,1020,1004,760, 744,728,712,696,680,664,648,1023,994,978,962,946,1010,766,734,702,670,968, 1000,740,708,676,644,974,942,1006,746,714,682,650,980,948,1012] [ns_server:debug,2014-08-19T16:54:29.397,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 756. Nacking mccouch update. [views:debug,2014-08-19T16:54:29.397,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/756. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:29.397,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",756,active,0} [ns_server:debug,2014-08-19T16:54:29.397,ns_1@10.242.238.90:capi_set_view_manager-tiles<0.6184.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [766,984,968,952,1016,1000,756,990,974,958,942,1022,1006,762,996,980,964,948, 1012,986,970,954,938,1018,1002,758,992,976,960,944,1008,764,998,982,966,950, 1014,988,972,956,940,1020,1004,760,1023,994,978,962,946,1010] [ns_server:debug,2014-08-19T16:54:29.455,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 659. Nacking mccouch update. [views:debug,2014-08-19T16:54:29.455,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/659. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:29.456,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",659,active,0} [views:debug,2014-08-19T16:54:29.456,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/644. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:29.456,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",644,active,0} [ns_server:debug,2014-08-19T16:54:29.456,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,750,686,622,558,984,737,673,971,724,660,596,958,711,1022,945,762,698,634, 570,1009,996,749,685,983,736,672,608,970,723,659,957,710,646,582,1021,944, 761,697,1008,995,748,684,620,556,982,735,671,969,722,658,594,956,709,1020, 943,760,696,632,568,1007,994,747,683,981,734,670,606,968,721,955,708,644,580, 1019,974,942,759,727,695,663,1006,993,961,746,714,682,650,618,586,554,980, 948,765,733,701,669,1012,999,967,752,720,688,656,624,592,560,986,954,739,707, 675,1018,973,941,758,726,694,662,630,598,566,1005,992,960,745,713,681,979, 947,764,732,700,668,636,604,572,1011,998,966,751,719,687,985,953,738,706,674, 642,610,578,1017,972,940,757,725,693,661,1004,991,959,744,712,680,648,616, 584,552,1023,978,946,763,731,699,667,1010,965,718,654,590,952,705,1016,939, 756,692,628,564,1003,990,743,679,977,730,666,602,964,717,951,704,640,576, 1015,938,755,691,1002,989,742,678,614,550,976,729,665,963,716,652,588,950, 767,703,1014,754,690,626,562,1001,988,741,677,975,728,664,600,962,715,949, 766,702,638,574,1013,753,689,1000,987,740,676,612,548] [views:debug,2014-08-19T16:54:29.506,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/756. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:29.506,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",756,active,0} [views:debug,2014-08-19T16:54:29.581,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/659. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:29.581,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",659,active,0} [ns_server:debug,2014-08-19T16:54:29.731,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 642. Nacking mccouch update. [views:debug,2014-08-19T16:54:29.732,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/642. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:29.732,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",642,active,0} [ns_server:debug,2014-08-19T16:54:29.732,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_tiles<0.4349.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,718,686,654,984,952,1016,756,724,692,660,990,958,1022,762,730,698,666, 996,964,752,736,720,704,688,672,656,986,970,954,938,1018,1002,758,742,726, 710,694,678,662,646,992,976,960,944,1008,764,748,732,716,700,684,668,652,998, 982,966,950,1014,754,738,722,706,690,674,658,642,988,972,956,940,1020,1004, 760,744,728,712,696,680,664,648,1023,994,978,962,946,1010,766,734,702,670, 968,1000,740,708,676,644,974,942,1006,746,714,682,650,980,948,1012] [ns_server:debug,2014-08-19T16:54:29.765,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 754. Nacking mccouch update. [views:debug,2014-08-19T16:54:29.765,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/754. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:29.765,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",754,active,0} [ns_server:debug,2014-08-19T16:54:29.765,ns_1@10.242.238.90:capi_set_view_manager-tiles<0.6184.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [766,984,968,952,1016,1000,756,990,974,958,942,1022,1006,762,996,980,964,948, 1012,986,970,954,938,1018,1002,758,992,976,960,944,1008,764,998,982,966,950, 1014,754,988,972,956,940,1020,1004,760,1023,994,978,962,946,1010] [views:debug,2014-08-19T16:54:29.816,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/642. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:29.816,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",642,active,0} [ns_server:debug,2014-08-19T16:54:29.899,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 657. Nacking mccouch update. [views:debug,2014-08-19T16:54:29.899,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/657. Updated state: active (0) [views:debug,2014-08-19T16:54:29.899,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/754. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:29.900,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",657,active,0} [ns_server:debug,2014-08-19T16:54:29.900,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",754,active,0} [ns_server:debug,2014-08-19T16:54:29.900,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,750,686,622,558,984,737,673,971,724,660,596,958,711,1022,945,762,698,634, 570,1009,996,749,685,983,736,672,608,970,723,659,957,710,646,582,1021,944, 761,697,1008,995,748,684,620,556,982,735,671,969,722,658,594,956,709,1020, 943,760,696,632,568,1007,994,747,683,981,734,670,606,968,721,657,955,708,644, 580,1019,974,942,759,727,695,663,1006,993,961,746,714,682,650,618,586,554, 980,948,765,733,701,669,1012,999,967,752,720,688,656,624,592,560,986,954,739, 707,675,1018,973,941,758,726,694,662,630,598,566,1005,992,960,745,713,681, 979,947,764,732,700,668,636,604,572,1011,998,966,751,719,687,985,953,738,706, 674,642,610,578,1017,972,940,757,725,693,661,1004,991,959,744,712,680,648, 616,584,552,1023,978,946,763,731,699,667,1010,965,718,654,590,952,705,1016, 939,756,692,628,564,1003,990,743,679,977,730,666,602,964,717,951,704,640,576, 1015,938,755,691,1002,989,742,678,614,550,976,729,665,963,716,652,588,950, 767,703,1014,754,690,626,562,1001,988,741,677,975,728,664,600,962,715,949, 766,702,638,574,1013,753,689,1000,987,740,676,612,548] [views:debug,2014-08-19T16:54:30.009,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/657. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:30.009,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",657,active,0} [ns_server:debug,2014-08-19T16:54:30.075,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 640. Nacking mccouch update. [views:debug,2014-08-19T16:54:30.075,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/640. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:30.075,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",640,active,0} [ns_server:debug,2014-08-19T16:54:30.076,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_tiles<0.4349.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,718,686,654,984,952,1016,756,724,692,660,990,958,1022,762,730,698,666, 996,964,752,736,720,704,688,672,656,640,986,970,954,938,1018,1002,758,742, 726,710,694,678,662,646,992,976,960,944,1008,764,748,732,716,700,684,668,652, 998,982,966,950,1014,754,738,722,706,690,674,658,642,988,972,956,940,1020, 1004,760,744,728,712,696,680,664,648,1023,994,978,962,946,1010,766,734,702, 670,968,1000,740,708,676,644,974,942,1006,746,714,682,650,980,948,1012] [ns_server:debug,2014-08-19T16:54:30.192,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 752. Nacking mccouch update. [views:debug,2014-08-19T16:54:30.192,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/752. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:30.193,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",752,active,0} [ns_server:debug,2014-08-19T16:54:30.193,ns_1@10.242.238.90:capi_set_view_manager-tiles<0.6184.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [766,984,968,952,1016,1000,756,990,974,958,942,1022,1006,762,996,980,964,948, 1012,752,986,970,954,938,1018,1002,758,992,976,960,944,1008,764,998,982,966, 950,1014,754,988,972,956,940,1020,1004,760,1023,994,978,962,946,1010] [views:debug,2014-08-19T16:54:30.209,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/640. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:30.210,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",640,active,0} [views:debug,2014-08-19T16:54:30.310,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/752. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:30.310,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",752,active,0} [ns_server:debug,2014-08-19T16:54:30.343,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 655. Nacking mccouch update. [views:debug,2014-08-19T16:54:30.343,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/655. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:30.343,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",655,active,0} [ns_server:debug,2014-08-19T16:54:30.344,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,750,686,622,558,984,737,673,971,724,660,596,958,711,1022,945,762,698,634, 570,1009,996,749,685,983,736,672,608,970,723,659,957,710,646,582,1021,944, 761,697,1008,995,748,684,620,556,982,735,671,969,722,658,594,956,709,1020, 943,760,696,632,568,1007,994,747,683,981,734,670,606,968,721,657,955,708,644, 580,1019,974,942,759,727,695,663,1006,993,961,746,714,682,650,618,586,554, 980,948,765,733,701,669,1012,999,967,752,720,688,656,624,592,560,986,954,739, 707,675,1018,973,941,758,726,694,662,630,598,566,1005,992,960,745,713,681, 979,947,764,732,700,668,636,604,572,1011,998,966,751,719,687,655,985,953,738, 706,674,642,610,578,1017,972,940,757,725,693,661,1004,991,959,744,712,680, 648,616,584,552,1023,978,946,763,731,699,667,1010,965,718,654,590,952,705, 1016,939,756,692,628,564,1003,990,743,679,977,730,666,602,964,717,951,704, 640,576,1015,938,755,691,1002,989,742,678,614,550,976,729,665,963,716,652, 588,950,767,703,1014,754,690,626,562,1001,988,741,677,975,728,664,600,962, 715,949,766,702,638,574,1013,753,689,1000,987,740,676,612,548] [views:debug,2014-08-19T16:54:30.474,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/655. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:30.475,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",655,active,0} [ns_server:debug,2014-08-19T16:54:30.516,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 638. Nacking mccouch update. [views:debug,2014-08-19T16:54:30.516,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/638. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:30.516,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",638,active,0} [ns_server:debug,2014-08-19T16:54:30.516,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_tiles<0.4349.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,718,686,654,984,952,1016,756,724,692,660,990,958,1022,762,730,698,666, 996,964,752,736,720,704,688,672,656,640,986,970,954,938,1018,1002,758,742, 726,710,694,678,662,646,992,976,960,944,1008,764,748,732,716,700,684,668,652, 998,982,966,950,1014,754,738,722,706,690,674,658,642,988,972,956,940,1020, 1004,760,744,728,712,696,680,664,648,1023,994,978,962,946,1010,766,734,702, 670,638,968,1000,740,708,676,644,974,942,1006,746,714,682,650,980,948,1012] [ns_server:debug,2014-08-19T16:54:30.666,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 750. Nacking mccouch update. [views:debug,2014-08-19T16:54:30.666,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/750. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:30.667,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",750,active,0} [views:debug,2014-08-19T16:54:30.667,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/638. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:30.667,ns_1@10.242.238.90:capi_set_view_manager-tiles<0.6184.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [766,750,984,968,952,1016,1000,756,990,974,958,942,1022,1006,762,996,980,964, 948,1012,752,986,970,954,938,1018,1002,758,992,976,960,944,1008,764,998,982, 966,950,1014,754,988,972,956,940,1020,1004,760,1023,994,978,962,946,1010] [ns_server:debug,2014-08-19T16:54:30.667,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",638,active,0} [views:debug,2014-08-19T16:54:30.767,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/750. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:30.767,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",750,active,0} [ns_server:debug,2014-08-19T16:54:30.817,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 653. Nacking mccouch update. [views:debug,2014-08-19T16:54:30.817,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/653. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:30.817,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",653,active,0} [ns_server:debug,2014-08-19T16:54:30.818,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,750,686,622,558,984,737,673,971,724,660,596,958,711,1022,945,762,698,634, 570,1009,996,749,685,983,736,672,608,970,723,659,957,710,646,582,1021,944, 761,697,1008,995,748,684,620,556,982,735,671,969,722,658,594,956,709,1020, 943,760,696,632,568,1007,994,747,683,981,734,670,606,968,721,657,955,708,644, 580,1019,974,942,759,727,695,663,1006,993,961,746,714,682,650,618,586,554, 980,948,765,733,701,669,1012,999,967,752,720,688,656,624,592,560,986,954,739, 707,675,1018,973,941,758,726,694,662,630,598,566,1005,992,960,745,713,681, 979,947,764,732,700,668,636,604,572,1011,998,966,751,719,687,655,985,953,738, 706,674,642,610,578,1017,972,940,757,725,693,661,1004,991,959,744,712,680, 648,616,584,552,1023,978,946,763,731,699,667,1010,965,718,654,590,952,705, 1016,939,756,692,628,564,1003,990,743,679,977,730,666,602,964,717,653,951, 704,640,576,1015,938,755,691,1002,989,742,678,614,550,976,729,665,963,716, 652,588,950,767,703,1014,754,690,626,562,1001,988,741,677,975,728,664,600, 962,715,949,766,702,638,574,1013,753,689,1000,987,740,676,612,548] [views:debug,2014-08-19T16:54:30.918,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/653. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:30.918,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",653,active,0} [ns_server:debug,2014-08-19T16:54:30.935,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 636. Nacking mccouch update. [views:debug,2014-08-19T16:54:30.935,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/636. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:30.935,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",636,active,0} [ns_server:debug,2014-08-19T16:54:30.935,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_tiles<0.4349.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,718,686,654,984,952,1016,756,724,692,660,990,958,1022,762,730,698,666, 996,964,752,736,720,704,688,672,656,640,986,970,954,938,1018,1002,758,742, 726,710,694,678,662,646,992,976,960,944,1008,764,748,732,716,700,684,668,652, 636,998,982,966,950,1014,754,738,722,706,690,674,658,642,988,972,956,940, 1020,1004,760,744,728,712,696,680,664,648,1023,994,978,962,946,1010,766,734, 702,670,638,968,1000,740,708,676,644,974,942,1006,746,714,682,650,980,948, 1012] [ns_server:debug,2014-08-19T16:54:30.968,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 748. Nacking mccouch update. [views:debug,2014-08-19T16:54:30.968,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/748. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:30.969,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",748,active,0} [ns_server:debug,2014-08-19T16:54:30.969,ns_1@10.242.238.90:capi_set_view_manager-tiles<0.6184.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [766,750,984,968,952,1016,1000,756,990,974,958,942,1022,1006,762,996,980,964, 948,1012,752,986,970,954,938,1018,1002,758,992,976,960,944,1008,764,748,998, 982,966,950,1014,754,988,972,956,940,1020,1004,760,1023,994,978,962,946,1010] [views:debug,2014-08-19T16:54:31.011,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/636. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:31.011,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",636,active,0} [views:debug,2014-08-19T16:54:31.061,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/748. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:31.061,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",748,active,0} [ns_server:debug,2014-08-19T16:54:31.162,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 651. Nacking mccouch update. [views:debug,2014-08-19T16:54:31.162,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/651. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:31.163,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",651,active,0} [ns_server:debug,2014-08-19T16:54:31.163,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,750,686,622,558,984,737,673,971,724,660,596,958,711,1022,945,762,698,634, 570,1009,996,749,685,983,736,672,608,970,723,659,957,710,646,582,1021,944, 761,697,1008,995,748,684,620,556,982,735,671,969,722,658,594,956,709,1020, 943,760,696,632,568,1007,994,747,683,981,734,670,606,968,721,657,955,708,644, 580,1019,974,942,759,727,695,663,1006,993,961,746,714,682,650,618,586,554, 980,948,765,733,701,669,1012,999,967,752,720,688,656,624,592,560,986,954,739, 707,675,1018,973,941,758,726,694,662,630,598,566,1005,992,960,745,713,681, 979,947,764,732,700,668,636,604,572,1011,998,966,751,719,687,655,985,953,738, 706,674,642,610,578,1017,972,940,757,725,693,661,1004,991,959,744,712,680, 648,616,584,552,1023,978,946,763,731,699,667,1010,965,718,654,590,952,705, 1016,939,756,692,628,564,1003,990,743,679,977,730,666,602,964,717,653,951, 704,640,576,1015,938,755,691,1002,989,742,678,614,550,976,729,665,963,716, 652,588,950,767,703,1014,754,690,626,562,1001,988,741,677,975,728,664,600, 962,715,651,949,766,702,638,574,1013,753,689,1000,987,740,676,612,548] [views:debug,2014-08-19T16:54:31.246,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/651. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:31.247,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",651,active,0} [ns_server:debug,2014-08-19T16:54:31.371,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 746. Nacking mccouch update. [ns_server:debug,2014-08-19T16:54:31.371,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 634. Nacking mccouch update. [views:debug,2014-08-19T16:54:31.371,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/746. Updated state: active (0) [views:debug,2014-08-19T16:54:31.372,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/634. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:31.372,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",746,active,0} [ns_server:debug,2014-08-19T16:54:31.372,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",634,active,0} [ns_server:debug,2014-08-19T16:54:31.372,ns_1@10.242.238.90:capi_set_view_manager-tiles<0.6184.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [766,750,984,968,952,1016,1000,756,990,974,958,942,1022,1006,762,746,996,980, 964,948,1012,752,986,970,954,938,1018,1002,758,992,976,960,944,1008,764,748, 998,982,966,950,1014,754,988,972,956,940,1020,1004,760,1023,994,978,962,946, 1010] [ns_server:debug,2014-08-19T16:54:31.372,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_tiles<0.4349.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,718,686,654,984,952,1016,756,724,692,660,990,958,1022,762,730,698,666, 634,996,964,736,704,672,640,986,970,954,938,1018,1002,758,742,726,710,694, 678,662,646,992,976,960,944,1008,764,748,732,716,700,684,668,652,636,998,982, 966,950,1014,754,738,722,706,690,674,658,642,988,972,956,940,1020,1004,760, 744,728,712,696,680,664,648,1023,994,978,962,946,1010,766,734,702,670,638, 968,1000,740,708,676,644,974,942,1006,746,714,682,650,980,948,1012,752,720, 688,656] [views:debug,2014-08-19T16:54:31.439,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/746. Updated state: active (0) [views:debug,2014-08-19T16:54:31.439,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/634. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:31.439,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",746,active,0} [ns_server:debug,2014-08-19T16:54:31.439,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",634,active,0} [ns_server:debug,2014-08-19T16:54:31.556,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 649. Nacking mccouch update. [views:debug,2014-08-19T16:54:31.556,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/649. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:31.556,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",649,active,0} [ns_server:debug,2014-08-19T16:54:31.557,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,750,686,622,558,984,737,673,971,724,660,596,958,711,1022,945,762,698,634, 570,1009,996,749,685,983,736,672,608,970,723,659,957,710,646,582,1021,944, 761,697,1008,995,748,684,620,556,982,735,671,969,722,658,594,956,709,1020, 943,760,696,632,568,1007,994,747,683,981,734,670,606,968,721,657,955,708,644, 580,1019,942,759,695,1006,993,961,746,714,682,650,618,586,554,980,948,765, 733,701,669,1012,999,967,752,720,688,656,624,592,560,986,954,739,707,675, 1018,973,941,758,726,694,662,630,598,566,1005,992,960,745,713,681,649,979, 947,764,732,700,668,636,604,572,1011,998,966,751,719,687,655,985,953,738,706, 674,642,610,578,1017,972,940,757,725,693,661,1004,991,959,744,712,680,648, 616,584,552,1023,978,946,763,731,699,667,1010,965,718,654,590,952,705,1016, 939,756,692,628,564,1003,990,743,679,977,730,666,602,964,717,653,951,704,640, 576,1015,938,755,691,1002,989,742,678,614,550,976,729,665,963,716,652,588, 950,767,703,1014,754,690,626,562,1001,988,741,677,975,728,664,600,962,715, 651,949,766,702,638,574,1013,753,689,1000,987,740,676,612,548,974,727,663] [views:debug,2014-08-19T16:54:31.615,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/649. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:31.615,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",649,active,0} [ns_server:debug,2014-08-19T16:54:31.727,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 744. Nacking mccouch update. [ns_server:debug,2014-08-19T16:54:31.727,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 632. Nacking mccouch update. [views:debug,2014-08-19T16:54:31.727,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/744. Updated state: active (0) [views:debug,2014-08-19T16:54:31.727,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/632. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:31.727,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",744,active,0} [ns_server:debug,2014-08-19T16:54:31.727,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",632,active,0} [ns_server:debug,2014-08-19T16:54:31.727,ns_1@10.242.238.90:capi_set_view_manager-tiles<0.6184.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [766,750,984,968,952,1016,1000,756,990,974,958,942,1022,1006,762,746,996,980, 964,948,1012,752,986,970,954,938,1018,1002,758,992,976,960,944,1008,764,748, 998,982,966,950,1014,754,988,972,956,940,1020,1004,760,744,1023,994,978,962, 946,1010] [ns_server:debug,2014-08-19T16:54:31.728,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_tiles<0.4349.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,718,686,654,984,952,1016,756,724,692,660,990,958,1022,762,730,698,666, 634,996,964,736,704,672,640,986,970,954,938,1018,1002,758,742,726,710,694, 678,662,646,992,976,960,944,1008,764,748,732,716,700,684,668,652,636,998,982, 966,950,1014,754,738,722,706,690,674,658,642,988,972,956,940,1020,1004,760, 744,728,712,696,680,664,648,632,1023,994,978,962,946,1010,766,734,702,670, 638,968,1000,740,708,676,644,974,942,1006,746,714,682,650,980,948,1012,752, 720,688,656] [views:debug,2014-08-19T16:54:31.846,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/632. Updated state: active (0) [views:debug,2014-08-19T16:54:31.846,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/744. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:31.846,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",632,active,0} [ns_server:debug,2014-08-19T16:54:31.846,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",744,active,0} [ns_server:debug,2014-08-19T16:54:31.946,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 647. Nacking mccouch update. [views:debug,2014-08-19T16:54:31.946,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/647. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:31.946,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",647,active,0} [ns_server:debug,2014-08-19T16:54:31.947,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,750,686,622,558,984,737,673,971,724,660,596,958,711,647,1022,945,762,698, 634,570,1009,996,749,685,983,736,672,608,970,723,659,957,710,646,582,1021, 944,761,697,1008,995,748,684,620,556,982,735,671,969,722,658,594,956,709, 1020,943,760,696,632,568,1007,994,747,683,981,734,670,606,968,721,657,955, 708,644,580,1019,942,759,695,1006,993,961,746,714,682,650,618,586,554,980, 948,765,733,701,669,1012,999,967,752,720,688,656,624,592,560,986,954,739,707, 675,1018,973,941,758,726,694,662,630,598,566,1005,992,960,745,713,681,649, 979,947,764,732,700,668,636,604,572,1011,998,966,751,719,687,655,985,953,738, 706,674,642,610,578,1017,972,940,757,725,693,661,1004,991,959,744,712,680, 648,616,584,552,1023,978,946,763,731,699,667,1010,965,718,654,590,952,705, 1016,939,756,692,628,564,1003,990,743,679,977,730,666,602,964,717,653,951, 704,640,576,1015,938,755,691,1002,989,742,678,614,550,976,729,665,963,716, 652,588,950,767,703,1014,754,690,626,562,1001,988,741,677,975,728,664,600, 962,715,651,949,766,702,638,574,1013,753,689,1000,987,740,676,612,548,974, 727,663] [views:debug,2014-08-19T16:54:32.022,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/647. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:32.022,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",647,active,0} [ns_server:debug,2014-08-19T16:54:32.164,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 630. Nacking mccouch update. [ns_server:debug,2014-08-19T16:54:32.164,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 742. Nacking mccouch update. [views:debug,2014-08-19T16:54:32.164,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/630. Updated state: active (0) [views:debug,2014-08-19T16:54:32.164,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/742. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:32.164,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",630,active,0} [ns_server:debug,2014-08-19T16:54:32.164,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",742,active,0} [ns_server:debug,2014-08-19T16:54:32.164,ns_1@10.242.238.90:capi_set_view_manager-tiles<0.6184.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [766,750,984,968,952,1016,1000,756,990,974,958,942,1022,1006,762,746,996,980, 964,948,1012,752,986,970,954,938,1018,1002,758,742,992,976,960,944,1008,764, 748,998,982,966,950,1014,754,988,972,956,940,1020,1004,760,744,1023,994,978, 962,946,1010] [ns_server:debug,2014-08-19T16:54:32.164,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_tiles<0.4349.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,718,686,654,984,952,1016,756,724,692,660,990,958,1022,762,730,698,666, 634,996,964,736,704,672,640,986,970,954,938,1018,1002,758,742,726,710,694, 678,662,646,630,992,976,960,944,1008,764,748,732,716,700,684,668,652,636,998, 982,966,950,1014,754,738,722,706,690,674,658,642,988,972,956,940,1020,1004, 760,744,728,712,696,680,664,648,632,1023,994,978,962,946,1010,766,734,702, 670,638,968,1000,740,708,676,644,974,942,1006,746,714,682,650,980,948,1012, 752,720,688,656] [views:debug,2014-08-19T16:54:32.240,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/742. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:32.240,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",742,active,0} [views:debug,2014-08-19T16:54:32.248,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/630. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:32.248,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",630,active,0} [ns_server:debug,2014-08-19T16:54:32.306,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 645. Nacking mccouch update. [views:debug,2014-08-19T16:54:32.306,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/645. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:32.307,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",645,active,0} [ns_server:debug,2014-08-19T16:54:32.307,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,750,686,622,558,984,737,673,971,724,660,596,958,711,647,1022,945,762,698, 634,570,1009,996,749,685,983,736,672,608,970,723,659,957,710,646,582,1021, 944,761,697,1008,995,748,684,620,556,982,735,671,969,722,658,594,956,709,645, 1020,943,760,696,632,568,1007,994,747,683,981,734,670,606,968,721,657,955, 708,644,580,1019,942,759,695,1006,993,961,746,714,682,650,618,586,554,980, 948,765,733,701,669,1012,999,967,752,720,688,656,624,592,560,986,954,739,707, 675,1018,973,941,758,726,694,662,630,598,566,1005,992,960,745,713,681,649, 979,947,764,732,700,668,636,604,572,1011,998,966,751,719,687,655,985,953,738, 706,674,642,610,578,1017,972,940,757,725,693,661,1004,991,959,744,712,680, 648,616,584,552,1023,978,946,763,731,699,667,1010,965,718,654,590,952,705, 1016,939,756,692,628,564,1003,990,743,679,977,730,666,602,964,717,653,951, 704,640,576,1015,938,755,691,1002,989,742,678,614,550,976,729,665,963,716, 652,588,950,767,703,1014,754,690,626,562,1001,988,741,677,975,728,664,600, 962,715,651,949,766,702,638,574,1013,753,689,1000,987,740,676,612,548,974, 727,663] [views:debug,2014-08-19T16:54:32.407,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/645. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:32.407,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",645,active,0} [ns_server:debug,2014-08-19T16:54:32.524,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 628. Nacking mccouch update. [ns_server:debug,2014-08-19T16:54:32.524,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 740. Nacking mccouch update. [views:debug,2014-08-19T16:54:32.524,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/628. Updated state: active (0) [views:debug,2014-08-19T16:54:32.524,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/740. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:32.524,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",628,active,0} [ns_server:debug,2014-08-19T16:54:32.524,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_tiles<0.4349.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,718,686,654,984,952,1016,756,724,692,660,628,990,958,1022,762,730,698, 666,634,996,964,736,704,672,640,986,970,954,938,1018,1002,758,742,726,710, 694,678,662,646,630,992,976,960,944,1008,764,748,732,716,700,684,668,652,636, 998,982,966,950,1014,754,738,722,706,690,674,658,642,988,972,956,940,1020, 1004,760,744,728,712,696,680,664,648,632,1023,994,978,962,946,1010,766,734, 702,670,638,968,1000,740,708,676,644,974,942,1006,746,714,682,650,980,948, 1012,752,720,688,656] [ns_server:debug,2014-08-19T16:54:32.525,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",740,active,0} [ns_server:debug,2014-08-19T16:54:32.525,ns_1@10.242.238.90:capi_set_view_manager-tiles<0.6184.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [766,750,984,968,952,1016,1000,756,740,990,974,958,942,1022,1006,762,746,996, 980,964,948,1012,752,986,970,954,938,1018,1002,758,742,992,976,960,944,1008, 764,748,998,982,966,950,1014,754,988,972,956,940,1020,1004,760,744,1023,994, 978,962,946,1010] [views:debug,2014-08-19T16:54:32.608,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/628. Updated state: active (0) [views:debug,2014-08-19T16:54:32.608,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/740. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:32.608,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",628,active,0} [ns_server:debug,2014-08-19T16:54:32.608,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",740,active,0} [ns_server:debug,2014-08-19T16:54:32.716,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 643. Nacking mccouch update. [views:debug,2014-08-19T16:54:32.717,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/643. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:32.717,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",643,active,0} [ns_server:debug,2014-08-19T16:54:32.717,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,750,686,622,558,984,737,673,971,724,660,596,958,711,647,1022,945,762,698, 634,570,1009,996,749,685,983,736,672,608,970,723,659,957,710,646,582,1021, 944,761,697,1008,995,748,684,620,556,982,735,671,969,722,658,594,956,709,645, 1020,943,760,696,632,568,1007,994,747,683,981,734,670,606,968,721,657,955, 708,644,580,1019,942,759,695,1006,993,961,746,714,682,650,618,586,554,980, 948,765,733,701,669,1012,999,967,752,720,688,656,624,592,560,986,954,739,707, 675,643,1018,973,941,758,726,694,662,630,598,566,1005,992,960,745,713,681, 649,979,947,764,732,700,668,636,604,572,1011,998,966,751,719,687,655,985,953, 738,706,674,642,610,578,1017,972,940,757,725,693,661,1004,991,959,744,712, 680,648,616,584,552,1023,978,946,763,731,699,667,1010,965,718,654,590,952, 705,1016,939,756,692,628,564,1003,990,743,679,977,730,666,602,964,717,653, 951,704,640,576,1015,938,755,691,1002,989,742,678,614,550,976,729,665,963, 716,652,588,950,767,703,1014,754,690,626,562,1001,988,741,677,975,728,664, 600,962,715,651,949,766,702,638,574,1013,753,689,1000,987,740,676,612,548, 974,727,663] [views:debug,2014-08-19T16:54:32.792,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/643. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:32.792,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",643,active,0} [ns_server:debug,2014-08-19T16:54:32.942,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 738. Nacking mccouch update. [ns_server:debug,2014-08-19T16:54:32.942,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 626. Nacking mccouch update. [views:debug,2014-08-19T16:54:32.943,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/738. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:32.943,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",738,active,0} [views:debug,2014-08-19T16:54:32.943,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/626. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:32.943,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",626,active,0} [ns_server:debug,2014-08-19T16:54:32.943,ns_1@10.242.238.90:capi_set_view_manager-tiles<0.6184.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [766,750,984,968,952,1016,1000,756,740,990,974,958,942,1022,1006,762,746,996, 980,964,948,1012,752,986,970,954,938,1018,1002,758,742,992,976,960,944,1008, 764,748,998,982,966,950,1014,754,738,988,972,956,940,1020,1004,760,744,1023, 994,978,962,946,1010] [ns_server:debug,2014-08-19T16:54:32.943,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_tiles<0.4349.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,718,686,654,984,952,1016,756,724,692,660,628,990,958,1022,762,730,698, 666,634,996,964,736,704,672,640,986,970,954,938,1018,1002,758,742,726,710, 694,678,662,646,630,992,976,960,944,1008,764,748,732,716,700,684,668,652,636, 998,982,966,950,1014,754,738,722,706,690,674,658,642,626,988,972,956,940, 1020,1004,760,744,728,712,696,680,664,648,632,1023,994,978,962,946,1010,766, 734,702,670,638,968,1000,740,708,676,644,974,942,1006,746,714,682,650,980, 948,1012,752,720,688,656] [views:debug,2014-08-19T16:54:33.051,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/626. Updated state: active (0) [views:debug,2014-08-19T16:54:33.052,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/738. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:33.052,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",626,active,0} [ns_server:debug,2014-08-19T16:54:33.052,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",738,active,0} [ns_server:debug,2014-08-19T16:54:33.157,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 641. Nacking mccouch update. [views:debug,2014-08-19T16:54:33.157,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/641. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:33.157,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",641,active,0} [ns_server:debug,2014-08-19T16:54:33.158,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,750,686,622,558,984,737,673,971,724,660,596,958,711,647,1022,945,762,698, 634,570,1009,996,749,685,983,736,672,608,970,723,659,957,710,646,582,1021, 944,761,697,1008,995,748,684,620,556,982,735,671,969,722,658,594,956,709,645, 1020,943,760,696,632,568,1007,994,747,683,981,734,670,606,968,721,657,955, 708,644,580,1019,942,759,695,1006,993,961,746,714,682,650,618,586,554,980, 948,765,733,701,669,1012,999,967,752,720,688,656,624,592,560,986,954,739,707, 675,643,1018,973,941,758,726,694,662,630,598,566,1005,992,960,745,713,681, 649,979,947,764,732,700,668,636,604,572,1011,998,966,751,719,687,655,985,953, 738,706,674,642,610,578,1017,972,940,757,725,693,661,1004,991,959,744,712, 680,648,616,584,552,1023,978,946,763,731,699,667,1010,965,718,654,590,952, 705,641,1016,939,756,692,628,564,1003,990,743,679,977,730,666,602,964,717, 653,951,704,640,576,1015,938,755,691,1002,989,742,678,614,550,976,729,665, 963,716,652,588,950,767,703,1014,754,690,626,562,1001,988,741,677,975,728, 664,600,962,715,651,949,766,702,638,574,1013,753,689,1000,987,740,676,612, 548,974,727,663] [views:debug,2014-08-19T16:54:33.208,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/641. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:33.208,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",641,active,0} [ns_server:debug,2014-08-19T16:54:33.274,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 624. Nacking mccouch update. [views:debug,2014-08-19T16:54:33.274,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/624. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:33.275,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",624,active,0} [ns_server:debug,2014-08-19T16:54:33.275,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_tiles<0.4349.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,718,686,654,984,952,1016,756,724,692,660,628,990,958,1022,762,730,698, 666,634,996,964,736,704,672,640,970,938,1002,758,742,726,710,694,678,662,646, 630,992,976,960,944,1008,764,748,732,716,700,684,668,652,636,998,982,966,950, 1014,754,738,722,706,690,674,658,642,626,988,972,956,940,1020,1004,760,744, 728,712,696,680,664,648,632,1023,994,978,962,946,1010,766,734,702,670,638, 968,1000,740,708,676,644,974,942,1006,746,714,682,650,980,948,1012,752,720, 688,656,624,986,954,1018] [ns_server:debug,2014-08-19T16:54:33.291,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 736. Nacking mccouch update. [views:debug,2014-08-19T16:54:33.291,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/736. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:33.291,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",736,active,0} [ns_server:debug,2014-08-19T16:54:33.292,ns_1@10.242.238.90:capi_set_view_manager-tiles<0.6184.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [766,750,984,968,952,1016,1000,756,740,990,974,958,942,1022,1006,762,746,996, 980,964,948,1012,752,736,986,970,954,938,1018,1002,758,742,992,976,960,944, 1008,764,748,998,982,966,950,1014,754,738,988,972,956,940,1020,1004,760,744, 1023,994,978,962,946,1010] [views:debug,2014-08-19T16:54:33.342,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/624. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:33.342,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",624,active,0} [ns_server:debug,2014-08-19T16:54:33.358,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 639. Nacking mccouch update. [views:debug,2014-08-19T16:54:33.359,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/639. Updated state: active (0) [views:debug,2014-08-19T16:54:33.359,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/736. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:33.359,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",639,active,0} [ns_server:debug,2014-08-19T16:54:33.359,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",736,active,0} [ns_server:debug,2014-08-19T16:54:33.360,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,750,686,622,558,984,737,673,971,724,660,596,958,711,647,1022,945,762,698, 634,570,1009,996,749,685,983,736,672,608,970,723,659,957,710,646,582,1021, 944,761,697,1008,995,748,684,620,556,982,735,671,969,722,658,594,956,709,645, 1020,943,760,696,632,568,1007,994,747,683,981,734,670,606,968,721,657,955, 708,644,580,1019,942,759,695,1006,993,746,682,618,554,980,948,765,733,701, 669,1012,999,967,752,720,688,656,624,592,560,986,954,739,707,675,643,1018, 973,941,758,726,694,662,630,598,566,1005,992,960,745,713,681,649,979,947,764, 732,700,668,636,604,572,1011,998,966,751,719,687,655,985,953,738,706,674,642, 610,578,1017,972,940,757,725,693,661,1004,991,959,744,712,680,648,616,584, 552,1023,978,946,763,731,699,667,1010,965,718,654,590,952,705,641,1016,939, 756,692,628,564,1003,990,743,679,977,730,666,602,964,717,653,951,704,640,576, 1015,938,755,691,1002,989,742,678,614,550,976,729,665,963,716,652,588,950, 767,703,639,1014,754,690,626,562,1001,988,741,677,975,728,664,600,962,715, 651,949,766,702,638,574,1013,753,689,1000,987,740,676,612,548,974,727,663, 961,714,650,586] [views:debug,2014-08-19T16:54:33.427,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/639. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:33.428,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",639,active,0} [ns_server:debug,2014-08-19T16:54:33.493,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 734. Nacking mccouch update. [ns_server:debug,2014-08-19T16:54:33.493,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 622. Nacking mccouch update. [views:debug,2014-08-19T16:54:33.493,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/734. Updated state: active (0) [views:debug,2014-08-19T16:54:33.493,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/622. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:33.493,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",734,active,0} [ns_server:debug,2014-08-19T16:54:33.494,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",622,active,0} [ns_server:debug,2014-08-19T16:54:33.494,ns_1@10.242.238.90:capi_set_view_manager-tiles<0.6184.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [766,750,734,984,968,952,1016,1000,756,740,990,974,958,942,1022,1006,762,746, 996,980,964,948,1012,752,736,986,970,954,938,1018,1002,758,742,992,976,960, 944,1008,764,748,998,982,966,950,1014,754,738,988,972,956,940,1020,1004,760, 744,1023,994,978,962,946,1010] [ns_server:debug,2014-08-19T16:54:33.494,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_tiles<0.4349.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,718,686,654,622,984,952,1016,756,724,692,660,628,990,958,1022,762,730, 698,666,634,996,964,736,704,672,640,970,938,1002,758,742,726,710,694,678,662, 646,630,992,976,960,944,1008,764,748,732,716,700,684,668,652,636,998,982,966, 950,1014,754,738,722,706,690,674,658,642,626,988,972,956,940,1020,1004,760, 744,728,712,696,680,664,648,632,1023,994,978,962,946,1010,766,734,702,670, 638,968,1000,740,708,676,644,974,942,1006,746,714,682,650,980,948,1012,752, 720,688,656,624,986,954,1018] [views:debug,2014-08-19T16:54:33.544,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/734. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:33.544,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",734,active,0} [views:debug,2014-08-19T16:54:33.552,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/622. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:33.552,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",622,active,0} [ns_server:debug,2014-08-19T16:54:33.677,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 637. Nacking mccouch update. [views:debug,2014-08-19T16:54:33.677,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/637. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:33.678,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",637,active,0} [ns_server:debug,2014-08-19T16:54:33.678,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,750,686,622,558,984,737,673,971,724,660,596,958,711,647,1022,945,762,698, 634,570,1009,996,749,685,983,736,672,608,970,723,659,957,710,646,582,1021, 944,761,697,1008,995,748,684,620,556,982,735,671,969,722,658,594,956,709,645, 1020,943,760,696,632,568,1007,994,747,683,981,734,670,606,968,721,657,955, 708,644,580,1019,942,759,695,1006,993,746,682,618,554,980,948,765,733,701, 669,637,1012,999,967,752,720,688,656,624,592,560,986,954,739,707,675,643, 1018,973,941,758,726,694,662,630,598,566,1005,992,960,745,713,681,649,979, 947,764,732,700,668,636,604,572,1011,998,966,751,719,687,655,985,953,738,706, 674,642,610,578,1017,972,940,757,725,693,661,1004,991,959,744,712,680,648, 616,584,552,1023,978,946,763,731,699,667,1010,965,718,654,590,952,705,641, 1016,939,756,692,628,564,1003,990,743,679,977,730,666,602,964,717,653,951, 704,640,576,1015,938,755,691,1002,989,742,678,614,550,976,729,665,963,716, 652,588,950,767,703,639,1014,754,690,626,562,1001,988,741,677,975,728,664, 600,962,715,651,949,766,702,638,574,1013,753,689,1000,987,740,676,612,548, 974,727,663,961,714,650,586] [views:debug,2014-08-19T16:54:33.762,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/637. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:33.762,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",637,active,0} [ns_server:debug,2014-08-19T16:54:33.930,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 732. Nacking mccouch update. [ns_server:debug,2014-08-19T16:54:33.930,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 620. Nacking mccouch update. [views:debug,2014-08-19T16:54:33.930,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/732. Updated state: active (0) [views:debug,2014-08-19T16:54:33.930,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/620. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:33.930,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",732,active,0} [ns_server:debug,2014-08-19T16:54:33.930,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",620,active,0} [ns_server:debug,2014-08-19T16:54:33.930,ns_1@10.242.238.90:capi_set_view_manager-tiles<0.6184.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [766,750,734,984,968,952,1016,1000,756,740,990,974,958,942,1022,1006,762,746, 996,980,964,948,1012,752,736,986,970,954,938,1018,1002,758,742,992,976,960, 944,1008,764,748,732,998,982,966,950,1014,754,738,988,972,956,940,1020,1004, 760,744,1023,994,978,962,946,1010] [ns_server:debug,2014-08-19T16:54:33.931,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_tiles<0.4349.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,718,686,654,622,984,952,1016,756,724,692,660,628,990,958,1022,762,730, 698,666,634,996,964,736,704,672,640,970,938,1002,758,742,726,710,694,678,662, 646,630,992,976,960,944,1008,764,748,732,716,700,684,668,652,636,620,998,982, 966,950,1014,754,738,722,706,690,674,658,642,626,988,972,956,940,1020,1004, 760,744,728,712,696,680,664,648,632,1023,994,978,962,946,1010,766,734,702, 670,638,968,1000,740,708,676,644,974,942,1006,746,714,682,650,980,948,1012, 752,720,688,656,624,986,954,1018] [views:debug,2014-08-19T16:54:34.047,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/732. Updated state: active (0) [views:debug,2014-08-19T16:54:34.048,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/620. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:34.048,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",732,active,0} [ns_server:debug,2014-08-19T16:54:34.048,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",620,active,0} [ns_server:debug,2014-08-19T16:54:34.173,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 635. Nacking mccouch update. [views:debug,2014-08-19T16:54:34.173,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/635. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:34.173,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",635,active,0} [ns_server:debug,2014-08-19T16:54:34.174,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,750,686,622,558,984,737,673,971,724,660,596,958,711,647,1022,945,762,698, 634,570,1009,996,749,685,983,736,672,608,970,723,659,957,710,646,582,1021, 944,761,697,1008,995,748,684,620,556,982,735,671,969,722,658,594,956,709,645, 1020,943,760,696,632,568,1007,994,747,683,981,734,670,606,968,721,657,955, 708,644,580,1019,942,759,695,1006,993,746,682,618,554,980,948,765,733,701, 669,637,1012,999,967,752,720,688,656,624,592,560,986,954,739,707,675,643, 1018,973,941,758,726,694,662,630,598,566,1005,992,960,745,713,681,649,979, 947,764,732,700,668,636,604,572,1011,998,966,751,719,687,655,985,953,738,706, 674,642,610,578,1017,972,940,757,725,693,661,1004,991,959,744,712,680,648, 616,584,552,1023,978,946,763,731,699,667,635,1010,965,718,654,590,952,705, 641,1016,939,756,692,628,564,1003,990,743,679,977,730,666,602,964,717,653, 951,704,640,576,1015,938,755,691,1002,989,742,678,614,550,976,729,665,963, 716,652,588,950,767,703,639,1014,754,690,626,562,1001,988,741,677,975,728, 664,600,962,715,651,949,766,702,638,574,1013,753,689,1000,987,740,676,612, 548,974,727,663,961,714,650,586] [views:debug,2014-08-19T16:54:34.265,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/635. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:34.265,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",635,active,0} [ns_server:debug,2014-08-19T16:54:34.369,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 618. Nacking mccouch update. [ns_server:debug,2014-08-19T16:54:34.369,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 730. Nacking mccouch update. [views:debug,2014-08-19T16:54:34.369,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/618. Updated state: active (0) [views:debug,2014-08-19T16:54:34.369,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/730. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:34.369,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",618,active,0} [ns_server:debug,2014-08-19T16:54:34.369,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",730,active,0} [ns_server:debug,2014-08-19T16:54:34.369,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_tiles<0.4349.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,718,686,654,622,984,952,1016,756,724,692,660,628,990,958,1022,762,730, 698,666,634,996,964,736,704,672,640,970,938,1002,758,742,726,710,694,678,662, 646,630,992,976,960,944,1008,764,748,732,716,700,684,668,652,636,620,998,982, 966,950,1014,754,738,722,706,690,674,658,642,626,988,972,956,940,1020,1004, 760,744,728,712,696,680,664,648,632,1023,994,978,962,946,1010,766,734,702, 670,638,968,1000,740,708,676,644,974,942,1006,746,714,682,650,618,980,948, 1012,752,720,688,656,624,986,954,1018] [ns_server:debug,2014-08-19T16:54:34.369,ns_1@10.242.238.90:capi_set_view_manager-tiles<0.6184.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [766,750,734,984,968,952,1016,1000,756,740,990,974,958,942,1022,1006,762,746, 730,996,980,964,948,1012,752,736,986,970,954,938,1018,1002,758,742,992,976, 960,944,1008,764,748,732,998,982,966,950,1014,754,738,988,972,956,940,1020, 1004,760,744,1023,994,978,962,946,1010] [views:debug,2014-08-19T16:54:34.420,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/730. Updated state: active (0) [views:debug,2014-08-19T16:54:34.420,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/618. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:34.420,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",730,active,0} [ns_server:debug,2014-08-19T16:54:34.420,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",618,active,0} [ns_server:debug,2014-08-19T16:54:34.470,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 633. Nacking mccouch update. [views:debug,2014-08-19T16:54:34.470,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/633. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:34.470,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",633,active,0} [ns_server:debug,2014-08-19T16:54:34.471,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,750,686,622,558,984,737,673,971,724,660,596,958,711,647,1022,945,762,698, 634,570,1009,996,749,685,983,736,672,608,970,723,659,957,710,646,582,1021, 944,761,697,633,1008,995,748,684,620,556,982,735,671,969,722,658,594,956,709, 645,1020,943,760,696,632,568,1007,994,747,683,981,734,670,606,968,721,657, 955,708,644,580,1019,942,759,695,1006,993,746,682,618,554,980,948,765,733, 701,669,637,1012,999,967,752,720,688,656,624,592,560,986,954,739,707,675,643, 1018,973,941,758,726,694,662,630,598,566,1005,992,960,745,713,681,649,979, 947,764,732,700,668,636,604,572,1011,998,966,751,719,687,655,985,953,738,706, 674,642,610,578,1017,972,940,757,725,693,661,1004,991,959,744,712,680,648, 616,584,552,1023,978,946,763,731,699,667,635,1010,965,718,654,590,952,705, 641,1016,939,756,692,628,564,1003,990,743,679,977,730,666,602,964,717,653, 951,704,640,576,1015,938,755,691,1002,989,742,678,614,550,976,729,665,963, 716,652,588,950,767,703,639,1014,754,690,626,562,1001,988,741,677,975,728, 664,600,962,715,651,949,766,702,638,574,1013,753,689,1000,987,740,676,612, 548,974,727,663,961,714,650,586] [views:debug,2014-08-19T16:54:34.522,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/633. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:34.522,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",633,active,0} [ns_server:debug,2014-08-19T16:54:34.589,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 616. Nacking mccouch update. [ns_server:debug,2014-08-19T16:54:34.589,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 728. Nacking mccouch update. [views:debug,2014-08-19T16:54:34.589,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/616. Updated state: active (0) [views:debug,2014-08-19T16:54:34.589,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/728. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:34.589,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",616,active,0} [ns_server:debug,2014-08-19T16:54:34.589,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",728,active,0} [ns_server:debug,2014-08-19T16:54:34.589,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_tiles<0.4349.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,718,686,654,622,984,952,1016,756,724,692,660,628,990,958,1022,762,730, 698,666,634,996,964,736,704,672,640,970,938,1002,758,742,726,710,694,678,662, 646,630,992,976,960,944,1008,764,748,732,716,700,684,668,652,636,620,998,982, 966,950,1014,754,738,722,706,690,674,658,642,626,988,972,956,940,1020,1004, 760,744,728,712,696,680,664,648,632,616,1023,994,978,962,946,1010,766,734, 702,670,638,968,1000,740,708,676,644,974,942,1006,746,714,682,650,618,980, 948,1012,752,720,688,656,624,986,954,1018] [ns_server:debug,2014-08-19T16:54:34.590,ns_1@10.242.238.90:capi_set_view_manager-tiles<0.6184.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [766,750,734,984,968,952,1016,1000,756,740,990,974,958,942,1022,1006,762,746, 730,996,980,964,948,1012,752,736,986,970,954,938,1018,1002,758,742,992,976, 960,944,1008,764,748,732,998,982,966,950,1014,754,738,988,972,956,940,1020, 1004,760,744,728,1023,994,978,962,946,1010] [views:debug,2014-08-19T16:54:34.623,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/616. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:34.623,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",616,active,0} [views:debug,2014-08-19T16:54:34.631,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/728. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:34.631,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",728,active,0} [ns_server:debug,2014-08-19T16:54:34.673,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 631. Nacking mccouch update. [views:debug,2014-08-19T16:54:34.673,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/631. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:34.673,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",631,active,0} [ns_server:debug,2014-08-19T16:54:34.674,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,750,686,622,558,984,737,673,971,724,660,596,958,711,647,1022,945,762,698, 634,570,1009,996,749,685,983,736,672,608,970,723,659,957,710,646,582,1021, 944,761,697,633,1008,995,748,684,620,556,982,735,671,969,722,658,594,956,709, 645,1020,943,760,696,632,568,1007,994,747,683,981,734,670,606,968,721,657, 955,708,644,580,1019,942,759,695,631,1006,993,746,682,618,554,980,948,765, 733,701,669,637,1012,999,967,752,720,688,656,624,592,560,986,954,739,707,675, 643,1018,973,941,758,726,694,662,630,598,566,1005,992,960,745,713,681,649, 979,947,764,732,700,668,636,604,572,1011,998,966,751,719,687,655,985,953,738, 706,674,642,610,578,1017,972,940,757,725,693,661,1004,991,959,744,712,680, 648,616,584,552,1023,978,946,763,731,699,667,635,1010,965,718,654,590,952, 705,641,1016,939,756,692,628,564,1003,990,743,679,977,730,666,602,964,717, 653,951,704,640,576,1015,938,755,691,1002,989,742,678,614,550,976,729,665, 963,716,652,588,950,767,703,639,1014,754,690,626,562,1001,988,741,677,975, 728,664,600,962,715,651,949,766,702,638,574,1013,753,689,1000,987,740,676, 612,548,974,727,663,961,714,650,586] [views:debug,2014-08-19T16:54:34.774,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/631. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:34.774,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",631,active,0} [ns_server:debug,2014-08-19T16:54:34.899,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 726. Nacking mccouch update. [ns_server:debug,2014-08-19T16:54:34.899,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 614. Nacking mccouch update. [views:debug,2014-08-19T16:54:34.899,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/726. Updated state: active (0) [views:debug,2014-08-19T16:54:34.899,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/614. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:34.899,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",726,active,0} [ns_server:debug,2014-08-19T16:54:34.899,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",614,active,0} [ns_server:debug,2014-08-19T16:54:34.900,ns_1@10.242.238.90:capi_set_view_manager-tiles<0.6184.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [766,750,734,984,968,952,1016,1000,756,740,990,974,958,942,1022,1006,762,746, 730,996,980,964,948,1012,752,736,986,970,954,938,1018,1002,758,742,726,992, 976,960,944,1008,764,748,732,998,982,966,950,1014,754,738,988,972,956,940, 1020,1004,760,744,728,1023,994,978,962,946,1010] [ns_server:debug,2014-08-19T16:54:34.900,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_tiles<0.4349.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,718,686,654,622,984,952,1016,756,724,692,660,628,990,958,1022,762,730, 698,666,634,996,964,736,704,672,640,970,938,1002,742,710,678,646,614,992,976, 960,944,1008,764,748,732,716,700,684,668,652,636,620,998,982,966,950,1014, 754,738,722,706,690,674,658,642,626,988,972,956,940,1020,1004,760,744,728, 712,696,680,664,648,632,616,1023,994,978,962,946,1010,766,734,702,670,638, 968,1000,740,708,676,644,974,942,1006,746,714,682,650,618,980,948,1012,752, 720,688,656,624,986,954,1018,758,726,694,662,630] [views:debug,2014-08-19T16:54:35.000,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/726. Updated state: active (0) [views:debug,2014-08-19T16:54:35.000,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/614. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:35.000,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",726,active,0} [ns_server:debug,2014-08-19T16:54:35.000,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",614,active,0} [ns_server:debug,2014-08-19T16:54:35.150,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 629. Nacking mccouch update. [views:debug,2014-08-19T16:54:35.150,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/629. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:35.150,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",629,active,0} [ns_server:debug,2014-08-19T16:54:35.151,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,750,686,622,558,984,737,673,971,724,660,596,958,711,647,1022,945,762,698, 634,570,1009,996,749,685,983,736,672,608,970,723,659,957,710,646,582,1021, 944,761,697,633,1008,995,748,684,620,556,982,735,671,969,722,658,594,956,709, 645,1020,943,760,696,632,568,1007,994,747,683,981,734,670,606,968,721,657, 955,708,644,580,1019,942,759,695,631,1006,993,746,682,618,554,980,733,669, 999,967,752,720,688,656,624,592,560,986,954,739,707,675,643,1018,973,941,758, 726,694,662,630,598,566,1005,992,960,745,713,681,649,979,947,764,732,700,668, 636,604,572,1011,998,966,751,719,687,655,985,953,738,706,674,642,610,578, 1017,972,940,757,725,693,661,629,1004,991,959,744,712,680,648,616,584,552, 1023,978,946,763,731,699,667,635,1010,965,718,654,590,952,705,641,1016,939, 756,692,628,564,1003,990,743,679,977,730,666,602,964,717,653,951,704,640,576, 1015,938,755,691,1002,989,742,678,614,550,976,729,665,963,716,652,588,950, 767,703,639,1014,754,690,626,562,1001,988,741,677,975,728,664,600,962,715, 651,949,766,702,638,574,1013,753,689,1000,987,740,676,612,548,974,727,663, 961,714,650,586,948,765,701,637,1012] [views:debug,2014-08-19T16:54:35.234,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/629. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:35.235,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",629,active,0} [ns_server:debug,2014-08-19T16:54:35.384,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 724. Nacking mccouch update. [ns_server:debug,2014-08-19T16:54:35.384,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 612. Nacking mccouch update. [views:debug,2014-08-19T16:54:35.384,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/724. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:35.385,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",724,active,0} [views:debug,2014-08-19T16:54:35.385,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/612. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:35.385,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",612,active,0} [ns_server:debug,2014-08-19T16:54:35.385,ns_1@10.242.238.90:capi_set_view_manager-tiles<0.6184.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [766,750,734,984,968,952,1016,1000,756,740,724,990,974,958,942,1022,1006,762, 746,730,996,980,964,948,1012,752,736,986,970,954,938,1018,1002,758,742,726, 992,976,960,944,1008,764,748,732,998,982,966,950,1014,754,738,988,972,956, 940,1020,1004,760,744,728,1023,994,978,962,946,1010] [ns_server:debug,2014-08-19T16:54:35.385,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_tiles<0.4349.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,718,686,654,622,984,952,1016,756,724,692,660,628,990,958,1022,762,730, 698,666,634,996,964,736,704,672,640,970,938,1002,742,710,678,646,614,992,976, 960,944,1008,764,748,732,716,700,684,668,652,636,620,998,982,966,950,1014, 754,738,722,706,690,674,658,642,626,988,972,956,940,1020,1004,760,744,728, 712,696,680,664,648,632,616,1023,994,978,962,946,1010,766,734,702,670,638, 968,1000,740,708,676,644,612,974,942,1006,746,714,682,650,618,980,948,1012, 752,720,688,656,624,986,954,1018,758,726,694,662,630] [views:debug,2014-08-19T16:54:35.493,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/612. Updated state: active (0) [views:debug,2014-08-19T16:54:35.494,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/724. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:35.494,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",612,active,0} [ns_server:debug,2014-08-19T16:54:35.494,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",724,active,0} [ns_server:debug,2014-08-19T16:54:35.590,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 627. Nacking mccouch update. [views:debug,2014-08-19T16:54:35.590,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/627. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:35.590,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",627,active,0} [ns_server:debug,2014-08-19T16:54:35.591,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,750,686,622,558,984,737,673,971,724,660,596,958,711,647,1022,945,762,698, 634,570,1009,996,749,685,983,736,672,608,970,723,659,957,710,646,582,1021, 944,761,697,633,1008,995,748,684,620,556,982,735,671,969,722,658,594,956,709, 645,1020,943,760,696,632,568,1007,994,747,683,981,734,670,606,968,721,657, 955,708,644,580,1019,942,759,695,631,1006,993,746,682,618,554,980,733,669, 999,967,752,720,688,656,624,592,560,986,954,739,707,675,643,1018,973,941,758, 726,694,662,630,598,566,1005,992,960,745,713,681,649,979,947,764,732,700,668, 636,604,572,1011,998,966,751,719,687,655,985,953,738,706,674,642,610,578, 1017,972,940,757,725,693,661,629,1004,991,959,744,712,680,648,616,584,552, 1023,978,946,763,731,699,667,635,1010,965,718,654,590,952,705,641,1016,939, 756,692,628,564,1003,990,743,679,977,730,666,602,964,717,653,951,704,640,576, 1015,938,755,691,627,1002,989,742,678,614,550,976,729,665,963,716,652,588, 950,767,703,639,1014,754,690,626,562,1001,988,741,677,975,728,664,600,962, 715,651,949,766,702,638,574,1013,753,689,1000,987,740,676,612,548,974,727, 663,961,714,650,586,948,765,701,637,1012] [views:debug,2014-08-19T16:54:35.641,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/627. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:35.641,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",627,active,0} [ns_server:debug,2014-08-19T16:54:35.716,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 610. Nacking mccouch update. [ns_server:debug,2014-08-19T16:54:35.716,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 722. Nacking mccouch update. [views:debug,2014-08-19T16:54:35.716,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/610. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:35.717,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",610,active,0} [views:debug,2014-08-19T16:54:35.717,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/722. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:35.717,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",722,active,0} [ns_server:debug,2014-08-19T16:54:35.717,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_tiles<0.4349.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,718,686,654,622,984,952,1016,756,724,692,660,628,990,958,1022,762,730, 698,666,634,996,964,736,704,672,640,970,938,1002,742,710,678,646,614,992,976, 960,944,1008,764,748,732,716,700,684,668,652,636,620,998,982,966,950,1014, 754,738,722,706,690,674,658,642,626,610,988,972,956,940,1020,1004,760,744, 728,712,696,680,664,648,632,616,1023,994,978,962,946,1010,766,734,702,670, 638,968,1000,740,708,676,644,612,974,942,1006,746,714,682,650,618,980,948, 1012,752,720,688,656,624,986,954,1018,758,726,694,662,630] [ns_server:debug,2014-08-19T16:54:35.717,ns_1@10.242.238.90:capi_set_view_manager-tiles<0.6184.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [766,750,734,984,968,952,1016,1000,756,740,724,990,974,958,942,1022,1006,762, 746,730,996,980,964,948,1012,752,736,986,970,954,938,1018,1002,758,742,726, 992,976,960,944,1008,764,748,732,998,982,966,950,1014,754,738,722,988,972, 956,940,1020,1004,760,744,728,1023,994,978,962,946,1010] [views:debug,2014-08-19T16:54:35.758,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/610. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:35.758,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",610,active,0} [views:debug,2014-08-19T16:54:35.775,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/722. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:35.776,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",722,active,0} [ns_server:debug,2014-08-19T16:54:35.809,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 625. Nacking mccouch update. [views:debug,2014-08-19T16:54:35.809,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/625. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:35.809,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",625,active,0} [ns_server:debug,2014-08-19T16:54:35.810,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,750,686,622,558,984,737,673,971,724,660,596,958,711,647,1022,945,762,698, 634,570,1009,996,749,685,983,736,672,608,970,723,659,957,710,646,582,1021, 944,761,697,633,1008,995,748,684,620,556,982,735,671,969,722,658,594,956,709, 645,1020,943,760,696,632,568,1007,994,747,683,981,734,670,606,968,721,657, 955,708,644,580,1019,942,759,695,631,1006,993,746,682,618,554,980,733,669, 999,967,752,720,688,656,624,592,560,986,954,739,707,675,643,1018,973,941,758, 726,694,662,630,598,566,1005,992,960,745,713,681,649,979,947,764,732,700,668, 636,604,572,1011,998,966,751,719,687,655,985,953,738,706,674,642,610,578, 1017,972,940,757,725,693,661,629,1004,991,959,744,712,680,648,616,584,552, 1023,978,946,763,731,699,667,635,1010,965,718,654,590,952,705,641,1016,939, 756,692,628,564,1003,990,743,679,977,730,666,602,964,717,653,951,704,640,576, 1015,938,755,691,627,1002,989,742,678,614,550,976,729,665,963,716,652,588, 950,767,703,639,1014,754,690,626,562,1001,988,741,677,975,728,664,600,962, 715,651,949,766,702,638,574,1013,753,689,625,1000,987,740,676,612,548,974, 727,663,961,714,650,586,948,765,701,637,1012] [views:debug,2014-08-19T16:54:35.860,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/625. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:35.860,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",625,active,0} [ns_server:debug,2014-08-19T16:54:35.927,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 720. Nacking mccouch update. [views:debug,2014-08-19T16:54:35.927,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/720. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:35.927,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 608. Nacking mccouch update. [views:debug,2014-08-19T16:54:35.927,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/608. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:35.927,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",720,active,0} [ns_server:debug,2014-08-19T16:54:35.927,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",608,active,0} [ns_server:debug,2014-08-19T16:54:35.927,ns_1@10.242.238.90:capi_set_view_manager-tiles<0.6184.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [766,750,734,984,968,952,1016,1000,756,740,724,990,974,958,942,1022,1006,762, 746,730,996,980,964,948,1012,752,736,720,986,970,954,938,1018,1002,758,742, 726,992,976,960,944,1008,764,748,732,998,982,966,950,1014,754,738,722,988, 972,956,940,1020,1004,760,744,728,1023,994,978,962,946,1010] [ns_server:debug,2014-08-19T16:54:35.927,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_tiles<0.4349.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,718,686,654,622,984,952,1016,756,724,692,660,628,990,958,1022,762,730, 698,666,634,996,964,736,704,672,640,608,970,938,1002,742,710,678,646,614,992, 976,960,944,1008,764,748,732,716,700,684,668,652,636,620,998,982,966,950, 1014,754,738,722,706,690,674,658,642,626,610,988,972,956,940,1020,1004,760, 744,728,712,696,680,664,648,632,616,1023,994,978,962,946,1010,766,734,702, 670,638,968,1000,740,708,676,644,612,974,942,1006,746,714,682,650,618,980, 948,1012,752,720,688,656,624,986,954,1018,758,726,694,662,630] [views:debug,2014-08-19T16:54:36.002,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/608. Updated state: active (0) [views:debug,2014-08-19T16:54:36.003,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/720. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:36.003,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",608,active,0} [ns_server:debug,2014-08-19T16:54:36.003,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",720,active,0} [ns_server:debug,2014-08-19T16:54:36.114,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 623. Nacking mccouch update. [views:debug,2014-08-19T16:54:36.114,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/623. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:36.115,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",623,active,0} [ns_server:debug,2014-08-19T16:54:36.115,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,750,686,622,558,984,737,673,971,724,660,596,958,711,647,1022,945,762,698, 634,570,1009,996,749,685,983,736,672,608,970,723,659,957,710,646,582,1021, 944,761,697,633,1008,995,748,684,620,556,982,735,671,969,722,658,594,956,709, 645,1020,943,760,696,632,568,1007,994,747,683,981,734,670,606,968,721,657, 955,708,644,580,1019,942,759,695,631,1006,993,746,682,618,554,980,733,669, 999,967,752,720,688,656,624,592,560,986,954,739,707,675,643,1018,973,941,758, 726,694,662,630,598,566,1005,992,960,745,713,681,649,979,947,764,732,700,668, 636,604,572,1011,998,966,751,719,687,655,623,985,953,738,706,674,642,610,578, 1017,972,940,757,725,693,661,629,1004,991,959,744,712,680,648,616,584,552, 1023,978,946,763,731,699,667,635,1010,965,718,654,590,952,705,641,1016,939, 756,692,628,564,1003,990,743,679,977,730,666,602,964,717,653,951,704,640,576, 1015,938,755,691,627,1002,989,742,678,614,550,976,729,665,963,716,652,588, 950,767,703,639,1014,754,690,626,562,1001,988,741,677,975,728,664,600,962, 715,651,949,766,702,638,574,1013,753,689,625,1000,987,740,676,612,548,974, 727,663,961,714,650,586,948,765,701,637,1012] [views:debug,2014-08-19T16:54:36.203,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/623. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:36.204,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",623,active,0} [ns_server:debug,2014-08-19T16:54:36.312,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 718. Nacking mccouch update. [ns_server:debug,2014-08-19T16:54:36.312,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 606. Nacking mccouch update. [views:debug,2014-08-19T16:54:36.312,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/718. Updated state: active (0) [views:debug,2014-08-19T16:54:36.312,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/606. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:36.312,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",718,active,0} [ns_server:debug,2014-08-19T16:54:36.312,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",606,active,0} [ns_server:debug,2014-08-19T16:54:36.312,ns_1@10.242.238.90:capi_set_view_manager-tiles<0.6184.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [766,750,734,718,984,968,952,1016,1000,756,740,724,990,974,958,942,1022,1006, 762,746,730,996,980,964,948,1012,752,736,720,986,970,954,938,1018,1002,758, 742,726,992,976,960,944,1008,764,748,732,998,982,966,950,1014,754,738,722, 988,972,956,940,1020,1004,760,744,728,1023,994,978,962,946,1010] [ns_server:debug,2014-08-19T16:54:36.312,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_tiles<0.4349.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,718,686,654,622,984,952,1016,756,724,692,660,628,990,958,1022,762,730, 698,666,634,996,964,736,704,672,640,608,970,938,1002,742,710,678,646,614,992, 976,960,944,1008,764,748,732,716,700,684,668,652,636,620,998,982,966,950, 1014,754,738,722,706,690,674,658,642,626,610,988,972,956,940,1020,1004,760, 744,728,712,696,680,664,648,632,616,1023,994,978,962,946,1010,766,734,702, 670,638,606,968,1000,740,708,676,644,612,974,942,1006,746,714,682,650,618, 980,948,1012,752,720,688,656,624,986,954,1018,758,726,694,662,630] [views:debug,2014-08-19T16:54:36.504,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/606. Updated state: active (0) [views:debug,2014-08-19T16:54:36.504,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/718. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:36.504,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",606,active,0} [ns_server:debug,2014-08-19T16:54:36.505,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",718,active,0} [ns_server:debug,2014-08-19T16:54:36.638,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 621. Nacking mccouch update. [views:debug,2014-08-19T16:54:36.638,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/621. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:36.638,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",621,active,0} [ns_server:debug,2014-08-19T16:54:36.639,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,750,686,622,558,984,737,673,971,724,660,596,958,711,647,1022,945,762,698, 634,570,1009,996,749,685,621,983,736,672,608,970,723,659,957,710,646,582, 1021,944,761,697,633,1008,995,748,684,620,556,982,735,671,969,722,658,594, 956,709,645,1020,943,760,696,632,568,1007,994,747,683,981,734,670,606,968, 721,657,955,708,644,580,1019,942,759,695,631,1006,993,746,682,618,554,980, 733,669,999,967,752,720,688,656,624,592,560,986,954,739,707,675,643,1018,973, 941,758,726,694,662,630,598,566,1005,992,960,745,713,681,649,979,947,764,732, 700,668,636,604,572,1011,998,966,751,719,687,655,623,985,953,738,706,674,642, 610,578,1017,972,940,757,725,693,661,629,1004,991,959,744,712,680,648,616, 584,552,1023,978,946,763,731,699,667,635,1010,965,718,654,590,952,705,641, 1016,939,756,692,628,564,1003,990,743,679,977,730,666,602,964,717,653,951, 704,640,576,1015,938,755,691,627,1002,989,742,678,614,550,976,729,665,963, 716,652,588,950,767,703,639,1014,754,690,626,562,1001,988,741,677,975,728, 664,600,962,715,651,949,766,702,638,574,1013,753,689,625,1000,987,740,676, 612,548,974,727,663,961,714,650,586,948,765,701,637,1012] [views:debug,2014-08-19T16:54:36.730,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/621. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:36.730,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",621,active,0} [ns_server:debug,2014-08-19T16:54:36.865,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 716. Nacking mccouch update. [ns_server:debug,2014-08-19T16:54:36.865,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 604. Nacking mccouch update. [views:debug,2014-08-19T16:54:36.865,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/716. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:36.865,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",716,active,0} [views:debug,2014-08-19T16:54:36.865,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/604. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:36.865,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",604,active,0} [ns_server:debug,2014-08-19T16:54:36.865,ns_1@10.242.238.90:capi_set_view_manager-tiles<0.6184.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [766,750,734,718,984,968,952,1016,1000,756,740,724,990,974,958,942,1022,1006, 762,746,730,996,980,964,948,1012,752,736,720,986,970,954,938,1018,1002,758, 742,726,992,976,960,944,1008,764,748,732,716,998,982,966,950,1014,754,738, 722,988,972,956,940,1020,1004,760,744,728,1023,994,978,962,946,1010] [ns_server:debug,2014-08-19T16:54:36.866,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_tiles<0.4349.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,718,686,654,622,984,952,1016,756,724,692,660,628,990,958,1022,762,730, 698,666,634,996,964,736,704,672,640,608,970,938,1002,742,710,678,646,614,976, 944,1008,764,748,732,716,700,684,668,652,636,620,604,998,982,966,950,1014, 754,738,722,706,690,674,658,642,626,610,988,972,956,940,1020,1004,760,744, 728,712,696,680,664,648,632,616,1023,994,978,962,946,1010,766,734,702,670, 638,606,968,1000,740,708,676,644,612,974,942,1006,746,714,682,650,618,980, 948,1012,752,720,688,656,624,986,954,1018,758,726,694,662,630,992,960] [views:debug,2014-08-19T16:54:36.936,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/604. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:36.936,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",604,active,0} [views:debug,2014-08-19T16:54:36.936,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/716. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:36.936,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",716,active,0} [ns_server:debug,2014-08-19T16:54:36.986,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 619. Nacking mccouch update. [views:debug,2014-08-19T16:54:36.986,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/619. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:36.986,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",619,active,0} [ns_server:debug,2014-08-19T16:54:36.987,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,750,686,622,558,984,737,673,971,724,660,596,958,711,647,1022,945,762,698, 634,570,1009,996,749,685,621,983,736,672,608,970,723,659,957,710,646,582, 1021,944,761,697,633,1008,995,748,684,620,556,982,735,671,969,722,658,594, 956,709,645,1020,943,760,696,632,568,1007,994,747,683,619,981,734,670,606, 968,721,657,955,708,644,580,1019,942,759,695,631,1006,993,746,682,618,554, 980,733,669,967,720,656,592,986,954,739,707,675,643,1018,973,941,758,726,694, 662,630,598,566,1005,992,960,745,713,681,649,979,947,764,732,700,668,636,604, 572,1011,998,966,751,719,687,655,623,985,953,738,706,674,642,610,578,1017, 972,940,757,725,693,661,629,1004,991,959,744,712,680,648,616,584,552,1023, 978,946,763,731,699,667,635,1010,965,718,654,590,952,705,641,1016,939,756, 692,628,564,1003,990,743,679,977,730,666,602,964,717,653,951,704,640,576, 1015,938,755,691,627,1002,989,742,678,614,550,976,729,665,963,716,652,588, 950,767,703,639,1014,754,690,626,562,1001,988,741,677,975,728,664,600,962, 715,651,949,766,702,638,574,1013,753,689,625,1000,987,740,676,612,548,974, 727,663,961,714,650,586,948,765,701,637,1012,999,752,688,624,560] [views:debug,2014-08-19T16:54:37.037,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/619. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:37.037,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",619,active,0} [ns_server:debug,2014-08-19T16:54:37.095,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 714. Nacking mccouch update. [ns_server:debug,2014-08-19T16:54:37.095,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 602. Nacking mccouch update. [views:debug,2014-08-19T16:54:37.096,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/714. Updated state: active (0) [views:debug,2014-08-19T16:54:37.096,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/602. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:37.096,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",714,active,0} [ns_server:debug,2014-08-19T16:54:37.096,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",602,active,0} [ns_server:debug,2014-08-19T16:54:37.096,ns_1@10.242.238.90:capi_set_view_manager-tiles<0.6184.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [766,750,734,718,984,968,952,1016,1000,756,740,724,990,974,958,942,1022,1006, 762,746,730,714,996,980,964,948,1012,752,736,720,986,970,954,938,1018,1002, 758,742,726,992,976,960,944,1008,764,748,732,716,998,982,966,950,1014,754, 738,722,988,972,956,940,1020,1004,760,744,728,1023,994,978,962,946,1010] [ns_server:debug,2014-08-19T16:54:37.096,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_tiles<0.4349.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,718,686,654,622,984,952,1016,756,724,692,660,628,990,958,1022,762,730, 698,666,634,602,996,964,736,704,672,640,608,970,938,1002,742,710,678,646,614, 976,944,1008,764,748,732,716,700,684,668,652,636,620,604,998,982,966,950, 1014,754,738,722,706,690,674,658,642,626,610,988,972,956,940,1020,1004,760, 744,728,712,696,680,664,648,632,616,1023,994,978,962,946,1010,766,734,702, 670,638,606,968,1000,740,708,676,644,612,974,942,1006,746,714,682,650,618, 980,948,1012,752,720,688,656,624,986,954,1018,758,726,694,662,630,992,960] [views:debug,2014-08-19T16:54:37.146,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/714. Updated state: active (0) [views:debug,2014-08-19T16:54:37.146,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/602. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:37.146,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",714,active,0} [ns_server:debug,2014-08-19T16:54:37.146,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",602,active,0} [ns_server:debug,2014-08-19T16:54:37.198,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 617. Nacking mccouch update. [views:debug,2014-08-19T16:54:37.198,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/617. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:37.198,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",617,active,0} [ns_server:debug,2014-08-19T16:54:37.199,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,750,686,622,558,984,737,673,971,724,660,596,958,711,647,1022,945,762,698, 634,570,1009,996,749,685,621,983,736,672,608,970,723,659,957,710,646,582, 1021,944,761,697,633,1008,995,748,684,620,556,982,735,671,969,722,658,594, 956,709,645,1020,943,760,696,632,568,1007,994,747,683,619,981,734,670,606, 968,721,657,955,708,644,580,1019,942,759,695,631,1006,993,746,682,618,554, 980,733,669,967,720,656,592,986,954,739,707,675,643,1018,973,941,758,726,694, 662,630,598,566,1005,992,960,745,713,681,649,617,979,947,764,732,700,668,636, 604,572,1011,998,966,751,719,687,655,623,985,953,738,706,674,642,610,578, 1017,972,940,757,725,693,661,629,1004,991,959,744,712,680,648,616,584,552, 1023,978,946,763,731,699,667,635,1010,965,718,654,590,952,705,641,1016,939, 756,692,628,564,1003,990,743,679,977,730,666,602,964,717,653,951,704,640,576, 1015,938,755,691,627,1002,989,742,678,614,550,976,729,665,963,716,652,588, 950,767,703,639,1014,754,690,626,562,1001,988,741,677,975,728,664,600,962, 715,651,949,766,702,638,574,1013,753,689,625,1000,987,740,676,612,548,974, 727,663,961,714,650,586,948,765,701,637,1012,999,752,688,624,560] [views:debug,2014-08-19T16:54:37.249,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/617. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:37.249,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",617,active,0} [ns_server:debug,2014-08-19T16:54:37.424,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 712. Nacking mccouch update. [views:debug,2014-08-19T16:54:37.424,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/712. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:37.424,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 600. Nacking mccouch update. [views:debug,2014-08-19T16:54:37.424,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/600. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:37.424,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",712,active,0} [ns_server:debug,2014-08-19T16:54:37.424,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",600,active,0} [ns_server:debug,2014-08-19T16:54:37.424,ns_1@10.242.238.90:capi_set_view_manager-tiles<0.6184.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [766,750,734,718,984,968,952,1016,1000,756,740,724,990,974,958,942,1022,1006, 762,746,730,714,996,980,964,948,1012,752,736,720,986,970,954,938,1018,1002, 758,742,726,992,976,960,944,1008,764,748,732,716,998,982,966,950,1014,754, 738,722,988,972,956,940,1020,1004,760,744,728,712,1023,994,978,962,946,1010] [ns_server:debug,2014-08-19T16:54:37.425,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_tiles<0.4349.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,718,686,654,622,984,952,1016,756,724,692,660,628,990,958,1022,762,730, 698,666,634,602,996,964,736,704,672,640,608,970,938,1002,742,710,678,646,614, 976,944,1008,764,748,732,716,700,684,668,652,636,620,604,998,982,966,950, 1014,754,738,722,706,690,674,658,642,626,610,988,972,956,940,1020,1004,760, 744,728,712,696,680,664,648,632,616,600,1023,994,978,962,946,1010,766,734, 702,670,638,606,968,1000,740,708,676,644,612,974,942,1006,746,714,682,650, 618,980,948,1012,752,720,688,656,624,986,954,1018,758,726,694,662,630,992, 960] [views:debug,2014-08-19T16:54:37.533,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/712. Updated state: active (0) [views:debug,2014-08-19T16:54:37.533,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/600. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:37.533,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",712,active,0} [ns_server:debug,2014-08-19T16:54:37.533,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",600,active,0} [ns_server:debug,2014-08-19T16:54:37.650,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 615. Nacking mccouch update. [views:debug,2014-08-19T16:54:37.650,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/615. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:37.650,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",615,active,0} [ns_server:debug,2014-08-19T16:54:37.651,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,750,686,622,558,984,737,673,971,724,660,596,958,711,647,1022,945,762,698, 634,570,1009,996,749,685,621,983,736,672,608,970,723,659,957,710,646,582, 1021,944,761,697,633,1008,995,748,684,620,556,982,735,671,969,722,658,594, 956,709,645,1020,943,760,696,632,568,1007,994,747,683,619,981,734,670,606, 968,721,657,955,708,644,580,1019,942,759,695,631,1006,993,746,682,618,554, 980,733,669,967,720,656,592,986,954,739,707,675,643,1018,973,941,758,726,694, 662,630,598,566,1005,992,960,745,713,681,649,617,979,947,764,732,700,668,636, 604,572,1011,998,966,751,719,687,655,623,985,953,738,706,674,642,610,578, 1017,972,940,757,725,693,661,629,1004,991,959,744,712,680,648,616,584,552, 1023,978,946,763,731,699,667,635,1010,965,718,654,590,952,705,641,1016,939, 756,692,628,564,1003,990,743,679,615,977,730,666,602,964,717,653,951,704,640, 576,1015,938,755,691,627,1002,989,742,678,614,550,976,729,665,963,716,652, 588,950,767,703,639,1014,754,690,626,562,1001,988,741,677,975,728,664,600, 962,715,651,949,766,702,638,574,1013,753,689,625,1000,987,740,676,612,548, 974,727,663,961,714,650,586,948,765,701,637,1012,999,752,688,624,560] [views:debug,2014-08-19T16:54:37.767,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/615. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:37.768,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",615,active,0} [ns_server:debug,2014-08-19T16:54:37.934,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 598. Nacking mccouch update. [ns_server:debug,2014-08-19T16:54:37.934,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 710. Nacking mccouch update. [views:debug,2014-08-19T16:54:37.935,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/598. Updated state: active (0) [views:debug,2014-08-19T16:54:37.935,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/710. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:37.935,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",598,active,0} [ns_server:debug,2014-08-19T16:54:37.935,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",710,active,0} [ns_server:debug,2014-08-19T16:54:37.935,ns_1@10.242.238.90:capi_set_view_manager-tiles<0.6184.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [766,750,734,718,984,968,952,1016,1000,756,740,724,990,974,958,942,1022,1006, 762,746,730,714,996,980,964,948,1012,752,736,720,986,970,954,938,1018,1002, 758,742,726,710,992,976,960,944,1008,764,748,732,716,998,982,966,950,1014, 754,738,722,988,972,956,940,1020,1004,760,744,728,712,1023,994,978,962,946, 1010] [ns_server:debug,2014-08-19T16:54:37.935,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_tiles<0.4349.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,718,686,654,622,984,952,1016,756,724,692,660,628,990,958,1022,762,730, 698,666,634,602,996,964,736,704,672,640,608,970,938,1002,742,710,678,646,614, 976,944,1008,764,748,732,716,700,684,668,652,636,620,604,998,982,966,950, 1014,754,738,722,706,690,674,658,642,626,610,988,972,956,940,1020,1004,760, 744,728,712,696,680,664,648,632,616,600,1023,994,978,962,946,1010,766,734, 702,670,638,606,968,1000,740,708,676,644,612,974,942,1006,746,714,682,650, 618,980,948,1012,752,720,688,656,624,986,954,1018,758,726,694,662,630,598, 992,960] [views:debug,2014-08-19T16:54:38.043,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/710. Updated state: active (0) [views:debug,2014-08-19T16:54:38.043,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/598. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:38.044,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",710,active,0} [ns_server:debug,2014-08-19T16:54:38.044,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",598,active,0} [ns_server:debug,2014-08-19T16:54:38.148,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 613. Nacking mccouch update. [views:debug,2014-08-19T16:54:38.148,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/613. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:38.148,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",613,active,0} [ns_server:debug,2014-08-19T16:54:38.149,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,750,686,622,558,984,737,673,971,724,660,596,958,711,647,1022,945,762,698, 634,570,1009,996,749,685,621,983,736,672,608,970,723,659,957,710,646,582, 1021,944,761,697,633,1008,995,748,684,620,556,982,735,671,969,722,658,594, 956,709,645,1020,943,760,696,632,568,1007,994,747,683,619,981,734,670,606, 968,721,657,955,708,644,580,1019,942,759,695,631,1006,993,746,682,618,554, 980,733,669,967,720,656,592,986,954,739,707,675,643,1018,973,941,758,726,694, 662,630,598,566,1005,992,960,745,713,681,649,617,979,947,764,732,700,668,636, 604,572,1011,998,966,751,719,687,655,623,985,953,738,706,674,642,610,578, 1017,972,940,757,725,693,661,629,1004,991,959,744,712,680,648,616,584,552, 1023,978,946,763,731,699,667,635,1010,965,718,654,590,952,705,641,1016,939, 756,692,628,564,1003,990,743,679,615,977,730,666,602,964,717,653,951,704,640, 576,1015,938,755,691,627,1002,989,742,678,614,550,976,729,665,963,716,652, 588,950,767,703,639,1014,754,690,626,562,1001,988,741,677,613,975,728,664, 600,962,715,651,949,766,702,638,574,1013,753,689,625,1000,987,740,676,612, 548,974,727,663,961,714,650,586,948,765,701,637,1012,999,752,688,624,560] [views:debug,2014-08-19T16:54:38.199,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/613. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:38.199,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",613,active,0} [ns_server:debug,2014-08-19T16:54:38.266,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 708. Nacking mccouch update. [ns_server:debug,2014-08-19T16:54:38.266,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 596. Nacking mccouch update. [views:debug,2014-08-19T16:54:38.266,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/708. Updated state: active (0) [views:debug,2014-08-19T16:54:38.266,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/596. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:38.266,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",708,active,0} [ns_server:debug,2014-08-19T16:54:38.266,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",596,active,0} [ns_server:debug,2014-08-19T16:54:38.266,ns_1@10.242.238.90:capi_set_view_manager-tiles<0.6184.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [766,750,734,718,984,968,952,1016,1000,756,740,724,708,990,974,958,942,1022, 1006,762,746,730,714,996,980,964,948,1012,752,736,720,986,970,954,938,1018, 1002,758,742,726,710,992,976,960,944,1008,764,748,732,716,998,982,966,950, 1014,754,738,722,988,972,956,940,1020,1004,760,744,728,712,1023,994,978,962, 946,1010] [ns_server:debug,2014-08-19T16:54:38.267,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_tiles<0.4349.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,718,686,654,622,984,952,1016,756,724,692,660,628,596,990,958,1022,762, 730,698,666,634,602,996,964,736,704,672,640,608,970,938,1002,742,710,678,646, 614,976,944,1008,764,748,732,716,700,684,668,652,636,620,604,998,982,966,950, 1014,754,738,722,706,690,674,658,642,626,610,988,972,956,940,1020,1004,760, 744,728,712,696,680,664,648,632,616,600,1023,994,978,962,946,1010,766,734, 702,670,638,606,968,1000,740,708,676,644,612,974,942,1006,746,714,682,650, 618,980,948,1012,752,720,688,656,624,986,954,1018,758,726,694,662,630,598, 992,960] [views:debug,2014-08-19T16:54:38.300,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/708. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:38.300,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",708,active,0} [views:debug,2014-08-19T16:54:38.317,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/596. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:38.317,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",596,active,0} [ns_server:debug,2014-08-19T16:54:38.350,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 611. Nacking mccouch update. [views:debug,2014-08-19T16:54:38.350,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/611. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:38.350,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",611,active,0} [ns_server:debug,2014-08-19T16:54:38.351,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,750,686,622,558,984,737,673,971,724,660,596,958,711,647,1022,945,762,698, 634,570,1009,996,749,685,621,983,736,672,608,970,723,659,957,710,646,582, 1021,944,761,697,633,1008,995,748,684,620,556,982,735,671,969,722,658,594, 956,709,645,1020,943,760,696,632,568,1007,994,747,683,619,981,734,670,606, 968,721,657,955,708,644,580,1019,942,759,695,631,1006,993,746,682,618,554, 980,733,669,967,720,656,592,986,954,739,707,675,643,611,1018,973,941,758,726, 694,662,630,598,566,1005,992,960,745,713,681,649,617,979,947,764,732,700,668, 636,604,572,1011,998,966,751,719,687,655,623,985,953,738,706,674,642,610,578, 1017,972,940,757,725,693,661,629,1004,991,959,744,712,680,648,616,584,552, 1023,978,946,763,731,699,667,635,1010,965,718,654,590,952,705,641,1016,939, 756,692,628,564,1003,990,743,679,615,977,730,666,602,964,717,653,951,704,640, 576,1015,938,755,691,627,1002,989,742,678,614,550,976,729,665,963,716,652, 588,950,767,703,639,1014,754,690,626,562,1001,988,741,677,613,975,728,664, 600,962,715,651,949,766,702,638,574,1013,753,689,625,1000,987,740,676,612, 548,974,727,663,961,714,650,586,948,765,701,637,1012,999,752,688,624,560] [views:debug,2014-08-19T16:54:38.401,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/611. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:38.401,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",611,active,0} [ns_server:debug,2014-08-19T16:54:38.468,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 706. Nacking mccouch update. [views:debug,2014-08-19T16:54:38.468,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/706. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:38.468,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",706,active,0} [ns_server:debug,2014-08-19T16:54:38.468,ns_1@10.242.238.90:capi_set_view_manager-tiles<0.6184.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [766,750,734,718,984,968,952,1016,1000,756,740,724,708,990,974,958,942,1022, 1006,762,746,730,714,996,980,964,948,1012,752,736,720,986,970,954,938,1018, 1002,758,742,726,710,992,976,960,944,1008,764,748,732,716,998,982,966,950, 1014,754,738,722,706,988,972,956,940,1020,1004,760,744,728,712,1023,994,978, 962,946,1010] [ns_server:debug,2014-08-19T16:54:38.485,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 594. Nacking mccouch update. [views:debug,2014-08-19T16:54:38.485,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/594. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:38.485,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",594,active,0} [ns_server:debug,2014-08-19T16:54:38.485,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_tiles<0.4349.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,718,686,654,622,984,952,1016,756,724,692,660,628,596,990,958,1022,762, 730,698,666,634,602,996,964,736,704,672,640,608,970,938,1002,742,710,678,646, 614,976,944,1008,748,716,684,652,620,998,982,966,950,1014,754,738,722,706, 690,674,658,642,626,610,594,988,972,956,940,1020,1004,760,744,728,712,696, 680,664,648,632,616,600,1023,994,978,962,946,1010,766,734,702,670,638,606, 968,1000,740,708,676,644,612,974,942,1006,746,714,682,650,618,980,948,1012, 752,720,688,656,624,986,954,1018,758,726,694,662,630,598,992,960,764,732,700, 668,636,604] [views:debug,2014-08-19T16:54:38.552,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/706. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:38.553,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",706,active,0} [ns_server:debug,2014-08-19T16:54:38.585,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 609. Nacking mccouch update. [views:debug,2014-08-19T16:54:38.586,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/609. Updated state: active (0) [views:debug,2014-08-19T16:54:38.586,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/594. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:38.586,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",609,active,0} [ns_server:debug,2014-08-19T16:54:38.586,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",594,active,0} [ns_server:debug,2014-08-19T16:54:38.586,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,750,686,622,558,984,737,673,609,971,724,660,596,958,711,647,1022,945,762, 698,634,570,1009,996,749,685,621,983,736,672,608,970,723,659,957,710,646,582, 1021,944,761,697,633,1008,995,748,684,620,556,982,735,671,969,722,658,594, 956,709,645,1020,943,760,696,632,568,1007,994,747,683,619,981,734,670,606, 968,721,657,955,708,644,580,1019,942,759,695,631,1006,993,746,682,618,554, 980,733,669,967,720,656,592,954,707,643,1018,973,941,758,726,694,662,630,598, 566,1005,992,960,745,713,681,649,617,979,947,764,732,700,668,636,604,572, 1011,998,966,751,719,687,655,623,985,953,738,706,674,642,610,578,1017,972, 940,757,725,693,661,629,1004,991,959,744,712,680,648,616,584,552,1023,978, 946,763,731,699,667,635,1010,965,718,654,590,952,705,641,1016,939,756,692, 628,564,1003,990,743,679,615,977,730,666,602,964,717,653,951,704,640,576, 1015,938,755,691,627,1002,989,742,678,614,550,976,729,665,963,716,652,588, 950,767,703,639,1014,754,690,626,562,1001,988,741,677,613,975,728,664,600, 962,715,651,949,766,702,638,574,1013,753,689,625,1000,987,740,676,612,548, 974,727,663,961,714,650,586,948,765,701,637,1012,999,752,688,624,560,986,739, 675,611] [views:debug,2014-08-19T16:54:38.680,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/609. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:38.681,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",609,active,0} [ns_server:debug,2014-08-19T16:54:38.829,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 592. Nacking mccouch update. [ns_server:debug,2014-08-19T16:54:38.829,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 704. Nacking mccouch update. [views:debug,2014-08-19T16:54:38.829,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/592. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:38.829,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",592,active,0} [views:debug,2014-08-19T16:54:38.829,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/704. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:38.829,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",704,active,0} [ns_server:debug,2014-08-19T16:54:38.829,ns_1@10.242.238.90:capi_set_view_manager-tiles<0.6184.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [766,750,734,718,984,968,952,1016,1000,756,740,724,708,990,974,958,942,1022, 1006,762,746,730,714,996,980,964,948,1012,752,736,720,704,986,970,954,938, 1018,1002,758,742,726,710,992,976,960,944,1008,764,748,732,716,998,982,966, 950,1014,754,738,722,706,988,972,956,940,1020,1004,760,744,728,712,1023,994, 978,962,946,1010] [ns_server:debug,2014-08-19T16:54:38.829,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_tiles<0.4349.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,718,686,654,622,984,952,1016,756,724,692,660,628,596,990,958,1022,762, 730,698,666,634,602,996,964,736,704,672,640,608,970,938,1002,742,710,678,646, 614,976,944,1008,748,716,684,652,620,998,982,966,950,1014,754,738,722,706, 690,674,658,642,626,610,594,988,972,956,940,1020,1004,760,744,728,712,696, 680,664,648,632,616,600,1023,994,978,962,946,1010,766,734,702,670,638,606, 968,1000,740,708,676,644,612,974,942,1006,746,714,682,650,618,980,948,1012, 752,720,688,656,624,592,986,954,1018,758,726,694,662,630,598,992,960,764,732, 700,668,636,604] [ns_server:debug,2014-08-19T16:54:38.921,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 607. Nacking mccouch update. [views:debug,2014-08-19T16:54:38.921,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/607. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:38.921,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",607,active,0} [views:debug,2014-08-19T16:54:38.921,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/592. Updated state: active (0) [views:debug,2014-08-19T16:54:38.921,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/704. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:38.921,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",592,active,0} [ns_server:debug,2014-08-19T16:54:38.921,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",704,active,0} [ns_server:debug,2014-08-19T16:54:38.922,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,750,686,622,558,984,737,673,609,971,724,660,596,958,711,647,1022,945,762, 698,634,570,1009,996,749,685,621,983,736,672,608,970,723,659,957,710,646,582, 1021,944,761,697,633,1008,995,748,684,620,556,982,735,671,607,969,722,658, 594,956,709,645,1020,943,760,696,632,568,1007,994,747,683,619,981,734,670, 606,968,721,657,955,708,644,580,1019,942,759,695,631,1006,993,746,682,618, 554,980,733,669,967,720,656,592,954,707,643,1018,973,941,758,726,694,662,630, 598,566,1005,992,960,745,713,681,649,617,979,947,764,732,700,668,636,604,572, 1011,998,966,751,719,687,655,623,985,953,738,706,674,642,610,578,1017,972, 940,757,725,693,661,629,1004,991,959,744,712,680,648,616,584,552,1023,978, 946,763,731,699,667,635,1010,965,718,654,590,952,705,641,1016,939,756,692, 628,564,1003,990,743,679,615,977,730,666,602,964,717,653,951,704,640,576, 1015,938,755,691,627,1002,989,742,678,614,550,976,729,665,963,716,652,588, 950,767,703,639,1014,754,690,626,562,1001,988,741,677,613,975,728,664,600, 962,715,651,949,766,702,638,574,1013,753,689,625,1000,987,740,676,612,548, 974,727,663,961,714,650,586,948,765,701,637,1012,999,752,688,624,560,986,739, 675,611] [views:debug,2014-08-19T16:54:39.063,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/607. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:39.063,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",607,active,0} [ns_server:debug,2014-08-19T16:54:39.256,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 590. Nacking mccouch update. [ns_server:debug,2014-08-19T16:54:39.256,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 702. Nacking mccouch update. [views:debug,2014-08-19T16:54:39.256,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/590. Updated state: active (0) [views:debug,2014-08-19T16:54:39.256,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/702. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:39.256,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",590,active,0} [ns_server:debug,2014-08-19T16:54:39.256,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",702,active,0} [ns_server:debug,2014-08-19T16:54:39.256,ns_1@10.242.238.90:capi_set_view_manager-tiles<0.6184.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [766,750,734,718,702,984,968,952,1016,1000,756,740,724,708,990,974,958,942, 1022,1006,762,746,730,714,996,980,964,948,1012,752,736,720,704,986,970,954, 938,1018,1002,758,742,726,710,992,976,960,944,1008,764,748,732,716,998,982, 966,950,1014,754,738,722,706,988,972,956,940,1020,1004,760,744,728,712,1023, 994,978,962,946,1010] [ns_server:debug,2014-08-19T16:54:39.256,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_tiles<0.4349.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,718,686,654,622,590,984,952,1016,756,724,692,660,628,596,990,958,1022, 762,730,698,666,634,602,996,964,736,704,672,640,608,970,938,1002,742,710,678, 646,614,976,944,1008,748,716,684,652,620,998,982,966,950,1014,754,738,722, 706,690,674,658,642,626,610,594,988,972,956,940,1020,1004,760,744,728,712, 696,680,664,648,632,616,600,1023,994,978,962,946,1010,766,734,702,670,638, 606,968,1000,740,708,676,644,612,974,942,1006,746,714,682,650,618,980,948, 1012,752,720,688,656,624,592,986,954,1018,758,726,694,662,630,598,992,960, 764,732,700,668,636,604] [ns_server:debug,2014-08-19T16:54:39.327,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 605. Nacking mccouch update. [views:debug,2014-08-19T16:54:39.327,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/605. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:39.327,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",605,active,0} [ns_server:debug,2014-08-19T16:54:39.328,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,750,686,622,558,984,737,673,609,971,724,660,596,958,711,647,1022,945,762, 698,634,570,1009,996,749,685,621,983,736,672,608,970,723,659,957,710,646,582, 1021,944,761,697,633,1008,995,748,684,620,556,982,735,671,607,969,722,658, 594,956,709,645,1020,943,760,696,632,568,1007,994,747,683,619,981,734,670, 606,968,721,657,955,708,644,580,1019,942,759,695,631,1006,993,746,682,618, 554,980,733,669,605,967,720,656,592,954,707,643,1018,973,941,758,726,694,662, 630,598,566,1005,992,960,745,713,681,649,617,979,947,764,732,700,668,636,604, 572,1011,998,966,751,719,687,655,623,985,953,738,706,674,642,610,578,1017, 972,940,757,725,693,661,629,1004,991,959,744,712,680,648,616,584,552,1023, 978,946,763,731,699,667,635,1010,965,718,654,590,952,705,641,1016,939,756, 692,628,564,1003,990,743,679,615,977,730,666,602,964,717,653,951,704,640,576, 1015,938,755,691,627,1002,989,742,678,614,550,976,729,665,963,716,652,588, 950,767,703,639,1014,754,690,626,562,1001,988,741,677,613,975,728,664,600, 962,715,651,949,766,702,638,574,1013,753,689,625,1000,987,740,676,612,548, 974,727,663,961,714,650,586,948,765,701,637,1012,999,752,688,624,560,986,739, 675,611] [views:debug,2014-08-19T16:54:39.344,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/590. Updated state: active (0) [views:debug,2014-08-19T16:54:39.345,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/702. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:39.345,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",590,active,0} [ns_server:debug,2014-08-19T16:54:39.345,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",702,active,0} [views:debug,2014-08-19T16:54:39.395,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/605. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:39.395,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",605,active,0} [ns_server:debug,2014-08-19T16:54:39.495,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 588. Nacking mccouch update. [views:debug,2014-08-19T16:54:39.496,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/588. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:39.496,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",588,active,0} [ns_server:debug,2014-08-19T16:54:39.496,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_tiles<0.4349.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,718,686,654,622,590,984,952,1016,756,724,692,660,628,596,990,958,1022, 762,730,698,666,634,602,996,964,736,704,672,640,608,970,938,1002,742,710,678, 646,614,976,944,1008,748,716,684,652,620,588,998,982,966,950,1014,754,738, 722,706,690,674,658,642,626,610,594,988,972,956,940,1020,1004,760,744,728, 712,696,680,664,648,632,616,600,1023,994,978,962,946,1010,766,734,702,670, 638,606,968,1000,740,708,676,644,612,974,942,1006,746,714,682,650,618,980, 948,1012,752,720,688,656,624,592,986,954,1018,758,726,694,662,630,598,992, 960,764,732,700,668,636,604] [ns_server:debug,2014-08-19T16:54:39.512,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 700. Nacking mccouch update. [views:debug,2014-08-19T16:54:39.512,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/700. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:39.512,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",700,active,0} [ns_server:debug,2014-08-19T16:54:39.513,ns_1@10.242.238.90:capi_set_view_manager-tiles<0.6184.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [766,750,734,718,702,984,968,952,1016,1000,756,740,724,708,990,974,958,942, 1022,1006,762,746,730,714,996,980,964,948,1012,752,736,720,704,986,970,954, 938,1018,1002,758,742,726,710,992,976,960,944,1008,764,748,732,716,700,998, 982,966,950,1014,754,738,722,706,988,972,956,940,1020,1004,760,744,728,712, 1023,994,978,962,946,1010] [ns_server:debug,2014-08-19T16:54:39.529,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 603. Nacking mccouch update. [views:debug,2014-08-19T16:54:39.529,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/603. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:39.529,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",603,active,0} [ns_server:debug,2014-08-19T16:54:39.530,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,750,686,622,558,984,737,673,609,971,724,660,596,958,711,647,1022,945,762, 698,634,570,1009,996,749,685,621,983,736,672,608,970,723,659,957,710,646,582, 1021,944,761,697,633,1008,995,748,684,620,556,982,735,671,607,969,722,658, 594,956,709,645,1020,943,760,696,632,568,1007,994,747,683,619,981,734,670, 606,968,721,657,955,708,644,580,1019,942,759,695,631,1006,993,746,682,618, 554,980,733,669,605,967,720,656,592,954,707,643,1018,973,941,758,726,694,662, 630,598,566,1005,992,960,745,713,681,649,617,979,947,764,732,700,668,636,604, 572,1011,998,966,751,719,687,655,623,985,953,738,706,674,642,610,578,1017, 972,940,757,725,693,661,629,1004,991,959,744,712,680,648,616,584,552,1023, 978,946,763,731,699,667,635,603,1010,965,718,654,590,952,705,641,1016,939, 756,692,628,564,1003,990,743,679,615,977,730,666,602,964,717,653,951,704,640, 576,1015,938,755,691,627,1002,989,742,678,614,550,976,729,665,963,716,652, 588,950,767,703,639,1014,754,690,626,562,1001,988,741,677,613,975,728,664, 600,962,715,651,949,766,702,638,574,1013,753,689,625,1000,987,740,676,612, 548,974,727,663,961,714,650,586,948,765,701,637,1012,999,752,688,624,560,986, 739,675,611] [views:debug,2014-08-19T16:54:39.563,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/588. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:39.563,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",588,active,0} [views:debug,2014-08-19T16:54:39.580,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/700. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:39.580,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",700,active,0} [views:debug,2014-08-19T16:54:39.597,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/603. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:39.597,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",603,active,0} [ns_server:debug,2014-08-19T16:54:39.799,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 586. Nacking mccouch update. [views:debug,2014-08-19T16:54:39.799,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/586. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:39.799,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",586,active,0} [ns_server:debug,2014-08-19T16:54:39.800,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_tiles<0.4349.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,718,686,654,622,590,984,952,1016,756,724,692,660,628,596,990,958,1022, 762,730,698,666,634,602,996,964,736,704,672,640,608,970,938,1002,742,710,678, 646,614,976,944,1008,748,716,684,652,620,588,998,982,966,950,1014,754,738, 722,706,690,674,658,642,626,610,594,988,972,956,940,1020,1004,760,744,728, 712,696,680,664,648,632,616,600,1023,994,978,962,946,1010,766,734,702,670, 638,606,968,1000,740,708,676,644,612,974,942,1006,746,714,682,650,618,586, 980,948,1012,752,720,688,656,624,592,986,954,1018,758,726,694,662,630,598, 992,960,764,732,700,668,636,604] [ns_server:debug,2014-08-19T16:54:39.874,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 698. Nacking mccouch update. [views:debug,2014-08-19T16:54:39.874,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/698. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:39.875,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",698,active,0} [ns_server:debug,2014-08-19T16:54:39.875,ns_1@10.242.238.90:capi_set_view_manager-tiles<0.6184.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [766,750,734,718,702,984,968,952,1016,1000,756,740,724,708,990,974,958,942, 1022,1006,762,746,730,714,698,996,980,964,948,1012,752,736,720,704,986,970, 954,938,1018,1002,758,742,726,710,992,976,960,944,1008,764,748,732,716,700, 998,982,966,950,1014,754,738,722,706,988,972,956,940,1020,1004,760,744,728, 712,1023,994,978,962,946,1010] [ns_server:debug,2014-08-19T16:54:39.906,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 601. Nacking mccouch update. [views:debug,2014-08-19T16:54:39.907,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/601. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:39.907,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",601,active,0} [ns_server:debug,2014-08-19T16:54:39.908,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,750,686,622,558,984,737,673,609,971,724,660,596,958,711,647,1022,945,762, 698,634,570,1009,996,749,685,621,983,736,672,608,970,723,659,957,710,646,582, 1021,944,761,697,633,1008,995,748,684,620,556,982,735,671,607,969,722,658, 594,956,709,645,1020,943,760,696,632,568,1007,994,747,683,619,981,734,670, 606,968,721,657,955,708,644,580,1019,942,759,695,631,1006,993,746,682,618, 554,980,733,669,605,967,720,656,592,954,707,643,1018,973,941,758,726,694,662, 630,598,566,1005,992,960,745,713,681,649,617,979,947,764,732,700,668,636,604, 572,1011,998,966,751,719,687,655,623,985,953,738,706,674,642,610,578,1017, 972,940,757,725,693,661,629,1004,991,959,744,712,680,648,616,584,552,1023, 978,946,763,731,699,667,635,603,1010,965,718,654,590,952,705,641,1016,939, 756,692,628,564,1003,990,743,679,615,977,730,666,602,964,717,653,951,704,640, 576,1015,938,755,691,627,1002,989,742,678,614,550,976,729,665,601,963,716, 652,588,950,767,703,639,1014,754,690,626,562,1001,988,741,677,613,975,728, 664,600,962,715,651,949,766,702,638,574,1013,753,689,625,1000,987,740,676, 612,548,974,727,663,961,714,650,586,948,765,701,637,1012,999,752,688,624,560, 986,739,675,611] [views:debug,2014-08-19T16:54:39.942,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/586. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:39.942,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",586,active,0} [views:debug,2014-08-19T16:54:40.001,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/698. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:40.001,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",698,active,0} [views:debug,2014-08-19T16:54:40.076,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/601. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:40.076,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",601,active,0} [ns_server:debug,2014-08-19T16:54:40.235,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 584. Nacking mccouch update. [views:debug,2014-08-19T16:54:40.235,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/584. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:40.235,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",584,active,0} [ns_server:debug,2014-08-19T16:54:40.235,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_tiles<0.4349.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,718,686,654,622,590,984,952,1016,756,724,692,660,628,596,990,958,1022, 762,730,698,666,634,602,996,964,736,704,672,640,608,970,938,1002,742,710,678, 646,614,976,944,1008,748,716,684,652,620,588,982,950,1014,754,738,722,706, 690,674,658,642,626,610,594,988,972,956,940,1020,1004,760,744,728,712,696, 680,664,648,632,616,600,584,1023,994,978,962,946,1010,766,734,702,670,638, 606,968,1000,740,708,676,644,612,974,942,1006,746,714,682,650,618,586,980, 948,1012,752,720,688,656,624,592,986,954,1018,758,726,694,662,630,598,992, 960,764,732,700,668,636,604,998,966] [ns_server:debug,2014-08-19T16:54:40.276,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 696. Nacking mccouch update. [views:debug,2014-08-19T16:54:40.277,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/696. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:40.277,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",696,active,0} [ns_server:debug,2014-08-19T16:54:40.277,ns_1@10.242.238.90:capi_set_view_manager-tiles<0.6184.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [766,750,734,718,702,984,968,952,1016,1000,756,740,724,708,990,974,958,942, 1022,1006,762,746,730,714,698,996,980,964,948,1012,752,736,720,704,986,970, 954,938,1018,1002,758,742,726,710,992,976,960,944,1008,764,748,732,716,700, 998,982,966,950,1014,754,738,722,706,988,972,956,940,1020,1004,760,744,728, 712,696,1023,994,978,962,946,1010] [views:debug,2014-08-19T16:54:40.327,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/584. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:40.327,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",584,active,0} [ns_server:debug,2014-08-19T16:54:40.419,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 599. Nacking mccouch update. [views:debug,2014-08-19T16:54:40.419,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/599. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:40.419,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",599,active,0} [views:debug,2014-08-19T16:54:40.419,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/696. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:40.419,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",696,active,0} [ns_server:debug,2014-08-19T16:54:40.420,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,750,686,622,558,984,737,673,609,971,724,660,596,958,711,647,1022,945,762, 698,634,570,1009,996,749,685,621,983,736,672,608,970,723,659,957,710,646,582, 1021,944,761,697,633,1008,995,748,684,620,556,982,735,671,607,969,722,658, 594,956,709,645,1020,943,760,696,632,568,1007,994,747,683,619,981,734,670, 606,968,721,657,955,708,644,580,1019,942,759,695,631,1006,993,746,682,618, 554,980,733,669,605,967,720,656,592,954,707,643,1018,941,758,694,630,566, 1005,992,960,745,713,681,649,617,979,947,764,732,700,668,636,604,572,1011, 998,966,751,719,687,655,623,985,953,738,706,674,642,610,578,1017,972,940,757, 725,693,661,629,1004,991,959,744,712,680,648,616,584,552,1023,978,946,763, 731,699,667,635,603,1010,965,718,654,590,952,705,641,1016,939,756,692,628, 564,1003,990,743,679,615,977,730,666,602,964,717,653,951,704,640,576,1015, 938,755,691,627,1002,989,742,678,614,550,976,729,665,601,963,716,652,588,950, 767,703,639,1014,754,690,626,562,1001,988,741,677,613,975,728,664,600,962, 715,651,949,766,702,638,574,1013,753,689,625,1000,987,740,676,612,548,974, 727,663,599,961,714,650,586,948,765,701,637,1012,999,752,688,624,560,986,739, 675,611,973,726,662,598] [views:debug,2014-08-19T16:54:40.486,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/599. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:40.486,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",599,active,0} [ns_server:debug,2014-08-19T16:54:40.532,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 582. Nacking mccouch update. [views:debug,2014-08-19T16:54:40.533,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/582. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:40.533,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",582,active,0} [ns_server:debug,2014-08-19T16:54:40.533,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_tiles<0.4349.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,718,686,654,622,590,984,952,1016,756,724,692,660,628,596,990,958,1022, 762,730,698,666,634,602,996,964,736,704,672,640,608,970,938,1002,742,710,678, 646,614,582,976,944,1008,748,716,684,652,620,588,982,950,1014,754,738,722, 706,690,674,658,642,626,610,594,988,972,956,940,1020,1004,760,744,728,712, 696,680,664,648,632,616,600,584,1023,994,978,962,946,1010,766,734,702,670, 638,606,968,1000,740,708,676,644,612,974,942,1006,746,714,682,650,618,586, 980,948,1012,752,720,688,656,624,592,986,954,1018,758,726,694,662,630,598, 992,960,764,732,700,668,636,604,998,966] [ns_server:debug,2014-08-19T16:54:40.583,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 694. Nacking mccouch update. [views:debug,2014-08-19T16:54:40.583,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/694. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:40.583,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",694,active,0} [ns_server:debug,2014-08-19T16:54:40.583,ns_1@10.242.238.90:capi_set_view_manager-tiles<0.6184.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,718,984,968,952,1016,1000,756,740,724,708,990,974,958,942,1022,1006,762, 746,730,714,698,996,980,964,948,1012,752,736,720,704,986,970,954,938,1018, 1002,758,742,726,710,694,992,976,960,944,1008,764,748,732,716,700,998,982, 966,950,1014,754,738,722,706,988,972,956,940,1020,1004,760,744,728,712,696, 1023,994,978,962,946,1010,766,734,702] [views:debug,2014-08-19T16:54:40.600,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/582. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:40.600,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",582,active,0} [ns_server:debug,2014-08-19T16:54:40.650,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 597. Nacking mccouch update. [views:debug,2014-08-19T16:54:40.650,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/597. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:40.651,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",597,active,0} [views:debug,2014-08-19T16:54:40.651,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/694. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:40.651,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",694,active,0} [ns_server:debug,2014-08-19T16:54:40.651,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,750,686,622,558,984,737,673,609,971,724,660,596,958,711,647,1022,945,762, 698,634,570,1009,996,749,685,621,983,736,672,608,970,723,659,957,710,646,582, 1021,944,761,697,633,1008,995,748,684,620,556,982,735,671,607,969,722,658, 594,956,709,645,1020,943,760,696,632,568,1007,994,747,683,619,981,734,670, 606,968,721,657,955,708,644,580,1019,942,759,695,631,1006,993,746,682,618, 554,980,733,669,605,967,720,656,592,954,707,643,1018,941,758,694,630,566, 1005,992,960,745,713,681,649,617,979,947,764,732,700,668,636,604,572,1011, 998,966,751,719,687,655,623,985,953,738,706,674,642,610,578,1017,972,940,757, 725,693,661,629,597,1004,991,959,744,712,680,648,616,584,552,1023,978,946, 763,731,699,667,635,603,1010,965,718,654,590,952,705,641,1016,939,756,692, 628,564,1003,990,743,679,615,977,730,666,602,964,717,653,951,704,640,576, 1015,938,755,691,627,1002,989,742,678,614,550,976,729,665,601,963,716,652, 588,950,767,703,639,1014,754,690,626,562,1001,988,741,677,613,975,728,664, 600,962,715,651,949,766,702,638,574,1013,753,689,625,1000,987,740,676,612, 548,974,727,663,599,961,714,650,586,948,765,701,637,1012,999,752,688,624,560, 986,739,675,611,973,726,662,598] [views:debug,2014-08-19T16:54:40.718,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/597. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:40.718,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",597,active,0} [ns_server:debug,2014-08-19T16:54:40.743,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 580. Nacking mccouch update. [views:debug,2014-08-19T16:54:40.743,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/580. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:40.743,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",580,active,0} [ns_server:debug,2014-08-19T16:54:40.743,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_tiles<0.4349.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,718,686,654,622,590,984,952,1016,756,724,692,660,628,596,990,958,1022, 762,730,698,666,634,602,996,964,736,704,672,640,608,970,938,1002,742,710,678, 646,614,582,976,944,1008,748,716,684,652,620,588,982,950,1014,754,738,722, 706,690,674,658,642,626,610,594,988,972,956,940,1020,1004,760,744,728,712, 696,680,664,648,632,616,600,584,1023,994,978,962,946,1010,766,734,702,670, 638,606,968,1000,740,708,676,644,612,580,974,942,1006,746,714,682,650,618, 586,980,948,1012,752,720,688,656,624,592,986,954,1018,758,726,694,662,630, 598,992,960,764,732,700,668,636,604,998,966] [ns_server:debug,2014-08-19T16:54:40.819,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 692. Nacking mccouch update. [views:debug,2014-08-19T16:54:40.819,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/692. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:40.819,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",692,active,0} [ns_server:debug,2014-08-19T16:54:40.819,ns_1@10.242.238.90:capi_set_view_manager-tiles<0.6184.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,718,984,968,952,1016,1000,756,740,724,708,692,990,974,958,942,1022,1006, 762,746,730,714,698,996,980,964,948,1012,752,736,720,704,986,970,954,938, 1018,1002,758,742,726,710,694,992,976,960,944,1008,764,748,732,716,700,998, 982,966,950,1014,754,738,722,706,988,972,956,940,1020,1004,760,744,728,712, 696,1023,994,978,962,946,1010,766,734,702] [views:debug,2014-08-19T16:54:40.820,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/580. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:40.820,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",580,active,0} [views:debug,2014-08-19T16:54:40.961,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/692. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:40.962,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",692,active,0} [ns_server:debug,2014-08-19T16:54:41.020,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 595. Nacking mccouch update. [views:debug,2014-08-19T16:54:41.020,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/595. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:41.020,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",595,active,0} [ns_server:debug,2014-08-19T16:54:41.021,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,750,686,622,558,984,737,673,609,971,724,660,596,958,711,647,1022,945,762, 698,634,570,1009,996,749,685,621,983,736,672,608,970,723,659,595,957,710,646, 582,1021,944,761,697,633,1008,995,748,684,620,556,982,735,671,607,969,722, 658,594,956,709,645,1020,943,760,696,632,568,1007,994,747,683,619,981,734, 670,606,968,721,657,955,708,644,580,1019,942,759,695,631,1006,993,746,682, 618,554,980,733,669,605,967,720,656,592,954,707,643,1018,941,758,694,630,566, 1005,992,960,745,713,681,649,617,979,947,764,732,700,668,636,604,572,1011, 998,966,751,719,687,655,623,985,953,738,706,674,642,610,578,1017,972,940,757, 725,693,661,629,597,1004,991,959,744,712,680,648,616,584,552,1023,978,946, 763,731,699,667,635,603,1010,965,718,654,590,952,705,641,1016,939,756,692, 628,564,1003,990,743,679,615,977,730,666,602,964,717,653,951,704,640,576, 1015,938,755,691,627,1002,989,742,678,614,550,976,729,665,601,963,716,652, 588,950,767,703,639,1014,754,690,626,562,1001,988,741,677,613,975,728,664, 600,962,715,651,949,766,702,638,574,1013,753,689,625,1000,987,740,676,612, 548,974,727,663,599,961,714,650,586,948,765,701,637,1012,999,752,688,624,560, 986,739,675,611,973,726,662,598] [views:debug,2014-08-19T16:54:41.187,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/595. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:41.187,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",595,active,0} [ns_server:debug,2014-08-19T16:54:41.229,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 578. Nacking mccouch update. [views:debug,2014-08-19T16:54:41.229,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/578. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:41.229,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",578,active,0} [ns_server:debug,2014-08-19T16:54:41.229,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_tiles<0.4349.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,718,686,654,622,590,984,952,1016,756,724,692,660,628,596,990,958,1022, 762,730,698,666,634,602,996,964,736,704,672,640,608,970,938,1002,742,710,678, 646,614,582,976,944,1008,748,716,684,652,620,588,982,950,1014,754,738,722, 706,690,674,658,642,626,610,594,578,988,972,956,940,1020,1004,760,744,728, 712,696,680,664,648,632,616,600,584,1023,994,978,962,946,1010,766,734,702, 670,638,606,968,1000,740,708,676,644,612,580,974,942,1006,746,714,682,650, 618,586,980,948,1012,752,720,688,656,624,592,986,954,1018,758,726,694,662, 630,598,992,960,764,732,700,668,636,604,998,966] [ns_server:debug,2014-08-19T16:54:41.312,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 690. Nacking mccouch update. [views:debug,2014-08-19T16:54:41.313,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/690. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:41.313,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",690,active,0} [ns_server:debug,2014-08-19T16:54:41.313,ns_1@10.242.238.90:capi_set_view_manager-tiles<0.6184.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,718,984,968,952,1016,1000,756,740,724,708,692,990,974,958,942,1022,1006, 762,746,730,714,698,996,980,964,948,1012,752,736,720,704,986,970,954,938, 1018,1002,758,742,726,710,694,992,976,960,944,1008,764,748,732,716,700,998, 982,966,950,1014,754,738,722,706,690,988,972,956,940,1020,1004,760,744,728, 712,696,1023,994,978,962,946,1010,766,734,702] [views:debug,2014-08-19T16:54:41.330,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/578. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:41.330,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",578,active,0} [views:debug,2014-08-19T16:54:41.455,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/690. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:41.455,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",690,active,0} [ns_server:debug,2014-08-19T16:54:41.514,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 593. Nacking mccouch update. [views:debug,2014-08-19T16:54:41.514,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/593. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:41.514,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",593,active,0} [ns_server:debug,2014-08-19T16:54:41.515,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,750,686,622,558,984,737,673,609,971,724,660,596,958,711,647,1022,945,762, 698,634,570,1009,996,749,685,621,983,736,672,608,970,723,659,595,957,710,646, 582,1021,944,761,697,633,1008,995,748,684,620,556,982,735,671,607,969,722, 658,594,956,709,645,1020,943,760,696,632,568,1007,994,747,683,619,981,734, 670,606,968,721,657,593,955,708,644,580,1019,942,759,695,631,1006,993,746, 682,618,554,980,733,669,605,967,720,656,592,954,707,643,1018,941,758,694,630, 566,1005,992,960,745,713,681,649,617,979,947,764,732,700,668,636,604,572, 1011,998,966,751,719,687,655,623,985,953,738,706,674,642,610,578,1017,972, 940,757,725,693,661,629,597,1004,991,959,744,712,680,648,616,584,552,1023, 978,946,763,731,699,667,635,603,1010,965,718,654,590,952,705,641,1016,939, 756,692,628,564,1003,990,743,679,615,977,730,666,602,964,717,653,951,704,640, 576,1015,938,755,691,627,1002,989,742,678,614,550,976,729,665,601,963,716, 652,588,950,767,703,639,1014,754,690,626,562,1001,988,741,677,613,975,728, 664,600,962,715,651,949,766,702,638,574,1013,753,689,625,1000,987,740,676, 612,548,974,727,663,599,961,714,650,586,948,765,701,637,1012,999,752,688,624, 560,986,739,675,611,973,726,662,598] [views:debug,2014-08-19T16:54:41.619,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/593. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:41.619,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",593,active,0} [ns_server:debug,2014-08-19T16:54:41.686,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 688. Nacking mccouch update. [ns_server:debug,2014-08-19T16:54:41.686,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 576. Nacking mccouch update. [views:debug,2014-08-19T16:54:41.686,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/688. Updated state: active (0) [views:debug,2014-08-19T16:54:41.686,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/576. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:41.686,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",688,active,0} [ns_server:debug,2014-08-19T16:54:41.686,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",576,active,0} [ns_server:debug,2014-08-19T16:54:41.686,ns_1@10.242.238.90:capi_set_view_manager-tiles<0.6184.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,718,984,968,952,1016,1000,756,740,724,708,692,990,974,958,942,1022,1006, 762,746,730,714,698,996,980,964,948,1012,752,736,720,704,688,986,970,954,938, 1018,1002,758,742,726,710,694,992,976,960,944,1008,764,748,732,716,700,998, 982,966,950,1014,754,738,722,706,690,988,972,956,940,1020,1004,760,744,728, 712,696,1023,994,978,962,946,1010,766,734,702] [ns_server:debug,2014-08-19T16:54:41.686,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_tiles<0.4349.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,718,686,654,622,590,984,952,1016,756,724,692,660,628,596,990,958,1022, 762,730,698,666,634,602,996,964,736,704,672,640,608,576,970,938,1002,742,710, 678,646,614,582,976,944,1008,748,716,684,652,620,588,982,950,1014,754,738, 722,706,690,674,658,642,626,610,594,578,988,972,956,940,1020,1004,760,744, 728,712,696,680,664,648,632,616,600,584,1023,994,978,962,946,1010,766,734, 702,670,638,606,968,1000,740,708,676,644,612,580,974,942,1006,746,714,682, 650,618,586,980,948,1012,752,720,688,656,624,592,986,954,1018,758,726,694, 662,630,598,992,960,764,732,700,668,636,604,998,966] [views:debug,2014-08-19T16:54:41.736,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/688. Updated state: active (0) [views:debug,2014-08-19T16:54:41.736,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/576. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:41.736,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",688,active,0} [ns_server:debug,2014-08-19T16:54:41.737,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",576,active,0} [ns_server:debug,2014-08-19T16:54:41.795,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 591. Nacking mccouch update. [views:debug,2014-08-19T16:54:41.795,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/591. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:41.795,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",591,active,0} [ns_server:debug,2014-08-19T16:54:41.796,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,750,686,622,558,984,737,673,609,971,724,660,596,958,711,647,1022,945,762, 698,634,570,1009,996,749,685,621,983,736,672,608,970,723,659,595,957,710,646, 582,1021,944,761,697,633,1008,995,748,684,620,556,982,735,671,607,969,722, 658,594,956,709,645,1020,943,760,696,632,568,1007,994,747,683,619,981,734, 670,606,968,721,657,593,955,708,644,580,1019,942,759,695,631,1006,993,746, 682,618,554,980,733,669,605,967,720,656,592,954,707,643,1018,941,758,694,630, 566,1005,992,960,745,713,681,649,617,979,947,764,732,700,668,636,604,572, 1011,998,966,751,719,687,655,623,591,985,953,738,706,674,642,610,578,1017, 972,940,757,725,693,661,629,597,1004,991,959,744,712,680,648,616,584,552, 1023,978,946,763,731,699,667,635,603,1010,965,718,654,590,952,705,641,1016, 939,756,692,628,564,1003,990,743,679,615,977,730,666,602,964,717,653,951,704, 640,576,1015,938,755,691,627,1002,989,742,678,614,550,976,729,665,601,963, 716,652,588,950,767,703,639,1014,754,690,626,562,1001,988,741,677,613,975, 728,664,600,962,715,651,949,766,702,638,574,1013,753,689,625,1000,987,740, 676,612,548,974,727,663,599,961,714,650,586,948,765,701,637,1012,999,752,688, 624,560,986,739,675,611,973,726,662,598] [views:debug,2014-08-19T16:54:41.846,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/591. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:41.846,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",591,active,0} [ns_server:debug,2014-08-19T16:54:41.946,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 686. Nacking mccouch update. [ns_server:debug,2014-08-19T16:54:41.946,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 574. Nacking mccouch update. [views:debug,2014-08-19T16:54:41.946,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/686. Updated state: active (0) [views:debug,2014-08-19T16:54:41.946,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/574. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:41.946,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",686,active,0} [ns_server:debug,2014-08-19T16:54:41.946,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",574,active,0} [ns_server:debug,2014-08-19T16:54:41.947,ns_1@10.242.238.90:capi_set_view_manager-tiles<0.6184.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,718,686,984,968,952,1016,1000,756,740,724,708,692,990,974,958,942,1022, 1006,762,746,730,714,698,996,980,964,948,1012,752,736,720,704,688,986,970, 954,938,1018,1002,758,742,726,710,694,992,976,960,944,1008,764,748,732,716, 700,998,982,966,950,1014,754,738,722,706,690,988,972,956,940,1020,1004,760, 744,728,712,696,1023,994,978,962,946,1010,766,734,702] [ns_server:debug,2014-08-19T16:54:41.947,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_tiles<0.4349.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,718,686,654,622,590,984,952,1016,756,724,692,660,628,596,990,958,1022, 762,730,698,666,634,602,996,964,736,704,672,640,608,576,970,938,1002,742,710, 678,646,614,582,976,944,1008,748,716,684,652,620,588,982,950,1014,754,722, 690,658,626,594,988,972,956,940,1020,1004,760,744,728,712,696,680,664,648, 632,616,600,584,1023,994,978,962,946,1010,766,734,702,670,638,606,574,968, 1000,740,708,676,644,612,580,974,942,1006,746,714,682,650,618,586,980,948, 1012,752,720,688,656,624,592,986,954,1018,758,726,694,662,630,598,992,960, 764,732,700,668,636,604,998,966,738,706,674,642,610,578] [views:debug,2014-08-19T16:54:42.005,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/686. Updated state: active (0) [views:debug,2014-08-19T16:54:42.005,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/574. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:42.005,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",686,active,0} [ns_server:debug,2014-08-19T16:54:42.006,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",574,active,0} [ns_server:debug,2014-08-19T16:54:42.038,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 589. Nacking mccouch update. [views:debug,2014-08-19T16:54:42.038,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/589. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:42.038,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",589,active,0} [ns_server:debug,2014-08-19T16:54:42.039,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,750,686,622,558,984,737,673,609,971,724,660,596,958,711,647,1022,945,762, 698,634,570,1009,996,749,685,621,983,736,672,608,970,723,659,595,957,710,646, 582,1021,944,761,697,633,1008,995,748,684,620,556,982,735,671,607,969,722, 658,594,956,709,645,1020,943,760,696,632,568,1007,994,747,683,619,981,734, 670,606,968,721,657,593,955,708,644,580,1019,942,759,695,631,1006,993,746, 682,618,554,980,733,669,605,967,720,656,592,954,707,643,1018,941,758,694,630, 566,1005,992,745,681,617,979,947,764,732,700,668,636,604,572,1011,998,966, 751,719,687,655,623,591,985,953,738,706,674,642,610,578,1017,972,940,757,725, 693,661,629,597,1004,991,959,744,712,680,648,616,584,552,1023,978,946,763, 731,699,667,635,603,1010,965,718,654,590,952,705,641,1016,939,756,692,628, 564,1003,990,743,679,615,977,730,666,602,964,717,653,589,951,704,640,576, 1015,938,755,691,627,1002,989,742,678,614,550,976,729,665,601,963,716,652, 588,950,767,703,639,1014,754,690,626,562,1001,988,741,677,613,975,728,664, 600,962,715,651,949,766,702,638,574,1013,753,689,625,1000,987,740,676,612, 548,974,727,663,599,961,714,650,586,948,765,701,637,1012,999,752,688,624,560, 986,739,675,611,973,726,662,598,960,713,649] [ns_server:debug,2014-08-19T16:54:42.083,ns_1@10.242.238.90:compaction_daemon<0.17567.0>:compaction_daemon:handle_info:447]Starting compaction for the following buckets: [<<"tiles">>,<<"maps_1_8_tiles">>,<<"maps_1_8_metahash">>,<<"default">>] [ns_server:info,2014-08-19T16:54:42.088,ns_1@10.242.238.90:<0.10013.1>:compaction_daemon:check_all_dbs_exist:1611]Skipping compaction of bucket `tiles` since at least database `tiles/100` seems to be missing. [ns_server:info,2014-08-19T16:54:42.093,ns_1@10.242.238.90:<0.10014.1>:compaction_daemon:check_all_dbs_exist:1611]Skipping compaction of bucket `maps_1_8_tiles` since at least database `maps_1_8_tiles/100` seems to be missing. [ns_server:info,2014-08-19T16:54:42.096,ns_1@10.242.238.90:<0.10015.1>:compaction_daemon:check_all_dbs_exist:1611]Skipping compaction of bucket `maps_1_8_metahash` since at least database `maps_1_8_metahash/100` seems to be missing. [ns_server:info,2014-08-19T16:54:42.101,ns_1@10.242.238.90:<0.10016.1>:compaction_daemon:try_to_cleanup_indexes:650]Cleaning up indexes for bucket `default` [ns_server:info,2014-08-19T16:54:42.102,ns_1@10.242.238.90:<0.10016.1>:compaction_daemon:spawn_bucket_compactor:609]Compacting bucket default with config: [{database_fragmentation_threshold,{30,undefined}}, {view_fragmentation_threshold,{30,undefined}}] [ns_server:debug,2014-08-19T16:54:42.107,ns_1@10.242.238.90:<0.10019.1>:compaction_daemon:bucket_needs_compaction:1042]`default` data size is 70794, disk size is 10529838 [ns_server:debug,2014-08-19T16:54:42.107,ns_1@10.242.238.90:compaction_daemon<0.17567.0>:compaction_daemon:handle_info:505]Finished compaction iteration. [ns_server:debug,2014-08-19T16:54:42.107,ns_1@10.242.238.90:compaction_daemon<0.17567.0>:compaction_daemon:schedule_next_compaction:1519]Finished compaction too soon. Next run will be in 30s [views:debug,2014-08-19T16:54:42.167,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/589. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:42.167,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",589,active,0} [ns_server:debug,2014-08-19T16:54:42.282,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 684. Nacking mccouch update. [views:debug,2014-08-19T16:54:42.282,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/684. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:42.282,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",684,active,0} [ns_server:debug,2014-08-19T16:54:42.282,ns_1@10.242.238.90:capi_set_view_manager-tiles<0.6184.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,718,686,984,952,1016,756,740,724,708,692,990,974,958,942,1022,1006,762, 746,730,714,698,996,980,964,948,1012,752,736,720,704,688,986,970,954,938, 1018,1002,758,742,726,710,694,992,976,960,944,1008,764,748,732,716,700,684, 998,982,966,950,1014,754,738,722,706,690,988,972,956,940,1020,1004,760,744, 728,712,696,1023,994,978,962,946,1010,766,734,702,968,1000] [ns_server:debug,2014-08-19T16:54:42.332,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 572. Nacking mccouch update. [views:debug,2014-08-19T16:54:42.332,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/572. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:42.332,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",572,active,0} [ns_server:debug,2014-08-19T16:54:42.332,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_tiles<0.4349.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,718,686,654,622,590,984,952,1016,756,724,692,660,628,596,990,958,1022, 762,730,698,666,634,602,996,964,736,704,672,640,608,576,970,938,1002,742,710, 678,646,614,582,976,944,1008,748,716,684,652,620,588,982,950,1014,754,722, 690,658,626,594,988,972,956,940,1020,1004,760,744,728,712,696,680,664,648, 632,616,600,584,1023,994,978,962,946,1010,766,734,702,670,638,606,574,968, 1000,740,708,676,644,612,580,974,942,1006,746,714,682,650,618,586,980,948, 1012,752,720,688,656,624,592,986,954,1018,758,726,694,662,630,598,992,960, 764,732,700,668,636,604,572,998,966,738,706,674,642,610,578] [views:debug,2014-08-19T16:54:42.436,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/684. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:42.437,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",684,active,0} [views:debug,2014-08-19T16:54:42.484,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/572. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:42.484,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",572,active,0} [ns_server:debug,2014-08-19T16:54:42.517,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 587. Nacking mccouch update. [views:debug,2014-08-19T16:54:42.517,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/587. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:42.518,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",587,active,0} [ns_server:debug,2014-08-19T16:54:42.518,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,750,686,622,558,984,737,673,609,971,724,660,596,958,711,647,1022,945,762, 698,634,570,1009,996,749,685,621,983,736,672,608,970,723,659,595,957,710,646, 582,1021,944,761,697,633,1008,995,748,684,620,556,982,735,671,607,969,722, 658,594,956,709,645,1020,943,760,696,632,568,1007,994,747,683,619,981,734, 670,606,968,721,657,593,955,708,644,580,1019,942,759,695,631,1006,993,746, 682,618,554,980,733,669,605,967,720,656,592,954,707,643,1018,941,758,694,630, 566,1005,992,745,681,617,979,947,764,732,700,668,636,604,572,1011,998,966, 751,719,687,655,623,591,985,953,738,706,674,642,610,578,1017,972,940,757,725, 693,661,629,597,1004,991,959,744,712,680,648,616,584,552,1023,978,946,763, 731,699,667,635,603,1010,965,718,654,590,952,705,641,1016,939,756,692,628, 564,1003,990,743,679,615,977,730,666,602,964,717,653,589,951,704,640,576, 1015,938,755,691,627,1002,989,742,678,614,550,976,729,665,601,963,716,652, 588,950,767,703,639,1014,754,690,626,562,1001,988,741,677,613,975,728,664, 600,962,715,651,587,949,766,702,638,574,1013,753,689,625,1000,987,740,676, 612,548,974,727,663,599,961,714,650,586,948,765,701,637,1012,999,752,688,624, 560,986,739,675,611,973,726,662,598,960,713,649] [views:debug,2014-08-19T16:54:42.620,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/587. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:42.620,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",587,active,0} [ns_server:debug,2014-08-19T16:54:42.735,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 682. Nacking mccouch update. [views:debug,2014-08-19T16:54:42.735,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/682. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:42.736,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",682,active,0} [ns_server:debug,2014-08-19T16:54:42.736,ns_1@10.242.238.90:capi_set_view_manager-tiles<0.6184.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,718,686,984,952,1016,756,740,724,708,692,990,974,958,942,1022,1006,762, 746,730,714,698,682,996,980,964,948,1012,752,736,720,704,688,986,970,954,938, 1018,1002,758,742,726,710,694,992,976,960,944,1008,764,748,732,716,700,684, 998,982,966,950,1014,754,738,722,706,690,988,972,956,940,1020,1004,760,744, 728,712,696,1023,994,978,962,946,1010,766,734,702,968,1000] [ns_server:debug,2014-08-19T16:54:42.794,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 570. Nacking mccouch update. [views:debug,2014-08-19T16:54:42.794,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/570. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:42.794,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",570,active,0} [ns_server:debug,2014-08-19T16:54:42.794,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_tiles<0.4349.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,718,686,654,622,590,984,952,1016,756,724,692,660,628,596,990,958,1022, 762,730,698,666,634,602,570,996,964,736,704,672,640,608,576,970,938,1002,742, 710,678,646,614,582,976,944,1008,748,716,684,652,620,588,982,950,1014,754, 722,690,658,626,594,988,972,956,940,1020,1004,760,744,728,712,696,680,664, 648,632,616,600,584,1023,994,978,962,946,1010,766,734,702,670,638,606,574, 968,1000,740,708,676,644,612,580,974,942,1006,746,714,682,650,618,586,980, 948,1012,752,720,688,656,624,592,986,954,1018,758,726,694,662,630,598,992, 960,764,732,700,668,636,604,572,998,966,738,706,674,642,610,578] [views:debug,2014-08-19T16:54:42.844,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/682. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:42.844,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",682,active,0} [views:debug,2014-08-19T16:54:42.889,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/570. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:42.889,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",570,active,0} [ns_server:debug,2014-08-19T16:54:42.906,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 585. Nacking mccouch update. [views:debug,2014-08-19T16:54:42.906,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/585. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:42.906,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",585,active,0} [ns_server:debug,2014-08-19T16:54:42.907,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,750,686,622,558,984,737,673,609,971,724,660,596,958,711,647,1022,945,762, 698,634,570,1009,996,749,685,621,983,736,672,608,970,723,659,595,957,710,646, 582,1021,944,761,697,633,1008,995,748,684,620,556,982,735,671,607,969,722, 658,594,956,709,645,1020,943,760,696,632,568,1007,994,747,683,619,981,734, 670,606,968,721,657,593,955,708,644,580,1019,942,759,695,631,1006,993,746, 682,618,554,980,733,669,605,967,720,656,592,954,707,643,1018,941,758,694,630, 566,1005,992,745,681,617,979,947,764,732,700,668,636,604,572,1011,998,966, 751,719,687,655,623,591,985,953,738,706,674,642,610,578,1017,972,940,757,725, 693,661,629,597,1004,991,959,744,712,680,648,616,584,552,1023,978,946,763, 731,699,667,635,603,1010,965,718,654,590,952,705,641,1016,939,756,692,628, 564,1003,990,743,679,615,977,730,666,602,964,717,653,589,951,704,640,576, 1015,938,755,691,627,1002,989,742,678,614,550,976,729,665,601,963,716,652, 588,950,767,703,639,1014,754,690,626,562,1001,988,741,677,613,975,728,664, 600,962,715,651,587,949,766,702,638,574,1013,753,689,625,1000,987,740,676, 612,548,974,727,663,599,961,714,650,586,948,765,701,637,1012,999,752,688,624, 560,986,739,675,611,973,726,662,598,960,713,649,585] [views:debug,2014-08-19T16:54:42.974,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/585. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:42.974,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",585,active,0} [ns_server:debug,2014-08-19T16:54:43.016,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 680. Nacking mccouch update. [views:debug,2014-08-19T16:54:43.016,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/680. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:43.016,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",680,active,0} [ns_server:debug,2014-08-19T16:54:43.017,ns_1@10.242.238.90:capi_set_view_manager-tiles<0.6184.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,718,686,984,952,1016,756,740,724,708,692,990,974,958,942,1022,1006,762, 746,730,714,698,682,996,980,964,948,1012,752,736,720,704,688,986,970,954,938, 1018,1002,758,742,726,710,694,992,976,960,944,1008,764,748,732,716,700,684, 998,982,966,950,1014,754,738,722,706,690,988,972,956,940,1020,1004,760,744, 728,712,696,680,1023,994,978,962,946,1010,766,734,702,968,1000] [ns_server:debug,2014-08-19T16:54:43.134,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 568. Nacking mccouch update. [views:debug,2014-08-19T16:54:43.134,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/568. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:43.134,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",568,active,0} [views:debug,2014-08-19T16:54:43.134,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/680. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:43.134,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",680,active,0} [ns_server:debug,2014-08-19T16:54:43.134,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_tiles<0.4349.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,718,686,654,622,590,984,952,1016,756,724,692,660,628,596,990,958,1022, 762,730,698,666,634,602,570,996,964,736,704,672,640,608,576,970,938,1002,742, 710,678,646,614,582,976,944,1008,748,716,684,652,620,588,982,950,1014,754, 722,690,658,626,594,988,972,956,940,1020,1004,760,744,728,712,696,680,664, 648,632,616,600,584,568,1023,994,978,962,946,1010,766,734,702,670,638,606, 574,968,1000,740,708,676,644,612,580,974,942,1006,746,714,682,650,618,586, 980,948,1012,752,720,688,656,624,592,986,954,1018,758,726,694,662,630,598, 992,960,764,732,700,668,636,604,572,998,966,738,706,674,642,610,578] [views:debug,2014-08-19T16:54:43.268,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/568. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:43.268,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",568,active,0} [ns_server:debug,2014-08-19T16:54:43.309,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 583. Nacking mccouch update. [views:debug,2014-08-19T16:54:43.309,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/583. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:43.310,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",583,active,0} [ns_server:debug,2014-08-19T16:54:43.310,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,750,686,622,558,984,737,673,609,971,724,660,596,958,711,647,583,1022,945, 762,698,634,570,1009,996,749,685,621,983,736,672,608,970,723,659,595,957,710, 646,582,1021,944,761,697,633,1008,995,748,684,620,556,982,735,671,607,969, 722,658,594,956,709,645,1020,943,760,696,632,568,1007,994,747,683,619,981, 734,670,606,968,721,657,593,955,708,644,580,1019,942,759,695,631,1006,993, 746,682,618,554,980,733,669,605,967,720,656,592,954,707,643,1018,941,758,694, 630,566,1005,992,745,681,617,979,947,764,732,700,668,636,604,572,1011,998, 966,751,719,687,655,623,591,985,953,738,706,674,642,610,578,1017,972,940,757, 725,693,661,629,597,1004,991,959,744,712,680,648,616,584,552,1023,978,946, 763,731,699,667,635,603,1010,965,718,654,590,952,705,641,1016,939,756,692, 628,564,1003,990,743,679,615,977,730,666,602,964,717,653,589,951,704,640,576, 1015,938,755,691,627,1002,989,742,678,614,550,976,729,665,601,963,716,652, 588,950,767,703,639,1014,754,690,626,562,1001,988,741,677,613,975,728,664, 600,962,715,651,587,949,766,702,638,574,1013,753,689,625,1000,987,740,676, 612,548,974,727,663,599,961,714,650,586,948,765,701,637,1012,999,752,688,624, 560,986,739,675,611,973,726,662,598,960,713,649,585] [ns_server:debug,2014-08-19T16:54:43.427,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 678. Nacking mccouch update. [views:debug,2014-08-19T16:54:43.427,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/678. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:43.427,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",678,active,0} [ns_server:debug,2014-08-19T16:54:43.427,ns_1@10.242.238.90:capi_set_view_manager-tiles<0.6184.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,718,686,984,952,1016,756,740,724,708,692,990,974,958,942,1022,1006,762, 746,730,714,698,682,996,980,964,948,1012,752,736,720,704,688,986,970,954,938, 1018,1002,758,742,726,710,694,678,992,976,960,944,1008,764,748,732,716,700, 684,998,982,966,950,1014,754,738,722,706,690,988,972,956,940,1020,1004,760, 744,728,712,696,680,1023,994,978,962,946,1010,766,734,702,968,1000] [views:debug,2014-08-19T16:54:43.427,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/583. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:43.427,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",583,active,0} [views:debug,2014-08-19T16:54:43.503,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/678. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:43.503,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",678,active,0} [ns_server:debug,2014-08-19T16:54:43.536,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 566. Nacking mccouch update. [views:debug,2014-08-19T16:54:43.537,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/566. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:43.537,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",566,active,0} [ns_server:debug,2014-08-19T16:54:43.537,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_tiles<0.4349.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,718,686,654,622,590,984,952,1016,756,724,692,660,628,596,990,958,1022, 762,730,698,666,634,602,570,996,964,736,704,672,640,608,576,970,938,1002,742, 710,678,646,614,582,976,944,1008,748,716,684,652,620,588,982,950,1014,754, 722,690,658,626,594,988,972,956,940,1020,1004,760,744,728,712,696,680,664, 648,632,616,600,584,568,1023,994,978,962,946,1010,766,734,702,670,638,606, 574,968,1000,740,708,676,644,612,580,974,942,1006,746,714,682,650,618,586, 980,948,1012,752,720,688,656,624,592,986,954,1018,758,726,694,662,630,598, 566,992,960,764,732,700,668,636,604,572,998,966,738,706,674,642,610,578] [views:debug,2014-08-19T16:54:43.637,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/566. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:43.637,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",566,active,0} [ns_server:debug,2014-08-19T16:54:43.670,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 581. Nacking mccouch update. [views:debug,2014-08-19T16:54:43.670,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/581. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:43.671,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",581,active,0} [ns_server:debug,2014-08-19T16:54:43.671,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,750,686,622,558,984,737,673,609,971,724,660,596,958,711,647,583,1022,945, 762,698,634,570,1009,996,749,685,621,983,736,672,608,970,723,659,595,957,710, 646,582,1021,944,761,697,633,1008,995,748,684,620,556,982,735,671,607,969, 722,658,594,956,709,645,581,1020,943,760,696,632,568,1007,994,747,683,619, 981,734,670,606,968,721,657,593,955,708,644,580,1019,942,759,695,631,1006, 993,746,682,618,554,980,733,669,605,967,720,656,592,954,707,643,1018,941,758, 694,630,566,1005,992,745,681,617,979,947,764,732,700,668,636,604,572,1011, 998,966,751,719,687,655,623,591,985,953,738,706,674,642,610,578,1017,972,940, 757,725,693,661,629,597,1004,991,959,744,712,680,648,616,584,552,1023,978, 946,763,731,699,667,635,603,1010,965,718,654,590,952,705,641,1016,939,756, 692,628,564,1003,990,743,679,615,977,730,666,602,964,717,653,589,951,704,640, 576,1015,938,755,691,627,1002,989,742,678,614,550,976,729,665,601,963,716, 652,588,950,767,703,639,1014,754,690,626,562,1001,988,741,677,613,975,728, 664,600,962,715,651,587,949,766,702,638,574,1013,753,689,625,1000,987,740, 676,612,548,974,727,663,599,961,714,650,586,948,765,701,637,1012,999,752,688, 624,560,986,739,675,611,973,726,662,598,960,713,649,585] [ns_server:debug,2014-08-19T16:54:43.729,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 676. Nacking mccouch update. [views:debug,2014-08-19T16:54:43.729,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/676. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:43.729,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",676,active,0} [ns_server:debug,2014-08-19T16:54:43.730,ns_1@10.242.238.90:capi_set_view_manager-tiles<0.6184.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,718,686,984,952,1016,756,740,724,708,692,676,990,974,958,942,1022,1006, 762,746,730,714,698,682,996,980,964,948,1012,752,736,720,704,688,986,970,954, 938,1018,1002,758,742,726,710,694,678,992,976,960,944,1008,764,748,732,716, 700,684,998,982,966,950,1014,754,738,722,706,690,988,972,956,940,1020,1004, 760,744,728,712,696,680,1023,994,978,962,946,1010,766,734,702,968,1000] [views:debug,2014-08-19T16:54:43.780,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/581. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:43.780,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",581,active,0} [views:debug,2014-08-19T16:54:43.813,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/676. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:43.813,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",676,active,0} [ns_server:debug,2014-08-19T16:54:43.897,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 564. Nacking mccouch update. [views:debug,2014-08-19T16:54:43.897,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/564. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:43.897,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",564,active,0} [ns_server:debug,2014-08-19T16:54:43.898,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_tiles<0.4349.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,718,686,654,622,590,984,952,1016,756,724,692,660,628,596,564,990,958, 1022,762,730,698,666,634,602,570,996,964,736,704,672,640,608,576,970,938, 1002,742,710,678,646,614,582,976,944,1008,748,716,684,652,620,588,982,950, 1014,754,722,690,658,626,594,988,956,1020,760,744,728,712,696,680,664,648, 632,616,600,584,568,1023,994,978,962,946,1010,766,734,702,670,638,606,574, 968,1000,740,708,676,644,612,580,974,942,1006,746,714,682,650,618,586,980, 948,1012,752,720,688,656,624,592,986,954,1018,758,726,694,662,630,598,566, 992,960,764,732,700,668,636,604,572,998,966,738,706,674,642,610,578,972,940, 1004] [views:debug,2014-08-19T16:54:43.948,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/564. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:43.948,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",564,active,0} [ns_server:debug,2014-08-19T16:54:44.060,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 674. Nacking mccouch update. [ns_server:debug,2014-08-19T16:54:44.060,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 579. Nacking mccouch update. [views:debug,2014-08-19T16:54:44.060,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/674. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:44.060,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",674,active,0} [views:debug,2014-08-19T16:54:44.060,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/579. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:44.061,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",579,active,0} [ns_server:debug,2014-08-19T16:54:44.061,ns_1@10.242.238.90:capi_set_view_manager-tiles<0.6184.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,718,686,984,952,1016,756,724,692,990,974,958,942,1022,1006,762,746,730, 714,698,682,996,980,964,948,1012,752,736,720,704,688,986,970,954,938,1018, 1002,758,742,726,710,694,678,992,976,960,944,1008,764,748,732,716,700,684, 998,982,966,950,1014,754,738,722,706,690,674,988,972,956,940,1020,1004,760, 744,728,712,696,680,1023,994,978,962,946,1010,766,734,702,968,1000,740,708, 676] [ns_server:debug,2014-08-19T16:54:44.062,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,750,686,622,558,984,737,673,609,971,724,660,596,958,711,647,583,1022,945, 762,698,634,570,1009,996,749,685,621,983,736,672,608,970,723,659,595,957,710, 646,582,1021,944,761,697,633,1008,995,748,684,620,556,982,735,671,607,969, 722,658,594,956,709,645,581,1020,943,760,696,632,568,1007,994,747,683,619, 981,734,670,606,968,721,657,593,955,708,644,580,1019,942,759,695,631,1006, 993,746,682,618,554,980,733,669,605,967,720,656,592,954,707,643,579,1018,941, 758,694,630,566,1005,992,745,681,617,979,732,668,604,998,966,751,719,687,655, 623,591,985,953,738,706,674,642,610,578,1017,972,940,757,725,693,661,629,597, 1004,991,959,744,712,680,648,616,584,552,1023,978,946,763,731,699,667,635, 603,1010,965,718,654,590,952,705,641,1016,939,756,692,628,564,1003,990,743, 679,615,977,730,666,602,964,717,653,589,951,704,640,576,1015,938,755,691,627, 1002,989,742,678,614,550,976,729,665,601,963,716,652,588,950,767,703,639, 1014,754,690,626,562,1001,988,741,677,613,975,728,664,600,962,715,651,587, 949,766,702,638,574,1013,753,689,625,1000,987,740,676,612,548,974,727,663, 599,961,714,650,586,948,765,701,637,1012,999,752,688,624,560,986,739,675,611, 973,726,662,598,960,713,649,585,947,764,700,636,572,1011] [views:debug,2014-08-19T16:54:44.128,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/579. Updated state: active (0) [views:debug,2014-08-19T16:54:44.128,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/674. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:44.128,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",579,active,0} [ns_server:debug,2014-08-19T16:54:44.128,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",674,active,0} [ns_server:debug,2014-08-19T16:54:44.228,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 562. Nacking mccouch update. [views:debug,2014-08-19T16:54:44.228,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/562. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:44.228,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",562,active,0} [ns_server:debug,2014-08-19T16:54:44.229,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_tiles<0.4349.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,718,686,654,622,590,984,952,1016,756,724,692,660,628,596,564,990,958, 1022,762,730,698,666,634,602,570,996,964,736,704,672,640,608,576,970,938, 1002,742,710,678,646,614,582,976,944,1008,748,716,684,652,620,588,982,950, 1014,754,722,690,658,626,594,562,988,956,1020,760,744,728,712,696,680,664, 648,632,616,600,584,568,1023,994,978,962,946,1010,766,734,702,670,638,606, 574,968,1000,740,708,676,644,612,580,974,942,1006,746,714,682,650,618,586, 980,948,1012,752,720,688,656,624,592,986,954,1018,758,726,694,662,630,598, 566,992,960,764,732,700,668,636,604,572,998,966,738,706,674,642,610,578,972, 940,1004] [views:debug,2014-08-19T16:54:44.296,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/562. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:44.296,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",562,active,0} [ns_server:debug,2014-08-19T16:54:44.396,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 672. Nacking mccouch update. [ns_server:debug,2014-08-19T16:54:44.396,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 577. Nacking mccouch update. [views:debug,2014-08-19T16:54:44.396,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/672. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:44.396,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",672,active,0} [views:debug,2014-08-19T16:54:44.396,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/577. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:44.396,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",577,active,0} [ns_server:debug,2014-08-19T16:54:44.396,ns_1@10.242.238.90:capi_set_view_manager-tiles<0.6184.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,718,686,984,952,1016,756,724,692,990,974,958,942,1022,1006,762,746,730, 714,698,682,996,980,964,948,1012,752,736,720,704,688,672,986,970,954,938, 1018,1002,758,742,726,710,694,678,992,976,960,944,1008,764,748,732,716,700, 684,998,982,966,950,1014,754,738,722,706,690,674,988,972,956,940,1020,1004, 760,744,728,712,696,680,1023,994,978,962,946,1010,766,734,702,968,1000,740, 708,676] [ns_server:debug,2014-08-19T16:54:44.397,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,750,686,622,558,984,737,673,609,971,724,660,596,958,711,647,583,1022,945, 762,698,634,570,1009,996,749,685,621,983,736,672,608,970,723,659,595,957,710, 646,582,1021,944,761,697,633,1008,995,748,684,620,556,982,735,671,607,969, 722,658,594,956,709,645,581,1020,943,760,696,632,568,1007,994,747,683,619, 981,734,670,606,968,721,657,593,955,708,644,580,1019,942,759,695,631,1006, 993,746,682,618,554,980,733,669,605,967,720,656,592,954,707,643,579,1018,941, 758,694,630,566,1005,992,745,681,617,979,732,668,604,998,966,751,719,687,655, 623,591,985,953,738,706,674,642,610,578,1017,972,940,757,725,693,661,629,597, 1004,991,959,744,712,680,648,616,584,552,1023,978,946,763,731,699,667,635, 603,1010,965,718,654,590,952,705,641,577,1016,939,756,692,628,564,1003,990, 743,679,615,977,730,666,602,964,717,653,589,951,704,640,576,1015,938,755,691, 627,1002,989,742,678,614,550,976,729,665,601,963,716,652,588,950,767,703,639, 1014,754,690,626,562,1001,988,741,677,613,975,728,664,600,962,715,651,587, 949,766,702,638,574,1013,753,689,625,1000,987,740,676,612,548,974,727,663, 599,961,714,650,586,948,765,701,637,1012,999,752,688,624,560,986,739,675,611, 973,726,662,598,960,713,649,585,947,764,700,636,572,1011] [ns_server:debug,2014-08-19T16:54:44.463,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 560. Nacking mccouch update. [views:debug,2014-08-19T16:54:44.463,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/560. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:44.463,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",560,active,0} [views:debug,2014-08-19T16:54:44.463,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/672. Updated state: active (0) [views:debug,2014-08-19T16:54:44.463,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/577. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:44.463,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",672,active,0} [ns_server:debug,2014-08-19T16:54:44.463,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_tiles<0.4349.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,718,686,654,622,590,984,952,1016,756,724,692,660,628,596,564,990,958, 1022,762,730,698,666,634,602,570,996,964,736,704,672,640,608,576,970,938, 1002,742,710,678,646,614,582,976,944,1008,748,716,684,652,620,588,982,950, 1014,754,722,690,658,626,594,562,988,956,1020,760,744,728,712,696,680,664, 648,632,616,600,584,568,1023,994,978,962,946,1010,766,734,702,670,638,606, 574,968,1000,740,708,676,644,612,580,974,942,1006,746,714,682,650,618,586, 980,948,1012,752,720,688,656,624,592,560,986,954,1018,758,726,694,662,630, 598,566,992,960,764,732,700,668,636,604,572,998,966,738,706,674,642,610,578, 972,940,1004] [ns_server:debug,2014-08-19T16:54:44.464,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",577,active,0} [views:debug,2014-08-19T16:54:44.564,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/560. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:44.564,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",560,active,0} [ns_server:debug,2014-08-19T16:54:44.765,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 670. Nacking mccouch update. [views:debug,2014-08-19T16:54:44.765,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/670. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:44.765,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",670,active,0} [ns_server:debug,2014-08-19T16:54:44.765,ns_1@10.242.238.90:capi_set_view_manager-tiles<0.6184.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,718,686,984,952,1016,756,724,692,990,974,958,942,1022,1006,762,746,730, 714,698,682,996,980,964,948,1012,752,736,720,704,688,672,986,970,954,938, 1018,1002,758,742,726,710,694,678,992,976,960,944,1008,764,748,732,716,700, 684,998,982,966,950,1014,754,738,722,706,690,674,988,972,956,940,1020,1004, 760,744,728,712,696,680,1023,994,978,962,946,1010,766,734,702,670,968,1000, 740,708,676] [ns_server:debug,2014-08-19T16:54:44.840,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 558. Nacking mccouch update. [views:debug,2014-08-19T16:54:44.840,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/558. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:44.841,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",558,active,0} [ns_server:debug,2014-08-19T16:54:44.841,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_tiles<0.4349.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,718,686,654,622,590,558,984,952,1016,756,724,692,660,628,596,564,990,958, 1022,762,730,698,666,634,602,570,996,964,736,704,672,640,608,576,970,938, 1002,742,710,678,646,614,582,976,944,1008,748,716,684,652,620,588,982,950, 1014,754,722,690,658,626,594,562,988,956,1020,760,744,728,712,696,680,664, 648,632,616,600,584,568,1023,994,978,962,946,1010,766,734,702,670,638,606, 574,968,1000,740,708,676,644,612,580,974,942,1006,746,714,682,650,618,586, 980,948,1012,752,720,688,656,624,592,560,986,954,1018,758,726,694,662,630, 598,566,992,960,764,732,700,668,636,604,572,998,966,738,706,674,642,610,578, 972,940,1004] [ns_server:debug,2014-08-19T16:54:44.882,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 575. Nacking mccouch update. [views:debug,2014-08-19T16:54:44.882,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/575. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:44.882,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",575,active,0} [ns_server:debug,2014-08-19T16:54:44.883,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,750,686,622,558,984,737,673,609,971,724,660,596,958,711,647,583,1022,945, 762,698,634,570,1009,996,749,685,621,983,736,672,608,970,723,659,595,957,710, 646,582,1021,944,761,697,633,1008,995,748,684,620,556,982,735,671,607,969, 722,658,594,956,709,645,581,1020,943,760,696,632,568,1007,994,747,683,619, 981,734,670,606,968,721,657,593,955,708,644,580,1019,942,759,695,631,1006, 993,746,682,618,554,980,733,669,605,967,720,656,592,954,707,643,579,1018,941, 758,694,630,566,1005,992,745,681,617,979,732,668,604,998,966,751,719,687,655, 623,591,985,953,738,706,674,642,610,578,1017,972,940,757,725,693,661,629,597, 1004,991,959,744,712,680,648,616,584,552,1023,978,946,763,731,699,667,635, 603,1010,965,718,654,590,952,705,641,577,1016,939,756,692,628,564,1003,990, 743,679,615,977,730,666,602,964,717,653,589,951,704,640,576,1015,938,755,691, 627,1002,989,742,678,614,550,976,729,665,601,963,716,652,588,950,767,703,639, 575,1014,754,690,626,562,1001,988,741,677,613,975,728,664,600,962,715,651, 587,949,766,702,638,574,1013,753,689,625,1000,987,740,676,612,548,974,727, 663,599,961,714,650,586,948,765,701,637,1012,999,752,688,624,560,986,739,675, 611,973,726,662,598,960,713,649,585,947,764,700,636,572,1011] [views:debug,2014-08-19T16:54:44.899,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/670. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:44.900,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",670,active,0} [views:debug,2014-08-19T16:54:44.958,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/558. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:44.958,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",558,active,0} [views:debug,2014-08-19T16:54:45.000,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/575. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:45.000,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",575,active,0} [ns_server:debug,2014-08-19T16:54:45.135,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 668. Nacking mccouch update. [views:debug,2014-08-19T16:54:45.135,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/668. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:45.135,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",668,active,0} [ns_server:debug,2014-08-19T16:54:45.135,ns_1@10.242.238.90:capi_set_view_manager-tiles<0.6184.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,718,686,984,952,1016,756,724,692,990,974,958,942,1022,1006,762,746,730, 714,698,682,996,980,964,948,1012,752,736,720,704,688,672,986,970,954,938, 1018,1002,758,742,726,710,694,678,992,976,960,944,1008,764,748,732,716,700, 684,668,998,982,966,950,1014,754,738,722,706,690,674,988,972,956,940,1020, 1004,760,744,728,712,696,680,1023,994,978,962,946,1010,766,734,702,670,968, 1000,740,708,676] [ns_server:debug,2014-08-19T16:54:45.219,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 556. Nacking mccouch update. [ns_server:debug,2014-08-19T16:54:45.219,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 573. Nacking mccouch update. [views:debug,2014-08-19T16:54:45.219,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/556. Updated state: active (0) [views:debug,2014-08-19T16:54:45.219,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/573. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:45.219,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",556,active,0} [views:debug,2014-08-19T16:54:45.219,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/668. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:45.219,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",573,active,0} [ns_server:debug,2014-08-19T16:54:45.220,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",668,active,0} [ns_server:debug,2014-08-19T16:54:45.220,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_tiles<0.4349.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,718,686,654,622,590,558,984,952,1016,756,724,692,660,628,596,564,990,958, 1022,762,730,698,666,634,602,570,996,964,736,704,672,640,608,576,970,938, 1002,742,710,678,646,614,582,976,944,1008,748,716,684,652,620,588,556,982, 950,1014,754,722,690,658,626,594,562,988,956,1020,760,744,728,712,696,680, 664,648,632,616,600,584,568,1023,994,978,962,946,1010,766,734,702,670,638, 606,574,968,1000,740,708,676,644,612,580,974,942,1006,746,714,682,650,618, 586,980,948,1012,752,720,688,656,624,592,560,986,954,1018,758,726,694,662, 630,598,566,992,960,764,732,700,668,636,604,572,998,966,738,706,674,642,610, 578,972,940,1004] [ns_server:debug,2014-08-19T16:54:45.220,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,750,686,622,558,984,737,673,609,971,724,660,596,958,711,647,583,1022,945, 762,698,634,570,1009,996,749,685,621,983,736,672,608,970,723,659,595,957,710, 646,582,1021,944,761,697,633,1008,995,748,684,620,556,982,735,671,607,969, 722,658,594,956,709,645,581,1020,943,760,696,632,568,1007,994,747,683,619, 981,734,670,606,968,721,657,593,955,708,644,580,1019,942,759,695,631,1006, 993,746,682,618,554,980,733,669,605,967,720,656,592,954,707,643,579,1018,941, 758,694,630,566,1005,992,745,681,617,979,732,668,604,998,966,751,719,687,655, 623,591,985,953,738,706,674,642,610,578,1017,972,940,757,725,693,661,629,597, 1004,991,959,744,712,680,648,616,584,552,1023,978,946,763,731,699,667,635, 603,1010,965,718,654,590,952,705,641,577,1016,939,756,692,628,564,1003,990, 743,679,615,977,730,666,602,964,717,653,589,951,704,640,576,1015,938,755,691, 627,1002,989,742,678,614,550,976,729,665,601,963,716,652,588,950,767,703,639, 575,1014,754,690,626,562,1001,988,741,677,613,975,728,664,600,962,715,651, 587,949,766,702,638,574,1013,753,689,625,1000,987,740,676,612,548,974,727, 663,599,961,714,650,586,948,765,701,637,573,1012,999,752,688,624,560,986,739, 675,611,973,726,662,598,960,713,649,585,947,764,700,636,572,1011] [views:debug,2014-08-19T16:54:45.331,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/573. Updated state: active (0) [views:debug,2014-08-19T16:54:45.331,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/556. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:45.331,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",573,active,0} [ns_server:debug,2014-08-19T16:54:45.332,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",556,active,0} [ns_server:debug,2014-08-19T16:54:45.456,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 666. Nacking mccouch update. [views:debug,2014-08-19T16:54:45.457,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/666. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:45.457,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",666,active,0} [ns_server:debug,2014-08-19T16:54:45.457,ns_1@10.242.238.90:capi_set_view_manager-tiles<0.6184.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,718,686,984,952,1016,756,724,692,990,974,958,942,1022,1006,762,746,730, 714,698,682,666,996,980,964,948,1012,752,736,720,704,688,672,986,970,954,938, 1018,1002,758,742,726,710,694,678,992,976,960,944,1008,764,748,732,716,700, 684,668,998,982,966,950,1014,754,738,722,706,690,674,988,972,956,940,1020, 1004,760,744,728,712,696,680,1023,994,978,962,946,1010,766,734,702,670,968, 1000,740,708,676] [ns_server:debug,2014-08-19T16:54:45.532,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 571. Nacking mccouch update. [ns_server:debug,2014-08-19T16:54:45.532,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 554. Nacking mccouch update. [views:debug,2014-08-19T16:54:45.532,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/571. Updated state: active (0) [views:debug,2014-08-19T16:54:45.532,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/554. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:45.532,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",571,active,0} [ns_server:debug,2014-08-19T16:54:45.532,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",554,active,0} [views:debug,2014-08-19T16:54:45.532,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/666. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:45.532,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_tiles<0.4349.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,718,686,654,622,590,558,984,952,1016,756,724,692,660,628,596,564,990,958, 1022,762,730,698,666,634,602,570,996,964,736,704,672,640,608,576,970,938, 1002,742,710,678,646,614,582,976,944,1008,748,716,684,652,620,588,556,982, 950,1014,754,722,690,658,626,594,562,988,956,1020,760,728,696,664,632,600, 568,994,978,962,946,1010,766,734,702,670,638,606,574,968,1000,740,708,676, 644,612,580,974,942,1006,746,714,682,650,618,586,554,980,948,1012,752,720, 688,656,624,592,560,986,954,1018,758,726,694,662,630,598,566,992,960,764,732, 700,668,636,604,572,998,966,738,706,674,642,610,578,972,940,1004,744,712,680, 648,616,584,1023] [ns_server:debug,2014-08-19T16:54:45.533,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",666,active,0} [ns_server:debug,2014-08-19T16:54:45.533,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,750,686,622,558,984,737,673,609,971,724,660,596,958,711,647,583,1022,945, 762,698,634,570,1009,996,749,685,621,983,736,672,608,970,723,659,595,957,710, 646,582,1021,944,761,697,633,1008,995,748,684,620,556,982,735,671,607,969, 722,658,594,956,709,645,581,1020,943,760,696,632,568,1007,994,747,683,619, 981,734,670,606,968,721,657,593,955,708,644,580,1019,942,759,695,631,1006, 993,746,682,618,554,980,733,669,605,967,720,656,592,954,707,643,579,1018,941, 758,694,630,566,1005,992,745,681,617,979,732,668,604,998,966,751,719,687,655, 623,591,985,953,738,706,674,642,610,578,1017,972,940,757,725,693,661,629,597, 1004,991,959,744,712,680,648,616,584,552,1023,978,946,763,731,699,667,635, 603,571,1010,965,718,654,590,952,705,641,577,1016,939,756,692,628,564,1003, 990,743,679,615,977,730,666,602,964,717,653,589,951,704,640,576,1015,938,755, 691,627,1002,989,742,678,614,550,976,729,665,601,963,716,652,588,950,767,703, 639,575,1014,754,690,626,562,1001,988,741,677,613,975,728,664,600,962,715, 651,587,949,766,702,638,574,1013,753,689,625,1000,987,740,676,612,548,974, 727,663,599,961,714,650,586,948,765,701,637,573,1012,999,752,688,624,560,986, 739,675,611,973,726,662,598,960,713,649,585,947,764,700,636,572,1011] [views:debug,2014-08-19T16:54:45.616,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/554. Updated state: active (0) [views:debug,2014-08-19T16:54:45.616,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/571. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:45.616,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",554,active,0} [ns_server:debug,2014-08-19T16:54:45.616,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",571,active,0} [ns_server:debug,2014-08-19T16:54:45.776,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 664. Nacking mccouch update. [views:debug,2014-08-19T16:54:45.776,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/664. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:45.776,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",664,active,0} [ns_server:debug,2014-08-19T16:54:45.777,ns_1@10.242.238.90:capi_set_view_manager-tiles<0.6184.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,718,686,984,952,1016,756,724,692,990,958,1022,762,746,730,714,698,682, 666,996,980,964,948,1012,752,736,720,704,688,672,986,970,954,938,1018,1002, 758,742,726,710,694,678,992,976,960,944,1008,764,748,732,716,700,684,668,998, 982,966,950,1014,754,738,722,706,690,674,988,972,956,940,1020,1004,760,744, 728,712,696,680,664,1023,994,978,962,946,1010,766,734,702,670,968,1000,740, 708,676,974,942,1006] [ns_server:debug,2014-08-19T16:54:45.901,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 552. Nacking mccouch update. [views:debug,2014-08-19T16:54:45.901,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/552. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:45.902,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",552,active,0} [ns_server:debug,2014-08-19T16:54:45.902,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_tiles<0.4349.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,718,686,654,622,590,558,984,952,1016,756,724,692,660,628,596,564,990,958, 1022,762,730,698,666,634,602,570,996,964,736,704,672,640,608,576,970,938, 1002,742,710,678,646,614,582,976,944,1008,748,716,684,652,620,588,556,982, 950,1014,754,722,690,658,626,594,562,988,956,1020,760,728,696,664,632,600, 568,994,978,962,946,1010,766,734,702,670,638,606,574,968,1000,740,708,676, 644,612,580,974,942,1006,746,714,682,650,618,586,554,980,948,1012,752,720, 688,656,624,592,560,986,954,1018,758,726,694,662,630,598,566,992,960,764,732, 700,668,636,604,572,998,966,738,706,674,642,610,578,972,940,1004,744,712,680, 648,616,584,552,1023] [ns_server:debug,2014-08-19T16:54:45.943,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 569. Nacking mccouch update. [views:debug,2014-08-19T16:54:45.943,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/569. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:45.943,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",569,active,0} [views:debug,2014-08-19T16:54:45.944,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/664. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:45.944,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",664,active,0} [ns_server:debug,2014-08-19T16:54:45.944,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,750,686,622,558,984,737,673,609,971,724,660,596,958,711,647,583,1022,945, 762,698,634,570,1009,996,749,685,621,983,736,672,608,970,723,659,595,957,710, 646,582,1021,944,761,697,633,569,1008,995,748,684,620,556,982,735,671,607, 969,722,658,594,956,709,645,581,1020,943,760,696,632,568,1007,994,747,683, 619,981,734,670,606,968,721,657,593,955,708,644,580,1019,942,759,695,631, 1006,993,746,682,618,554,980,733,669,605,967,720,656,592,954,707,643,579, 1018,941,758,694,630,566,1005,992,745,681,617,979,732,668,604,966,719,655, 591,985,953,738,706,674,642,610,578,1017,972,940,757,725,693,661,629,597, 1004,991,959,744,712,680,648,616,584,552,1023,978,946,763,731,699,667,635, 603,571,1010,965,718,654,590,952,705,641,577,1016,939,756,692,628,564,1003, 990,743,679,615,977,730,666,602,964,717,653,589,951,704,640,576,1015,938,755, 691,627,1002,989,742,678,614,550,976,729,665,601,963,716,652,588,950,767,703, 639,575,1014,754,690,626,562,1001,988,741,677,613,975,728,664,600,962,715, 651,587,949,766,702,638,574,1013,753,689,625,1000,987,740,676,612,548,974, 727,663,599,961,714,650,586,948,765,701,637,573,1012,999,752,688,624,560,986, 739,675,611,973,726,662,598,960,713,649,585,947,764,700,636,572,1011,998,751, 687,623] [views:debug,2014-08-19T16:54:46.020,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/552. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:46.020,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",552,active,0} [views:debug,2014-08-19T16:54:46.071,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/569. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:46.071,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",569,active,0} [ns_server:debug,2014-08-19T16:54:46.187,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 662. Nacking mccouch update. [views:debug,2014-08-19T16:54:46.187,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/662. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:46.188,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",662,active,0} [ns_server:debug,2014-08-19T16:54:46.188,ns_1@10.242.238.90:capi_set_view_manager-tiles<0.6184.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,718,686,984,952,1016,756,724,692,990,958,1022,762,746,730,714,698,682, 666,996,980,964,948,1012,752,736,720,704,688,672,986,970,954,938,1018,1002, 758,742,726,710,694,678,662,992,976,960,944,1008,764,748,732,716,700,684,668, 998,982,966,950,1014,754,738,722,706,690,674,988,972,956,940,1020,1004,760, 744,728,712,696,680,664,1023,994,978,962,946,1010,766,734,702,670,968,1000, 740,708,676,974,942,1006] [ns_server:debug,2014-08-19T16:54:46.204,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 550. Nacking mccouch update. [views:debug,2014-08-19T16:54:46.204,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/550. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:46.204,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",550,active,0} [ns_server:debug,2014-08-19T16:54:46.205,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_tiles<0.4349.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,718,686,654,622,590,558,984,952,1016,756,724,692,660,628,596,564,990,958, 1022,762,730,698,666,634,602,570,996,964,736,704,672,640,608,576,970,938, 1002,742,710,678,646,614,582,550,976,944,1008,748,716,684,652,620,588,556, 982,950,1014,754,722,690,658,626,594,562,988,956,1020,760,728,696,664,632, 600,568,994,978,962,946,1010,766,734,702,670,638,606,574,968,1000,740,708, 676,644,612,580,974,942,1006,746,714,682,650,618,586,554,980,948,1012,752, 720,688,656,624,592,560,986,954,1018,758,726,694,662,630,598,566,992,960,764, 732,700,668,636,604,572,998,966,738,706,674,642,610,578,972,940,1004,744,712, 680,648,616,584,552,1023] [views:debug,2014-08-19T16:54:46.221,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/662. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:46.222,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",662,active,0} [ns_server:debug,2014-08-19T16:54:46.330,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 567. Nacking mccouch update. [views:debug,2014-08-19T16:54:46.330,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/567. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:46.330,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",567,active,0} [views:debug,2014-08-19T16:54:46.331,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/550. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:46.331,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",550,active,0} [ns_server:debug,2014-08-19T16:54:46.331,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,750,686,622,558,984,737,673,609,971,724,660,596,958,711,647,583,1022,945, 762,698,634,570,1009,996,749,685,621,983,736,672,608,970,723,659,595,957,710, 646,582,1021,944,761,697,633,569,1008,995,748,684,620,556,982,735,671,607, 969,722,658,594,956,709,645,581,1020,943,760,696,632,568,1007,994,747,683, 619,981,734,670,606,968,721,657,593,955,708,644,580,1019,942,759,695,631,567, 1006,993,746,682,618,554,980,733,669,605,967,720,656,592,954,707,643,579, 1018,941,758,694,630,566,1005,992,745,681,617,979,732,668,604,966,719,655, 591,985,953,738,706,674,642,610,578,1017,972,940,757,725,693,661,629,597, 1004,991,959,744,712,680,648,616,584,552,1023,978,946,763,731,699,667,635, 603,571,1010,965,718,654,590,952,705,641,577,1016,939,756,692,628,564,1003, 990,743,679,615,977,730,666,602,964,717,653,589,951,704,640,576,1015,938,755, 691,627,1002,989,742,678,614,550,976,729,665,601,963,716,652,588,950,767,703, 639,575,1014,754,690,626,562,1001,988,741,677,613,975,728,664,600,962,715, 651,587,949,766,702,638,574,1013,753,689,625,1000,987,740,676,612,548,974, 727,663,599,961,714,650,586,948,765,701,637,573,1012,999,752,688,624,560,986, 739,675,611,973,726,662,598,960,713,649,585,947,764,700,636,572,1011,998,751, 687,623] [views:debug,2014-08-19T16:54:46.431,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/567. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:46.431,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",567,active,0} [ns_server:debug,2014-08-19T16:54:46.481,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 660. Nacking mccouch update. [views:debug,2014-08-19T16:54:46.481,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/660. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:46.481,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",660,active,0} [ns_server:debug,2014-08-19T16:54:46.481,ns_1@10.242.238.90:capi_set_view_manager-tiles<0.6184.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,718,686,984,952,1016,756,724,692,660,990,958,1022,762,746,730,714,698, 682,666,996,980,964,948,1012,752,736,720,704,688,672,986,970,954,938,1018, 1002,758,742,726,710,694,678,662,992,976,960,944,1008,764,748,732,716,700, 684,668,998,982,966,950,1014,754,738,722,706,690,674,988,972,956,940,1020, 1004,760,744,728,712,696,680,664,1023,994,978,962,946,1010,766,734,702,670, 968,1000,740,708,676,974,942,1006] [ns_server:debug,2014-08-19T16:54:46.585,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 548. Nacking mccouch update. [views:debug,2014-08-19T16:54:46.586,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/548. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:46.586,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",548,active,0} [ns_server:debug,2014-08-19T16:54:46.586,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_tiles<0.4349.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,718,686,654,622,590,558,984,952,1016,756,724,692,660,628,596,564,990,958, 1022,762,730,698,666,634,602,570,996,964,736,704,672,640,608,576,970,938, 1002,742,710,678,646,614,582,550,976,944,1008,748,716,684,652,620,588,556, 982,950,1014,754,722,690,658,626,594,562,988,956,1020,760,728,696,664,632, 600,568,994,978,962,946,1010,766,734,702,670,638,606,574,968,1000,740,708, 676,644,612,580,548,974,942,1006,746,714,682,650,618,586,554,980,948,1012, 752,720,688,656,624,592,560,986,954,1018,758,726,694,662,630,598,566,992,960, 764,732,700,668,636,604,572,998,966,738,706,674,642,610,578,972,940,1004,744, 712,680,648,616,584,552,1023] [views:debug,2014-08-19T16:54:46.602,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/660. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:46.603,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",660,active,0} [views:debug,2014-08-19T16:54:46.711,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/548. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:46.712,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",548,active,0} [ns_server:debug,2014-08-19T16:54:46.744,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 565. Nacking mccouch update. [views:debug,2014-08-19T16:54:46.744,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/565. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:46.745,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",565,active,0} [ns_server:debug,2014-08-19T16:54:46.745,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,750,686,622,558,984,737,673,609,971,724,660,596,958,711,647,583,1022,945, 762,698,634,570,1009,996,749,685,621,983,736,672,608,970,723,659,595,957,710, 646,582,1021,944,761,697,633,569,1008,995,748,684,620,556,982,735,671,607, 969,722,658,594,956,709,645,581,1020,943,760,696,632,568,1007,994,747,683, 619,981,734,670,606,968,721,657,593,955,708,644,580,1019,942,759,695,631,567, 1006,993,746,682,618,554,980,733,669,605,967,720,656,592,954,707,643,579, 1018,941,758,694,630,566,1005,992,745,681,617,979,732,668,604,966,719,655, 591,985,953,738,706,674,642,610,578,1017,972,940,757,725,693,661,629,597,565, 1004,991,959,744,712,680,648,616,584,552,1023,978,946,763,731,699,667,635, 603,571,1010,965,718,654,590,952,705,641,577,1016,939,756,692,628,564,1003, 990,743,679,615,977,730,666,602,964,717,653,589,951,704,640,576,1015,938,755, 691,627,1002,989,742,678,614,550,976,729,665,601,963,716,652,588,950,767,703, 639,575,1014,754,690,626,562,1001,988,741,677,613,975,728,664,600,962,715, 651,587,949,766,702,638,574,1013,753,689,625,1000,987,740,676,612,548,974, 727,663,599,961,714,650,586,948,765,701,637,573,1012,999,752,688,624,560,986, 739,675,611,973,726,662,598,960,713,649,585,947,764,700,636,572,1011,998,751, 687,623] [views:debug,2014-08-19T16:54:46.862,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/565. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:46.862,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",565,active,0} [ns_server:debug,2014-08-19T16:54:46.895,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 658. Nacking mccouch update. [views:debug,2014-08-19T16:54:46.895,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/658. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:46.895,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",658,active,0} [ns_server:debug,2014-08-19T16:54:46.895,ns_1@10.242.238.90:capi_set_view_manager-tiles<0.6184.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,718,686,984,952,1016,756,724,692,660,990,958,1022,762,746,730,714,698, 682,666,996,980,964,948,1012,752,736,720,704,688,672,986,970,954,938,1018, 1002,758,742,726,710,694,678,662,992,976,960,944,1008,764,748,732,716,700, 684,668,998,982,966,950,1014,754,738,722,706,690,674,658,988,972,956,940, 1020,1004,760,744,728,712,696,680,664,1023,994,978,962,946,1010,766,734,702, 670,968,1000,740,708,676,974,942,1006] [ns_server:debug,2014-08-19T16:54:46.996,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 546. Nacking mccouch update. [views:debug,2014-08-19T16:54:46.996,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/546. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:46.996,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",546,active,0} [views:debug,2014-08-19T16:54:46.996,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/658. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:46.997,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",658,active,0} [ns_server:debug,2014-08-19T16:54:46.997,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_tiles<0.4349.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,718,686,654,622,590,558,984,952,1016,756,724,692,660,628,596,564,990,958, 1022,762,730,698,666,634,602,570,996,964,736,704,672,640,608,576,970,938, 1002,742,710,678,646,614,582,550,976,944,1008,748,716,684,652,620,588,556, 982,950,1014,754,722,690,658,626,594,562,988,956,1020,760,728,696,664,632, 600,568,994,978,962,946,1010,766,734,702,670,638,606,574,968,1000,740,708, 676,644,612,580,548,974,942,1006,746,714,682,650,618,586,554,980,948,1012, 752,720,688,656,624,592,560,986,954,1018,758,726,694,662,630,598,566,992,960, 764,732,700,668,636,604,572,998,966,738,706,674,642,610,578,546,972,940,1004, 744,712,680,648,616,584,552,1023] [views:debug,2014-08-19T16:54:47.097,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/546. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:47.097,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",546,active,0} [ns_server:debug,2014-08-19T16:54:47.147,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 563. Nacking mccouch update. [views:debug,2014-08-19T16:54:47.147,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/563. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:47.147,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",563,active,0} [ns_server:debug,2014-08-19T16:54:47.148,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,750,686,622,558,984,737,673,609,971,724,660,596,958,711,647,583,1022,945, 762,698,634,570,1009,996,749,685,621,983,736,672,608,970,723,659,595,957,710, 646,582,1021,944,761,697,633,569,1008,995,748,684,620,556,982,735,671,607, 969,722,658,594,956,709,645,581,1020,943,760,696,632,568,1007,994,747,683, 619,981,734,670,606,968,721,657,593,955,708,644,580,1019,942,759,695,631,567, 1006,993,746,682,618,554,980,733,669,605,967,720,656,592,954,707,643,579, 1018,941,758,694,630,566,1005,992,745,681,617,979,732,668,604,966,719,655, 591,985,953,738,706,674,642,610,578,1017,972,940,757,725,693,661,629,597,565, 1004,991,959,744,712,680,648,616,584,552,1023,978,946,763,731,699,667,635, 603,571,1010,965,718,654,590,952,705,641,577,1016,939,756,692,628,564,1003, 990,743,679,615,977,730,666,602,964,717,653,589,951,704,640,576,1015,938,755, 691,627,563,1002,989,742,678,614,550,976,729,665,601,963,716,652,588,950,767, 703,639,575,1014,754,690,626,562,1001,988,741,677,613,975,728,664,600,962, 715,651,587,949,766,702,638,574,1013,753,689,625,1000,987,740,676,612,548, 974,727,663,599,961,714,650,586,948,765,701,637,573,1012,999,752,688,624,560, 986,739,675,611,973,726,662,598,960,713,649,585,947,764,700,636,572,1011,998, 751,687,623] [views:debug,2014-08-19T16:54:47.290,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/563. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:47.290,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",563,active,0} [ns_server:debug,2014-08-19T16:54:47.332,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 656. Nacking mccouch update. [views:debug,2014-08-19T16:54:47.332,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/656. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:47.332,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",656,active,0} [ns_server:debug,2014-08-19T16:54:47.332,ns_1@10.242.238.90:capi_set_view_manager-tiles<0.6184.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,718,686,984,952,1016,756,724,692,660,990,958,1022,762,746,730,714,698, 682,666,996,980,964,948,1012,752,736,720,704,688,672,656,986,970,954,938, 1018,1002,758,742,726,710,694,678,662,992,976,960,944,1008,764,748,732,716, 700,684,668,998,982,966,950,1014,754,738,722,706,690,674,658,988,972,956,940, 1020,1004,760,744,728,712,696,680,664,1023,994,978,962,946,1010,766,734,702, 670,968,1000,740,708,676,974,942,1006] [ns_server:debug,2014-08-19T16:54:47.399,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 544. Nacking mccouch update. [views:debug,2014-08-19T16:54:47.399,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/544. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:47.399,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",544,active,0} [ns_server:debug,2014-08-19T16:54:47.399,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_tiles<0.4349.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,718,686,654,622,590,558,984,952,1016,756,724,692,660,628,596,564,990,958, 1022,762,730,698,666,634,602,570,996,964,736,704,672,640,608,576,544,970,938, 1002,742,710,678,646,614,582,550,976,944,1008,748,716,684,652,620,588,556, 982,950,1014,754,722,690,658,626,594,562,988,956,1020,760,728,696,664,632, 600,568,994,962,766,734,702,670,638,606,574,968,1000,740,708,676,644,612,580, 548,974,942,1006,746,714,682,650,618,586,554,980,948,1012,752,720,688,656, 624,592,560,986,954,1018,758,726,694,662,630,598,566,992,960,764,732,700,668, 636,604,572,998,966,738,706,674,642,610,578,546,972,940,1004,744,712,680,648, 616,584,552,1023,978,946,1010] [views:debug,2014-08-19T16:54:47.433,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/656. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:47.433,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",656,active,0} [views:debug,2014-08-19T16:54:47.466,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/544. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:47.466,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",544,active,0} [ns_server:debug,2014-08-19T16:54:47.567,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 561. Nacking mccouch update. [views:debug,2014-08-19T16:54:47.567,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/561. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:47.567,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",561,active,0} [ns_server:debug,2014-08-19T16:54:47.568,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,750,686,622,558,984,737,673,609,971,724,660,596,958,711,647,583,1022,945, 762,698,634,570,1009,996,749,685,621,983,736,672,608,970,723,659,595,957,710, 646,582,1021,944,761,697,633,569,1008,995,748,684,620,556,982,735,671,607, 969,722,658,594,956,709,645,581,1020,943,760,696,632,568,1007,994,747,683, 619,981,734,670,606,968,721,657,593,955,708,644,580,1019,942,759,695,631,567, 1006,993,746,682,618,554,980,733,669,605,967,720,656,592,954,707,643,579, 1018,941,758,694,630,566,1005,992,745,681,617,979,732,668,604,966,719,655, 591,985,953,738,706,674,642,610,578,1017,972,940,757,725,693,661,629,597,565, 1004,991,959,744,712,680,648,616,584,552,1023,978,946,763,731,699,667,635, 603,571,1010,965,718,654,590,952,705,641,577,1016,939,756,692,628,564,1003, 990,743,679,615,977,730,666,602,964,717,653,589,951,704,640,576,1015,938,755, 691,627,563,1002,989,742,678,614,550,976,729,665,601,963,716,652,588,950,767, 703,639,575,1014,754,690,626,562,1001,988,741,677,613,975,728,664,600,962, 715,651,587,949,766,702,638,574,1013,753,689,625,561,1000,987,740,676,612, 548,974,727,663,599,961,714,650,586,948,765,701,637,573,1012,999,752,688,624, 560,986,739,675,611,973,726,662,598,960,713,649,585,947,764,700,636,572,1011, 998,751,687,623] [views:debug,2014-08-19T16:54:47.609,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/561. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:47.609,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",561,active,0} [ns_server:debug,2014-08-19T16:54:47.676,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 654. Nacking mccouch update. [ns_server:debug,2014-08-19T16:54:47.676,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 542. Nacking mccouch update. [views:debug,2014-08-19T16:54:47.676,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/654. Updated state: active (0) [views:debug,2014-08-19T16:54:47.676,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/542. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:47.676,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",654,active,0} [ns_server:debug,2014-08-19T16:54:47.676,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",542,active,0} [ns_server:debug,2014-08-19T16:54:47.676,ns_1@10.242.238.90:capi_set_view_manager-tiles<0.6184.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,718,686,654,984,952,1016,756,724,692,660,990,958,1022,762,730,698,666, 996,980,964,948,1012,752,736,720,704,688,672,656,986,970,954,938,1018,1002, 758,742,726,710,694,678,662,992,976,960,944,1008,764,748,732,716,700,684,668, 998,982,966,950,1014,754,738,722,706,690,674,658,988,972,956,940,1020,1004, 760,744,728,712,696,680,664,1023,994,978,962,946,1010,766,734,702,670,968, 1000,740,708,676,974,942,1006,746,714,682] [ns_server:debug,2014-08-19T16:54:47.677,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_tiles<0.4349.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,718,686,654,622,590,558,984,952,1016,756,724,692,660,628,596,564,990,958, 1022,762,730,698,666,634,602,570,996,964,736,704,672,640,608,576,544,970,938, 1002,742,710,678,646,614,582,550,976,944,1008,748,716,684,652,620,588,556, 982,950,1014,754,722,690,658,626,594,562,988,956,1020,760,728,696,664,632, 600,568,994,962,766,734,702,670,638,606,574,542,968,1000,740,708,676,644,612, 580,548,974,942,1006,746,714,682,650,618,586,554,980,948,1012,752,720,688, 656,624,592,560,986,954,1018,758,726,694,662,630,598,566,992,960,764,732,700, 668,636,604,572,998,966,738,706,674,642,610,578,546,972,940,1004,744,712,680, 648,616,584,552,1023,978,946,1010] [views:debug,2014-08-19T16:54:47.745,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/542. Updated state: active (0) [views:debug,2014-08-19T16:54:47.745,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/654. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:47.745,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",542,active,0} [ns_server:debug,2014-08-19T16:54:47.745,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",654,active,0} [ns_server:debug,2014-08-19T16:54:47.797,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 559. Nacking mccouch update. [views:debug,2014-08-19T16:54:47.797,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/559. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:47.797,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",559,active,0} [ns_server:debug,2014-08-19T16:54:47.798,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,750,686,622,558,984,737,673,609,971,724,660,596,958,711,647,583,1022,945, 762,698,634,570,1009,996,749,685,621,983,736,672,608,970,723,659,595,957,710, 646,582,1021,944,761,697,633,569,1008,995,748,684,620,556,982,735,671,607, 969,722,658,594,956,709,645,581,1020,943,760,696,632,568,1007,994,747,683, 619,981,734,670,606,968,721,657,593,955,708,644,580,1019,942,759,695,631,567, 1006,993,746,682,618,554,980,733,669,605,967,720,656,592,954,707,643,579, 1018,941,758,694,630,566,1005,992,745,681,617,979,732,668,604,966,719,655, 591,953,706,642,578,1017,972,940,757,725,693,661,629,597,565,1004,991,959, 744,712,680,648,616,584,552,1023,978,946,763,731,699,667,635,603,571,1010, 965,718,654,590,952,705,641,577,1016,939,756,692,628,564,1003,990,743,679, 615,977,730,666,602,964,717,653,589,951,704,640,576,1015,938,755,691,627,563, 1002,989,742,678,614,550,976,729,665,601,963,716,652,588,950,767,703,639,575, 1014,754,690,626,562,1001,988,741,677,613,975,728,664,600,962,715,651,587, 949,766,702,638,574,1013,753,689,625,561,1000,987,740,676,612,548,974,727, 663,599,961,714,650,586,948,765,701,637,573,1012,999,752,688,624,560,986,739, 675,611,973,726,662,598,960,713,649,585,947,764,700,636,572,1011,998,751,687, 623,559,985,738,674,610] [views:debug,2014-08-19T16:54:47.898,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/559. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:47.898,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",559,active,0} [ns_server:debug,2014-08-19T16:54:48.015,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 652. Nacking mccouch update. [views:debug,2014-08-19T16:54:48.015,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/652. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:48.015,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 540. Nacking mccouch update. [views:debug,2014-08-19T16:54:48.015,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/540. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:48.015,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",652,active,0} [ns_server:debug,2014-08-19T16:54:48.015,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",540,active,0} [ns_server:debug,2014-08-19T16:54:48.016,ns_1@10.242.238.90:capi_set_view_manager-tiles<0.6184.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,718,686,654,984,952,1016,756,724,692,660,990,958,1022,762,730,698,666, 996,980,964,948,1012,752,736,720,704,688,672,656,986,970,954,938,1018,1002, 758,742,726,710,694,678,662,992,976,960,944,1008,764,748,732,716,700,684,668, 652,998,982,966,950,1014,754,738,722,706,690,674,658,988,972,956,940,1020, 1004,760,744,728,712,696,680,664,1023,994,978,962,946,1010,766,734,702,670, 968,1000,740,708,676,974,942,1006,746,714,682] [ns_server:debug,2014-08-19T16:54:48.016,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_tiles<0.4349.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,718,686,654,622,590,558,984,952,1016,756,724,692,660,628,596,564,990,958, 1022,762,730,698,666,634,602,570,996,964,736,704,672,640,608,576,544,970,938, 1002,742,710,678,646,614,582,550,976,944,1008,748,716,684,652,620,588,556, 982,950,1014,754,722,690,658,626,594,562,988,956,1020,760,728,696,664,632, 600,568,994,962,766,734,702,670,638,606,574,542,968,1000,740,708,676,644,612, 580,548,974,942,1006,746,714,682,650,618,586,554,980,948,1012,752,720,688, 656,624,592,560,986,954,1018,758,726,694,662,630,598,566,992,960,764,732,700, 668,636,604,572,540,998,966,738,706,674,642,610,578,546,972,940,1004,744,712, 680,648,616,584,552,1023,978,946,1010] [views:debug,2014-08-19T16:54:48.099,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/540. Updated state: active (0) [views:debug,2014-08-19T16:54:48.099,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/652. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:48.099,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",540,active,0} [ns_server:debug,2014-08-19T16:54:48.099,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",652,active,0} [ns_server:debug,2014-08-19T16:54:48.191,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 557. Nacking mccouch update. [views:debug,2014-08-19T16:54:48.191,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/557. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:48.191,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",557,active,0} [ns_server:debug,2014-08-19T16:54:48.192,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,750,686,622,558,984,737,673,609,971,724,660,596,958,711,647,583,1022,945, 762,698,634,570,1009,996,749,685,621,557,983,736,672,608,970,723,659,595,957, 710,646,582,1021,944,761,697,633,569,1008,995,748,684,620,556,982,735,671, 607,969,722,658,594,956,709,645,581,1020,943,760,696,632,568,1007,994,747, 683,619,981,734,670,606,968,721,657,593,955,708,644,580,1019,942,759,695,631, 567,1006,993,746,682,618,554,980,733,669,605,967,720,656,592,954,707,643,579, 1018,941,758,694,630,566,1005,992,745,681,617,979,732,668,604,966,719,655, 591,953,706,642,578,1017,972,940,757,725,693,661,629,597,565,1004,991,959, 744,712,680,648,616,584,552,1023,978,946,763,731,699,667,635,603,571,1010, 965,718,654,590,952,705,641,577,1016,939,756,692,628,564,1003,990,743,679, 615,977,730,666,602,964,717,653,589,951,704,640,576,1015,938,755,691,627,563, 1002,989,742,678,614,550,976,729,665,601,963,716,652,588,950,767,703,639,575, 1014,754,690,626,562,1001,988,741,677,613,975,728,664,600,962,715,651,587, 949,766,702,638,574,1013,753,689,625,561,1000,987,740,676,612,548,974,727, 663,599,961,714,650,586,948,765,701,637,573,1012,999,752,688,624,560,986,739, 675,611,973,726,662,598,960,713,649,585,947,764,700,636,572,1011,998,751,687, 623,559,985,738,674,610] [views:debug,2014-08-19T16:54:48.276,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/557. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:48.276,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",557,active,0} [ns_server:debug,2014-08-19T16:54:48.442,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 538. Nacking mccouch update. [ns_server:debug,2014-08-19T16:54:48.442,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 650. Nacking mccouch update. [views:debug,2014-08-19T16:54:48.442,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/538. Updated state: active (0) [views:debug,2014-08-19T16:54:48.442,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/650. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:48.442,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",538,active,0} [ns_server:debug,2014-08-19T16:54:48.442,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",650,active,0} [ns_server:debug,2014-08-19T16:54:48.443,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_tiles<0.4349.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,718,686,654,622,590,558,984,952,1016,756,724,692,660,628,596,564,990,958, 1022,762,730,698,666,634,602,570,538,996,964,736,704,672,640,608,576,544,970, 938,1002,742,710,678,646,614,582,550,976,944,1008,748,716,684,652,620,588, 556,982,950,1014,754,722,690,658,626,594,562,988,956,1020,760,728,696,664, 632,600,568,994,962,766,734,702,670,638,606,574,542,968,1000,740,708,676,644, 612,580,548,974,942,1006,746,714,682,650,618,586,554,980,948,1012,752,720, 688,656,624,592,560,986,954,1018,758,726,694,662,630,598,566,992,960,764,732, 700,668,636,604,572,540,998,966,738,706,674,642,610,578,546,972,940,1004,744, 712,680,648,616,584,552,1023,978,946,1010] [ns_server:debug,2014-08-19T16:54:48.443,ns_1@10.242.238.90:capi_set_view_manager-tiles<0.6184.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,718,686,654,984,952,1016,756,724,692,660,990,958,1022,762,730,698,666, 996,980,964,948,1012,752,736,720,704,688,672,656,986,970,954,938,1018,1002, 758,742,726,710,694,678,662,992,976,960,944,1008,764,748,732,716,700,684,668, 652,998,982,966,950,1014,754,738,722,706,690,674,658,988,972,956,940,1020, 1004,760,744,728,712,696,680,664,1023,994,978,962,946,1010,766,734,702,670, 968,1000,740,708,676,974,942,1006,746,714,682,650] [views:debug,2014-08-19T16:54:48.528,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/650. Updated state: active (0) [views:debug,2014-08-19T16:54:48.528,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/538. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:48.528,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",650,active,0} [ns_server:debug,2014-08-19T16:54:48.529,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",538,active,0} [ns_server:debug,2014-08-19T16:54:48.604,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 555. Nacking mccouch update. [views:debug,2014-08-19T16:54:48.604,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/555. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:48.604,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",555,active,0} [ns_server:debug,2014-08-19T16:54:48.604,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,750,686,622,558,984,737,673,609,971,724,660,596,958,711,647,583,1022,945, 762,698,634,570,1009,996,749,685,621,557,983,736,672,608,970,723,659,595,957, 710,646,582,1021,944,761,697,633,569,1008,995,748,684,620,556,982,735,671, 607,969,722,658,594,956,709,645,581,1020,943,760,696,632,568,1007,994,747, 683,619,555,981,734,670,606,968,721,657,593,955,708,644,580,1019,942,759,695, 631,567,1006,993,746,682,618,554,980,733,669,605,967,720,656,592,954,707,643, 579,1018,941,758,694,630,566,1005,992,745,681,617,979,732,668,604,966,719, 655,591,953,706,642,578,1017,972,940,757,725,693,661,629,597,565,1004,991, 959,744,712,680,648,616,584,552,1023,978,946,763,731,699,667,635,603,571, 1010,965,718,654,590,952,705,641,577,1016,939,756,692,628,564,1003,990,743, 679,615,977,730,666,602,964,717,653,589,951,704,640,576,1015,938,755,691,627, 563,1002,989,742,678,614,550,976,729,665,601,963,716,652,588,950,767,703,639, 575,1014,754,690,626,562,1001,988,741,677,613,975,728,664,600,962,715,651, 587,949,766,702,638,574,1013,753,689,625,561,1000,987,740,676,612,548,974, 727,663,599,961,714,650,586,948,765,701,637,573,1012,999,752,688,624,560,986, 739,675,611,973,726,662,598,960,713,649,585,947,764,700,636,572,1011,998,751, 687,623,559,985,738,674,610] [views:debug,2014-08-19T16:54:48.654,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/555. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:48.654,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",555,active,0} [ns_server:debug,2014-08-19T16:54:48.722,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 536. Nacking mccouch update. [views:debug,2014-08-19T16:54:48.722,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/536. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:48.722,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",536,active,0} [ns_server:debug,2014-08-19T16:54:48.723,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_tiles<0.4349.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,718,686,654,622,590,558,984,952,1016,756,724,692,660,628,596,564,990,958, 1022,762,730,698,666,634,602,570,538,996,964,736,704,672,640,608,576,544,970, 938,1002,742,710,678,646,614,582,550,976,944,1008,748,716,684,652,620,588, 556,982,950,1014,754,722,690,658,626,594,562,988,956,1020,760,728,696,664, 632,600,568,536,994,962,766,734,702,670,638,606,574,542,968,1000,740,708,676, 644,612,580,548,974,942,1006,746,714,682,650,618,586,554,980,948,1012,752, 720,688,656,624,592,560,986,954,1018,758,726,694,662,630,598,566,992,960,764, 732,700,668,636,604,572,540,998,966,738,706,674,642,610,578,546,972,940,1004, 744,712,680,648,616,584,552,1023,978,946,1010] [ns_server:debug,2014-08-19T16:54:48.738,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 648. Nacking mccouch update. [views:debug,2014-08-19T16:54:48.738,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/648. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:48.738,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",648,active,0} [ns_server:debug,2014-08-19T16:54:48.739,ns_1@10.242.238.90:capi_set_view_manager-tiles<0.6184.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,718,686,654,984,952,1016,756,724,692,660,990,958,1022,762,730,698,666, 996,980,964,948,1012,752,736,720,704,688,672,656,986,970,954,938,1018,1002, 758,742,726,710,694,678,662,992,976,960,944,1008,764,748,732,716,700,684,668, 652,998,982,966,950,1014,754,738,722,706,690,674,658,988,972,956,940,1020, 1004,760,744,728,712,696,680,664,648,1023,994,978,962,946,1010,766,734,702, 670,968,1000,740,708,676,974,942,1006,746,714,682,650] [views:debug,2014-08-19T16:54:48.789,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/536. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:48.789,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",536,active,0} [views:debug,2014-08-19T16:54:48.805,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/648. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:48.806,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",648,active,0} [ns_server:debug,2014-08-19T16:54:48.839,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 553. Nacking mccouch update. [views:debug,2014-08-19T16:54:48.839,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/553. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:48.839,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",553,active,0} [ns_server:debug,2014-08-19T16:54:48.840,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,750,686,622,558,984,737,673,609,971,724,660,596,958,711,647,583,1022,945, 762,698,634,570,1009,996,749,685,621,557,983,736,672,608,970,723,659,595,957, 710,646,582,1021,944,761,697,633,569,1008,995,748,684,620,556,982,735,671, 607,969,722,658,594,956,709,645,581,1020,943,760,696,632,568,1007,994,747, 683,619,555,981,734,670,606,968,721,657,593,955,708,644,580,1019,942,759,695, 631,567,1006,993,746,682,618,554,980,733,669,605,967,720,656,592,954,707,643, 579,1018,941,758,694,630,566,1005,992,745,681,617,553,979,732,668,604,966, 719,655,591,953,706,642,578,1017,972,940,757,725,693,661,629,597,565,1004, 991,959,744,712,680,648,616,584,552,1023,978,946,763,731,699,667,635,603,571, 1010,965,718,654,590,952,705,641,577,1016,939,756,692,628,564,1003,990,743, 679,615,977,730,666,602,964,717,653,589,951,704,640,576,1015,938,755,691,627, 563,1002,989,742,678,614,550,976,729,665,601,963,716,652,588,950,767,703,639, 575,1014,754,690,626,562,1001,988,741,677,613,975,728,664,600,962,715,651, 587,949,766,702,638,574,1013,753,689,625,561,1000,987,740,676,612,548,974, 727,663,599,961,714,650,586,948,765,701,637,573,1012,999,752,688,624,560,986, 739,675,611,973,726,662,598,960,713,649,585,947,764,700,636,572,1011,998,751, 687,623,559,985,738,674,610] [views:debug,2014-08-19T16:54:48.907,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/553. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:48.907,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",553,active,0} [ns_server:debug,2014-08-19T16:54:49.069,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 534. Nacking mccouch update. [ns_server:debug,2014-08-19T16:54:49.069,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 646. Nacking mccouch update. [views:debug,2014-08-19T16:54:49.069,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/534. Updated state: active (0) [views:debug,2014-08-19T16:54:49.069,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/646. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:49.069,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",534,active,0} [ns_server:debug,2014-08-19T16:54:49.069,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",646,active,0} [ns_server:debug,2014-08-19T16:54:49.070,ns_1@10.242.238.90:capi_set_view_manager-tiles<0.6184.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,718,686,654,984,952,1016,756,724,692,660,990,958,1022,762,730,698,666, 996,980,964,948,1012,752,736,720,704,688,672,656,986,970,954,938,1018,1002, 758,742,726,710,694,678,662,646,992,976,960,944,1008,764,748,732,716,700,684, 668,652,998,982,966,950,1014,754,738,722,706,690,674,658,988,972,956,940, 1020,1004,760,744,728,712,696,680,664,648,1023,994,978,962,946,1010,766,734, 702,670,968,1000,740,708,676,974,942,1006,746,714,682,650] [ns_server:debug,2014-08-19T16:54:49.069,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_tiles<0.4349.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,686,622,558,984,952,1016,756,724,692,660,628,596,564,990,958,1022,762, 730,698,666,634,602,570,538,996,964,736,704,672,640,608,576,544,970,938,1002, 742,710,678,646,614,582,550,976,944,1008,748,716,684,652,620,588,556,982,950, 1014,754,722,690,658,626,594,562,988,956,1020,760,728,696,664,632,600,568, 536,994,962,766,734,702,670,638,606,574,542,968,1000,740,708,676,644,612,580, 548,974,942,1006,746,714,682,650,618,586,554,980,948,1012,752,720,688,656, 624,592,560,986,954,1018,758,726,694,662,630,598,566,534,992,960,764,732,700, 668,636,604,572,540,998,966,738,706,674,642,610,578,546,972,940,1004,744,712, 680,648,616,584,552,1023,978,946,1010,718,654,590] [views:debug,2014-08-19T16:54:49.136,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/534. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:49.137,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",534,active,0} [views:debug,2014-08-19T16:54:49.145,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/646. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:49.145,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",646,active,0} [ns_server:debug,2014-08-19T16:54:49.253,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 551. Nacking mccouch update. [views:debug,2014-08-19T16:54:49.253,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/551. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:49.254,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",551,active,0} [ns_server:debug,2014-08-19T16:54:49.255,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,750,686,622,558,984,737,673,609,971,724,660,596,958,711,647,583,1022,945, 762,698,634,570,1009,996,749,685,621,557,983,736,672,608,970,723,659,595,957, 710,646,582,1021,944,761,697,633,569,1008,995,748,684,620,556,982,735,671, 607,969,722,658,594,956,709,645,581,1020,943,760,696,632,568,1007,994,747, 683,619,555,981,734,670,606,968,721,657,593,955,708,644,580,1019,942,759,695, 631,567,1006,993,746,682,618,554,980,733,669,605,967,720,656,592,954,707,643, 579,1018,941,758,694,630,566,1005,992,745,681,617,553,979,732,668,604,966, 719,655,591,953,706,642,578,1017,972,940,757,725,693,661,629,597,565,1004, 991,959,744,712,680,648,616,584,552,1023,978,946,763,731,699,667,635,603,571, 1010,965,718,654,590,952,705,641,577,1016,939,756,692,628,564,1003,990,743, 679,615,551,977,730,666,602,964,717,653,589,951,704,640,576,1015,938,755,691, 627,563,1002,989,742,678,614,550,976,729,665,601,963,716,652,588,950,767,703, 639,575,1014,754,690,626,562,1001,988,741,677,613,975,728,664,600,962,715, 651,587,949,766,702,638,574,1013,753,689,625,561,1000,987,740,676,612,548, 974,727,663,599,961,714,650,586,948,765,701,637,573,1012,999,752,688,624,560, 986,739,675,611,973,726,662,598,960,713,649,585,947,764,700,636,572,1011,998, 751,687,623,559,985,738,674,610] [views:debug,2014-08-19T16:54:49.338,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/551. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:49.338,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",551,active,0} [ns_server:debug,2014-08-19T16:54:49.496,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 644. Nacking mccouch update. [views:debug,2014-08-19T16:54:49.496,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/644. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:49.496,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 532. Nacking mccouch update. [ns_server:debug,2014-08-19T16:54:49.496,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",644,active,0} [views:debug,2014-08-19T16:54:49.497,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/532. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:49.497,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",532,active,0} [ns_server:debug,2014-08-19T16:54:49.497,ns_1@10.242.238.90:capi_set_view_manager-tiles<0.6184.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,718,686,654,984,952,1016,756,724,692,660,990,958,1022,762,730,698,666, 996,964,752,736,720,704,688,672,656,986,970,954,938,1018,1002,758,742,726, 710,694,678,662,646,992,976,960,944,1008,764,748,732,716,700,684,668,652,998, 982,966,950,1014,754,738,722,706,690,674,658,988,972,956,940,1020,1004,760, 744,728,712,696,680,664,648,1023,994,978,962,946,1010,766,734,702,670,968, 1000,740,708,676,644,974,942,1006,746,714,682,650,980,948,1012] [ns_server:debug,2014-08-19T16:54:49.497,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_tiles<0.4349.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,686,622,558,984,952,1016,756,724,692,660,628,596,564,532,990,958,1022, 762,730,698,666,634,602,570,538,996,964,736,704,672,640,608,576,544,970,938, 1002,742,710,678,646,614,582,550,976,944,1008,748,716,684,652,620,588,556, 982,950,1014,754,722,690,658,626,594,562,988,956,1020,760,728,696,664,632, 600,568,536,994,962,766,734,702,670,638,606,574,542,968,1000,740,708,676,644, 612,580,548,974,942,1006,746,714,682,650,618,586,554,980,948,1012,752,720, 688,656,624,592,560,986,954,1018,758,726,694,662,630,598,566,534,992,960,764, 732,700,668,636,604,572,540,998,966,738,706,674,642,610,578,546,972,940,1004, 744,712,680,648,616,584,552,1023,978,946,1010,718,654,590] [views:debug,2014-08-19T16:54:49.613,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/644. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:49.613,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",644,active,0} [views:debug,2014-08-19T16:54:49.655,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/532. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:49.655,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",532,active,0} [ns_server:debug,2014-08-19T16:54:49.688,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 549. Nacking mccouch update. [views:debug,2014-08-19T16:54:49.688,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/549. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:49.689,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",549,active,0} [ns_server:debug,2014-08-19T16:54:49.689,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,750,686,622,558,984,737,673,609,971,724,660,596,958,711,647,583,1022,945, 762,698,634,570,1009,996,749,685,621,557,983,736,672,608,970,723,659,595,957, 710,646,582,1021,944,761,697,633,569,1008,995,748,684,620,556,982,735,671, 607,969,722,658,594,956,709,645,581,1020,943,760,696,632,568,1007,994,747, 683,619,555,981,734,670,606,968,721,657,593,955,708,644,580,1019,942,759,695, 631,567,1006,993,746,682,618,554,980,733,669,605,967,720,656,592,954,707,643, 579,1018,941,758,694,630,566,1005,992,745,681,617,553,979,732,668,604,966, 719,655,591,953,706,642,578,1017,940,757,693,629,565,1004,991,959,744,712, 680,648,616,584,552,1023,978,946,763,731,699,667,635,603,571,1010,965,718, 654,590,952,705,641,577,1016,939,756,692,628,564,1003,990,743,679,615,551, 977,730,666,602,964,717,653,589,951,704,640,576,1015,938,755,691,627,563, 1002,989,742,678,614,550,976,729,665,601,963,716,652,588,950,767,703,639,575, 1014,754,690,626,562,1001,988,741,677,613,549,975,728,664,600,962,715,651, 587,949,766,702,638,574,1013,753,689,625,561,1000,987,740,676,612,548,974, 727,663,599,961,714,650,586,948,765,701,637,573,1012,999,752,688,624,560,986, 739,675,611,973,726,662,598,960,713,649,585,947,764,700,636,572,1011,998,751, 687,623,559,985,738,674,610,972,725,661,597] [views:debug,2014-08-19T16:54:49.766,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/549. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:49.766,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",549,active,0} [ns_server:debug,2014-08-19T16:54:49.832,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 642. Nacking mccouch update. [views:debug,2014-08-19T16:54:49.832,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/642. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:49.832,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 530. Nacking mccouch update. [ns_server:debug,2014-08-19T16:54:49.833,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",642,active,0} [views:debug,2014-08-19T16:54:49.833,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/530. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:49.833,ns_1@10.242.238.90:capi_set_view_manager-tiles<0.6184.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,718,686,654,984,952,1016,756,724,692,660,990,958,1022,762,730,698,666, 996,964,752,736,720,704,688,672,656,986,970,954,938,1018,1002,758,742,726, 710,694,678,662,646,992,976,960,944,1008,764,748,732,716,700,684,668,652,998, 982,966,950,1014,754,738,722,706,690,674,658,642,988,972,956,940,1020,1004, 760,744,728,712,696,680,664,648,1023,994,978,962,946,1010,766,734,702,670, 968,1000,740,708,676,644,974,942,1006,746,714,682,650,980,948,1012] [ns_server:debug,2014-08-19T16:54:49.833,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",530,active,0} [ns_server:debug,2014-08-19T16:54:49.833,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_tiles<0.4349.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,686,622,558,984,952,1016,756,724,692,660,628,596,564,532,990,958,1022, 762,730,698,666,634,602,570,538,996,964,736,704,672,640,608,576,544,970,938, 1002,742,710,678,646,614,582,550,976,944,1008,748,716,684,652,620,588,556, 982,950,1014,754,722,690,658,626,594,562,530,988,956,1020,760,728,696,664, 632,600,568,536,994,962,766,734,702,670,638,606,574,542,968,1000,740,708,676, 644,612,580,548,974,942,1006,746,714,682,650,618,586,554,980,948,1012,752, 720,688,656,624,592,560,986,954,1018,758,726,694,662,630,598,566,534,992,960, 764,732,700,668,636,604,572,540,998,966,738,706,674,642,610,578,546,972,940, 1004,744,712,680,648,616,584,552,1023,978,946,1010,718,654,590] [views:debug,2014-08-19T16:54:49.875,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/642. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:49.875,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",642,active,0} [views:debug,2014-08-19T16:54:49.883,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/530. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:49.883,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",530,active,0} [ns_server:debug,2014-08-19T16:54:49.925,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 547. Nacking mccouch update. [views:debug,2014-08-19T16:54:49.925,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/547. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:49.925,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",547,active,0} [ns_server:debug,2014-08-19T16:54:49.926,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,750,686,622,558,984,737,673,609,971,724,660,596,958,711,647,583,1022,945, 762,698,634,570,1009,996,749,685,621,557,983,736,672,608,970,723,659,595,957, 710,646,582,1021,944,761,697,633,569,1008,995,748,684,620,556,982,735,671, 607,969,722,658,594,956,709,645,581,1020,943,760,696,632,568,1007,994,747, 683,619,555,981,734,670,606,968,721,657,593,955,708,644,580,1019,942,759,695, 631,567,1006,993,746,682,618,554,980,733,669,605,967,720,656,592,954,707,643, 579,1018,941,758,694,630,566,1005,992,745,681,617,553,979,732,668,604,966, 719,655,591,953,706,642,578,1017,940,757,693,629,565,1004,991,959,744,712, 680,648,616,584,552,1023,978,946,763,731,699,667,635,603,571,1010,965,718, 654,590,952,705,641,577,1016,939,756,692,628,564,1003,990,743,679,615,551, 977,730,666,602,964,717,653,589,951,704,640,576,1015,938,755,691,627,563, 1002,989,742,678,614,550,976,729,665,601,963,716,652,588,950,767,703,639,575, 1014,754,690,626,562,1001,988,741,677,613,549,975,728,664,600,962,715,651, 587,949,766,702,638,574,1013,753,689,625,561,1000,987,740,676,612,548,974, 727,663,599,961,714,650,586,948,765,701,637,573,1012,999,752,688,624,560,986, 739,675,611,547,973,726,662,598,960,713,649,585,947,764,700,636,572,1011,998, 751,687,623,559,985,738,674,610,972,725,661,597] [views:debug,2014-08-19T16:54:50.034,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/547. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:50.035,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",547,active,0} [ns_server:debug,2014-08-19T16:54:50.102,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 640. Nacking mccouch update. [views:debug,2014-08-19T16:54:50.102,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/640. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:50.103,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",640,active,0} [ns_server:debug,2014-08-19T16:54:50.103,ns_1@10.242.238.90:capi_set_view_manager-tiles<0.6184.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,718,686,654,984,952,1016,756,724,692,660,990,958,1022,762,730,698,666, 996,964,752,736,720,704,688,672,656,640,986,970,954,938,1018,1002,758,742, 726,710,694,678,662,646,992,976,960,944,1008,764,748,732,716,700,684,668,652, 998,982,966,950,1014,754,738,722,706,690,674,658,642,988,972,956,940,1020, 1004,760,744,728,712,696,680,664,648,1023,994,978,962,946,1010,766,734,702, 670,968,1000,740,708,676,644,974,942,1006,746,714,682,650,980,948,1012] [ns_server:debug,2014-08-19T16:54:50.118,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 528. Nacking mccouch update. [views:debug,2014-08-19T16:54:50.118,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/528. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:50.118,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",528,active,0} [ns_server:debug,2014-08-19T16:54:50.119,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_tiles<0.4349.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,686,622,558,984,952,1016,756,724,692,660,628,596,564,532,990,958,1022, 762,730,698,666,634,602,570,538,996,964,736,704,672,640,608,576,544,970,938, 1002,742,710,678,646,614,582,550,976,944,1008,748,716,684,652,620,588,556, 982,950,1014,754,722,690,658,626,594,562,530,988,956,1020,760,728,696,664, 632,600,568,536,994,962,766,734,702,670,638,606,574,542,968,1000,740,708,676, 644,612,580,548,974,942,1006,746,714,682,650,618,586,554,980,948,1012,752, 720,688,656,624,592,560,528,986,954,1018,758,726,694,662,630,598,566,534,992, 960,764,732,700,668,636,604,572,540,998,966,738,706,674,642,610,578,546,972, 940,1004,744,712,680,648,616,584,552,1023,978,946,1010,718,654,590] [views:debug,2014-08-19T16:54:50.169,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/640. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:50.169,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",640,active,0} [ns_server:debug,2014-08-19T16:54:50.205,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 545. Nacking mccouch update. [views:debug,2014-08-19T16:54:50.205,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/545. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:50.205,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",545,active,0} [views:debug,2014-08-19T16:54:50.206,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/528. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:50.206,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",528,active,0} [ns_server:debug,2014-08-19T16:54:50.206,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,750,686,622,558,984,737,673,609,545,971,724,660,596,958,711,647,583,1022, 945,762,698,634,570,1009,996,749,685,621,557,983,736,672,608,970,723,659,595, 957,710,646,582,1021,944,761,697,633,569,1008,995,748,684,620,556,982,735, 671,607,969,722,658,594,956,709,645,581,1020,943,760,696,632,568,1007,994, 747,683,619,555,981,734,670,606,968,721,657,593,955,708,644,580,1019,942,759, 695,631,567,1006,993,746,682,618,554,980,733,669,605,967,720,656,592,954,707, 643,579,1018,941,758,694,630,566,1005,992,745,681,617,553,979,732,668,604, 966,719,655,591,953,706,642,578,1017,940,757,693,629,565,1004,991,959,744, 712,680,648,616,584,552,1023,978,946,763,731,699,667,635,603,571,1010,965, 718,654,590,952,705,641,577,1016,939,756,692,628,564,1003,990,743,679,615, 551,977,730,666,602,964,717,653,589,951,704,640,576,1015,938,755,691,627,563, 1002,989,742,678,614,550,976,729,665,601,963,716,652,588,950,767,703,639,575, 1014,754,690,626,562,1001,988,741,677,613,549,975,728,664,600,962,715,651, 587,949,766,702,638,574,1013,753,689,625,561,1000,987,740,676,612,548,974, 727,663,599,961,714,650,586,948,765,701,637,573,1012,999,752,688,624,560,986, 739,675,611,547,973,726,662,598,960,713,649,585,947,764,700,636,572,1011,998, 751,687,623,559,985,738,674,610,972,725,661,597] [views:debug,2014-08-19T16:54:50.341,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/545. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:50.342,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",545,active,0} [ns_server:debug,2014-08-19T16:54:50.490,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 638. Nacking mccouch update. [views:debug,2014-08-19T16:54:50.490,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/638. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:50.490,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",638,active,0} [ns_server:debug,2014-08-19T16:54:50.491,ns_1@10.242.238.90:capi_set_view_manager-tiles<0.6184.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,718,686,654,984,952,1016,756,724,692,660,990,958,1022,762,730,698,666, 996,964,752,736,720,704,688,672,656,640,986,970,954,938,1018,1002,758,742, 726,710,694,678,662,646,992,976,960,944,1008,764,748,732,716,700,684,668,652, 998,982,966,950,1014,754,738,722,706,690,674,658,642,988,972,956,940,1020, 1004,760,744,728,712,696,680,664,648,1023,994,978,962,946,1010,766,734,702, 670,638,968,1000,740,708,676,644,974,942,1006,746,714,682,650,980,948,1012] [ns_server:debug,2014-08-19T16:54:50.549,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 526. Nacking mccouch update. [views:debug,2014-08-19T16:54:50.549,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/526. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:50.549,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",526,active,0} [ns_server:debug,2014-08-19T16:54:50.549,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_tiles<0.4349.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,686,622,558,984,952,1016,756,724,692,660,628,596,564,532,990,958,1022, 762,730,698,666,634,602,570,538,996,964,736,704,672,640,608,576,544,970,938, 1002,742,710,678,646,614,582,550,976,944,1008,748,716,684,652,620,588,556, 982,950,1014,754,722,690,658,626,594,562,530,988,956,1020,760,728,696,664, 632,600,568,536,994,962,766,734,702,670,638,606,574,542,968,1000,740,708,676, 644,612,580,548,974,942,1006,746,714,682,650,618,586,554,980,948,1012,752, 720,688,656,624,592,560,528,986,954,1018,758,726,694,662,630,598,566,534,992, 960,764,732,700,668,636,604,572,540,998,966,738,706,674,642,610,578,546,972, 940,1004,744,712,680,648,616,584,552,1023,978,946,1010,718,654,590,526] [ns_server:debug,2014-08-19T16:54:50.715,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 543. Nacking mccouch update. [views:debug,2014-08-19T16:54:50.716,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/543. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:50.716,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",543,active,0} [ns_server:debug,2014-08-19T16:54:50.716,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,750,686,622,558,984,737,673,609,545,971,724,660,596,958,711,647,583,1022, 945,762,698,634,570,1009,996,749,685,621,557,983,736,672,608,970,723,659,595, 957,710,646,582,1021,944,761,697,633,569,1008,995,748,684,620,556,982,735, 671,607,543,969,722,658,594,956,709,645,581,1020,943,760,696,632,568,1007, 994,747,683,619,555,981,734,670,606,968,721,657,593,955,708,644,580,1019,942, 759,695,631,567,1006,993,746,682,618,554,980,733,669,605,967,720,656,592,954, 707,643,579,1018,941,758,694,630,566,1005,992,745,681,617,553,979,732,668, 604,966,719,655,591,953,706,642,578,1017,940,757,693,629,565,1004,991,959, 744,712,680,648,616,584,552,1023,978,946,763,731,699,667,635,603,571,1010, 965,718,654,590,952,705,641,577,1016,939,756,692,628,564,1003,990,743,679, 615,551,977,730,666,602,964,717,653,589,951,704,640,576,1015,938,755,691,627, 563,1002,989,742,678,614,550,976,729,665,601,963,716,652,588,950,767,703,639, 575,1014,754,690,626,562,1001,988,741,677,613,549,975,728,664,600,962,715, 651,587,949,766,702,638,574,1013,753,689,625,561,1000,987,740,676,612,548, 974,727,663,599,961,714,650,586,948,765,701,637,573,1012,999,752,688,624,560, 986,739,675,611,547,973,726,662,598,960,713,649,585,947,764,700,636,572,1011, 998,751,687,623,559,985,738,674,610,972,725,661,597] [views:debug,2014-08-19T16:54:50.766,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/638. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:50.766,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",638,active,0} [views:debug,2014-08-19T16:54:50.808,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/526. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:50.808,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",526,active,0} [views:debug,2014-08-19T16:54:50.900,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/543. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:50.900,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",543,active,0} [ns_server:debug,2014-08-19T16:54:51.077,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 636. Nacking mccouch update. [views:debug,2014-08-19T16:54:51.077,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/636. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:51.077,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",636,active,0} [ns_server:debug,2014-08-19T16:54:51.077,ns_1@10.242.238.90:capi_set_view_manager-tiles<0.6184.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,718,686,654,984,952,1016,756,724,692,660,990,958,1022,762,730,698,666, 996,964,752,736,720,704,688,672,656,640,986,970,954,938,1018,1002,758,742, 726,710,694,678,662,646,992,976,960,944,1008,764,748,732,716,700,684,668,652, 636,998,982,966,950,1014,754,738,722,706,690,674,658,642,988,972,956,940, 1020,1004,760,744,728,712,696,680,664,648,1023,994,978,962,946,1010,766,734, 702,670,638,968,1000,740,708,676,644,974,942,1006,746,714,682,650,980,948, 1012] [ns_server:debug,2014-08-19T16:54:51.094,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 524. Nacking mccouch update. [views:debug,2014-08-19T16:54:51.094,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/524. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:51.095,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",524,active,0} [ns_server:debug,2014-08-19T16:54:51.095,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_tiles<0.4349.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,686,622,558,984,756,724,692,660,628,596,564,532,990,958,1022,762,730,698, 666,634,602,570,538,996,964,736,704,672,640,608,576,544,970,938,1002,742,710, 678,646,614,582,550,976,944,1008,748,716,684,652,620,588,556,524,982,950, 1014,754,722,690,658,626,594,562,530,988,956,1020,760,728,696,664,632,600, 568,536,994,962,766,734,702,670,638,606,574,542,968,1000,740,708,676,644,612, 580,548,974,942,1006,746,714,682,650,618,586,554,980,948,1012,752,720,688, 656,624,592,560,528,986,954,1018,758,726,694,662,630,598,566,534,992,960,764, 732,700,668,636,604,572,540,998,966,738,706,674,642,610,578,546,972,940,1004, 744,712,680,648,616,584,552,1023,978,946,1010,718,654,590,526,952,1016] [ns_server:debug,2014-08-19T16:54:51.128,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 541. Nacking mccouch update. [views:debug,2014-08-19T16:54:51.128,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/541. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:51.129,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",541,active,0} [ns_server:debug,2014-08-19T16:54:51.129,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,750,686,622,558,984,737,673,609,545,971,724,660,596,958,711,647,583,1022, 945,762,698,634,570,1009,996,749,685,621,557,983,736,672,608,970,723,659,595, 957,710,646,582,1021,944,761,697,633,569,1008,995,748,684,620,556,982,735, 671,607,543,969,722,658,594,956,709,645,581,1020,943,760,696,632,568,1007, 994,747,683,619,555,981,734,670,606,968,721,657,593,955,708,644,580,1019,942, 759,695,631,567,1006,993,746,682,618,554,980,733,669,605,541,967,720,656,592, 954,707,643,579,1018,941,758,694,630,566,1005,992,745,681,617,553,979,732, 668,604,966,719,655,591,953,706,642,578,1017,940,757,693,629,565,1004,991, 959,744,712,680,648,616,584,552,1023,978,946,763,731,699,667,635,603,571, 1010,965,718,654,590,952,705,641,577,1016,939,756,692,628,564,1003,990,743, 679,615,551,977,730,666,602,964,717,653,589,951,704,640,576,1015,938,755,691, 627,563,1002,989,742,678,614,550,976,729,665,601,963,716,652,588,950,767,703, 639,575,1014,754,690,626,562,1001,988,741,677,613,549,975,728,664,600,962, 715,651,587,949,766,702,638,574,1013,753,689,625,561,1000,987,740,676,612, 548,974,727,663,599,961,714,650,586,948,765,701,637,573,1012,999,752,688,624, 560,986,739,675,611,547,973,726,662,598,960,713,649,585,947,764,700,636,572, 1011,998,751,687,623,559,985,738,674,610,972,725,661,597] [views:debug,2014-08-19T16:54:51.145,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/636. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:51.146,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",636,active,0} [views:debug,2014-08-19T16:54:51.162,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/524. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:51.162,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",524,active,0} [views:debug,2014-08-19T16:54:51.196,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/541. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:51.196,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",541,active,0} [ns_server:debug,2014-08-19T16:54:51.347,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 634. Nacking mccouch update. [views:debug,2014-08-19T16:54:51.347,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/634. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:51.347,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",634,active,0} [ns_server:debug,2014-08-19T16:54:51.347,ns_1@10.242.238.90:capi_set_view_manager-tiles<0.6184.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,718,686,654,984,952,1016,756,724,692,660,990,958,1022,762,730,698,666, 634,996,964,736,704,672,640,986,970,954,938,1018,1002,758,742,726,710,694, 678,662,646,992,976,960,944,1008,764,748,732,716,700,684,668,652,636,998,982, 966,950,1014,754,738,722,706,690,674,658,642,988,972,956,940,1020,1004,760, 744,728,712,696,680,664,648,1023,994,978,962,946,1010,766,734,702,670,638, 968,1000,740,708,676,644,974,942,1006,746,714,682,650,980,948,1012,752,720, 688,656] [ns_server:debug,2014-08-19T16:54:51.372,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 539. Nacking mccouch update. [ns_server:debug,2014-08-19T16:54:51.372,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 522. Nacking mccouch update. [views:debug,2014-08-19T16:54:51.372,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/539. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:51.372,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",539,active,0} [views:debug,2014-08-19T16:54:51.372,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/522. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:51.372,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",522,active,0} [ns_server:debug,2014-08-19T16:54:51.373,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_tiles<0.4349.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,686,622,558,984,756,724,692,660,628,596,564,532,990,958,1022,762,730,698, 666,634,602,570,538,996,964,736,704,672,640,608,576,544,970,938,1002,742,710, 678,646,614,582,550,976,944,1008,748,716,684,652,620,588,556,524,982,950, 1014,754,722,690,658,626,594,562,530,988,956,1020,760,728,696,664,632,600, 568,536,994,962,766,734,702,670,638,606,574,542,968,1000,740,708,676,644,612, 580,548,974,942,1006,746,714,682,650,618,586,554,522,980,948,1012,752,720, 688,656,624,592,560,528,986,954,1018,758,726,694,662,630,598,566,534,992,960, 764,732,700,668,636,604,572,540,998,966,738,706,674,642,610,578,546,972,940, 1004,744,712,680,648,616,584,552,1023,978,946,1010,718,654,590,526,952,1016] [ns_server:debug,2014-08-19T16:54:51.373,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,750,686,622,558,984,737,673,609,545,971,724,660,596,958,711,647,583,1022, 945,762,698,634,570,1009,996,749,685,621,557,983,736,672,608,970,723,659,595, 957,710,646,582,1021,944,761,697,633,569,1008,995,748,684,620,556,982,735, 671,607,543,969,722,658,594,956,709,645,581,1020,943,760,696,632,568,1007, 994,747,683,619,555,981,734,670,606,968,721,657,593,955,708,644,580,1019,942, 759,695,631,567,1006,993,746,682,618,554,980,733,669,605,541,967,720,656,592, 954,707,643,579,1018,941,758,694,630,566,1005,992,745,681,617,553,979,732, 668,604,966,719,655,591,953,706,642,578,1017,940,757,693,629,565,1004,991, 744,680,616,552,978,946,763,731,699,667,635,603,571,539,1010,965,718,654,590, 952,705,641,577,1016,939,756,692,628,564,1003,990,743,679,615,551,977,730, 666,602,964,717,653,589,951,704,640,576,1015,938,755,691,627,563,1002,989, 742,678,614,550,976,729,665,601,963,716,652,588,950,767,703,639,575,1014,754, 690,626,562,1001,988,741,677,613,549,975,728,664,600,962,715,651,587,949,766, 702,638,574,1013,753,689,625,561,1000,987,740,676,612,548,974,727,663,599, 961,714,650,586,948,765,701,637,573,1012,999,752,688,624,560,986,739,675,611, 547,973,726,662,598,960,713,649,585,947,764,700,636,572,1011,998,751,687,623, 559,985,738,674,610,972,725,661,597,959,712,648,584,1023] [views:debug,2014-08-19T16:54:51.423,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/634. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:51.423,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",634,active,0} [views:debug,2014-08-19T16:54:51.440,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/522. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:51.440,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",522,active,0} [views:debug,2014-08-19T16:54:51.440,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/539. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:51.440,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",539,active,0} [ns_server:debug,2014-08-19T16:54:51.743,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 632. Nacking mccouch update. [views:debug,2014-08-19T16:54:51.743,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/632. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:51.743,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",632,active,0} [ns_server:debug,2014-08-19T16:54:51.743,ns_1@10.242.238.90:capi_set_view_manager-tiles<0.6184.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,718,686,654,984,952,1016,756,724,692,660,990,958,1022,762,730,698,666, 634,996,964,736,704,672,640,986,970,954,938,1018,1002,758,742,726,710,694, 678,662,646,992,976,960,944,1008,764,748,732,716,700,684,668,652,636,998,982, 966,950,1014,754,738,722,706,690,674,658,642,988,972,956,940,1020,1004,760, 744,728,712,696,680,664,648,632,1023,994,978,962,946,1010,766,734,702,670, 638,968,1000,740,708,676,644,974,942,1006,746,714,682,650,980,948,1012,752, 720,688,656] [ns_server:debug,2014-08-19T16:54:51.776,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 520. Nacking mccouch update. [ns_server:debug,2014-08-19T16:54:51.776,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 537. Nacking mccouch update. [views:debug,2014-08-19T16:54:51.777,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/520. Updated state: active (0) [views:debug,2014-08-19T16:54:51.777,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/537. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:51.777,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",520,active,0} [ns_server:debug,2014-08-19T16:54:51.777,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",537,active,0} [ns_server:debug,2014-08-19T16:54:51.777,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_tiles<0.4349.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,686,622,558,984,756,724,692,660,628,596,564,532,990,958,1022,762,730,698, 666,634,602,570,538,996,964,736,704,672,640,608,576,544,970,938,1002,742,710, 678,646,614,582,550,976,944,1008,748,716,684,652,620,588,556,524,982,950, 1014,754,722,690,658,626,594,562,530,988,956,1020,760,728,696,664,632,600, 568,536,994,962,766,734,702,670,638,606,574,542,968,1000,740,708,676,644,612, 580,548,974,942,1006,746,714,682,650,618,586,554,522,980,948,1012,752,720, 688,656,624,592,560,528,986,954,1018,758,726,694,662,630,598,566,534,992,960, 764,732,700,668,636,604,572,540,998,966,738,706,674,642,610,578,546,972,940, 1004,744,712,680,648,616,584,552,520,1023,978,946,1010,718,654,590,526,952, 1016] [ns_server:debug,2014-08-19T16:54:51.778,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,750,686,622,558,984,737,673,609,545,971,724,660,596,958,711,647,583,1022, 945,762,698,634,570,1009,996,749,685,621,557,983,736,672,608,970,723,659,595, 957,710,646,582,1021,944,761,697,633,569,1008,995,748,684,620,556,982,735, 671,607,543,969,722,658,594,956,709,645,581,1020,943,760,696,632,568,1007, 994,747,683,619,555,981,734,670,606,968,721,657,593,955,708,644,580,1019,942, 759,695,631,567,1006,993,746,682,618,554,980,733,669,605,541,967,720,656,592, 954,707,643,579,1018,941,758,694,630,566,1005,992,745,681,617,553,979,732, 668,604,966,719,655,591,953,706,642,578,1017,940,757,693,629,565,1004,991, 744,680,616,552,978,946,763,731,699,667,635,603,571,539,1010,965,718,654,590, 952,705,641,577,1016,939,756,692,628,564,1003,990,743,679,615,551,977,730, 666,602,964,717,653,589,951,704,640,576,1015,938,755,691,627,563,1002,989, 742,678,614,550,976,729,665,601,537,963,716,652,588,950,767,703,639,575,1014, 754,690,626,562,1001,988,741,677,613,549,975,728,664,600,962,715,651,587,949, 766,702,638,574,1013,753,689,625,561,1000,987,740,676,612,548,974,727,663, 599,961,714,650,586,948,765,701,637,573,1012,999,752,688,624,560,986,739,675, 611,547,973,726,662,598,960,713,649,585,947,764,700,636,572,1011,998,751,687, 623,559,985,738,674,610,972,725,661,597,959,712,648,584,1023] [views:debug,2014-08-19T16:54:51.837,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/632. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:51.837,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",632,active,0} [views:debug,2014-08-19T16:54:51.920,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/520. Updated state: active (0) [views:debug,2014-08-19T16:54:51.920,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/537. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:51.920,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",520,active,0} [ns_server:debug,2014-08-19T16:54:51.921,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",537,active,0} [ns_server:debug,2014-08-19T16:54:52.196,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 535. Nacking mccouch update. [ns_server:debug,2014-08-19T16:54:52.196,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 630. Nacking mccouch update. [views:debug,2014-08-19T16:54:52.196,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/535. Updated state: active (0) [views:debug,2014-08-19T16:54:52.196,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/630. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:52.196,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",535,active,0} [ns_server:debug,2014-08-19T16:54:52.197,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",630,active,0} [ns_server:debug,2014-08-19T16:54:52.197,ns_1@10.242.238.90:capi_set_view_manager-tiles<0.6184.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,718,686,654,984,952,1016,756,724,692,660,990,958,1022,762,730,698,666, 634,996,964,736,704,672,640,986,970,954,938,1018,1002,758,742,726,710,694, 678,662,646,630,992,976,960,944,1008,764,748,732,716,700,684,668,652,636,998, 982,966,950,1014,754,738,722,706,690,674,658,642,988,972,956,940,1020,1004, 760,744,728,712,696,680,664,648,632,1023,994,978,962,946,1010,766,734,702, 670,638,968,1000,740,708,676,644,974,942,1006,746,714,682,650,980,948,1012, 752,720,688,656] [ns_server:debug,2014-08-19T16:54:52.197,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,750,686,622,558,984,737,673,609,545,971,724,660,596,958,711,647,583,1022, 945,762,698,634,570,1009,996,749,685,621,557,983,736,672,608,970,723,659,595, 957,710,646,582,1021,944,761,697,633,569,1008,995,748,684,620,556,982,735, 671,607,543,969,722,658,594,956,709,645,581,1020,943,760,696,632,568,1007, 994,747,683,619,555,981,734,670,606,968,721,657,593,955,708,644,580,1019,942, 759,695,631,567,1006,993,746,682,618,554,980,733,669,605,541,967,720,656,592, 954,707,643,579,1018,941,758,694,630,566,1005,992,745,681,617,553,979,732, 668,604,966,719,655,591,953,706,642,578,1017,940,757,693,629,565,1004,991, 744,680,616,552,978,946,763,731,699,667,635,603,571,539,1010,965,718,654,590, 952,705,641,577,1016,939,756,692,628,564,1003,990,743,679,615,551,977,730, 666,602,964,717,653,589,951,704,640,576,1015,938,755,691,627,563,1002,989, 742,678,614,550,976,729,665,601,537,963,716,652,588,950,767,703,639,575,1014, 754,690,626,562,1001,988,741,677,613,549,975,728,664,600,962,715,651,587,949, 766,702,638,574,1013,753,689,625,561,1000,987,740,676,612,548,974,727,663, 599,535,961,714,650,586,948,765,701,637,573,1012,999,752,688,624,560,986,739, 675,611,547,973,726,662,598,960,713,649,585,947,764,700,636,572,1011,998,751, 687,623,559,985,738,674,610,972,725,661,597,959,712,648,584,1023] [ns_server:debug,2014-08-19T16:54:52.213,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 518. Nacking mccouch update. [views:debug,2014-08-19T16:54:52.213,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/518. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:52.213,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",518,active,0} [ns_server:debug,2014-08-19T16:54:52.213,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_tiles<0.4349.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,686,622,558,984,756,724,692,660,628,596,564,532,990,958,1022,762,730,698, 666,634,602,570,538,996,964,736,704,672,640,608,576,544,970,938,1002,742,710, 678,646,614,582,550,518,976,944,1008,748,716,684,652,620,588,556,524,982,950, 1014,754,722,690,658,626,594,562,530,988,956,1020,760,728,696,664,632,600, 568,536,994,962,766,734,702,670,638,606,574,542,968,1000,740,708,676,644,612, 580,548,974,942,1006,746,714,682,650,618,586,554,522,980,948,1012,752,720, 688,656,624,592,560,528,986,954,1018,758,726,694,662,630,598,566,534,992,960, 764,732,700,668,636,604,572,540,998,966,738,706,674,642,610,578,546,972,940, 1004,744,712,680,648,616,584,552,520,1023,978,946,1010,718,654,590,526,952, 1016] [views:debug,2014-08-19T16:54:52.322,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/630. Updated state: active (0) [views:debug,2014-08-19T16:54:52.322,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/535. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:52.322,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",630,active,0} [ns_server:debug,2014-08-19T16:54:52.322,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",535,active,0} [views:debug,2014-08-19T16:54:52.355,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/518. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:52.356,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",518,active,0} [ns_server:debug,2014-08-19T16:54:52.574,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 628. Nacking mccouch update. [views:debug,2014-08-19T16:54:52.574,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/628. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:52.574,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",628,active,0} [ns_server:debug,2014-08-19T16:54:52.575,ns_1@10.242.238.90:capi_set_view_manager-tiles<0.6184.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,718,686,654,984,952,1016,756,724,692,660,628,990,958,1022,762,730,698, 666,634,996,964,736,704,672,640,986,970,954,938,1018,1002,758,742,726,710, 694,678,662,646,630,992,976,960,944,1008,764,748,732,716,700,684,668,652,636, 998,982,966,950,1014,754,738,722,706,690,674,658,642,988,972,956,940,1020, 1004,760,744,728,712,696,680,664,648,632,1023,994,978,962,946,1010,766,734, 702,670,638,968,1000,740,708,676,644,974,942,1006,746,714,682,650,980,948, 1012,752,720,688,656] [ns_server:debug,2014-08-19T16:54:52.649,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 533. Nacking mccouch update. [views:debug,2014-08-19T16:54:52.650,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/533. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:52.650,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",533,active,0} [ns_server:debug,2014-08-19T16:54:52.650,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,750,686,622,558,984,737,673,609,545,971,724,660,596,958,711,647,583,1022, 945,762,698,634,570,1009,996,749,685,621,557,983,736,672,608,970,723,659,595, 957,710,646,582,1021,944,761,697,633,569,1008,995,748,684,620,556,982,735, 671,607,543,969,722,658,594,956,709,645,581,1020,943,760,696,632,568,1007, 994,747,683,619,555,981,734,670,606,968,721,657,593,955,708,644,580,1019,942, 759,695,631,567,1006,993,746,682,618,554,980,733,669,605,541,967,720,656,592, 954,707,643,579,1018,941,758,694,630,566,1005,992,745,681,617,553,979,732, 668,604,966,719,655,591,953,706,642,578,1017,940,757,693,629,565,1004,991, 744,680,616,552,978,946,763,731,699,667,635,603,571,539,1010,965,718,654,590, 952,705,641,577,1016,939,756,692,628,564,1003,990,743,679,615,551,977,730, 666,602,964,717,653,589,951,704,640,576,1015,938,755,691,627,563,1002,989, 742,678,614,550,976,729,665,601,537,963,716,652,588,950,767,703,639,575,1014, 754,690,626,562,1001,988,741,677,613,549,975,728,664,600,962,715,651,587,949, 766,702,638,574,1013,753,689,625,561,1000,987,740,676,612,548,974,727,663, 599,535,961,714,650,586,948,765,701,637,573,1012,999,752,688,624,560,986,739, 675,611,547,973,726,662,598,960,713,649,585,947,764,700,636,572,1011,998,751, 687,623,559,985,738,674,610,972,725,661,597,533,959,712,648,584,1023] [ns_server:debug,2014-08-19T16:54:52.685,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 516. Nacking mccouch update. [views:debug,2014-08-19T16:54:52.685,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/516. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:52.685,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",516,active,0} [ns_server:debug,2014-08-19T16:54:52.685,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_tiles<0.4349.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,686,622,558,984,756,724,692,660,628,596,564,532,990,958,1022,762,730,698, 666,634,602,570,538,996,964,736,704,672,640,608,576,544,970,938,1002,742,710, 678,646,614,582,550,518,976,944,1008,748,716,684,652,620,588,556,524,982,950, 1014,754,722,690,658,626,594,562,530,988,956,1020,760,728,696,664,632,600, 568,536,994,962,766,734,702,670,638,606,574,542,968,1000,740,708,676,644,612, 580,548,516,974,942,1006,746,714,682,650,618,586,554,522,980,948,1012,752, 720,688,656,624,592,560,528,986,954,1018,758,726,694,662,630,598,566,534,992, 960,764,732,700,668,636,604,572,540,998,966,738,706,674,642,610,578,546,972, 940,1004,744,712,680,648,616,584,552,520,1023,978,946,1010,718,654,590,526, 952,1016] [views:debug,2014-08-19T16:54:52.725,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/628. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:52.725,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",628,active,0} [views:debug,2014-08-19T16:54:52.785,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/533. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:52.785,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",533,active,0} [views:debug,2014-08-19T16:54:52.809,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/516. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:52.809,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",516,active,0} [ns_server:debug,2014-08-19T16:54:53.093,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 626. Nacking mccouch update. [views:debug,2014-08-19T16:54:53.093,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/626. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:53.093,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",626,active,0} [ns_server:debug,2014-08-19T16:54:53.094,ns_1@10.242.238.90:capi_set_view_manager-tiles<0.6184.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,718,686,654,984,952,1016,756,724,692,660,628,990,958,1022,762,730,698, 666,634,996,964,736,704,672,640,986,970,954,938,1018,1002,758,742,726,710, 694,678,662,646,630,992,976,960,944,1008,764,748,732,716,700,684,668,652,636, 998,982,966,950,1014,754,738,722,706,690,674,658,642,626,988,972,956,940, 1020,1004,760,744,728,712,696,680,664,648,632,1023,994,978,962,946,1010,766, 734,702,670,638,968,1000,740,708,676,644,974,942,1006,746,714,682,650,980, 948,1012,752,720,688,656] [ns_server:debug,2014-08-19T16:54:53.188,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 531. Nacking mccouch update. [ns_server:debug,2014-08-19T16:54:53.189,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 514. Nacking mccouch update. [views:debug,2014-08-19T16:54:53.189,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/531. Updated state: active (0) [views:debug,2014-08-19T16:54:53.189,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/514. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:53.189,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",531,active,0} [ns_server:debug,2014-08-19T16:54:53.189,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",514,active,0} [views:debug,2014-08-19T16:54:53.189,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/626. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:53.189,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",626,active,0} [ns_server:debug,2014-08-19T16:54:53.189,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_tiles<0.4349.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,686,622,558,984,724,660,596,532,990,958,1022,762,730,698,666,634,602,570, 538,996,964,736,704,672,640,608,576,544,970,938,1002,742,710,678,646,614,582, 550,518,976,944,1008,748,716,684,652,620,588,556,524,982,950,1014,754,722, 690,658,626,594,562,530,988,956,1020,760,728,696,664,632,600,568,536,994,962, 766,734,702,670,638,606,574,542,968,1000,740,708,676,644,612,580,548,516,974, 942,1006,746,714,682,650,618,586,554,522,980,948,1012,752,720,688,656,624, 592,560,528,986,954,1018,758,726,694,662,630,598,566,534,992,960,764,732,700, 668,636,604,572,540,998,966,738,706,674,642,610,578,546,514,972,940,1004,744, 712,680,648,616,584,552,520,1023,978,946,1010,718,654,590,526,952,1016,756, 692,628,564] [ns_server:debug,2014-08-19T16:54:53.190,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,750,686,622,558,984,737,673,609,545,971,724,660,596,958,711,647,583,1022, 945,762,698,634,570,1009,996,749,685,621,557,983,736,672,608,970,723,659,595, 531,957,710,646,582,1021,944,761,697,633,569,1008,995,748,684,620,556,982, 735,671,607,543,969,722,658,594,956,709,645,581,1020,943,760,696,632,568, 1007,994,747,683,619,555,981,734,670,606,968,721,657,593,955,708,644,580, 1019,942,759,695,631,567,1006,993,746,682,618,554,980,733,669,605,541,967, 720,656,592,954,707,643,579,1018,941,758,694,630,566,1005,992,745,681,617, 553,979,732,668,604,966,719,655,591,953,706,642,578,1017,940,757,693,629,565, 1004,991,744,680,616,552,978,946,763,731,699,667,635,603,571,539,1010,965, 718,654,590,952,705,641,577,1016,939,756,692,628,564,1003,990,743,679,615, 551,977,730,666,602,964,717,653,589,951,704,640,576,1015,938,755,691,627,563, 1002,989,742,678,614,550,976,729,665,601,537,963,716,652,588,950,767,703,639, 575,1014,754,690,626,562,1001,988,741,677,613,549,975,728,664,600,962,715, 651,587,949,766,702,638,574,1013,753,689,625,561,1000,987,740,676,612,548, 974,727,663,599,535,961,714,650,586,948,765,701,637,573,1012,999,752,688,624, 560,986,739,675,611,547,973,726,662,598,960,713,649,585,947,764,700,636,572, 1011,998,751,687,623,559,985,738,674,610,972,725,661,597,533,959,712,648,584, 1023] [views:debug,2014-08-19T16:54:53.306,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/531. Updated state: active (0) [views:debug,2014-08-19T16:54:53.306,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/514. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:53.306,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",531,active,0} [ns_server:debug,2014-08-19T16:54:53.307,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",514,active,0} [ns_server:debug,2014-08-19T16:54:53.441,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 624. Nacking mccouch update. [views:debug,2014-08-19T16:54:53.441,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/624. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:53.441,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",624,active,0} [ns_server:debug,2014-08-19T16:54:53.442,ns_1@10.242.238.90:capi_set_view_manager-tiles<0.6184.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,718,686,654,984,952,1016,756,724,692,660,628,990,958,1022,762,730,698, 666,634,996,964,736,704,672,640,970,938,1002,758,742,726,710,694,678,662,646, 630,992,976,960,944,1008,764,748,732,716,700,684,668,652,636,998,982,966,950, 1014,754,738,722,706,690,674,658,642,626,988,972,956,940,1020,1004,760,744, 728,712,696,680,664,648,632,1023,994,978,962,946,1010,766,734,702,670,638, 968,1000,740,708,676,644,974,942,1006,746,714,682,650,980,948,1012,752,720, 688,656,624,986,954,1018] [views:debug,2014-08-19T16:54:53.507,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/624. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:53.507,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",624,active,0} [ns_server:debug,2014-08-19T16:54:53.599,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 512. Nacking mccouch update. [ns_server:debug,2014-08-19T16:54:53.599,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 529. Nacking mccouch update. [views:debug,2014-08-19T16:54:53.599,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/512. Updated state: active (0) [views:debug,2014-08-19T16:54:53.599,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/529. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:53.599,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",512,active,0} [ns_server:debug,2014-08-19T16:54:53.599,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",529,active,0} [ns_server:debug,2014-08-19T16:54:53.600,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_tiles<0.4349.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,686,622,558,984,724,660,596,532,990,958,1022,762,730,698,666,634,602,570, 538,996,964,736,704,672,640,608,576,544,512,970,938,1002,742,710,678,646,614, 582,550,518,976,944,1008,748,716,684,652,620,588,556,524,982,950,1014,754, 722,690,658,626,594,562,530,988,956,1020,760,728,696,664,632,600,568,536,994, 962,766,734,702,670,638,606,574,542,968,1000,740,708,676,644,612,580,548,516, 974,942,1006,746,714,682,650,618,586,554,522,980,948,1012,752,720,688,656, 624,592,560,528,986,954,1018,758,726,694,662,630,598,566,534,992,960,764,732, 700,668,636,604,572,540,998,966,738,706,674,642,610,578,546,514,972,940,1004, 744,712,680,648,616,584,552,520,1023,978,946,1010,718,654,590,526,952,1016, 756,692,628,564] [ns_server:debug,2014-08-19T16:54:53.600,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,750,686,622,558,984,737,673,609,545,971,724,660,596,958,711,647,583,1022, 945,762,698,634,570,1009,996,749,685,621,557,983,736,672,608,970,723,659,595, 531,957,710,646,582,1021,944,761,697,633,569,1008,995,748,684,620,556,982, 735,671,607,543,969,722,658,594,956,709,645,581,1020,943,760,696,632,568, 1007,994,747,683,619,555,981,734,670,606,968,721,657,593,529,955,708,644,580, 1019,942,759,695,631,567,1006,993,746,682,618,554,980,733,669,605,541,967, 720,656,592,954,707,643,579,1018,941,758,694,630,566,1005,992,745,681,617, 553,979,732,668,604,966,719,655,591,953,706,642,578,1017,940,757,693,629,565, 1004,991,744,680,616,552,978,731,667,603,539,965,718,654,590,952,705,641,577, 1016,939,756,692,628,564,1003,990,743,679,615,551,977,730,666,602,964,717, 653,589,951,704,640,576,1015,938,755,691,627,563,1002,989,742,678,614,550, 976,729,665,601,537,963,716,652,588,950,767,703,639,575,1014,754,690,626,562, 1001,988,741,677,613,549,975,728,664,600,962,715,651,587,949,766,702,638,574, 1013,753,689,625,561,1000,987,740,676,612,548,974,727,663,599,535,961,714, 650,586,948,765,701,637,573,1012,999,752,688,624,560,986,739,675,611,547,973, 726,662,598,960,713,649,585,947,764,700,636,572,1011,998,751,687,623,559,985, 738,674,610,972,725,661,597,533,959,712,648,584,1023,946,763,699,635,571, 1010] [views:debug,2014-08-19T16:54:53.700,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/529. Updated state: active (0) [views:debug,2014-08-19T16:54:53.700,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/512. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:53.700,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",529,active,0} [ns_server:debug,2014-08-19T16:54:53.700,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",512,active,0} [ns_server:info,2014-08-19T16:54:53.797,ns_1@10.242.238.90:ns_config_rep<0.17414.0>:ns_config_rep:do_pull:341]Pulling config from: 'ns_1@10.242.238.91' [ns_server:debug,2014-08-19T16:54:53.933,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 622. Nacking mccouch update. [views:debug,2014-08-19T16:54:53.934,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/622. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:53.934,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",622,active,0} [ns_server:debug,2014-08-19T16:54:53.934,ns_1@10.242.238.90:capi_set_view_manager-tiles<0.6184.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,718,686,654,622,984,952,1016,756,724,692,660,628,990,958,1022,762,730, 698,666,634,996,964,736,704,672,640,970,938,1002,758,742,726,710,694,678,662, 646,630,992,976,960,944,1008,764,748,732,716,700,684,668,652,636,998,982,966, 950,1014,754,738,722,706,690,674,658,642,626,988,972,956,940,1020,1004,760, 744,728,712,696,680,664,648,632,1023,994,978,962,946,1010,766,734,702,670, 638,968,1000,740,708,676,644,974,942,1006,746,714,682,650,980,948,1012,752, 720,688,656,624,986,954,1018] [views:debug,2014-08-19T16:54:54.044,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/622. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:54.044,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",622,active,0} [ns_server:debug,2014-08-19T16:54:54.186,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 527. Nacking mccouch update. [ns_server:debug,2014-08-19T16:54:54.186,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 426. Nacking mccouch update. [views:debug,2014-08-19T16:54:54.186,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/527. Updated state: active (0) [views:debug,2014-08-19T16:54:54.186,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/426. Updated state: replica (0) [ns_server:debug,2014-08-19T16:54:54.186,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",527,active,0} [ns_server:debug,2014-08-19T16:54:54.186,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",426,replica,0} [ns_server:debug,2014-08-19T16:54:54.187,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_tiles<0.4349.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,686,622,558,984,426,724,660,596,532,990,958,1022,762,730,698,666,634,602, 570,538,996,964,736,704,672,640,608,576,544,512,970,938,1002,742,710,678,646, 614,582,550,518,976,944,1008,748,716,684,652,620,588,556,524,982,950,1014, 754,722,690,658,626,594,562,530,988,956,1020,760,728,696,664,632,600,568,536, 994,962,766,734,702,670,638,606,574,542,968,1000,740,708,676,644,612,580,548, 516,974,942,1006,746,714,682,650,618,586,554,522,980,948,1012,752,720,688, 656,624,592,560,528,986,954,1018,758,726,694,662,630,598,566,534,992,960,764, 732,700,668,636,604,572,540,998,966,738,706,674,642,610,578,546,514,972,940, 1004,744,712,680,648,616,584,552,520,1023,978,946,1010,718,654,590,526,952, 1016,756,692,628,564] [ns_server:debug,2014-08-19T16:54:54.187,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,750,686,622,558,984,737,673,609,545,971,724,660,596,958,711,647,583,1022, 945,762,698,634,570,1009,996,749,685,621,557,983,736,672,608,970,723,659,595, 531,957,710,646,582,1021,944,761,697,633,569,1008,995,748,684,620,556,982, 735,671,607,543,969,722,658,594,956,709,645,581,1020,943,760,696,632,568, 1007,994,747,683,619,555,981,734,670,606,968,721,657,593,529,955,708,644,580, 1019,942,759,695,631,567,1006,993,746,682,618,554,980,733,669,605,541,967, 720,656,592,954,707,643,579,1018,941,758,694,630,566,1005,992,745,681,617, 553,979,732,668,604,966,719,655,591,527,953,706,642,578,1017,940,757,693,629, 565,1004,991,744,680,616,552,978,731,667,603,539,965,718,654,590,952,705,641, 577,1016,939,756,692,628,564,1003,990,743,679,615,551,977,730,666,602,964, 717,653,589,951,704,640,576,1015,938,755,691,627,563,1002,989,742,678,614, 550,976,729,665,601,537,963,716,652,588,950,767,703,639,575,1014,754,690,626, 562,1001,988,741,677,613,549,975,728,664,600,962,715,651,587,949,766,702,638, 574,1013,753,689,625,561,1000,987,740,676,612,548,974,727,663,599,535,961, 714,650,586,948,765,701,637,573,1012,999,752,688,624,560,986,739,675,611,547, 973,726,662,598,960,713,649,585,947,764,700,636,572,1011,998,751,687,623,559, 985,738,674,610,972,725,661,597,533,959,712,648,584,1023,946,763,699,635,571, 1010] [ns_server:debug,2014-08-19T16:54:54.286,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 620. Nacking mccouch update. [views:debug,2014-08-19T16:54:54.287,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/620. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:54.287,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",620,active,0} [views:debug,2014-08-19T16:54:54.287,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/527. Updated state: active (0) [views:debug,2014-08-19T16:54:54.287,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/426. Updated state: replica (0) [ns_server:debug,2014-08-19T16:54:54.287,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",527,active,0} [ns_server:debug,2014-08-19T16:54:54.287,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",426,replica,0} [ns_server:debug,2014-08-19T16:54:54.287,ns_1@10.242.238.90:capi_set_view_manager-tiles<0.6184.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,718,686,654,622,984,952,1016,756,724,692,660,628,990,958,1022,762,730, 698,666,634,996,964,736,704,672,640,970,938,1002,758,742,726,710,694,678,662, 646,630,992,976,960,944,1008,764,748,732,716,700,684,668,652,636,620,998,982, 966,950,1014,754,738,722,706,690,674,658,642,626,988,972,956,940,1020,1004, 760,744,728,712,696,680,664,648,632,1023,994,978,962,946,1010,766,734,702, 670,638,968,1000,740,708,676,644,974,942,1006,746,714,682,650,980,948,1012, 752,720,688,656,624,986,954,1018] [views:debug,2014-08-19T16:54:54.414,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/620. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:54.414,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",620,active,0} [ns_server:debug,2014-08-19T16:54:54.577,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 424. Nacking mccouch update. [views:debug,2014-08-19T16:54:54.597,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/424. Updated state: replica (0) [ns_server:debug,2014-08-19T16:54:54.598,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",424,replica,0} [ns_server:debug,2014-08-19T16:54:54.598,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 525. Nacking mccouch update. [views:debug,2014-08-19T16:54:54.598,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/525. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:54.598,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",525,active,0} [ns_server:debug,2014-08-19T16:54:54.598,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_tiles<0.4349.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,686,622,558,984,426,724,660,596,532,990,958,1022,762,730,698,666,634,602, 570,538,996,964,736,704,672,640,608,576,544,512,970,938,1002,742,710,678,646, 614,582,550,518,976,944,1008,748,716,684,652,620,588,556,524,982,950,424, 1014,754,722,690,658,626,594,562,530,988,956,1020,760,728,696,664,632,600, 568,536,994,962,766,734,702,670,638,606,574,542,968,1000,740,708,676,644,612, 580,548,516,974,942,1006,746,714,682,650,618,586,554,522,980,948,1012,752, 720,688,656,624,592,560,528,986,954,1018,758,726,694,662,630,598,566,534,992, 960,764,732,700,668,636,604,572,540,998,966,738,706,674,642,610,578,546,514, 972,940,1004,744,712,680,648,616,584,552,520,1023,978,946,1010,718,654,590, 526,952,1016,756,692,628,564] [ns_server:debug,2014-08-19T16:54:54.599,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,750,686,622,558,984,737,673,609,545,971,724,660,596,958,711,647,583,1022, 945,762,698,634,570,1009,996,749,685,621,557,983,736,672,608,970,723,659,595, 531,957,710,646,582,1021,944,761,697,633,569,1008,995,748,684,620,556,982, 735,671,607,543,969,722,658,594,956,709,645,581,1020,943,760,696,632,568, 1007,994,747,683,619,555,981,734,670,606,968,721,657,593,529,955,708,644,580, 1019,942,759,695,631,567,1006,993,746,682,618,554,980,733,669,605,541,967, 720,656,592,954,707,643,579,1018,941,758,694,630,566,1005,992,745,681,617, 553,979,732,668,604,966,719,655,591,527,953,706,642,578,1017,940,757,693,629, 565,1004,991,744,680,616,552,978,731,667,603,539,965,718,654,590,952,705,641, 577,1016,939,756,692,628,564,1003,990,743,679,615,551,977,730,666,602,964, 717,653,589,525,951,704,640,576,1015,938,755,691,627,563,1002,989,742,678, 614,550,976,729,665,601,537,963,716,652,588,950,767,703,639,575,1014,754,690, 626,562,1001,988,741,677,613,549,975,728,664,600,962,715,651,587,949,766,702, 638,574,1013,753,689,625,561,1000,987,740,676,612,548,974,727,663,599,535, 961,714,650,586,948,765,701,637,573,1012,999,752,688,624,560,986,739,675,611, 547,973,726,662,598,960,713,649,585,947,764,700,636,572,1011,998,751,687,623, 559,985,738,674,610,972,725,661,597,533,959,712,648,584,1023,946,763,699,635, 571,1010] [ns_server:debug,2014-08-19T16:54:54.631,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 618. Nacking mccouch update. [views:debug,2014-08-19T16:54:54.631,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/618. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:54.631,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",618,active,0} [ns_server:debug,2014-08-19T16:54:54.632,ns_1@10.242.238.90:capi_set_view_manager-tiles<0.6184.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,718,686,654,622,984,952,1016,756,724,692,660,628,990,958,1022,762,730, 698,666,634,996,964,736,704,672,640,970,938,1002,758,742,726,710,694,678,662, 646,630,992,976,960,944,1008,764,748,732,716,700,684,668,652,636,620,998,982, 966,950,1014,754,738,722,706,690,674,658,642,626,988,972,956,940,1020,1004, 760,744,728,712,696,680,664,648,632,1023,994,978,962,946,1010,766,734,702, 670,638,968,1000,740,708,676,644,974,942,1006,746,714,682,650,618,980,948, 1012,752,720,688,656,624,986,954,1018] [views:debug,2014-08-19T16:54:54.699,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/424. Updated state: replica (0) [ns_server:debug,2014-08-19T16:54:54.699,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",424,replica,0} [views:debug,2014-08-19T16:54:54.699,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/525. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:54.699,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",525,active,0} [views:debug,2014-08-19T16:54:54.759,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/618. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:54.759,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",618,active,0} [ns_server:debug,2014-08-19T16:54:54.918,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 523. Nacking mccouch update. [ns_server:debug,2014-08-19T16:54:54.918,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 422. Nacking mccouch update. [views:debug,2014-08-19T16:54:54.918,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/523. Updated state: active (0) [views:debug,2014-08-19T16:54:54.919,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/422. Updated state: replica (0) [ns_server:debug,2014-08-19T16:54:54.919,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",523,active,0} [ns_server:debug,2014-08-19T16:54:54.919,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",422,replica,0} [ns_server:debug,2014-08-19T16:54:54.919,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_tiles<0.4349.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,686,622,558,984,426,724,660,596,532,990,958,1022,762,730,698,666,634,602, 570,538,996,964,736,704,672,640,608,576,544,512,970,938,1002,742,710,678,646, 614,582,550,518,976,944,1008,748,716,684,652,620,588,556,524,982,950,424, 1014,754,722,690,658,626,594,562,530,988,956,1020,760,728,696,664,632,600, 568,536,994,962,766,734,702,670,638,606,574,542,968,1000,740,708,676,644,612, 580,548,516,974,942,1006,746,714,682,650,618,586,554,522,980,948,422,1012, 752,720,688,656,624,592,560,528,986,954,1018,758,726,694,662,630,598,566,534, 992,960,764,732,700,668,636,604,572,540,998,966,738,706,674,642,610,578,546, 514,972,940,1004,744,712,680,648,616,584,552,520,1023,978,946,1010,718,654, 590,526,952,1016,756,692,628,564] [ns_server:debug,2014-08-19T16:54:54.919,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,750,686,622,558,984,737,673,609,545,971,724,660,596,958,711,647,583,1022, 945,762,698,634,570,1009,996,749,685,621,557,983,736,672,608,970,723,659,595, 531,957,710,646,582,1021,944,761,697,633,569,1008,995,748,684,620,556,982, 735,671,607,543,969,722,658,594,956,709,645,581,1020,943,760,696,632,568, 1007,994,747,683,619,555,981,734,670,606,968,721,657,593,529,955,708,644,580, 1019,942,759,695,631,567,1006,993,746,682,618,554,980,733,669,605,541,967, 720,656,592,954,707,643,579,1018,941,758,694,630,566,1005,992,745,681,617, 553,979,732,668,604,966,719,655,591,527,953,706,642,578,1017,940,757,693,629, 565,1004,991,744,680,616,552,978,731,667,603,539,965,718,654,590,952,705,641, 577,1016,939,756,692,628,564,1003,990,743,679,615,551,977,730,666,602,964, 717,653,589,525,951,704,640,576,1015,938,755,691,627,563,1002,989,742,678, 614,550,976,729,665,601,537,963,716,652,588,950,767,703,639,575,1014,754,690, 626,562,1001,988,741,677,613,549,975,728,664,600,962,715,651,587,523,949,766, 702,638,574,1013,753,689,625,561,1000,987,740,676,612,548,974,727,663,599, 535,961,714,650,586,948,765,701,637,573,1012,999,752,688,624,560,986,739,675, 611,547,973,726,662,598,960,713,649,585,947,764,700,636,572,1011,998,751,687, 623,559,985,738,674,610,972,725,661,597,533,959,712,648,584,1023,946,763,699, 635,571,1010] [ns_server:debug,2014-08-19T16:54:54.935,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 616. Nacking mccouch update. [views:debug,2014-08-19T16:54:54.935,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/616. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:54.935,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",616,active,0} [ns_server:debug,2014-08-19T16:54:54.936,ns_1@10.242.238.90:capi_set_view_manager-tiles<0.6184.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,718,686,654,622,984,952,1016,756,724,692,660,628,990,958,1022,762,730, 698,666,634,996,964,736,704,672,640,970,938,1002,758,742,726,710,694,678,662, 646,630,992,976,960,944,1008,764,748,732,716,700,684,668,652,636,620,998,982, 966,950,1014,754,738,722,706,690,674,658,642,626,988,972,956,940,1020,1004, 760,744,728,712,696,680,664,648,632,616,1023,994,978,962,946,1010,766,734, 702,670,638,968,1000,740,708,676,644,974,942,1006,746,714,682,650,618,980, 948,1012,752,720,688,656,624,986,954,1018] [views:debug,2014-08-19T16:54:55.153,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/422. Updated state: replica (0) [views:debug,2014-08-19T16:54:55.153,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/523. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:55.153,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",523,active,0} [ns_server:debug,2014-08-19T16:54:55.153,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",422,replica,0} [views:debug,2014-08-19T16:54:55.203,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/616. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:55.203,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",616,active,0} [ns_server:debug,2014-08-19T16:54:55.371,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 420. Nacking mccouch update. [views:debug,2014-08-19T16:54:55.372,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/420. Updated state: replica (0) [ns_server:debug,2014-08-19T16:54:55.372,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",420,replica,0} [ns_server:debug,2014-08-19T16:54:55.372,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_tiles<0.4349.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,686,622,558,984,426,724,660,596,532,958,1022,762,730,698,666,634,602,570, 538,996,964,736,704,672,640,608,576,544,512,970,938,1002,742,710,678,646,614, 582,550,518,976,944,1008,748,716,684,652,620,588,556,524,982,950,424,1014, 754,722,690,658,626,594,562,530,988,956,1020,760,728,696,664,632,600,568,536, 994,962,766,734,702,670,638,606,574,542,968,1000,740,708,676,644,612,580,548, 516,974,942,1006,746,714,682,650,618,586,554,522,980,948,422,1012,752,720, 688,656,624,592,560,528,986,954,1018,758,726,694,662,630,598,566,534,992,960, 764,732,700,668,636,604,572,540,998,966,738,706,674,642,610,578,546,514,972, 940,1004,744,712,680,648,616,584,552,520,1023,978,946,420,1010,718,654,590, 526,952,1016,756,692,628,564,990] [ns_server:debug,2014-08-19T16:54:55.406,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 614. Nacking mccouch update. [views:debug,2014-08-19T16:54:55.406,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/614. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:55.406,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",614,active,0} [ns_server:debug,2014-08-19T16:54:55.406,ns_1@10.242.238.90:capi_set_view_manager-tiles<0.6184.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,718,686,654,622,984,952,1016,756,724,692,660,628,990,958,1022,762,730, 698,666,634,996,964,736,704,672,640,970,938,1002,742,710,678,646,614,992,976, 960,944,1008,764,748,732,716,700,684,668,652,636,620,998,982,966,950,1014, 754,738,722,706,690,674,658,642,626,988,972,956,940,1020,1004,760,744,728, 712,696,680,664,648,632,616,1023,994,978,962,946,1010,766,734,702,670,638, 968,1000,740,708,676,644,974,942,1006,746,714,682,650,618,980,948,1012,752, 720,688,656,624,986,954,1018,758,726,694,662,630] [ns_server:debug,2014-08-19T16:54:55.448,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 521. Nacking mccouch update. [views:debug,2014-08-19T16:54:55.448,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/521. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:55.449,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",521,active,0} [ns_server:debug,2014-08-19T16:54:55.449,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,750,686,622,558,984,737,673,609,545,971,724,660,596,958,711,647,583,1022, 945,762,698,634,570,1009,996,749,685,621,557,983,736,672,608,970,723,659,595, 531,957,710,646,582,1021,944,761,697,633,569,1008,995,748,684,620,556,982, 735,671,607,543,969,722,658,594,956,709,645,581,1020,943,760,696,632,568, 1007,994,747,683,619,555,981,734,670,606,968,721,657,593,529,955,708,644,580, 1019,942,759,695,631,567,1006,993,746,682,618,554,980,733,669,605,541,967, 720,656,592,954,707,643,579,1018,941,758,694,630,566,1005,992,745,681,617, 553,979,732,668,604,966,719,655,591,527,953,706,642,578,1017,940,757,693,629, 565,1004,991,744,680,616,552,978,731,667,603,539,965,718,654,590,952,705,641, 577,1016,939,756,692,628,564,1003,990,743,679,615,551,977,730,666,602,964, 717,653,589,525,951,704,640,576,1015,938,755,691,627,563,1002,989,742,678, 614,550,976,729,665,601,537,963,716,652,588,950,767,703,639,575,1014,754,690, 626,562,1001,988,741,677,613,549,975,728,664,600,962,715,651,587,523,949,766, 702,638,574,1013,753,689,625,561,1000,987,740,676,612,548,974,727,663,599, 535,961,714,650,586,948,765,701,637,573,1012,999,752,688,624,560,986,739,675, 611,547,973,726,662,598,960,713,649,585,521,947,764,700,636,572,1011,998,751, 687,623,559,985,738,674,610,972,725,661,597,533,959,712,648,584,1023,946,763, 699,635,571,1010] [views:debug,2014-08-19T16:54:55.516,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/420. Updated state: replica (0) [ns_server:debug,2014-08-19T16:54:55.516,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",420,replica,0} [views:debug,2014-08-19T16:54:55.532,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/614. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:55.532,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",614,active,0} [views:debug,2014-08-19T16:54:55.549,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/521. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:55.550,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",521,active,0} [ns_server:debug,2014-08-19T16:54:55.775,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 418. Nacking mccouch update. [views:debug,2014-08-19T16:54:55.775,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/418. Updated state: replica (0) [ns_server:debug,2014-08-19T16:54:55.775,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",418,replica,0} [ns_server:debug,2014-08-19T16:54:55.776,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_tiles<0.4349.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,686,622,558,984,426,724,660,596,532,958,1022,762,730,698,666,634,602,570, 538,996,964,736,704,672,640,608,576,544,512,970,938,1002,742,710,678,646,614, 582,550,518,976,944,418,1008,748,716,684,652,620,588,556,524,982,950,424, 1014,754,722,690,658,626,594,562,530,988,956,1020,760,728,696,664,632,600, 568,536,994,962,766,734,702,670,638,606,574,542,968,1000,740,708,676,644,612, 580,548,516,974,942,1006,746,714,682,650,618,586,554,522,980,948,422,1012, 752,720,688,656,624,592,560,528,986,954,1018,758,726,694,662,630,598,566,534, 992,960,764,732,700,668,636,604,572,540,998,966,738,706,674,642,610,578,546, 514,972,940,1004,744,712,680,648,616,584,552,520,1023,978,946,420,1010,718, 654,590,526,952,1016,756,692,628,564,990] [ns_server:debug,2014-08-19T16:54:55.792,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 612. Nacking mccouch update. [ns_server:debug,2014-08-19T16:54:55.792,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 519. Nacking mccouch update. [views:debug,2014-08-19T16:54:55.792,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/612. Updated state: active (0) [views:debug,2014-08-19T16:54:55.792,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/519. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:55.792,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",612,active,0} [ns_server:debug,2014-08-19T16:54:55.792,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",519,active,0} [ns_server:debug,2014-08-19T16:54:55.793,ns_1@10.242.238.90:capi_set_view_manager-tiles<0.6184.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,718,686,654,622,984,952,1016,756,724,692,660,628,990,958,1022,762,730, 698,666,634,996,964,736,704,672,640,970,938,1002,742,710,678,646,614,992,976, 960,944,1008,764,748,732,716,700,684,668,652,636,620,998,982,966,950,1014, 754,738,722,706,690,674,658,642,626,988,972,956,940,1020,1004,760,744,728, 712,696,680,664,648,632,616,1023,994,978,962,946,1010,766,734,702,670,638, 968,1000,740,708,676,644,612,974,942,1006,746,714,682,650,618,980,948,1012, 752,720,688,656,624,986,954,1018,758,726,694,662,630] [ns_server:debug,2014-08-19T16:54:55.793,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,984,737,673,609,545,971,724,660,596,958,711,647,583,519,1022,945,762, 698,634,570,1009,996,749,685,621,557,983,736,672,608,970,723,659,595,531,957, 710,646,582,1021,944,761,697,633,569,1008,995,748,684,620,556,982,735,671, 607,543,969,722,658,594,956,709,645,581,1020,943,760,696,632,568,1007,994, 747,683,619,555,981,734,670,606,968,721,657,593,529,955,708,644,580,1019,942, 759,695,631,567,1006,993,746,682,618,554,980,733,669,605,541,967,720,656,592, 954,707,643,579,1018,941,758,694,630,566,1005,992,745,681,617,553,979,732, 668,604,966,719,655,591,527,953,706,642,578,1017,940,757,693,629,565,1004, 991,744,680,616,552,978,731,667,603,539,965,718,654,590,952,705,641,577,1016, 939,756,692,628,564,1003,990,743,679,615,551,977,730,666,602,964,717,653,589, 525,951,704,640,576,1015,938,755,691,627,563,1002,989,742,678,614,550,976, 729,665,601,537,963,716,652,588,950,767,703,639,575,1014,754,690,626,562, 1001,988,741,677,613,549,975,728,664,600,962,715,651,587,523,949,766,702,638, 574,1013,753,689,625,561,1000,987,740,676,612,548,974,727,663,599,535,961, 714,650,586,948,765,701,637,573,1012,999,752,688,624,560,986,739,675,611,547, 973,726,662,598,960,713,649,585,521,947,764,700,636,572,1011,998,751,687,623, 559,985,738,674,610,972,725,661,597,533,959,712,648,584,1023,946,763,699,635, 571,1010,997,686,558] [views:debug,2014-08-19T16:54:55.859,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/418. Updated state: replica (0) [ns_server:debug,2014-08-19T16:54:55.860,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",418,replica,0} [views:debug,2014-08-19T16:54:55.893,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/612. Updated state: active (0) [views:debug,2014-08-19T16:54:55.893,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/519. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:55.893,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",612,active,0} [ns_server:debug,2014-08-19T16:54:55.893,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",519,active,0} [ns_server:debug,2014-08-19T16:54:56.164,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 610. Nacking mccouch update. [ns_server:debug,2014-08-19T16:54:56.164,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 416. Nacking mccouch update. [views:debug,2014-08-19T16:54:56.164,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/610. Updated state: active (0) [views:debug,2014-08-19T16:54:56.164,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/416. Updated state: replica (0) [ns_server:debug,2014-08-19T16:54:56.164,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",610,active,0} [ns_server:debug,2014-08-19T16:54:56.164,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",416,replica,0} [ns_server:debug,2014-08-19T16:54:56.164,ns_1@10.242.238.90:capi_set_view_manager-tiles<0.6184.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,718,686,654,622,984,952,1016,756,724,692,660,628,990,958,1022,762,730, 698,666,634,996,964,736,704,672,640,970,938,1002,742,710,678,646,614,992,976, 960,944,1008,764,748,732,716,700,684,668,652,636,620,998,982,966,950,1014, 754,738,722,706,690,674,658,642,626,610,988,972,956,940,1020,1004,760,744, 728,712,696,680,664,648,632,616,1023,994,978,962,946,1010,766,734,702,670, 638,968,1000,740,708,676,644,612,974,942,1006,746,714,682,650,618,980,948, 1012,752,720,688,656,624,986,954,1018,758,726,694,662,630] [ns_server:debug,2014-08-19T16:54:56.165,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_tiles<0.4349.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,686,622,558,984,426,724,660,596,532,958,1022,762,730,698,666,634,602,570, 538,996,964,736,704,672,640,608,576,544,512,970,938,1002,742,710,678,646,614, 582,550,518,976,944,418,1008,748,716,684,652,620,588,556,524,982,950,424, 1014,754,722,690,658,626,594,562,530,988,956,1020,760,728,696,664,632,600, 568,536,994,962,766,734,702,670,638,606,574,542,968,1000,740,708,676,644,612, 580,548,516,974,942,416,1006,746,714,682,650,618,586,554,522,980,948,422, 1012,752,720,688,656,624,592,560,528,986,954,1018,758,726,694,662,630,598, 566,534,992,960,764,732,700,668,636,604,572,540,998,966,738,706,674,642,610, 578,546,514,972,940,1004,744,712,680,648,616,584,552,520,1023,978,946,420, 1010,718,654,590,526,952,1016,756,692,628,564,990] [ns_server:debug,2014-08-19T16:54:56.331,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 517. Nacking mccouch update. [views:debug,2014-08-19T16:54:56.331,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/517. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:56.331,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",517,active,0} [ns_server:debug,2014-08-19T16:54:56.332,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,984,737,673,609,545,971,724,660,596,958,711,647,583,519,1022,945,762, 698,634,570,1009,996,749,685,621,557,983,736,672,608,970,723,659,595,531,957, 710,646,582,1021,944,761,697,633,569,1008,995,748,684,620,556,982,735,671, 607,543,969,722,658,594,956,709,645,581,517,1020,943,760,696,632,568,1007, 994,747,683,619,555,981,734,670,606,968,721,657,593,529,955,708,644,580,1019, 942,759,695,631,567,1006,993,746,682,618,554,980,733,669,605,541,967,720,656, 592,954,707,643,579,1018,941,758,694,630,566,1005,992,745,681,617,553,979, 732,668,604,966,719,655,591,527,953,706,642,578,1017,940,757,693,629,565, 1004,991,744,680,616,552,978,731,667,603,539,965,718,654,590,952,705,641,577, 1016,939,756,692,628,564,1003,990,743,679,615,551,977,730,666,602,964,717, 653,589,525,951,704,640,576,1015,938,755,691,627,563,1002,989,742,678,614, 550,976,729,665,601,537,963,716,652,588,950,767,703,639,575,1014,754,690,626, 562,1001,988,741,677,613,549,975,728,664,600,962,715,651,587,523,949,766,702, 638,574,1013,753,689,625,561,1000,987,740,676,612,548,974,727,663,599,535, 961,714,650,586,948,765,701,637,573,1012,999,752,688,624,560,986,739,675,611, 547,973,726,662,598,960,713,649,585,521,947,764,700,636,572,1011,998,751,687, 623,559,985,738,674,610,972,725,661,597,533,959,712,648,584,1023,946,763,699, 635,571,1010,997,686,558] [views:debug,2014-08-19T16:54:56.382,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/416. Updated state: replica (0) [views:debug,2014-08-19T16:54:56.382,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/610. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:56.382,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",416,replica,0} [ns_server:debug,2014-08-19T16:54:56.382,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",610,active,0} [views:debug,2014-08-19T16:54:56.398,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/517. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:56.398,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",517,active,0} [ns_server:debug,2014-08-19T16:54:56.532,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 414. Nacking mccouch update. [views:debug,2014-08-19T16:54:56.533,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/414. Updated state: replica (0) [ns_server:debug,2014-08-19T16:54:56.533,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 608. Nacking mccouch update. [ns_server:debug,2014-08-19T16:54:56.533,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",414,replica,0} [views:debug,2014-08-19T16:54:56.533,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/608. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:56.533,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",608,active,0} [ns_server:debug,2014-08-19T16:54:56.533,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_tiles<0.4349.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,686,622,558,984,426,724,660,596,532,958,1022,762,730,698,666,634,602,570, 538,996,964,736,704,672,640,608,576,544,512,970,938,1002,742,710,678,646,614, 582,550,518,976,944,418,1008,748,716,684,652,620,588,556,524,982,950,424, 1014,754,722,690,658,626,594,562,530,988,956,1020,760,728,696,664,632,600, 568,536,994,962,766,734,702,670,638,606,574,542,968,1000,740,708,676,644,612, 580,548,516,974,942,416,1006,746,714,682,650,618,586,554,522,980,948,422, 1012,752,720,688,656,624,592,560,528,986,954,1018,758,726,694,662,630,598, 566,534,992,960,764,732,700,668,636,604,572,540,998,966,738,706,674,642,610, 578,546,514,972,940,414,1004,744,712,680,648,616,584,552,520,1023,978,946, 420,1010,718,654,590,526,952,1016,756,692,628,564,990] [ns_server:debug,2014-08-19T16:54:56.533,ns_1@10.242.238.90:capi_set_view_manager-tiles<0.6184.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,718,686,654,622,984,952,1016,756,724,692,660,628,990,958,1022,762,730, 698,666,634,996,964,736,704,672,640,608,970,938,1002,742,710,678,646,614,992, 976,960,944,1008,764,748,732,716,700,684,668,652,636,620,998,982,966,950, 1014,754,738,722,706,690,674,658,642,626,610,988,972,956,940,1020,1004,760, 744,728,712,696,680,664,648,632,616,1023,994,978,962,946,1010,766,734,702, 670,638,968,1000,740,708,676,644,612,974,942,1006,746,714,682,650,618,980, 948,1012,752,720,688,656,624,986,954,1018,758,726,694,662,630] [ns_server:debug,2014-08-19T16:54:56.549,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 515. Nacking mccouch update. [views:debug,2014-08-19T16:54:56.549,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/515. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:56.550,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",515,active,0} [ns_server:debug,2014-08-19T16:54:56.550,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,984,737,673,609,545,971,724,660,596,958,711,647,583,519,1022,945,762, 698,634,570,1009,996,749,685,621,557,983,736,672,608,970,723,659,595,531,957, 710,646,582,1021,944,761,697,633,569,1008,995,748,684,620,556,982,735,671, 607,543,969,722,658,594,956,709,645,581,517,1020,943,760,696,632,568,1007, 994,747,683,619,555,981,734,670,606,968,721,657,593,529,955,708,644,580,1019, 942,759,695,631,567,1006,993,746,682,618,554,980,733,669,605,541,967,720,656, 592,954,707,643,579,515,1018,941,758,694,630,566,1005,992,745,681,617,553, 979,732,668,604,966,719,655,591,527,953,706,642,578,1017,940,757,693,629,565, 1004,991,744,680,616,552,978,731,667,603,539,965,718,654,590,952,705,641,577, 1016,939,756,692,628,564,1003,990,743,679,615,551,977,730,666,602,964,717, 653,589,525,951,704,640,576,1015,938,755,691,627,563,1002,989,742,678,614, 550,976,729,665,601,537,963,716,652,588,950,767,703,639,575,1014,754,690,626, 562,1001,988,741,677,613,549,975,728,664,600,962,715,651,587,523,949,766,702, 638,574,1013,753,689,625,561,1000,987,740,676,612,548,974,727,663,599,535, 961,714,650,586,948,765,701,637,573,1012,999,752,688,624,560,986,739,675,611, 547,973,726,662,598,960,713,649,585,521,947,764,700,636,572,1011,998,751,687, 623,559,985,738,674,610,972,725,661,597,533,959,712,648,584,1023,946,763,699, 635,571,1010,997,686,558] [views:debug,2014-08-19T16:54:56.567,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/414. Updated state: replica (0) [views:debug,2014-08-19T16:54:56.567,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/608. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:56.567,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",414,replica,0} [ns_server:debug,2014-08-19T16:54:56.567,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",608,active,0} [views:debug,2014-08-19T16:54:56.650,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/515. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:56.650,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",515,active,0} [ns_server:debug,2014-08-19T16:54:56.878,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 412. Nacking mccouch update. [views:debug,2014-08-19T16:54:56.878,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/412. Updated state: replica (0) [ns_server:debug,2014-08-19T16:54:56.878,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",412,replica,0} [ns_server:debug,2014-08-19T16:54:56.878,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_tiles<0.4349.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,686,622,558,984,426,724,660,596,532,958,1022,762,730,698,666,634,602,570, 538,996,964,736,704,672,640,608,576,544,512,970,938,412,1002,742,710,678,646, 614,582,550,518,976,944,418,1008,748,716,684,652,620,588,556,524,982,950,424, 1014,754,722,690,658,626,594,562,530,988,956,1020,760,728,696,664,632,600, 568,536,994,962,766,734,702,670,638,606,574,542,968,1000,740,708,676,644,612, 580,548,516,974,942,416,1006,746,714,682,650,618,586,554,522,980,948,422, 1012,752,720,688,656,624,592,560,528,986,954,1018,758,726,694,662,630,598, 566,534,992,960,764,732,700,668,636,604,572,540,998,966,738,706,674,642,610, 578,546,514,972,940,414,1004,744,712,680,648,616,584,552,520,1023,978,946, 420,1010,718,654,590,526,952,1016,756,692,628,564,990] [ns_server:debug,2014-08-19T16:54:56.919,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 606. Nacking mccouch update. [views:debug,2014-08-19T16:54:56.920,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/606. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:56.920,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",606,active,0} [ns_server:debug,2014-08-19T16:54:56.920,ns_1@10.242.238.90:capi_set_view_manager-tiles<0.6184.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,718,686,654,622,984,952,1016,756,724,692,660,628,990,958,1022,762,730, 698,666,634,996,964,736,704,672,640,608,970,938,1002,742,710,678,646,614,992, 976,960,944,1008,764,748,732,716,700,684,668,652,636,620,998,982,966,950, 1014,754,738,722,706,690,674,658,642,626,610,988,972,956,940,1020,1004,760, 744,728,712,696,680,664,648,632,616,1023,994,978,962,946,1010,766,734,702, 670,638,606,968,1000,740,708,676,644,612,974,942,1006,746,714,682,650,618, 980,948,1012,752,720,688,656,624,986,954,1018,758,726,694,662,630] [ns_server:debug,2014-08-19T16:54:56.953,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 513. Nacking mccouch update. [views:debug,2014-08-19T16:54:56.953,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/513. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:56.953,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",513,active,0} [ns_server:debug,2014-08-19T16:54:56.954,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,984,737,673,609,545,971,724,660,596,958,711,647,583,519,1022,945,762, 698,634,570,1009,996,749,685,621,557,983,736,672,608,970,723,659,595,531,957, 710,646,582,1021,944,761,697,633,569,1008,995,748,684,620,556,982,735,671, 607,543,969,722,658,594,956,709,645,581,517,1020,943,760,696,632,568,1007, 994,747,683,619,555,981,734,670,606,968,721,657,593,529,955,708,644,580,1019, 942,759,695,631,567,1006,993,746,682,618,554,980,733,669,605,541,967,720,656, 592,954,707,643,579,515,1018,941,758,694,630,566,1005,992,745,681,617,553, 979,732,668,604,966,719,655,591,527,953,706,642,578,1017,940,757,693,629,565, 1004,991,744,680,616,552,978,731,667,603,539,965,718,654,590,952,705,641,577, 513,1016,939,756,692,628,564,1003,990,743,679,615,551,977,730,666,602,964, 717,653,589,525,951,704,640,576,1015,938,755,691,627,563,1002,989,742,678, 614,550,976,729,665,601,537,963,716,652,588,950,767,703,639,575,1014,754,690, 626,562,1001,988,741,677,613,549,975,728,664,600,962,715,651,587,523,949,766, 702,638,574,1013,753,689,625,561,1000,987,740,676,612,548,974,727,663,599, 535,961,714,650,586,948,765,701,637,573,1012,999,752,688,624,560,986,739,675, 611,547,973,726,662,598,960,713,649,585,521,947,764,700,636,572,1011,998,751, 687,623,559,985,738,674,610,972,725,661,597,533,959,712,648,584,1023,946,763, 699,635,571,1010,997,686,558] [views:debug,2014-08-19T16:54:57.020,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/412. Updated state: replica (0) [ns_server:debug,2014-08-19T16:54:57.021,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",412,replica,0} [views:debug,2014-08-19T16:54:57.062,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/606. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:57.062,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",606,active,0} [views:debug,2014-08-19T16:54:57.079,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/513. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:57.079,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",513,active,0} [ns_server:debug,2014-08-19T16:54:57.198,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 410. Nacking mccouch update. [views:debug,2014-08-19T16:54:57.198,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/410. Updated state: replica (0) [ns_server:debug,2014-08-19T16:54:57.198,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",410,replica,0} [ns_server:debug,2014-08-19T16:54:57.198,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_tiles<0.4349.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,686,622,558,984,426,724,660,596,532,958,1022,762,698,634,570,996,964,736, 704,672,640,608,576,544,512,970,938,412,1002,742,710,678,646,614,582,550,518, 976,944,418,1008,748,716,684,652,620,588,556,524,982,950,424,1014,754,722, 690,658,626,594,562,530,988,956,1020,760,728,696,664,632,600,568,536,994,962, 766,734,702,670,638,606,574,542,968,410,1000,740,708,676,644,612,580,548,516, 974,942,416,1006,746,714,682,650,618,586,554,522,980,948,422,1012,752,720, 688,656,624,592,560,528,986,954,1018,758,726,694,662,630,598,566,534,992,960, 764,732,700,668,636,604,572,540,998,966,738,706,674,642,610,578,546,514,972, 940,414,1004,744,712,680,648,616,584,552,520,1023,978,946,420,1010,718,654, 590,526,952,1016,756,692,628,564,990,730,666,602,538] [ns_server:debug,2014-08-19T16:54:57.356,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 604. Nacking mccouch update. [ns_server:debug,2014-08-19T16:54:57.356,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 425. Nacking mccouch update. [views:debug,2014-08-19T16:54:57.356,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/604. Updated state: active (0) [views:debug,2014-08-19T16:54:57.356,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/425. Updated state: replica (0) [ns_server:debug,2014-08-19T16:54:57.356,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",604,active,0} [ns_server:debug,2014-08-19T16:54:57.356,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",425,replica,0} [ns_server:debug,2014-08-19T16:54:57.357,ns_1@10.242.238.90:capi_set_view_manager-tiles<0.6184.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,718,686,654,622,984,952,1016,756,724,692,660,628,990,958,1022,762,730, 698,666,634,996,964,736,704,672,640,608,970,938,1002,742,710,678,646,614,976, 944,1008,764,748,732,716,700,684,668,652,636,620,604,998,982,966,950,1014, 754,738,722,706,690,674,658,642,626,610,988,972,956,940,1020,1004,760,744, 728,712,696,680,664,648,632,616,1023,994,978,962,946,1010,766,734,702,670, 638,606,968,1000,740,708,676,644,612,974,942,1006,746,714,682,650,618,980, 948,1012,752,720,688,656,624,986,954,1018,758,726,694,662,630,992,960] [ns_server:debug,2014-08-19T16:54:57.357,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,984,737,673,609,545,971,724,660,596,958,711,647,583,519,1022,945,762, 698,634,570,1009,996,749,685,621,557,983,736,672,608,425,970,723,659,595,531, 957,710,646,582,1021,944,761,697,633,569,1008,995,748,684,620,556,982,735, 671,607,543,969,722,658,594,956,709,645,581,517,1020,943,760,696,632,568, 1007,994,747,683,619,555,981,734,670,606,968,721,657,593,529,955,708,644,580, 1019,942,759,695,631,567,1006,993,746,682,618,554,980,733,669,605,541,967, 720,656,592,954,707,643,579,515,1018,941,758,694,630,566,1005,992,745,681, 617,553,979,732,668,604,966,719,655,591,527,953,706,642,578,1017,940,757,693, 629,565,1004,991,744,680,616,552,978,731,667,603,539,965,718,654,590,952,705, 641,577,513,1016,939,756,692,628,564,1003,990,743,679,615,551,977,730,666, 602,964,717,653,589,525,951,704,640,576,1015,938,755,691,627,563,1002,989, 742,678,614,550,976,729,665,601,537,963,716,652,588,950,767,703,639,575,1014, 754,690,626,562,1001,988,741,677,613,549,975,728,664,600,962,715,651,587,523, 949,766,702,638,574,1013,753,689,625,561,1000,987,740,676,612,548,974,727, 663,599,535,961,714,650,586,948,765,701,637,573,1012,999,752,688,624,560,986, 739,675,611,547,973,726,662,598,960,713,649,585,521,947,764,700,636,572,1011, 998,751,687,623,559,985,738,674,610,972,725,661,597,533,959,712,648,584,1023, 946,763,699,635,571,1010,997,686,558] [views:debug,2014-08-19T16:54:57.390,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/410. Updated state: replica (0) [ns_server:debug,2014-08-19T16:54:57.390,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",410,replica,0} [views:debug,2014-08-19T16:54:57.459,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/604. Updated state: active (0) [views:debug,2014-08-19T16:54:57.459,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/425. Updated state: replica (0) [ns_server:debug,2014-08-19T16:54:57.460,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",604,active,0} [ns_server:debug,2014-08-19T16:54:57.460,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",425,replica,0} [ns_server:debug,2014-08-19T16:54:57.610,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 408. Nacking mccouch update. [views:debug,2014-08-19T16:54:57.610,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/408. Updated state: replica (0) [ns_server:debug,2014-08-19T16:54:57.610,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",408,replica,0} [ns_server:debug,2014-08-19T16:54:57.610,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_tiles<0.4349.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,686,622,558,984,426,724,660,596,532,958,1022,762,698,634,570,996,964,736, 704,672,640,608,576,544,512,970,938,412,1002,742,710,678,646,614,582,550,518, 976,944,418,1008,748,716,684,652,620,588,556,524,982,950,424,1014,754,722, 690,658,626,594,562,530,988,956,1020,760,728,696,664,632,600,568,536,994,962, 766,734,702,670,638,606,574,542,968,410,1000,740,708,676,644,612,580,548,516, 974,942,416,1006,746,714,682,650,618,586,554,522,980,948,422,1012,752,720, 688,656,624,592,560,528,986,954,1018,758,726,694,662,630,598,566,534,992,960, 764,732,700,668,636,604,572,540,998,966,408,738,706,674,642,610,578,546,514, 972,940,414,1004,744,712,680,648,616,584,552,520,1023,978,946,420,1010,718, 654,590,526,952,1016,756,692,628,564,990,730,666,602,538] [views:debug,2014-08-19T16:54:57.702,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/408. Updated state: replica (0) [ns_server:debug,2014-08-19T16:54:57.702,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",408,replica,0} [ns_server:debug,2014-08-19T16:54:57.735,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 423. Nacking mccouch update. [ns_server:debug,2014-08-19T16:54:57.735,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 602. Nacking mccouch update. [views:debug,2014-08-19T16:54:57.735,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/423. Updated state: replica (0) [views:debug,2014-08-19T16:54:57.735,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/602. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:57.735,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",423,replica,0} [ns_server:debug,2014-08-19T16:54:57.736,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",602,active,0} [ns_server:debug,2014-08-19T16:54:57.736,ns_1@10.242.238.90:capi_set_view_manager-tiles<0.6184.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,718,686,654,622,984,952,1016,756,724,692,660,628,990,958,1022,762,730, 698,666,634,602,996,964,736,704,672,640,608,970,938,1002,742,710,678,646,614, 976,944,1008,764,748,732,716,700,684,668,652,636,620,604,998,982,966,950, 1014,754,738,722,706,690,674,658,642,626,610,988,972,956,940,1020,1004,760, 744,728,712,696,680,664,648,632,616,1023,994,978,962,946,1010,766,734,702, 670,638,606,968,1000,740,708,676,644,612,974,942,1006,746,714,682,650,618, 980,948,1012,752,720,688,656,624,986,954,1018,758,726,694,662,630,992,960] [ns_server:debug,2014-08-19T16:54:57.736,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,984,673,545,971,724,660,596,958,711,647,583,519,1022,945,762,698,634, 570,1009,996,749,685,621,557,983,736,672,608,425,970,723,659,595,531,957,710, 646,582,1021,944,761,697,633,569,1008,995,748,684,620,556,982,735,671,607, 543,969,722,658,594,956,709,645,581,517,1020,943,760,696,632,568,1007,994, 747,683,619,555,981,734,670,606,423,968,721,657,593,529,955,708,644,580,1019, 942,759,695,631,567,1006,993,746,682,618,554,980,733,669,605,541,967,720,656, 592,954,707,643,579,515,1018,941,758,694,630,566,1005,992,745,681,617,553, 979,732,668,604,966,719,655,591,527,953,706,642,578,1017,940,757,693,629,565, 1004,991,744,680,616,552,978,731,667,603,539,965,718,654,590,952,705,641,577, 513,1016,939,756,692,628,564,1003,990,743,679,615,551,977,730,666,602,964, 717,653,589,525,951,704,640,576,1015,938,755,691,627,563,1002,989,742,678, 614,550,976,729,665,601,537,963,716,652,588,950,767,703,639,575,1014,754,690, 626,562,1001,988,741,677,613,549,975,728,664,600,962,715,651,587,523,949,766, 702,638,574,1013,753,689,625,561,1000,987,740,676,612,548,974,727,663,599, 535,961,714,650,586,948,765,701,637,573,1012,999,752,688,624,560,986,739,675, 611,547,973,726,662,598,960,713,649,585,521,947,764,700,636,572,1011,998,751, 687,623,559,985,738,674,610,972,725,661,597,533,959,712,648,584,1023,946,763, 699,635,571,1010,997,686,558,737,609] [views:debug,2014-08-19T16:54:57.836,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/602. Updated state: active (0) [views:debug,2014-08-19T16:54:57.837,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/423. Updated state: replica (0) [ns_server:debug,2014-08-19T16:54:57.837,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",602,active,0} [ns_server:debug,2014-08-19T16:54:57.837,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",423,replica,0} [ns_server:debug,2014-08-19T16:54:58.078,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 406. Nacking mccouch update. [views:debug,2014-08-19T16:54:58.078,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/406. Updated state: replica (0) [ns_server:debug,2014-08-19T16:54:58.079,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",406,replica,0} [ns_server:debug,2014-08-19T16:54:58.079,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_tiles<0.4349.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,686,622,558,984,426,724,660,596,532,958,1022,762,698,634,570,996,964,406, 736,704,672,640,608,576,544,512,970,938,412,1002,742,710,678,646,614,582,550, 518,976,944,418,1008,748,716,684,652,620,588,556,524,982,950,424,1014,754, 722,690,658,626,594,562,530,988,956,1020,760,728,696,664,632,600,568,536,994, 962,766,734,702,670,638,606,574,542,968,410,1000,740,708,676,644,612,580,548, 516,974,942,416,1006,746,714,682,650,618,586,554,522,980,948,422,1012,752, 720,688,656,624,592,560,528,986,954,1018,758,726,694,662,630,598,566,534,992, 960,764,732,700,668,636,604,572,540,998,966,408,738,706,674,642,610,578,546, 514,972,940,414,1004,744,712,680,648,616,584,552,520,1023,978,946,420,1010, 718,654,590,526,952,1016,756,692,628,564,990,730,666,602,538] [views:debug,2014-08-19T16:54:58.112,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/406. Updated state: replica (0) [ns_server:debug,2014-08-19T16:54:58.113,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",406,replica,0} [ns_server:debug,2014-08-19T16:54:58.232,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 600. Nacking mccouch update. [ns_server:debug,2014-08-19T16:54:58.232,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 421. Nacking mccouch update. [views:debug,2014-08-19T16:54:58.232,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/600. Updated state: active (0) [views:debug,2014-08-19T16:54:58.232,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/421. Updated state: replica (0) [ns_server:debug,2014-08-19T16:54:58.232,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",600,active,0} [ns_server:debug,2014-08-19T16:54:58.232,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",421,replica,0} [ns_server:debug,2014-08-19T16:54:58.233,ns_1@10.242.238.90:capi_set_view_manager-tiles<0.6184.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,718,686,654,622,984,952,1016,756,724,692,660,628,990,958,1022,762,730, 698,666,634,602,996,964,736,704,672,640,608,970,938,1002,742,710,678,646,614, 976,944,1008,764,748,732,716,700,684,668,652,636,620,604,998,982,966,950, 1014,754,738,722,706,690,674,658,642,626,610,988,972,956,940,1020,1004,760, 744,728,712,696,680,664,648,632,616,600,1023,994,978,962,946,1010,766,734, 702,670,638,606,968,1000,740,708,676,644,612,974,942,1006,746,714,682,650, 618,980,948,1012,752,720,688,656,624,986,954,1018,758,726,694,662,630,992, 960] [ns_server:debug,2014-08-19T16:54:58.233,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,984,673,545,971,724,660,596,958,711,647,583,519,1022,945,762,698,634, 570,1009,996,749,685,621,557,983,736,672,608,425,970,723,659,595,531,957,710, 646,582,1021,944,761,697,633,569,1008,995,748,684,620,556,982,735,671,607, 543,969,722,658,594,956,709,645,581,517,1020,943,760,696,632,568,1007,994, 747,683,619,555,981,734,670,606,423,968,721,657,593,529,955,708,644,580,1019, 942,759,695,631,567,1006,993,746,682,618,554,980,733,669,605,541,967,720,656, 592,954,707,643,579,515,1018,941,758,694,630,566,1005,992,745,681,617,553, 979,732,668,604,421,966,719,655,591,527,953,706,642,578,1017,940,757,693,629, 565,1004,991,744,680,616,552,978,731,667,603,539,965,718,654,590,952,705,641, 577,513,1016,939,756,692,628,564,1003,990,743,679,615,551,977,730,666,602, 964,717,653,589,525,951,704,640,576,1015,938,755,691,627,563,1002,989,742, 678,614,550,976,729,665,601,537,963,716,652,588,950,767,703,639,575,1014,754, 690,626,562,1001,988,741,677,613,549,975,728,664,600,962,715,651,587,523,949, 766,702,638,574,1013,753,689,625,561,1000,987,740,676,612,548,974,727,663, 599,535,961,714,650,586,948,765,701,637,573,1012,999,752,688,624,560,986,739, 675,611,547,973,726,662,598,960,713,649,585,521,947,764,700,636,572,1011,998, 751,687,623,559,985,738,674,610,972,725,661,597,533,959,712,648,584,1023,946, 763,699,635,571,1010,997,686,558,737,609] [views:debug,2014-08-19T16:54:58.308,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/600. Updated state: active (0) [views:debug,2014-08-19T16:54:58.308,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/421. Updated state: replica (0) [ns_server:debug,2014-08-19T16:54:58.308,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",600,active,0} [ns_server:debug,2014-08-19T16:54:58.308,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",421,replica,0} [ns_server:debug,2014-08-19T16:54:58.420,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 404. Nacking mccouch update. [views:debug,2014-08-19T16:54:58.420,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/404. Updated state: replica (0) [ns_server:debug,2014-08-19T16:54:58.420,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",404,replica,0} [ns_server:debug,2014-08-19T16:54:58.420,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_tiles<0.4349.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,686,622,558,984,426,724,660,596,532,958,1022,762,698,634,570,996,964,406, 736,704,672,640,608,576,544,512,970,938,412,1002,742,710,678,646,614,582,550, 518,976,944,418,1008,748,716,684,652,620,588,556,524,982,950,424,1014,754, 722,690,658,626,594,562,530,988,956,1020,760,728,696,664,632,600,568,536,994, 962,404,766,734,702,670,638,606,574,542,968,410,1000,740,708,676,644,612,580, 548,516,974,942,416,1006,746,714,682,650,618,586,554,522,980,948,422,1012, 752,720,688,656,624,592,560,528,986,954,1018,758,726,694,662,630,598,566,534, 992,960,764,732,700,668,636,604,572,540,998,966,408,738,706,674,642,610,578, 546,514,972,940,414,1004,744,712,680,648,616,584,552,520,1023,978,946,420, 1010,718,654,590,526,952,1016,756,692,628,564,990,730,666,602,538] [views:debug,2014-08-19T16:54:58.509,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/404. Updated state: replica (0) [ns_server:debug,2014-08-19T16:54:58.509,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",404,replica,0} [ns_server:debug,2014-08-19T16:54:58.618,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 598. Nacking mccouch update. [views:debug,2014-08-19T16:54:58.618,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/598. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:58.618,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",598,active,0} [ns_server:debug,2014-08-19T16:54:58.618,ns_1@10.242.238.90:capi_set_view_manager-tiles<0.6184.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,718,686,654,622,984,952,1016,756,724,692,660,628,990,958,1022,762,730, 698,666,634,602,996,964,736,704,672,640,608,970,938,1002,742,710,678,646,614, 976,944,1008,764,748,732,716,700,684,668,652,636,620,604,998,982,966,950, 1014,754,738,722,706,690,674,658,642,626,610,988,972,956,940,1020,1004,760, 744,728,712,696,680,664,648,632,616,600,1023,994,978,962,946,1010,766,734, 702,670,638,606,968,1000,740,708,676,644,612,974,942,1006,746,714,682,650, 618,980,948,1012,752,720,688,656,624,986,954,1018,758,726,694,662,630,598, 992,960] [ns_server:debug,2014-08-19T16:54:58.634,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 419. Nacking mccouch update. [views:debug,2014-08-19T16:54:58.634,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/419. Updated state: replica (0) [ns_server:debug,2014-08-19T16:54:58.635,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",419,replica,0} [ns_server:debug,2014-08-19T16:54:58.635,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,984,673,545,971,724,660,596,958,711,647,583,519,1022,945,762,698,634, 570,1009,996,749,685,621,557,983,736,672,608,425,970,723,659,595,531,957,710, 646,582,1021,944,761,697,633,569,1008,995,748,684,620,556,982,735,671,607, 543,969,722,658,594,956,709,645,581,517,1020,943,760,696,632,568,1007,994, 747,683,619,555,981,734,670,606,423,968,721,657,593,529,955,708,644,580,1019, 942,759,695,631,567,1006,993,746,682,618,554,980,733,669,605,541,967,720,656, 592,954,707,643,579,515,1018,941,758,694,630,566,1005,992,745,681,617,553, 979,732,668,604,421,966,719,655,591,527,953,706,642,578,1017,940,757,693,629, 565,1004,991,744,680,616,552,978,731,667,603,539,965,718,654,590,952,705,641, 577,513,1016,939,756,692,628,564,1003,990,743,679,615,551,977,730,666,602, 419,964,717,653,589,525,951,704,640,576,1015,938,755,691,627,563,1002,989, 742,678,614,550,976,729,665,601,537,963,716,652,588,950,767,703,639,575,1014, 754,690,626,562,1001,988,741,677,613,549,975,728,664,600,962,715,651,587,523, 949,766,702,638,574,1013,753,689,625,561,1000,987,740,676,612,548,974,727, 663,599,535,961,714,650,586,948,765,701,637,573,1012,999,752,688,624,560,986, 739,675,611,547,973,726,662,598,960,713,649,585,521,947,764,700,636,572,1011, 998,751,687,623,559,985,738,674,610,972,725,661,597,533,959,712,648,584,1023, 946,763,699,635,571,1010,997,686,558,737,609] [views:debug,2014-08-19T16:54:58.702,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/598. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:58.702,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",598,active,0} [views:debug,2014-08-19T16:54:58.727,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/419. Updated state: replica (0) [ns_server:debug,2014-08-19T16:54:58.727,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",419,replica,0} [ns_server:debug,2014-08-19T16:54:58.760,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 402. Nacking mccouch update. [views:debug,2014-08-19T16:54:58.760,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/402. Updated state: replica (0) [ns_server:debug,2014-08-19T16:54:58.760,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",402,replica,0} [ns_server:debug,2014-08-19T16:54:58.761,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_tiles<0.4349.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,686,622,558,984,426,724,660,596,532,958,1022,762,698,634,570,996,964,406, 736,704,672,640,608,576,544,512,970,938,412,1002,742,710,678,646,614,582,550, 518,976,944,418,1008,748,716,684,652,620,588,556,524,982,950,424,1014,754, 722,690,658,626,594,562,530,988,956,1020,760,728,696,664,632,600,568,536,994, 962,404,766,734,702,670,638,606,574,542,968,410,1000,740,708,676,644,612,580, 548,516,974,942,416,1006,746,714,682,650,618,586,554,522,980,948,422,1012, 752,720,688,656,624,592,560,528,986,954,1018,758,726,694,662,630,598,566,534, 992,960,402,764,732,700,668,636,604,572,540,998,966,408,738,706,674,642,610, 578,546,514,972,940,414,1004,744,712,680,648,616,584,552,520,1023,978,946, 420,1010,718,654,590,526,952,1016,756,692,628,564,990,730,666,602,538] [views:debug,2014-08-19T16:54:58.839,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/402. Updated state: replica (0) [ns_server:debug,2014-08-19T16:54:58.839,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",402,replica,0} [ns_server:debug,2014-08-19T16:54:58.964,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 596. Nacking mccouch update. [views:debug,2014-08-19T16:54:58.964,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/596. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:58.965,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",596,active,0} [ns_server:debug,2014-08-19T16:54:58.965,ns_1@10.242.238.90:capi_set_view_manager-tiles<0.6184.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,718,686,654,622,984,952,1016,756,724,692,660,628,596,990,958,1022,762, 730,698,666,634,602,996,964,736,704,672,640,608,970,938,1002,742,710,678,646, 614,976,944,1008,764,748,732,716,700,684,668,652,636,620,604,998,982,966,950, 1014,754,738,722,706,690,674,658,642,626,610,988,972,956,940,1020,1004,760, 744,728,712,696,680,664,648,632,616,600,1023,994,978,962,946,1010,766,734, 702,670,638,606,968,1000,740,708,676,644,612,974,942,1006,746,714,682,650, 618,980,948,1012,752,720,688,656,624,986,954,1018,758,726,694,662,630,598, 992,960] [ns_server:debug,2014-08-19T16:54:59.115,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 417. Nacking mccouch update. [views:debug,2014-08-19T16:54:59.115,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/417. Updated state: replica (0) [ns_server:debug,2014-08-19T16:54:59.115,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",417,replica,0} [ns_server:debug,2014-08-19T16:54:59.116,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,984,673,545,971,724,660,596,958,711,647,583,519,1022,945,762,698,634, 570,1009,996,749,685,621,557,983,736,672,608,425,970,723,659,595,531,957,710, 646,582,1021,944,761,697,633,569,1008,995,748,684,620,556,982,735,671,607, 543,969,722,658,594,956,709,645,581,517,1020,943,760,696,632,568,1007,994, 747,683,619,555,981,734,670,606,423,968,721,657,593,529,955,708,644,580,1019, 942,759,695,631,567,1006,993,746,682,618,554,980,733,669,605,541,967,720,656, 592,954,707,643,579,515,1018,941,758,694,630,566,1005,992,745,681,617,553, 979,732,668,604,421,966,719,655,591,527,953,706,642,578,1017,940,757,693,629, 565,1004,991,744,680,616,552,978,731,667,603,539,965,718,654,590,952,705,641, 577,513,1016,939,756,692,628,564,1003,990,743,679,615,551,977,730,666,602, 419,964,717,653,589,525,951,704,640,576,1015,938,755,691,627,563,1002,989, 742,678,614,550,976,729,665,601,537,963,716,652,588,950,767,703,639,575,1014, 754,690,626,562,1001,988,741,677,613,549,975,728,664,600,417,962,715,651,587, 523,949,766,702,638,574,1013,753,689,625,561,1000,987,740,676,612,548,974, 727,663,599,535,961,714,650,586,948,765,701,637,573,1012,999,752,688,624,560, 986,739,675,611,547,973,726,662,598,960,713,649,585,521,947,764,700,636,572, 1011,998,751,687,623,559,985,738,674,610,972,725,661,597,533,959,712,648,584, 1023,946,763,699,635,571,1010,997,686,558,737,609] [ns_server:debug,2014-08-19T16:54:59.215,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 400. Nacking mccouch update. [views:debug,2014-08-19T16:54:59.215,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/400. Updated state: replica (0) [ns_server:debug,2014-08-19T16:54:59.215,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",400,replica,0} [views:debug,2014-08-19T16:54:59.215,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/596. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:59.215,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",596,active,0} [ns_server:debug,2014-08-19T16:54:59.216,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_tiles<0.4349.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,686,622,558,984,426,724,660,596,532,958,400,1022,762,698,634,570,996,736, 704,672,640,608,576,544,512,970,938,412,1002,742,710,678,646,614,582,550,518, 976,944,418,1008,748,716,684,652,620,588,556,524,982,950,424,1014,754,722, 690,658,626,594,562,530,988,956,1020,760,728,696,664,632,600,568,536,994,962, 404,766,734,702,670,638,606,574,542,968,410,1000,740,708,676,644,612,580,548, 516,974,942,416,1006,746,714,682,650,618,586,554,522,980,948,422,1012,752, 720,688,656,624,592,560,528,986,954,1018,758,726,694,662,630,598,566,534,992, 960,402,764,732,700,668,636,604,572,540,998,966,408,738,706,674,642,610,578, 546,514,972,940,414,1004,744,712,680,648,616,584,552,520,1023,978,946,420, 1010,718,654,590,526,952,1016,756,692,628,564,990,730,666,602,538,964,406] [views:debug,2014-08-19T16:54:59.257,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/417. Updated state: replica (0) [ns_server:debug,2014-08-19T16:54:59.257,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",417,replica,0} [views:debug,2014-08-19T16:54:59.368,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/400. Updated state: replica (0) [ns_server:debug,2014-08-19T16:54:59.368,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",400,replica,0} [ns_server:debug,2014-08-19T16:54:59.450,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 594. Nacking mccouch update. [views:debug,2014-08-19T16:54:59.450,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/594. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:59.450,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",594,active,0} [ns_server:debug,2014-08-19T16:54:59.450,ns_1@10.242.238.90:capi_set_view_manager-tiles<0.6184.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,718,686,654,622,984,952,1016,756,724,692,660,628,596,990,958,1022,762, 730,698,666,634,602,996,964,736,704,672,640,608,970,938,1002,742,710,678,646, 614,976,944,1008,748,716,684,652,620,998,982,966,950,1014,754,738,722,706, 690,674,658,642,626,610,594,988,972,956,940,1020,1004,760,744,728,712,696, 680,664,648,632,616,600,1023,994,978,962,946,1010,766,734,702,670,638,606, 968,1000,740,708,676,644,612,974,942,1006,746,714,682,650,618,980,948,1012, 752,720,688,656,624,986,954,1018,758,726,694,662,630,598,992,960,764,732,700, 668,636,604] [ns_server:debug,2014-08-19T16:54:59.467,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 415. Nacking mccouch update. [views:debug,2014-08-19T16:54:59.467,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/415. Updated state: replica (0) [ns_server:debug,2014-08-19T16:54:59.467,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",415,replica,0} [ns_server:debug,2014-08-19T16:54:59.468,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,984,673,545,971,724,660,596,958,711,647,583,519,1022,945,762,698,634, 570,1009,996,749,685,621,557,983,736,672,608,425,970,723,659,595,531,957,710, 646,582,1021,944,761,697,633,569,1008,995,748,684,620,556,982,735,671,607, 543,969,722,658,594,956,709,645,581,517,1020,943,760,696,632,568,1007,994, 747,683,619,555,981,734,670,606,423,968,721,657,593,529,955,708,644,580,1019, 942,759,695,631,567,1006,993,746,682,618,554,980,733,669,605,541,967,720,656, 592,954,707,643,579,515,1018,941,758,694,630,566,1005,992,745,681,617,553, 979,732,668,604,421,966,719,655,591,527,953,706,642,578,1017,940,757,693,629, 565,1004,991,744,680,616,552,978,731,667,603,539,965,718,654,590,952,705,641, 577,513,1016,939,756,692,628,564,1003,990,743,679,615,551,977,730,666,602, 419,964,717,653,589,525,951,704,640,576,1015,938,755,691,627,563,1002,989, 742,678,614,550,976,729,665,601,537,963,716,652,588,950,767,703,639,575,1014, 754,690,626,562,1001,988,741,677,613,549,975,728,664,600,417,962,715,651,587, 523,949,766,702,638,574,1013,753,689,625,561,1000,987,740,676,612,548,974, 727,663,599,535,961,714,650,586,948,765,701,637,573,1012,999,752,688,624,560, 986,739,675,611,547,973,726,662,598,415,960,713,649,585,521,947,764,700,636, 572,1011,998,751,687,623,559,985,738,674,610,972,725,661,597,533,959,712,648, 584,1023,946,763,699,635,571,1010,997,686,558,737,609] [views:debug,2014-08-19T16:54:59.544,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/594. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:59.544,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",594,active,0} [views:debug,2014-08-19T16:54:59.586,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/415. Updated state: replica (0) [ns_server:debug,2014-08-19T16:54:59.586,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",415,replica,0} [ns_server:debug,2014-08-19T16:54:59.602,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 398. Nacking mccouch update. [views:debug,2014-08-19T16:54:59.603,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/398. Updated state: replica (0) [ns_server:debug,2014-08-19T16:54:59.603,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",398,replica,0} [ns_server:debug,2014-08-19T16:54:59.603,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_tiles<0.4349.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,686,622,558,984,426,724,660,596,532,958,400,1022,762,698,634,570,996,736, 704,672,640,608,576,544,512,970,938,412,1002,742,710,678,646,614,582,550,518, 976,944,418,1008,748,716,684,652,620,588,556,524,982,950,424,1014,754,722, 690,658,626,594,562,530,988,956,398,1020,760,728,696,664,632,600,568,536,994, 962,404,766,734,702,670,638,606,574,542,968,410,1000,740,708,676,644,612,580, 548,516,974,942,416,1006,746,714,682,650,618,586,554,522,980,948,422,1012, 752,720,688,656,624,592,560,528,986,954,1018,758,726,694,662,630,598,566,534, 992,960,402,764,732,700,668,636,604,572,540,998,966,408,738,706,674,642,610, 578,546,514,972,940,414,1004,744,712,680,648,616,584,552,520,1023,978,946, 420,1010,718,654,590,526,952,1016,756,692,628,564,990,730,666,602,538,964, 406] [views:debug,2014-08-19T16:54:59.745,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/398. Updated state: replica (0) [ns_server:debug,2014-08-19T16:54:59.745,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",398,replica,0} [ns_server:debug,2014-08-19T16:54:59.887,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 592. Nacking mccouch update. [views:debug,2014-08-19T16:54:59.887,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/592. Updated state: active (0) [ns_server:debug,2014-08-19T16:54:59.887,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",592,active,0} [ns_server:debug,2014-08-19T16:54:59.888,ns_1@10.242.238.90:capi_set_view_manager-tiles<0.6184.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,718,686,654,622,984,952,1016,756,724,692,660,628,596,990,958,1022,762, 730,698,666,634,602,996,964,736,704,672,640,608,970,938,1002,742,710,678,646, 614,976,944,1008,748,716,684,652,620,998,982,966,950,1014,754,738,722,706, 690,674,658,642,626,610,594,988,972,956,940,1020,1004,760,744,728,712,696, 680,664,648,632,616,600,1023,994,978,962,946,1010,766,734,702,670,638,606, 968,1000,740,708,676,644,612,974,942,1006,746,714,682,650,618,980,948,1012, 752,720,688,656,624,592,986,954,1018,758,726,694,662,630,598,992,960,764,732, 700,668,636,604] [ns_server:debug,2014-08-19T16:54:59.954,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 413. Nacking mccouch update. [views:debug,2014-08-19T16:54:59.954,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/413. Updated state: replica (0) [ns_server:debug,2014-08-19T16:54:59.954,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",413,replica,0} [ns_server:debug,2014-08-19T16:54:59.955,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,984,673,545,724,596,413,958,711,647,583,519,1022,945,762,698,634,570, 1009,996,749,685,621,557,983,736,672,608,425,970,723,659,595,531,957,710,646, 582,1021,944,761,697,633,569,1008,995,748,684,620,556,982,735,671,607,543, 969,722,658,594,956,709,645,581,517,1020,943,760,696,632,568,1007,994,747, 683,619,555,981,734,670,606,423,968,721,657,593,529,955,708,644,580,1019,942, 759,695,631,567,1006,993,746,682,618,554,980,733,669,605,541,967,720,656,592, 954,707,643,579,515,1018,941,758,694,630,566,1005,992,745,681,617,553,979, 732,668,604,421,966,719,655,591,527,953,706,642,578,1017,940,757,693,629,565, 1004,991,744,680,616,552,978,731,667,603,539,965,718,654,590,952,705,641,577, 513,1016,939,756,692,628,564,1003,990,743,679,615,551,977,730,666,602,419, 964,717,653,589,525,951,704,640,576,1015,938,755,691,627,563,1002,989,742, 678,614,550,976,729,665,601,537,963,716,652,588,950,767,703,639,575,1014,754, 690,626,562,1001,988,741,677,613,549,975,728,664,600,417,962,715,651,587,523, 949,766,702,638,574,1013,753,689,625,561,1000,987,740,676,612,548,974,727, 663,599,535,961,714,650,586,948,765,701,637,573,1012,999,752,688,624,560,986, 739,675,611,547,973,726,662,598,415,960,713,649,585,521,947,764,700,636,572, 1011,998,751,687,623,559,985,738,674,610,972,725,661,597,533,959,712,648,584, 1023,946,763,699,635,571,1010,997,686,558,737,609,971,660] [ns_server:debug,2014-08-19T16:55:00.038,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 396. Nacking mccouch update. [views:debug,2014-08-19T16:55:00.038,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/396. Updated state: replica (0) [ns_server:debug,2014-08-19T16:55:00.038,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",396,replica,0} [ns_server:debug,2014-08-19T16:55:00.039,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_tiles<0.4349.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,686,622,558,984,426,724,660,596,532,958,400,1022,762,698,634,570,996,736, 704,672,640,608,576,544,512,970,938,412,1002,742,710,678,646,614,582,550,518, 976,944,418,1008,748,716,684,652,620,588,556,524,982,950,424,1014,754,722, 690,658,626,594,562,530,988,956,398,1020,760,728,696,664,632,600,568,536,994, 962,404,766,734,702,670,638,606,574,542,968,410,1000,740,708,676,644,612,580, 548,516,974,942,416,1006,746,714,682,650,618,586,554,522,980,948,422,1012, 752,720,688,656,624,592,560,528,986,954,396,1018,758,726,694,662,630,598,566, 534,992,960,402,764,732,700,668,636,604,572,540,998,966,408,738,706,674,642, 610,578,546,514,972,940,414,1004,744,712,680,648,616,584,552,520,1023,978, 946,420,1010,718,654,590,526,952,1016,756,692,628,564,990,730,666,602,538, 964,406] [views:debug,2014-08-19T16:55:00.080,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/592. Updated state: active (0) [ns_server:debug,2014-08-19T16:55:00.080,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",592,active,0} [views:debug,2014-08-19T16:55:00.113,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/413. Updated state: replica (0) [ns_server:debug,2014-08-19T16:55:00.113,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",413,replica,0} [views:debug,2014-08-19T16:55:00.147,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/396. Updated state: replica (0) [ns_server:debug,2014-08-19T16:55:00.147,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",396,replica,0} [ns_server:debug,2014-08-19T16:55:00.476,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 590. Nacking mccouch update. [views:debug,2014-08-19T16:55:00.476,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/590. Updated state: active (0) [ns_server:debug,2014-08-19T16:55:00.476,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",590,active,0} [ns_server:debug,2014-08-19T16:55:00.476,ns_1@10.242.238.90:capi_set_view_manager-tiles<0.6184.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,718,686,654,622,590,984,952,1016,756,724,692,660,628,596,990,958,1022, 762,730,698,666,634,602,996,964,736,704,672,640,608,970,938,1002,742,710,678, 646,614,976,944,1008,748,716,684,652,620,998,982,966,950,1014,754,738,722, 706,690,674,658,642,626,610,594,988,972,956,940,1020,1004,760,744,728,712, 696,680,664,648,632,616,600,1023,994,978,962,946,1010,766,734,702,670,638, 606,968,1000,740,708,676,644,612,974,942,1006,746,714,682,650,618,980,948, 1012,752,720,688,656,624,592,986,954,1018,758,726,694,662,630,598,992,960, 764,732,700,668,636,604] [ns_server:debug,2014-08-19T16:55:00.493,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 394. Nacking mccouch update. [ns_server:debug,2014-08-19T16:55:00.493,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 411. Nacking mccouch update. [views:debug,2014-08-19T16:55:00.493,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/394. Updated state: replica (0) [views:debug,2014-08-19T16:55:00.493,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/411. Updated state: replica (0) [ns_server:debug,2014-08-19T16:55:00.493,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",411,replica,0} [ns_server:debug,2014-08-19T16:55:00.493,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",394,replica,0} [ns_server:debug,2014-08-19T16:55:00.493,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_tiles<0.4349.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,686,622,558,984,426,724,660,596,532,958,400,1022,762,698,634,570,996,736, 704,672,640,608,576,544,512,970,938,412,1002,742,710,678,646,614,582,550,518, 976,944,418,1008,748,716,684,652,620,588,556,524,982,950,424,1014,754,722, 690,658,626,594,562,530,988,956,398,1020,760,728,696,664,632,600,568,536,994, 962,404,766,734,702,670,638,606,574,542,968,410,1000,740,708,676,644,612,580, 548,516,974,942,416,1006,746,714,682,650,618,586,554,522,980,948,422,1012, 752,720,688,656,624,592,560,528,986,954,396,1018,758,726,694,662,630,598,566, 534,992,960,402,764,732,700,668,636,604,572,540,998,966,408,738,706,674,642, 610,578,546,514,972,940,414,1004,744,712,680,648,616,584,552,520,1023,978, 946,420,1010,718,654,590,526,952,394,1016,756,692,628,564,990,730,666,602, 538,964,406] [ns_server:debug,2014-08-19T16:55:00.494,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,984,673,545,724,596,413,958,711,647,583,519,1022,945,762,698,634,570, 1009,996,749,685,621,557,983,736,672,608,425,970,723,659,595,531,957,710,646, 582,1021,944,761,697,633,569,1008,995,748,684,620,556,982,735,671,607,543, 969,722,658,594,411,956,709,645,581,517,1020,943,760,696,632,568,1007,994, 747,683,619,555,981,734,670,606,423,968,721,657,593,529,955,708,644,580,1019, 942,759,695,631,567,1006,993,746,682,618,554,980,733,669,605,541,967,720,656, 592,954,707,643,579,515,1018,941,758,694,630,566,1005,992,745,681,617,553, 979,732,668,604,421,966,719,655,591,527,953,706,642,578,1017,940,757,693,629, 565,1004,991,744,680,616,552,978,731,667,603,539,965,718,654,590,952,705,641, 577,513,1016,939,756,692,628,564,1003,990,743,679,615,551,977,730,666,602, 419,964,717,653,589,525,951,704,640,576,1015,938,755,691,627,563,1002,989, 742,678,614,550,976,729,665,601,537,963,716,652,588,950,767,703,639,575,1014, 754,690,626,562,1001,988,741,677,613,549,975,728,664,600,417,962,715,651,587, 523,949,766,702,638,574,1013,753,689,625,561,1000,987,740,676,612,548,974, 727,663,599,535,961,714,650,586,948,765,701,637,573,1012,999,752,688,624,560, 986,739,675,611,547,973,726,662,598,415,960,713,649,585,521,947,764,700,636, 572,1011,998,751,687,623,559,985,738,674,610,972,725,661,597,533,959,712,648, 584,1023,946,763,699,635,571,1010,997,686,558,737,609,971,660] [views:debug,2014-08-19T16:55:00.593,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/590. Updated state: active (0) [ns_server:debug,2014-08-19T16:55:00.593,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",590,active,0} [views:debug,2014-08-19T16:55:00.627,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/411. Updated state: replica (0) [views:debug,2014-08-19T16:55:00.627,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/394. Updated state: replica (0) [ns_server:debug,2014-08-19T16:55:00.627,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",411,replica,0} [ns_server:debug,2014-08-19T16:55:00.627,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",394,replica,0} [ns_server:debug,2014-08-19T16:55:00.828,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 392. Nacking mccouch update. [views:debug,2014-08-19T16:55:00.828,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/392. Updated state: replica (0) [ns_server:debug,2014-08-19T16:55:00.828,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",392,replica,0} [ns_server:debug,2014-08-19T16:55:00.828,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_tiles<0.4349.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,686,622,558,984,426,724,660,596,532,958,400,1022,762,698,634,570,996,736, 704,672,640,608,576,544,512,970,938,412,1002,742,710,678,646,614,582,550,518, 976,944,418,1008,748,716,684,652,620,588,556,524,982,950,424,392,1014,754, 722,690,658,626,594,562,530,988,956,398,1020,760,728,696,664,632,600,568,536, 994,962,404,766,734,702,670,638,606,574,542,968,410,1000,740,708,676,644,612, 580,548,516,974,942,416,1006,746,714,682,650,618,586,554,522,980,948,422, 1012,752,720,688,656,624,592,560,528,986,954,396,1018,758,726,694,662,630, 598,566,534,992,960,402,764,732,700,668,636,604,572,540,998,966,408,738,706, 674,642,610,578,546,514,972,940,414,1004,744,712,680,648,616,584,552,520, 1023,978,946,420,1010,718,654,590,526,952,394,1016,756,692,628,564,990,730, 666,602,538,964,406] [ns_server:debug,2014-08-19T16:55:00.844,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 409. Nacking mccouch update. [ns_server:debug,2014-08-19T16:55:00.844,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 588. Nacking mccouch update. [views:debug,2014-08-19T16:55:00.844,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/409. Updated state: replica (0) [views:debug,2014-08-19T16:55:00.845,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/588. Updated state: active (0) [ns_server:debug,2014-08-19T16:55:00.845,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",409,replica,0} [ns_server:debug,2014-08-19T16:55:00.845,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",588,active,0} [ns_server:debug,2014-08-19T16:55:00.845,ns_1@10.242.238.90:capi_set_view_manager-tiles<0.6184.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,718,686,654,622,590,984,952,1016,756,724,692,660,628,596,990,958,1022, 762,730,698,666,634,602,996,964,736,704,672,640,608,970,938,1002,742,710,678, 646,614,976,944,1008,748,716,684,652,620,588,998,982,966,950,1014,754,738, 722,706,690,674,658,642,626,610,594,988,972,956,940,1020,1004,760,744,728, 712,696,680,664,648,632,616,600,1023,994,978,962,946,1010,766,734,702,670, 638,606,968,1000,740,708,676,644,612,974,942,1006,746,714,682,650,618,980, 948,1012,752,720,688,656,624,592,986,954,1018,758,726,694,662,630,598,992, 960,764,732,700,668,636,604] [ns_server:debug,2014-08-19T16:55:00.845,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,984,673,545,724,596,413,958,711,647,583,519,1022,945,762,698,634,570, 1009,996,749,685,621,557,983,736,672,608,425,970,723,659,595,531,957,710,646, 582,1021,944,761,697,633,569,1008,995,748,684,620,556,982,735,671,607,543, 969,722,658,594,411,956,709,645,581,517,1020,943,760,696,632,568,1007,994, 747,683,619,555,981,734,670,606,423,968,721,657,593,529,955,708,644,580,1019, 942,759,695,631,567,1006,993,746,682,618,554,980,733,669,605,541,967,720,656, 592,409,954,707,643,579,515,1018,941,758,694,630,566,1005,992,745,681,617, 553,979,732,668,604,421,966,719,655,591,527,953,706,642,578,1017,940,757,693, 629,565,1004,991,744,680,616,552,978,731,667,603,539,965,718,654,590,952,705, 641,577,513,1016,939,756,692,628,564,1003,990,743,679,615,551,977,730,666, 602,419,964,717,653,589,525,951,704,640,576,1015,938,755,691,627,563,1002, 989,742,678,614,550,976,729,665,601,537,963,716,652,588,950,767,703,639,575, 1014,754,690,626,562,1001,988,741,677,613,549,975,728,664,600,417,962,715, 651,587,523,949,766,702,638,574,1013,753,689,625,561,1000,987,740,676,612, 548,974,727,663,599,535,961,714,650,586,948,765,701,637,573,1012,999,752,688, 624,560,986,739,675,611,547,973,726,662,598,415,960,713,649,585,521,947,764, 700,636,572,1011,998,751,687,623,559,985,738,674,610,972,725,661,597,533,959, 712,648,584,1023,946,763,699,635,571,1010,997,686,558,737,609,971,660] [views:debug,2014-08-19T16:55:01.020,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/392. Updated state: replica (0) [ns_server:debug,2014-08-19T16:55:01.020,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",392,replica,0} [views:debug,2014-08-19T16:55:01.054,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/588. Updated state: active (0) [views:debug,2014-08-19T16:55:01.054,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/409. Updated state: replica (0) [ns_server:debug,2014-08-19T16:55:01.054,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",588,active,0} [ns_server:debug,2014-08-19T16:55:01.054,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",409,replica,0} [ns_server:debug,2014-08-19T16:55:01.249,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 390. Nacking mccouch update. [ns_server:debug,2014-08-19T16:55:01.249,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 586. Nacking mccouch update. [views:debug,2014-08-19T16:55:01.249,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/390. Updated state: replica (0) [ns_server:debug,2014-08-19T16:55:01.250,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",390,replica,0} [views:debug,2014-08-19T16:55:01.250,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/586. Updated state: active (0) [ns_server:debug,2014-08-19T16:55:01.250,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",586,active,0} [ns_server:debug,2014-08-19T16:55:01.250,ns_1@10.242.238.90:capi_set_view_manager-tiles<0.6184.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,718,686,654,622,590,984,952,1016,756,724,692,660,628,596,990,958,1022, 762,730,698,666,634,602,996,964,736,704,672,640,608,970,938,1002,742,710,678, 646,614,976,944,1008,748,716,684,652,620,588,998,982,966,950,1014,754,738, 722,706,690,674,658,642,626,610,594,988,972,956,940,1020,1004,760,744,728, 712,696,680,664,648,632,616,600,1023,994,978,962,946,1010,766,734,702,670, 638,606,968,1000,740,708,676,644,612,974,942,1006,746,714,682,650,618,586, 980,948,1012,752,720,688,656,624,592,986,954,1018,758,726,694,662,630,598, 992,960,764,732,700,668,636,604] [ns_server:debug,2014-08-19T16:55:01.250,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_tiles<0.4349.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,686,622,558,984,426,724,660,596,532,958,400,1022,762,698,634,570,996,736, 672,608,544,970,938,412,1002,742,710,678,646,614,582,550,518,976,944,418, 1008,748,716,684,652,620,588,556,524,982,950,424,392,1014,754,722,690,658, 626,594,562,530,988,956,398,1020,760,728,696,664,632,600,568,536,994,962,404, 766,734,702,670,638,606,574,542,968,410,1000,740,708,676,644,612,580,548,516, 974,942,416,1006,746,714,682,650,618,586,554,522,980,948,422,390,1012,752, 720,688,656,624,592,560,528,986,954,396,1018,758,726,694,662,630,598,566,534, 992,960,402,764,732,700,668,636,604,572,540,998,966,408,738,706,674,642,610, 578,546,514,972,940,414,1004,744,712,680,648,616,584,552,520,1023,978,946, 420,1010,718,654,590,526,952,394,1016,756,692,628,564,990,730,666,602,538, 964,406,704,640,576,512] [ns_server:debug,2014-08-19T16:55:01.266,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 407. Nacking mccouch update. [views:debug,2014-08-19T16:55:01.266,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/407. Updated state: replica (0) [ns_server:debug,2014-08-19T16:55:01.266,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",407,replica,0} [ns_server:debug,2014-08-19T16:55:01.267,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,984,673,545,724,596,413,958,711,647,583,519,1022,945,762,698,634,570, 1009,996,749,685,621,557,983,736,672,608,425,970,723,659,595,531,957,710,646, 582,1021,944,761,697,633,569,1008,995,748,684,620,556,982,735,671,607,543, 969,722,658,594,411,956,709,645,581,517,1020,943,760,696,632,568,1007,994, 747,683,619,555,981,734,670,606,423,968,721,657,593,529,955,708,644,580,1019, 942,759,695,631,567,1006,993,746,682,618,554,980,733,669,605,541,967,720,656, 592,409,954,707,643,579,515,1018,941,758,694,630,566,1005,992,745,681,617, 553,979,732,668,604,421,966,719,655,591,527,953,706,642,578,1017,940,757,693, 629,565,1004,991,744,680,616,552,978,731,667,603,539,965,718,654,590,407,952, 705,641,577,513,1016,939,756,692,628,564,1003,990,743,679,615,551,977,730, 666,602,419,964,717,653,589,525,951,704,640,576,1015,938,755,691,627,563, 1002,989,742,678,614,550,976,729,665,601,537,963,716,652,588,950,767,703,639, 575,1014,754,690,626,562,1001,988,741,677,613,549,975,728,664,600,417,962, 715,651,587,523,949,766,702,638,574,1013,753,689,625,561,1000,987,740,676, 612,548,974,727,663,599,535,961,714,650,586,948,765,701,637,573,1012,999,752, 688,624,560,986,739,675,611,547,973,726,662,598,415,960,713,649,585,521,947, 764,700,636,572,1011,998,751,687,623,559,985,738,674,610,972,725,661,597,533, 959,712,648,584,1023,946,763,699,635,571,1010,997,686,558,737,609,971,660] [views:debug,2014-08-19T16:55:01.350,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/586. Updated state: active (0) [views:debug,2014-08-19T16:55:01.351,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/390. Updated state: replica (0) [ns_server:debug,2014-08-19T16:55:01.351,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",586,active,0} [ns_server:debug,2014-08-19T16:55:01.351,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",390,replica,0} [views:debug,2014-08-19T16:55:01.409,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/407. Updated state: replica (0) [ns_server:debug,2014-08-19T16:55:01.409,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",407,replica,0} [ns_server:debug,2014-08-19T16:55:01.651,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 584. Nacking mccouch update. [views:debug,2014-08-19T16:55:01.651,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/584. Updated state: active (0) [ns_server:debug,2014-08-19T16:55:01.652,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",584,active,0} [ns_server:debug,2014-08-19T16:55:01.652,ns_1@10.242.238.90:capi_set_view_manager-tiles<0.6184.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,718,686,654,622,590,984,952,1016,756,724,692,660,628,596,990,958,1022, 762,730,698,666,634,602,996,964,736,704,672,640,608,970,938,1002,742,710,678, 646,614,976,944,1008,748,716,684,652,620,588,982,950,1014,754,738,722,706, 690,674,658,642,626,610,594,988,972,956,940,1020,1004,760,744,728,712,696, 680,664,648,632,616,600,584,1023,994,978,962,946,1010,766,734,702,670,638, 606,968,1000,740,708,676,644,612,974,942,1006,746,714,682,650,618,586,980, 948,1012,752,720,688,656,624,592,986,954,1018,758,726,694,662,630,598,992, 960,764,732,700,668,636,604,998,966] [ns_server:debug,2014-08-19T16:55:01.668,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 388. Nacking mccouch update. [ns_server:debug,2014-08-19T16:55:01.668,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 405. Nacking mccouch update. [views:debug,2014-08-19T16:55:01.668,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/388. Updated state: replica (0) [views:debug,2014-08-19T16:55:01.669,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/405. Updated state: replica (0) [ns_server:debug,2014-08-19T16:55:01.669,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",388,replica,0} [ns_server:debug,2014-08-19T16:55:01.669,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",405,replica,0} [ns_server:debug,2014-08-19T16:55:01.669,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_tiles<0.4349.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,686,622,558,984,426,724,660,596,532,958,400,1022,762,698,634,570,996,736, 672,608,544,970,938,412,1002,742,710,678,646,614,582,550,518,976,944,418, 1008,748,716,684,652,620,588,556,524,982,950,424,392,1014,754,722,690,658, 626,594,562,530,988,956,398,1020,760,728,696,664,632,600,568,536,994,962,404, 766,734,702,670,638,606,574,542,968,410,1000,740,708,676,644,612,580,548,516, 974,942,416,1006,746,714,682,650,618,586,554,522,980,948,422,390,1012,752, 720,688,656,624,592,560,528,986,954,396,1018,758,726,694,662,630,598,566,534, 992,960,402,764,732,700,668,636,604,572,540,998,966,408,738,706,674,642,610, 578,546,514,972,940,414,1004,744,712,680,648,616,584,552,520,1023,978,946, 420,388,1010,718,654,590,526,952,394,1016,756,692,628,564,990,730,666,602, 538,964,406,704,640,576,512] [ns_server:debug,2014-08-19T16:55:01.669,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,984,673,545,724,596,413,958,711,647,583,519,1022,945,762,698,634,570, 1009,996,749,685,621,557,983,736,672,608,425,970,723,659,595,531,957,710,646, 582,1021,944,761,697,633,569,1008,995,748,684,620,556,982,735,671,607,543, 969,722,658,594,411,956,709,645,581,517,1020,943,760,696,632,568,1007,994, 747,683,619,555,981,734,670,606,423,968,721,657,593,529,955,708,644,580,1019, 942,759,695,631,567,1006,993,746,682,618,554,980,733,669,605,541,967,720,656, 592,409,954,707,643,579,515,1018,941,758,694,630,566,1005,992,745,681,617, 553,979,732,668,604,421,966,719,655,591,527,953,706,642,578,1017,940,757,693, 629,565,1004,991,744,680,616,552,978,731,667,603,539,965,718,654,590,407,952, 705,641,577,513,1016,939,756,692,628,564,1003,990,743,679,615,551,977,730, 666,602,419,964,717,653,589,525,951,704,640,576,1015,938,755,691,627,563, 1002,989,742,678,614,550,976,729,665,601,537,963,716,652,588,405,950,767,703, 639,575,1014,754,690,626,562,1001,988,741,677,613,549,975,728,664,600,417, 962,715,651,587,523,949,766,702,638,574,1013,753,689,625,561,1000,987,740, 676,612,548,974,727,663,599,535,961,714,650,586,948,765,701,637,573,1012,999, 752,688,624,560,986,739,675,611,547,973,726,662,598,415,960,713,649,585,521, 947,764,700,636,572,1011,998,751,687,623,559,985,738,674,610,972,725,661,597, 533,959,712,648,584,1023,946,763,699,635,571,1010,997,686,558,737,609,971, 660] [views:debug,2014-08-19T16:55:01.752,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/584. Updated state: active (0) [ns_server:debug,2014-08-19T16:55:01.753,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",584,active,0} [views:debug,2014-08-19T16:55:01.779,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/405. Updated state: replica (0) [views:debug,2014-08-19T16:55:01.779,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/388. Updated state: replica (0) [ns_server:debug,2014-08-19T16:55:01.779,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",405,replica,0} [ns_server:debug,2014-08-19T16:55:01.780,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",388,replica,0} [ns_server:debug,2014-08-19T16:55:02.023,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 386. Nacking mccouch update. [views:debug,2014-08-19T16:55:02.023,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/386. Updated state: replica (0) [ns_server:debug,2014-08-19T16:55:02.023,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",386,replica,0} [ns_server:debug,2014-08-19T16:55:02.024,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_tiles<0.4349.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,686,622,558,984,426,724,660,596,532,958,400,1022,762,698,634,570,996,736, 672,608,544,970,938,412,1002,742,710,678,646,614,582,550,518,976,944,418,386, 1008,748,716,684,652,620,588,556,524,982,950,424,392,1014,754,722,690,658, 626,594,562,530,988,956,398,1020,760,728,696,664,632,600,568,536,994,962,404, 766,734,702,670,638,606,574,542,968,410,1000,740,708,676,644,612,580,548,516, 974,942,416,1006,746,714,682,650,618,586,554,522,980,948,422,390,1012,752, 720,688,656,624,592,560,528,986,954,396,1018,758,726,694,662,630,598,566,534, 992,960,402,764,732,700,668,636,604,572,540,998,966,408,738,706,674,642,610, 578,546,514,972,940,414,1004,744,712,680,648,616,584,552,520,1023,978,946, 420,388,1010,718,654,590,526,952,394,1016,756,692,628,564,990,730,666,602, 538,964,406,704,640,576,512] [ns_server:debug,2014-08-19T16:55:02.140,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 582. Nacking mccouch update. [ns_server:debug,2014-08-19T16:55:02.140,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 403. Nacking mccouch update. [views:debug,2014-08-19T16:55:02.140,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/582. Updated state: active (0) [views:debug,2014-08-19T16:55:02.140,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/403. Updated state: replica (0) [ns_server:debug,2014-08-19T16:55:02.140,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",582,active,0} [ns_server:debug,2014-08-19T16:55:02.140,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",403,replica,0} [ns_server:debug,2014-08-19T16:55:02.140,ns_1@10.242.238.90:capi_set_view_manager-tiles<0.6184.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,718,686,654,622,590,984,952,1016,756,724,692,660,628,596,990,958,1022, 762,730,698,666,634,602,996,964,736,704,672,640,608,970,938,1002,742,710,678, 646,614,582,976,944,1008,748,716,684,652,620,588,982,950,1014,754,738,722, 706,690,674,658,642,626,610,594,988,972,956,940,1020,1004,760,744,728,712, 696,680,664,648,632,616,600,584,1023,994,978,962,946,1010,766,734,702,670, 638,606,968,1000,740,708,676,644,612,974,942,1006,746,714,682,650,618,586, 980,948,1012,752,720,688,656,624,592,986,954,1018,758,726,694,662,630,598, 992,960,764,732,700,668,636,604,998,966] [ns_server:debug,2014-08-19T16:55:02.141,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,984,673,545,724,596,413,958,647,519,945,762,698,634,570,1009,996,749, 685,621,557,983,736,672,608,425,970,723,659,595,531,957,710,646,582,1021,944, 761,697,633,569,1008,995,748,684,620,556,982,735,671,607,543,969,722,658,594, 411,956,709,645,581,517,1020,943,760,696,632,568,1007,994,747,683,619,555, 981,734,670,606,423,968,721,657,593,529,955,708,644,580,1019,942,759,695,631, 567,1006,993,746,682,618,554,980,733,669,605,541,967,720,656,592,409,954,707, 643,579,515,1018,941,758,694,630,566,1005,992,745,681,617,553,979,732,668, 604,421,966,719,655,591,527,953,706,642,578,1017,940,757,693,629,565,1004, 991,744,680,616,552,978,731,667,603,539,965,718,654,590,407,952,705,641,577, 513,1016,939,756,692,628,564,1003,990,743,679,615,551,977,730,666,602,419, 964,717,653,589,525,951,704,640,576,1015,938,755,691,627,563,1002,989,742, 678,614,550,976,729,665,601,537,963,716,652,588,405,950,767,703,639,575,1014, 754,690,626,562,1001,988,741,677,613,549,975,728,664,600,417,962,715,651,587, 523,949,766,702,638,574,1013,753,689,625,561,1000,987,740,676,612,548,974, 727,663,599,535,961,714,650,586,403,948,765,701,637,573,1012,999,752,688,624, 560,986,739,675,611,547,973,726,662,598,415,960,713,649,585,521,947,764,700, 636,572,1011,998,751,687,623,559,985,738,674,610,972,725,661,597,533,959,712, 648,584,1023,946,763,699,635,571,1010,997,686,558,737,609,971,660,711,583, 1022] [views:debug,2014-08-19T16:55:02.240,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/386. Updated state: replica (0) [ns_server:debug,2014-08-19T16:55:02.241,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",386,replica,0} [views:debug,2014-08-19T16:55:02.257,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/582. Updated state: active (0) [ns_server:debug,2014-08-19T16:55:02.257,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",582,active,0} [views:debug,2014-08-19T16:55:02.257,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/403. Updated state: replica (0) [ns_server:debug,2014-08-19T16:55:02.258,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",403,replica,0} [ns_server:debug,2014-08-19T16:55:02.483,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 384. Nacking mccouch update. [views:debug,2014-08-19T16:55:02.483,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/384. Updated state: replica (0) [ns_server:debug,2014-08-19T16:55:02.484,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",384,replica,0} [ns_server:debug,2014-08-19T16:55:02.484,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_tiles<0.4349.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,686,622,558,984,426,724,660,596,532,958,400,1022,762,698,634,570,996,736, 672,608,544,970,938,412,1002,742,710,678,646,614,582,550,518,976,944,418,386, 1008,748,716,684,652,620,588,556,524,982,950,424,392,1014,754,722,690,658, 626,594,562,530,988,956,398,1020,760,728,696,664,632,600,568,536,994,962,404, 766,734,702,670,638,606,574,542,968,410,1000,740,708,676,644,612,580,548,516, 974,942,416,384,1006,746,714,682,650,618,586,554,522,980,948,422,390,1012, 752,720,688,656,624,592,560,528,986,954,396,1018,758,726,694,662,630,598,566, 534,992,960,402,764,732,700,668,636,604,572,540,998,966,408,738,706,674,642, 610,578,546,514,972,940,414,1004,744,712,680,648,616,584,552,520,1023,978, 946,420,388,1010,718,654,590,526,952,394,1016,756,692,628,564,990,730,666, 602,538,964,406,704,640,576,512] [ns_server:debug,2014-08-19T16:55:02.535,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 401. Nacking mccouch update. [views:debug,2014-08-19T16:55:02.536,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/401. Updated state: replica (0) [ns_server:debug,2014-08-19T16:55:02.536,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",401,replica,0} [ns_server:debug,2014-08-19T16:55:02.536,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,984,673,545,724,596,413,958,647,519,945,762,698,634,570,1009,996,749, 685,621,557,983,736,672,608,425,970,723,659,595,531,957,710,646,582,1021,944, 761,697,633,569,1008,995,748,684,620,556,982,735,671,607,543,969,722,658,594, 411,956,709,645,581,517,1020,943,760,696,632,568,1007,994,747,683,619,555, 981,734,670,606,423,968,721,657,593,529,955,708,644,580,1019,942,759,695,631, 567,1006,993,746,682,618,554,980,733,669,605,541,967,720,656,592,409,954,707, 643,579,515,1018,941,758,694,630,566,1005,992,745,681,617,553,979,732,668, 604,421,966,719,655,591,527,953,706,642,578,1017,940,757,693,629,565,1004, 991,744,680,616,552,978,731,667,603,539,965,718,654,590,407,952,705,641,577, 513,1016,939,756,692,628,564,1003,990,743,679,615,551,977,730,666,602,419, 964,717,653,589,525,951,704,640,576,1015,938,755,691,627,563,1002,989,742, 678,614,550,976,729,665,601,537,963,716,652,588,405,950,767,703,639,575,1014, 754,690,626,562,1001,988,741,677,613,549,975,728,664,600,417,962,715,651,587, 523,949,766,702,638,574,1013,753,689,625,561,1000,987,740,676,612,548,974, 727,663,599,535,961,714,650,586,403,948,765,701,637,573,1012,999,752,688,624, 560,986,739,675,611,547,973,726,662,598,415,960,713,649,585,521,947,764,700, 636,572,1011,998,751,687,623,559,985,738,674,610,972,725,661,597,533,959,712, 648,584,401,1023,946,763,699,635,571,1010,997,686,558,737,609,971,660,711, 583,1022] [ns_server:debug,2014-08-19T16:55:02.552,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 580. Nacking mccouch update. [views:debug,2014-08-19T16:55:02.552,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/580. Updated state: active (0) [ns_server:debug,2014-08-19T16:55:02.552,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",580,active,0} [ns_server:debug,2014-08-19T16:55:02.553,ns_1@10.242.238.90:capi_set_view_manager-tiles<0.6184.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,718,686,654,622,590,984,952,1016,756,724,692,660,628,596,990,958,1022, 762,730,698,666,634,602,996,964,736,704,672,640,608,970,938,1002,742,710,678, 646,614,582,976,944,1008,748,716,684,652,620,588,982,950,1014,754,738,722, 706,690,674,658,642,626,610,594,988,972,956,940,1020,1004,760,744,728,712, 696,680,664,648,632,616,600,584,1023,994,978,962,946,1010,766,734,702,670, 638,606,968,1000,740,708,676,644,612,580,974,942,1006,746,714,682,650,618, 586,980,948,1012,752,720,688,656,624,592,986,954,1018,758,726,694,662,630, 598,992,960,764,732,700,668,636,604,998,966] [views:debug,2014-08-19T16:55:02.569,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/384. Updated state: replica (0) [ns_server:debug,2014-08-19T16:55:02.569,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",384,replica,0} [views:debug,2014-08-19T16:55:02.603,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/401. Updated state: replica (0) [ns_server:debug,2014-08-19T16:55:02.603,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",401,replica,0} [views:debug,2014-08-19T16:55:02.620,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/580. Updated state: active (0) [ns_server:debug,2014-08-19T16:55:02.620,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",580,active,0} [ns_server:debug,2014-08-19T16:55:02.721,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 382. Nacking mccouch update. [views:debug,2014-08-19T16:55:02.721,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/382. Updated state: replica (0) [ns_server:debug,2014-08-19T16:55:02.721,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",382,replica,0} [ns_server:debug,2014-08-19T16:55:02.721,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_tiles<0.4349.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,686,622,558,984,426,724,660,596,532,958,400,1022,762,698,634,570,996,736, 672,608,544,970,938,412,1002,742,710,678,646,614,582,550,518,976,944,418,386, 1008,748,716,684,652,620,588,556,524,982,950,424,392,1014,754,722,690,658, 626,594,562,530,988,956,398,1020,760,728,696,664,632,600,568,536,994,962,404, 766,734,702,670,638,606,574,542,968,410,1000,740,708,676,644,612,580,548,516, 974,942,416,384,1006,746,714,682,650,618,586,554,522,980,948,422,390,1012, 752,720,688,656,624,592,560,528,986,954,396,1018,758,726,694,662,630,598,566, 534,992,960,402,764,732,700,668,636,604,572,540,998,966,408,738,706,674,642, 610,578,546,514,972,940,414,382,1004,744,712,680,648,616,584,552,520,1023, 978,946,420,388,1010,718,654,590,526,952,394,1016,756,692,628,564,990,730, 666,602,538,964,406,704,640,576,512] [ns_server:debug,2014-08-19T16:55:02.854,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 399. Nacking mccouch update. [ns_server:debug,2014-08-19T16:55:02.855,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 578. Nacking mccouch update. [views:debug,2014-08-19T16:55:02.855,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/399. Updated state: replica (0) [views:debug,2014-08-19T16:55:02.855,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/578. Updated state: active (0) [ns_server:debug,2014-08-19T16:55:02.855,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",399,replica,0} [ns_server:debug,2014-08-19T16:55:02.855,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",578,active,0} [views:debug,2014-08-19T16:55:02.855,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/382. Updated state: replica (0) [ns_server:debug,2014-08-19T16:55:02.855,ns_1@10.242.238.90:capi_set_view_manager-tiles<0.6184.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,718,686,654,622,590,984,952,1016,756,724,692,660,628,596,990,958,1022, 762,730,698,666,634,602,996,964,736,704,672,640,608,970,938,1002,742,710,678, 646,614,582,976,944,1008,748,716,684,652,620,588,982,950,1014,754,738,722, 706,690,674,658,642,626,610,594,578,988,972,956,940,1020,1004,760,744,728, 712,696,680,664,648,632,616,600,584,1023,994,978,962,946,1010,766,734,702, 670,638,606,968,1000,740,708,676,644,612,580,974,942,1006,746,714,682,650, 618,586,980,948,1012,752,720,688,656,624,592,986,954,1018,758,726,694,662, 630,598,992,960,764,732,700,668,636,604,998,966] [ns_server:debug,2014-08-19T16:55:02.855,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",382,replica,0} [ns_server:debug,2014-08-19T16:55:02.856,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,984,673,545,724,596,413,958,647,519,945,762,698,634,570,1009,996,749, 685,621,557,983,736,672,608,425,970,723,659,595,531,957,710,646,582,399,1021, 944,761,697,633,569,1008,995,748,684,620,556,982,735,671,607,543,969,722,658, 594,411,956,709,645,581,517,1020,943,760,696,632,568,1007,994,747,683,619, 555,981,734,670,606,423,968,721,657,593,529,955,708,644,580,1019,942,759,695, 631,567,1006,993,746,682,618,554,980,733,669,605,541,967,720,656,592,409,954, 707,643,579,515,1018,941,758,694,630,566,1005,992,745,681,617,553,979,732, 668,604,421,966,719,655,591,527,953,706,642,578,1017,940,757,693,629,565, 1004,991,744,680,616,552,978,731,667,603,539,965,718,654,590,407,952,705,641, 577,513,1016,939,756,692,628,564,1003,990,743,679,615,551,977,730,666,602, 419,964,717,653,589,525,951,704,640,576,1015,938,755,691,627,563,1002,989, 742,678,614,550,976,729,665,601,537,963,716,652,588,405,950,767,703,639,575, 1014,754,690,626,562,1001,988,741,677,613,549,975,728,664,600,417,962,715, 651,587,523,949,766,702,638,574,1013,753,689,625,561,1000,987,740,676,612, 548,974,727,663,599,535,961,714,650,586,403,948,765,701,637,573,1012,999,752, 688,624,560,986,739,675,611,547,973,726,662,598,415,960,713,649,585,521,947, 764,700,636,572,1011,998,751,687,623,559,985,738,674,610,972,725,661,597,533, 959,712,648,584,401,1023,946,763,699,635,571,1010,997,686,558,737,609,971, 660,711,583,1022] [views:debug,2014-08-19T16:55:02.939,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/578. Updated state: active (0) [views:debug,2014-08-19T16:55:02.939,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/399. Updated state: replica (0) [ns_server:debug,2014-08-19T16:55:02.939,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",578,active,0} [ns_server:debug,2014-08-19T16:55:02.939,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",399,replica,0} [ns_server:debug,2014-08-19T16:55:03.084,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 380. Nacking mccouch update. [views:debug,2014-08-19T16:55:03.084,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/380. Updated state: replica (0) [ns_server:debug,2014-08-19T16:55:03.084,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",380,replica,0} [ns_server:debug,2014-08-19T16:55:03.085,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_tiles<0.4349.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,686,622,558,984,426,724,660,596,532,958,400,1022,762,698,634,570,996,736, 672,608,544,970,412,742,710,678,646,614,582,550,518,976,944,418,386,1008,748, 716,684,652,620,588,556,524,982,950,424,392,1014,754,722,690,658,626,594,562, 530,988,956,398,1020,760,728,696,664,632,600,568,536,994,962,404,766,734,702, 670,638,606,574,542,968,410,1000,740,708,676,644,612,580,548,516,974,942,416, 384,1006,746,714,682,650,618,586,554,522,980,948,422,390,1012,752,720,688, 656,624,592,560,528,986,954,396,1018,758,726,694,662,630,598,566,534,992,960, 402,764,732,700,668,636,604,572,540,998,966,408,738,706,674,642,610,578,546, 514,972,940,414,382,1004,744,712,680,648,616,584,552,520,1023,978,946,420, 388,1010,718,654,590,526,952,394,1016,756,692,628,564,990,730,666,602,538, 964,406,704,640,576,512,938,380,1002] [ns_server:debug,2014-08-19T16:55:03.192,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 576. Nacking mccouch update. [ns_server:debug,2014-08-19T16:55:03.192,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 397. Nacking mccouch update. [views:debug,2014-08-19T16:55:03.193,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/576. Updated state: active (0) [views:debug,2014-08-19T16:55:03.193,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/397. Updated state: replica (0) [ns_server:debug,2014-08-19T16:55:03.193,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",576,active,0} [ns_server:debug,2014-08-19T16:55:03.193,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",397,replica,0} [views:debug,2014-08-19T16:55:03.193,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/380. Updated state: replica (0) [ns_server:debug,2014-08-19T16:55:03.193,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",380,replica,0} [ns_server:debug,2014-08-19T16:55:03.193,ns_1@10.242.238.90:capi_set_view_manager-tiles<0.6184.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,718,686,654,622,590,984,952,1016,756,724,692,660,628,596,990,958,1022, 762,730,698,666,634,602,996,964,736,704,672,640,608,576,970,938,1002,742,710, 678,646,614,582,976,944,1008,748,716,684,652,620,588,982,950,1014,754,738, 722,706,690,674,658,642,626,610,594,578,988,972,956,940,1020,1004,760,744, 728,712,696,680,664,648,632,616,600,584,1023,994,978,962,946,1010,766,734, 702,670,638,606,968,1000,740,708,676,644,612,580,974,942,1006,746,714,682, 650,618,586,980,948,1012,752,720,688,656,624,592,986,954,1018,758,726,694, 662,630,598,992,960,764,732,700,668,636,604,998,966] [ns_server:debug,2014-08-19T16:55:03.194,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,984,673,545,724,596,413,958,647,519,945,762,698,634,570,1009,996,749, 685,621,557,983,736,672,608,425,970,723,659,595,531,957,710,646,582,399,1021, 944,761,697,633,569,1008,995,748,684,620,556,982,735,671,607,543,969,722,658, 594,411,956,709,645,581,517,1020,943,760,696,632,568,1007,994,747,683,619, 555,981,734,670,606,423,968,721,657,593,529,955,708,644,580,397,1019,942,759, 695,631,567,1006,993,746,682,618,554,980,733,669,605,541,967,720,656,592,409, 954,707,643,579,515,1018,941,758,694,630,566,1005,992,745,681,617,553,979, 732,668,604,421,966,719,655,591,527,953,706,642,578,1017,940,757,693,629,565, 1004,991,744,680,616,552,978,731,667,603,539,965,718,654,590,407,952,705,641, 577,513,1016,939,756,692,628,564,1003,990,743,679,615,551,977,730,666,602, 419,964,717,653,589,525,951,704,640,576,1015,938,755,691,627,563,1002,989, 742,678,614,550,976,729,665,601,537,963,716,652,588,405,950,767,703,639,575, 1014,754,690,626,562,1001,988,741,677,613,549,975,728,664,600,417,962,715, 651,587,523,949,766,702,638,574,1013,753,689,625,561,1000,987,740,676,612, 548,974,727,663,599,535,961,714,650,586,403,948,765,701,637,573,1012,999,752, 688,624,560,986,739,675,611,547,973,726,662,598,415,960,713,649,585,521,947, 764,700,636,572,1011,998,751,687,623,559,985,738,674,610,972,725,661,597,533, 959,712,648,584,401,1023,946,763,699,635,571,1010,997,686,558,737,609,971, 660,711,583,1022] [views:debug,2014-08-19T16:55:03.318,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/576. Updated state: active (0) [ns_server:debug,2014-08-19T16:55:03.319,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",576,active,0} [views:debug,2014-08-19T16:55:03.319,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/397. Updated state: replica (0) [ns_server:debug,2014-08-19T16:55:03.319,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",397,replica,0} [ns_server:debug,2014-08-19T16:55:03.571,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 378. Nacking mccouch update. [views:debug,2014-08-19T16:55:03.571,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/378. Updated state: replica (0) [ns_server:debug,2014-08-19T16:55:03.572,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",378,replica,0} [ns_server:debug,2014-08-19T16:55:03.572,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_tiles<0.4349.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,686,622,558,984,426,724,660,596,532,958,400,1022,762,698,634,570,996,736, 672,608,544,970,412,742,710,678,646,614,582,550,518,976,944,418,386,1008,748, 716,684,652,620,588,556,524,982,950,424,392,1014,754,722,690,658,626,594,562, 530,988,956,398,1020,760,728,696,664,632,600,568,536,994,962,404,766,734,702, 670,638,606,574,542,968,410,378,1000,740,708,676,644,612,580,548,516,974,942, 416,384,1006,746,714,682,650,618,586,554,522,980,948,422,390,1012,752,720, 688,656,624,592,560,528,986,954,396,1018,758,726,694,662,630,598,566,534,992, 960,402,764,732,700,668,636,604,572,540,998,966,408,738,706,674,642,610,578, 546,514,972,940,414,382,1004,744,712,680,648,616,584,552,520,1023,978,946, 420,388,1010,718,654,590,526,952,394,1016,756,692,628,564,990,730,666,602, 538,964,406,704,640,576,512,938,380,1002] [views:debug,2014-08-19T16:55:03.636,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/378. Updated state: replica (0) [ns_server:debug,2014-08-19T16:55:03.636,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",378,replica,0} [ns_server:debug,2014-08-19T16:55:03.736,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 395. Nacking mccouch update. [views:debug,2014-08-19T16:55:03.737,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/395. Updated state: replica (0) [ns_server:debug,2014-08-19T16:55:03.737,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",395,replica,0} [ns_server:debug,2014-08-19T16:55:03.737,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,984,673,545,724,596,413,958,647,519,945,762,698,634,570,1009,996,749, 685,621,557,983,736,672,608,425,970,723,659,595,531,957,710,646,582,399,1021, 944,761,697,633,569,1008,995,748,684,620,556,982,735,671,607,543,969,722,658, 594,411,956,709,645,581,517,1020,943,760,696,632,568,1007,994,747,683,619, 555,981,734,670,606,423,968,721,657,593,529,955,708,644,580,397,1019,942,759, 695,631,567,1006,993,746,682,618,554,980,733,669,605,541,967,720,656,592,409, 954,707,643,579,515,1018,941,758,694,630,566,1005,992,745,681,617,553,979, 732,668,604,421,966,719,655,591,527,953,706,642,578,395,1017,940,757,693,629, 565,1004,991,744,680,616,552,978,731,667,603,539,965,718,654,590,407,952,705, 641,577,513,1016,939,756,692,628,564,1003,990,743,679,615,551,977,730,666, 602,419,964,717,653,589,525,951,704,640,576,1015,938,755,691,627,563,1002, 989,742,678,614,550,976,729,665,601,537,963,716,652,588,405,950,767,703,639, 575,1014,754,690,626,562,1001,988,741,677,613,549,975,728,664,600,417,962, 715,651,587,523,949,766,702,638,574,1013,753,689,625,561,1000,987,740,676, 612,548,974,727,663,599,535,961,714,650,586,403,948,765,701,637,573,1012,999, 752,688,624,560,986,739,675,611,547,973,726,662,598,415,960,713,649,585,521, 947,764,700,636,572,1011,998,751,687,623,559,985,738,674,610,972,725,661,597, 533,959,712,648,584,401,1023,946,763,699,635,571,1010,997,686,558,737,609, 971,660,711,583,1022] [ns_server:debug,2014-08-19T16:55:03.768,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 574. Nacking mccouch update. [views:debug,2014-08-19T16:55:03.768,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/574. Updated state: active (0) [ns_server:debug,2014-08-19T16:55:03.768,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",574,active,0} [ns_server:debug,2014-08-19T16:55:03.769,ns_1@10.242.238.90:capi_set_view_manager-tiles<0.6184.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,718,686,654,622,590,984,952,1016,756,724,692,660,628,596,990,958,1022, 762,730,698,666,634,602,996,964,736,704,672,640,608,576,970,938,1002,742,710, 678,646,614,582,976,944,1008,748,716,684,652,620,588,982,950,1014,754,722, 690,658,626,594,988,972,956,940,1020,1004,760,744,728,712,696,680,664,648, 632,616,600,584,1023,994,978,962,946,1010,766,734,702,670,638,606,574,968, 1000,740,708,676,644,612,580,974,942,1006,746,714,682,650,618,586,980,948, 1012,752,720,688,656,624,592,986,954,1018,758,726,694,662,630,598,992,960, 764,732,700,668,636,604,998,966,738,706,674,642,610,578] [views:debug,2014-08-19T16:55:03.871,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/395. Updated state: replica (0) [ns_server:debug,2014-08-19T16:55:03.871,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",395,replica,0} [views:debug,2014-08-19T16:55:03.890,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/574. Updated state: active (0) [ns_server:debug,2014-08-19T16:55:03.890,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",574,active,0} [ns_server:debug,2014-08-19T16:55:03.923,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 376. Nacking mccouch update. [views:debug,2014-08-19T16:55:03.923,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/376. Updated state: replica (0) [ns_server:debug,2014-08-19T16:55:03.923,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",376,replica,0} [ns_server:debug,2014-08-19T16:55:03.924,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_tiles<0.4349.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,686,622,558,984,426,724,660,596,532,958,400,1022,762,698,634,570,996,736, 672,608,544,970,412,742,710,678,646,614,582,550,518,976,944,418,386,1008,748, 716,684,652,620,588,556,524,982,950,424,392,1014,754,722,690,658,626,594,562, 530,988,956,398,1020,760,728,696,664,632,600,568,536,994,962,404,766,734,702, 670,638,606,574,542,968,410,378,1000,740,708,676,644,612,580,548,516,974,942, 416,384,1006,746,714,682,650,618,586,554,522,980,948,422,390,1012,752,720, 688,656,624,592,560,528,986,954,396,1018,758,726,694,662,630,598,566,534,992, 960,402,764,732,700,668,636,604,572,540,998,966,408,376,738,706,674,642,610, 578,546,514,972,940,414,382,1004,744,712,680,648,616,584,552,520,1023,978, 946,420,388,1010,718,654,590,526,952,394,1016,756,692,628,564,990,730,666, 602,538,964,406,704,640,576,512,938,380,1002] [views:debug,2014-08-19T16:55:03.975,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/376. Updated state: replica (0) [ns_server:debug,2014-08-19T16:55:03.975,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",376,replica,0} [ns_server:debug,2014-08-19T16:55:04.051,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 572. Nacking mccouch update. [views:debug,2014-08-19T16:55:04.051,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/572. Updated state: active (0) [ns_server:debug,2014-08-19T16:55:04.051,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",572,active,0} [ns_server:debug,2014-08-19T16:55:04.051,ns_1@10.242.238.90:capi_set_view_manager-tiles<0.6184.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,718,686,654,622,590,984,952,1016,756,724,692,660,628,596,990,958,1022, 762,730,698,666,634,602,996,964,736,704,672,640,608,576,970,938,1002,742,710, 678,646,614,582,976,944,1008,748,716,684,652,620,588,982,950,1014,754,722, 690,658,626,594,988,972,956,940,1020,1004,760,744,728,712,696,680,664,648, 632,616,600,584,1023,994,978,962,946,1010,766,734,702,670,638,606,574,968, 1000,740,708,676,644,612,580,974,942,1006,746,714,682,650,618,586,980,948, 1012,752,720,688,656,624,592,986,954,1018,758,726,694,662,630,598,992,960, 764,732,700,668,636,604,572,998,966,738,706,674,642,610,578] [ns_server:debug,2014-08-19T16:55:04.051,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 393. Nacking mccouch update. [views:debug,2014-08-19T16:55:04.052,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/393. Updated state: replica (0) [ns_server:debug,2014-08-19T16:55:04.052,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",393,replica,0} [ns_server:debug,2014-08-19T16:55:04.052,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,984,673,545,724,596,413,958,647,519,698,570,1009,996,749,685,621,557, 983,736,672,608,425,970,723,659,595,531,957,710,646,582,399,1021,944,761,697, 633,569,1008,995,748,684,620,556,982,735,671,607,543,969,722,658,594,411,956, 709,645,581,517,1020,943,760,696,632,568,1007,994,747,683,619,555,981,734, 670,606,423,968,721,657,593,529,955,708,644,580,397,1019,942,759,695,631,567, 1006,993,746,682,618,554,980,733,669,605,541,967,720,656,592,409,954,707,643, 579,515,1018,941,758,694,630,566,1005,992,745,681,617,553,979,732,668,604, 421,966,719,655,591,527,953,706,642,578,395,1017,940,757,693,629,565,1004, 991,744,680,616,552,978,731,667,603,539,965,718,654,590,407,952,705,641,577, 513,1016,939,756,692,628,564,1003,990,743,679,615,551,977,730,666,602,419, 964,717,653,589,525,951,704,640,576,393,1015,938,755,691,627,563,1002,989, 742,678,614,550,976,729,665,601,537,963,716,652,588,405,950,767,703,639,575, 1014,754,690,626,562,1001,988,741,677,613,549,975,728,664,600,417,962,715, 651,587,523,949,766,702,638,574,1013,753,689,625,561,1000,987,740,676,612, 548,974,727,663,599,535,961,714,650,586,403,948,765,701,637,573,1012,999,752, 688,624,560,986,739,675,611,547,973,726,662,598,415,960,713,649,585,521,947, 764,700,636,572,1011,998,751,687,623,559,985,738,674,610,972,725,661,597,533, 959,712,648,584,401,1023,946,763,699,635,571,1010,997,686,558,737,609,971, 660,711,583,1022,945,762,634] [views:debug,2014-08-19T16:55:04.126,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/572. Updated state: active (0) [ns_server:debug,2014-08-19T16:55:04.126,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",572,active,0} [views:debug,2014-08-19T16:55:04.143,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/393. Updated state: replica (0) [ns_server:debug,2014-08-19T16:55:04.143,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",393,replica,0} [ns_server:debug,2014-08-19T16:55:04.176,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 374. Nacking mccouch update. [views:debug,2014-08-19T16:55:04.177,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/374. Updated state: replica (0) [ns_server:debug,2014-08-19T16:55:04.177,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",374,replica,0} [ns_server:debug,2014-08-19T16:55:04.177,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_tiles<0.4349.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,686,622,558,984,426,724,660,596,532,958,400,1022,762,698,634,570,996,374, 736,672,608,544,970,412,742,710,678,646,614,582,550,518,976,944,418,386,1008, 748,716,684,652,620,588,556,524,982,950,424,392,1014,754,722,690,658,626,594, 562,530,988,956,398,1020,760,728,696,664,632,600,568,536,994,962,404,766,734, 702,670,638,606,574,542,968,410,378,1000,740,708,676,644,612,580,548,516,974, 942,416,384,1006,746,714,682,650,618,586,554,522,980,948,422,390,1012,752, 720,688,656,624,592,560,528,986,954,396,1018,758,726,694,662,630,598,566,534, 992,960,402,764,732,700,668,636,604,572,540,998,966,408,376,738,706,674,642, 610,578,546,514,972,940,414,382,1004,744,712,680,648,616,584,552,520,1023, 978,946,420,388,1010,718,654,590,526,952,394,1016,756,692,628,564,990,730, 666,602,538,964,406,704,640,576,512,938,380,1002] [views:debug,2014-08-19T16:55:04.228,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/374. Updated state: replica (0) [ns_server:debug,2014-08-19T16:55:04.228,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",374,replica,0} [ns_server:debug,2014-08-19T16:55:04.371,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 391. Nacking mccouch update. [ns_server:debug,2014-08-19T16:55:04.371,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 570. Nacking mccouch update. [views:debug,2014-08-19T16:55:04.371,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/391. Updated state: replica (0) [views:debug,2014-08-19T16:55:04.371,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/570. Updated state: active (0) [ns_server:debug,2014-08-19T16:55:04.371,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",391,replica,0} [ns_server:debug,2014-08-19T16:55:04.371,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",570,active,0} [ns_server:debug,2014-08-19T16:55:04.372,ns_1@10.242.238.90:capi_set_view_manager-tiles<0.6184.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,718,686,654,622,590,984,952,1016,756,724,692,660,628,596,990,958,1022, 762,730,698,666,634,602,570,996,964,736,704,672,640,608,576,970,938,1002,742, 710,678,646,614,582,976,944,1008,748,716,684,652,620,588,982,950,1014,754, 722,690,658,626,594,988,972,956,940,1020,1004,760,744,728,712,696,680,664, 648,632,616,600,584,1023,994,978,962,946,1010,766,734,702,670,638,606,574, 968,1000,740,708,676,644,612,580,974,942,1006,746,714,682,650,618,586,980, 948,1012,752,720,688,656,624,592,986,954,1018,758,726,694,662,630,598,992, 960,764,732,700,668,636,604,572,998,966,738,706,674,642,610,578] [ns_server:debug,2014-08-19T16:55:04.372,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,984,673,545,724,596,413,958,647,519,698,570,1009,996,749,685,621,557, 983,736,672,608,425,970,723,659,595,531,957,710,646,582,399,1021,944,761,697, 633,569,1008,995,748,684,620,556,982,735,671,607,543,969,722,658,594,411,956, 709,645,581,517,1020,943,760,696,632,568,1007,994,747,683,619,555,981,734, 670,606,423,968,721,657,593,529,955,708,644,580,397,1019,942,759,695,631,567, 1006,993,746,682,618,554,980,733,669,605,541,967,720,656,592,409,954,707,643, 579,515,1018,941,758,694,630,566,1005,992,745,681,617,553,979,732,668,604, 421,966,719,655,591,527,953,706,642,578,395,1017,940,757,693,629,565,1004, 991,744,680,616,552,978,731,667,603,539,965,718,654,590,407,952,705,641,577, 513,1016,939,756,692,628,564,1003,990,743,679,615,551,977,730,666,602,419, 964,717,653,589,525,951,704,640,576,393,1015,938,755,691,627,563,1002,989, 742,678,614,550,976,729,665,601,537,963,716,652,588,405,950,767,703,639,575, 1014,754,690,626,562,1001,988,741,677,613,549,975,728,664,600,417,962,715, 651,587,523,949,766,702,638,574,391,1013,753,689,625,561,1000,987,740,676, 612,548,974,727,663,599,535,961,714,650,586,403,948,765,701,637,573,1012,999, 752,688,624,560,986,739,675,611,547,973,726,662,598,415,960,713,649,585,521, 947,764,700,636,572,1011,998,751,687,623,559,985,738,674,610,972,725,661,597, 533,959,712,648,584,401,1023,946,763,699,635,571,1010,997,686,558,737,609, 971,660,711,583,1022,945,762,634] [ns_server:debug,2014-08-19T16:55:04.471,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 372. Nacking mccouch update. [views:debug,2014-08-19T16:55:04.471,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/372. Updated state: replica (0) [ns_server:debug,2014-08-19T16:55:04.471,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",372,replica,0} [views:debug,2014-08-19T16:55:04.471,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/391. Updated state: replica (0) [ns_server:debug,2014-08-19T16:55:04.472,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",391,replica,0} [views:debug,2014-08-19T16:55:04.472,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/570. Updated state: active (0) [ns_server:debug,2014-08-19T16:55:04.472,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_tiles<0.4349.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,686,622,558,984,426,724,660,596,532,958,400,1022,762,698,634,570,996,374, 736,672,608,544,970,412,742,710,678,646,614,582,550,518,976,944,418,386,1008, 748,716,684,652,620,588,556,524,982,950,424,392,1014,754,722,690,658,626,594, 562,530,988,956,398,1020,760,728,696,664,632,600,568,536,994,962,404,372,766, 734,702,670,638,606,574,542,968,410,378,1000,740,708,676,644,612,580,548,516, 974,942,416,384,1006,746,714,682,650,618,586,554,522,980,948,422,390,1012, 752,720,688,656,624,592,560,528,986,954,396,1018,758,726,694,662,630,598,566, 534,992,960,402,764,732,700,668,636,604,572,540,998,966,408,376,738,706,674, 642,610,578,546,514,972,940,414,382,1004,744,712,680,648,616,584,552,520, 1023,978,946,420,388,1010,718,654,590,526,952,394,1016,756,692,628,564,990, 730,666,602,538,964,406,704,640,576,512,938,380,1002] [ns_server:debug,2014-08-19T16:55:04.472,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",570,active,0} [views:debug,2014-08-19T16:55:04.589,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/372. Updated state: replica (0) [ns_server:debug,2014-08-19T16:55:04.589,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",372,replica,0} [ns_server:debug,2014-08-19T16:55:04.789,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 389. Nacking mccouch update. [views:debug,2014-08-19T16:55:04.789,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/389. Updated state: replica (0) [ns_server:debug,2014-08-19T16:55:04.789,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",389,replica,0} [ns_server:debug,2014-08-19T16:55:04.790,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,984,673,545,724,596,413,958,647,519,698,570,1009,996,749,685,621,557, 983,736,672,608,425,970,723,659,595,531,957,710,646,582,399,1021,944,761,697, 633,569,1008,995,748,684,620,556,982,735,671,607,543,969,722,658,594,411,956, 709,645,581,517,1020,943,760,696,632,568,1007,994,747,683,619,555,981,734, 670,606,423,968,721,657,593,529,955,708,644,580,397,1019,942,759,695,631,567, 1006,993,746,682,618,554,980,733,669,605,541,967,720,656,592,409,954,707,643, 579,515,1018,941,758,694,630,566,1005,992,745,681,617,553,979,732,668,604, 421,966,719,655,591,527,953,706,642,578,395,1017,940,757,693,629,565,1004, 991,744,680,616,552,978,731,667,603,539,965,718,654,590,407,952,705,641,577, 513,1016,939,756,692,628,564,1003,990,743,679,615,551,977,730,666,602,419, 964,717,653,589,525,951,704,640,576,393,1015,938,755,691,627,563,1002,989, 742,678,614,550,976,729,665,601,537,963,716,652,588,405,950,767,703,639,575, 1014,754,690,626,562,1001,988,741,677,613,549,975,728,664,600,417,962,715, 651,587,523,949,766,702,638,574,391,1013,753,689,625,561,1000,987,740,676, 612,548,974,727,663,599,535,961,714,650,586,403,948,765,701,637,573,1012,999, 752,688,624,560,986,739,675,611,547,973,726,662,598,415,960,713,649,585,521, 947,764,700,636,572,389,1011,998,751,687,623,559,985,738,674,610,972,725,661, 597,533,959,712,648,584,401,1023,946,763,699,635,571,1010,997,686,558,737, 609,971,660,711,583,1022,945,762,634] [ns_server:debug,2014-08-19T16:55:04.882,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 568. Nacking mccouch update. [views:debug,2014-08-19T16:55:04.883,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/568. Updated state: active (0) [ns_server:debug,2014-08-19T16:55:04.883,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",568,active,0} [ns_server:debug,2014-08-19T16:55:04.883,ns_1@10.242.238.90:capi_set_view_manager-tiles<0.6184.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,718,686,654,622,590,984,952,1016,756,724,692,660,628,596,990,958,1022, 762,730,698,666,634,602,570,996,964,736,704,672,640,608,576,970,938,1002,742, 710,678,646,614,582,976,944,1008,748,716,684,652,620,588,982,950,1014,754, 722,690,658,626,594,988,972,956,940,1020,1004,760,744,728,712,696,680,664, 648,632,616,600,584,568,1023,994,978,962,946,1010,766,734,702,670,638,606, 574,968,1000,740,708,676,644,612,580,974,942,1006,746,714,682,650,618,586, 980,948,1012,752,720,688,656,624,592,986,954,1018,758,726,694,662,630,598, 992,960,764,732,700,668,636,604,572,998,966,738,706,674,642,610,578] [ns_server:debug,2014-08-19T16:55:04.925,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 370. Nacking mccouch update. [views:debug,2014-08-19T16:55:04.925,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/370. Updated state: replica (0) [ns_server:debug,2014-08-19T16:55:04.925,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",370,replica,0} [ns_server:debug,2014-08-19T16:55:04.926,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_tiles<0.4349.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,686,622,558,984,426,724,660,596,532,958,400,1022,762,698,634,570,996,374, 736,672,608,544,970,412,710,646,582,518,976,944,418,386,1008,748,716,684,652, 620,588,556,524,982,950,424,392,1014,754,722,690,658,626,594,562,530,988,956, 398,1020,760,728,696,664,632,600,568,536,994,962,404,372,766,734,702,670,638, 606,574,542,968,410,378,1000,740,708,676,644,612,580,548,516,974,942,416,384, 1006,746,714,682,650,618,586,554,522,980,948,422,390,1012,752,720,688,656, 624,592,560,528,986,954,396,1018,758,726,694,662,630,598,566,534,992,960,402, 370,764,732,700,668,636,604,572,540,998,966,408,376,738,706,674,642,610,578, 546,514,972,940,414,382,1004,744,712,680,648,616,584,552,520,1023,978,946, 420,388,1010,718,654,590,526,952,394,1016,756,692,628,564,990,730,666,602, 538,964,406,704,640,576,512,938,380,1002,742,678,614,550] [views:debug,2014-08-19T16:55:04.966,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/389. Updated state: replica (0) [ns_server:debug,2014-08-19T16:55:04.967,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",389,replica,0} [views:debug,2014-08-19T16:55:05.050,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/568. Updated state: active (0) [ns_server:debug,2014-08-19T16:55:05.050,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",568,active,0} [views:debug,2014-08-19T16:55:05.100,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/370. Updated state: replica (0) [ns_server:debug,2014-08-19T16:55:05.100,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",370,replica,0} [ns_server:debug,2014-08-19T16:55:05.203,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 387. Nacking mccouch update. [views:debug,2014-08-19T16:55:05.203,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/387. Updated state: replica (0) [ns_server:debug,2014-08-19T16:55:05.203,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",387,replica,0} [ns_server:debug,2014-08-19T16:55:05.204,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,984,673,545,724,596,413,958,647,519,698,570,387,1009,996,749,685,621, 557,983,736,672,608,425,970,723,659,595,531,957,710,646,582,399,1021,944,761, 697,633,569,1008,995,748,684,620,556,982,735,671,607,543,969,722,658,594,411, 956,709,645,581,517,1020,943,760,696,632,568,1007,994,747,683,619,555,981, 734,670,606,423,968,721,657,593,529,955,708,644,580,397,1019,942,759,695,631, 567,1006,993,746,682,618,554,980,733,669,605,541,967,720,656,592,409,954,707, 643,579,515,1018,941,758,694,630,566,1005,992,745,681,617,553,979,732,668, 604,421,966,719,655,591,527,953,706,642,578,395,1017,940,757,693,629,565, 1004,991,744,680,616,552,978,731,667,603,539,965,718,654,590,407,952,705,641, 577,513,1016,939,756,692,628,564,1003,990,743,679,615,551,977,730,666,602, 419,964,717,653,589,525,951,704,640,576,393,1015,938,755,691,627,563,1002, 989,742,678,614,550,976,729,665,601,537,963,716,652,588,405,950,767,703,639, 575,1014,754,690,626,562,1001,988,741,677,613,549,975,728,664,600,417,962, 715,651,587,523,949,766,702,638,574,391,1013,753,689,625,561,1000,987,740, 676,612,548,974,727,663,599,535,961,714,650,586,403,948,765,701,637,573,1012, 999,752,688,624,560,986,739,675,611,547,973,726,662,598,415,960,713,649,585, 521,947,764,700,636,572,389,1011,998,751,687,623,559,985,738,674,610,972,725, 661,597,533,959,712,648,584,401,1023,946,763,699,635,571,1010,997,686,558, 737,609,971,660,711,583,1022,945,762,634] [ns_server:debug,2014-08-19T16:55:05.262,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 566. Nacking mccouch update. [views:debug,2014-08-19T16:55:05.262,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/566. Updated state: active (0) [ns_server:debug,2014-08-19T16:55:05.262,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",566,active,0} [views:debug,2014-08-19T16:55:05.262,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/387. Updated state: replica (0) [ns_server:debug,2014-08-19T16:55:05.262,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",387,replica,0} [ns_server:debug,2014-08-19T16:55:05.262,ns_1@10.242.238.90:capi_set_view_manager-tiles<0.6184.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,718,686,654,622,590,984,952,1016,756,724,692,660,628,596,990,958,1022, 762,730,698,666,634,602,570,996,964,736,704,672,640,608,576,970,938,1002,742, 710,678,646,614,582,976,944,1008,748,716,684,652,620,588,982,950,1014,754, 722,690,658,626,594,988,972,956,940,1020,1004,760,744,728,712,696,680,664, 648,632,616,600,584,568,1023,994,978,962,946,1010,766,734,702,670,638,606, 574,968,1000,740,708,676,644,612,580,974,942,1006,746,714,682,650,618,586, 980,948,1012,752,720,688,656,624,592,986,954,1018,758,726,694,662,630,598, 566,992,960,764,732,700,668,636,604,572,998,966,738,706,674,642,610,578] [ns_server:debug,2014-08-19T16:55:05.279,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 368. Nacking mccouch update. [views:debug,2014-08-19T16:55:05.279,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/368. Updated state: replica (0) [ns_server:debug,2014-08-19T16:55:05.279,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",368,replica,0} [ns_server:debug,2014-08-19T16:55:05.279,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_tiles<0.4349.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,686,622,558,984,426,724,660,596,532,958,400,1022,762,698,634,570,996,374, 736,672,608,544,970,412,710,646,582,518,976,944,418,386,1008,748,716,684,652, 620,588,556,524,982,950,424,392,1014,754,722,690,658,626,594,562,530,988,956, 398,1020,760,728,696,664,632,600,568,536,994,962,404,372,766,734,702,670,638, 606,574,542,968,410,378,1000,740,708,676,644,612,580,548,516,974,942,416,384, 1006,746,714,682,650,618,586,554,522,980,948,422,390,1012,752,720,688,656, 624,592,560,528,986,954,396,1018,758,726,694,662,630,598,566,534,992,960,402, 370,764,732,700,668,636,604,572,540,998,966,408,376,738,706,674,642,610,578, 546,514,972,940,414,382,1004,744,712,680,648,616,584,552,520,1023,978,946, 420,388,1010,718,654,590,526,952,394,1016,756,692,628,564,990,368,730,666, 602,538,964,406,704,640,576,512,938,380,1002,742,678,614,550] [views:debug,2014-08-19T16:55:05.355,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/566. Updated state: active (0) [views:debug,2014-08-19T16:55:05.355,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/368. Updated state: replica (0) [ns_server:debug,2014-08-19T16:55:05.355,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",566,active,0} [ns_server:debug,2014-08-19T16:55:05.355,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",368,replica,0} [ns_server:debug,2014-08-19T16:55:05.438,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 385. Nacking mccouch update. [views:debug,2014-08-19T16:55:05.438,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/385. Updated state: replica (0) [ns_server:debug,2014-08-19T16:55:05.439,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",385,replica,0} [ns_server:debug,2014-08-19T16:55:05.440,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,984,673,545,724,596,413,958,647,519,698,570,387,1009,996,749,685,621, 557,983,736,672,608,425,970,723,659,595,531,957,710,646,582,399,1021,944,761, 697,633,569,1008,995,748,684,620,556,982,735,671,607,543,969,722,658,594,411, 956,709,645,581,517,1020,943,760,696,632,568,385,1007,994,747,683,619,555, 981,734,670,606,423,968,721,657,593,529,955,708,644,580,397,1019,942,759,695, 631,567,1006,993,746,682,618,554,980,733,669,605,541,967,720,656,592,409,954, 707,643,579,515,1018,941,758,694,630,566,1005,992,745,681,617,553,979,732, 668,604,421,966,719,655,591,527,953,706,642,578,395,1017,940,757,693,629,565, 1004,991,744,680,616,552,978,731,667,603,539,965,718,654,590,407,952,705,641, 577,513,1016,939,756,692,628,564,1003,990,743,679,615,551,977,730,666,602, 419,964,717,653,589,525,951,704,640,576,393,1015,938,755,691,627,563,1002, 989,742,678,614,550,976,729,665,601,537,963,716,652,588,405,950,767,703,639, 575,1014,754,690,626,562,1001,988,741,677,613,549,975,728,664,600,417,962, 715,651,587,523,949,766,702,638,574,391,1013,753,689,625,561,1000,987,740, 676,612,548,974,727,663,599,535,961,714,650,586,403,948,765,701,637,573,1012, 999,752,688,624,560,986,739,675,611,547,973,726,662,598,415,960,713,649,585, 521,947,764,700,636,572,389,1011,998,751,687,623,559,985,738,674,610,972,725, 661,597,533,959,712,648,584,401,1023,946,763,699,635,571,1010,997,686,558, 737,609,971,660,711,583,1022,945,762,634] [views:debug,2014-08-19T16:55:05.506,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/385. Updated state: replica (0) [ns_server:debug,2014-08-19T16:55:05.506,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",385,replica,0} [ns_server:debug,2014-08-19T16:55:05.522,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 366. Nacking mccouch update. [ns_server:debug,2014-08-19T16:55:05.522,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 564. Nacking mccouch update. [views:debug,2014-08-19T16:55:05.522,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/366. Updated state: replica (0) [views:debug,2014-08-19T16:55:05.523,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/564. Updated state: active (0) [ns_server:debug,2014-08-19T16:55:05.523,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",366,replica,0} [ns_server:debug,2014-08-19T16:55:05.523,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",564,active,0} [ns_server:debug,2014-08-19T16:55:05.523,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_tiles<0.4349.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,686,622,558,984,426,724,660,596,532,958,400,1022,762,698,634,570,996,374, 736,672,608,544,970,412,710,646,582,518,976,944,418,386,1008,748,716,684,652, 620,588,556,524,982,950,424,392,1014,754,722,690,658,626,594,562,530,988,956, 398,366,1020,760,728,696,664,632,600,568,536,994,962,404,372,766,734,702,670, 638,606,574,542,968,410,378,1000,740,708,676,644,612,580,548,516,974,942,416, 384,1006,746,714,682,650,618,586,554,522,980,948,422,390,1012,752,720,688, 656,624,592,560,528,986,954,396,1018,758,726,694,662,630,598,566,534,992,960, 402,370,764,732,700,668,636,604,572,540,998,966,408,376,738,706,674,642,610, 578,546,514,972,940,414,382,1004,744,712,680,648,616,584,552,520,1023,978, 946,420,388,1010,718,654,590,526,952,394,1016,756,692,628,564,990,368,730, 666,602,538,964,406,704,640,576,512,938,380,1002,742,678,614,550] [ns_server:debug,2014-08-19T16:55:05.523,ns_1@10.242.238.90:capi_set_view_manager-tiles<0.6184.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,718,686,654,622,590,984,952,1016,756,724,692,660,628,596,564,990,958, 1022,762,730,698,666,634,602,570,996,964,736,704,672,640,608,576,970,938, 1002,742,710,678,646,614,582,976,944,1008,748,716,684,652,620,588,982,950, 1014,754,722,690,658,626,594,988,956,1020,760,744,728,712,696,680,664,648, 632,616,600,584,568,1023,994,978,962,946,1010,766,734,702,670,638,606,574, 968,1000,740,708,676,644,612,580,974,942,1006,746,714,682,650,618,586,980, 948,1012,752,720,688,656,624,592,986,954,1018,758,726,694,662,630,598,566, 992,960,764,732,700,668,636,604,572,998,966,738,706,674,642,610,578,972,940, 1004] [views:debug,2014-08-19T16:55:05.668,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/564. Updated state: active (0) [views:debug,2014-08-19T16:55:05.668,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/366. Updated state: replica (0) [ns_server:debug,2014-08-19T16:55:05.668,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",564,active,0} [ns_server:debug,2014-08-19T16:55:05.668,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",366,replica,0} [ns_server:debug,2014-08-19T16:55:05.843,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 383. Nacking mccouch update. [views:debug,2014-08-19T16:55:05.843,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/383. Updated state: replica (0) [ns_server:debug,2014-08-19T16:55:05.843,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",383,replica,0} [ns_server:debug,2014-08-19T16:55:05.844,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,984,673,545,724,596,413,958,647,519,698,570,387,1009,749,621,983,736, 672,608,425,970,723,659,595,531,957,710,646,582,399,1021,944,761,697,633,569, 1008,995,748,684,620,556,982,735,671,607,543,969,722,658,594,411,956,709,645, 581,517,1020,943,760,696,632,568,385,1007,994,747,683,619,555,981,734,670, 606,423,968,721,657,593,529,955,708,644,580,397,1019,942,759,695,631,567, 1006,993,746,682,618,554,980,733,669,605,541,967,720,656,592,409,954,707,643, 579,515,1018,941,758,694,630,566,383,1005,992,745,681,617,553,979,732,668, 604,421,966,719,655,591,527,953,706,642,578,395,1017,940,757,693,629,565, 1004,991,744,680,616,552,978,731,667,603,539,965,718,654,590,407,952,705,641, 577,513,1016,939,756,692,628,564,1003,990,743,679,615,551,977,730,666,602, 419,964,717,653,589,525,951,704,640,576,393,1015,938,755,691,627,563,1002, 989,742,678,614,550,976,729,665,601,537,963,716,652,588,405,950,767,703,639, 575,1014,754,690,626,562,1001,988,741,677,613,549,975,728,664,600,417,962, 715,651,587,523,949,766,702,638,574,391,1013,753,689,625,561,1000,987,740, 676,612,548,974,727,663,599,535,961,714,650,586,403,948,765,701,637,573,1012, 999,752,688,624,560,986,739,675,611,547,973,726,662,598,415,960,713,649,585, 521,947,764,700,636,572,389,1011,998,751,687,623,559,985,738,674,610,972,725, 661,597,533,959,712,648,584,401,1023,946,763,699,635,571,1010,997,686,558, 737,609,971,660,711,583,1022,945,762,634,996,685,557] [views:debug,2014-08-19T16:55:05.935,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/383. Updated state: replica (0) [ns_server:debug,2014-08-19T16:55:05.935,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",383,replica,0} [ns_server:debug,2014-08-19T16:55:06.052,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 364. Nacking mccouch update. [ns_server:debug,2014-08-19T16:55:06.052,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 562. Nacking mccouch update. [views:debug,2014-08-19T16:55:06.052,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/364. Updated state: replica (0) [views:debug,2014-08-19T16:55:06.052,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/562. Updated state: active (0) [ns_server:debug,2014-08-19T16:55:06.052,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",562,active,0} [ns_server:debug,2014-08-19T16:55:06.053,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",364,replica,0} [ns_server:debug,2014-08-19T16:55:06.053,ns_1@10.242.238.90:capi_set_view_manager-tiles<0.6184.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,718,686,654,622,590,984,952,1016,756,724,692,660,628,596,564,990,958, 1022,762,730,698,666,634,602,570,996,964,736,704,672,640,608,576,970,938, 1002,742,710,678,646,614,582,976,944,1008,748,716,684,652,620,588,982,950, 1014,754,722,690,658,626,594,562,988,956,1020,760,744,728,712,696,680,664, 648,632,616,600,584,568,1023,994,978,962,946,1010,766,734,702,670,638,606, 574,968,1000,740,708,676,644,612,580,974,942,1006,746,714,682,650,618,586, 980,948,1012,752,720,688,656,624,592,986,954,1018,758,726,694,662,630,598, 566,992,960,764,732,700,668,636,604,572,998,966,738,706,674,642,610,578,972, 940,1004] [ns_server:debug,2014-08-19T16:55:06.053,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_tiles<0.4349.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,686,622,558,984,426,724,660,596,532,958,400,1022,762,698,634,570,996,374, 736,672,608,544,970,412,710,646,582,518,976,944,418,386,1008,748,716,684,652, 620,588,556,524,982,950,424,392,1014,754,722,690,658,626,594,562,530,988,956, 398,366,1020,760,728,696,664,632,600,568,536,994,962,404,372,766,734,702,670, 638,606,574,542,968,410,378,1000,740,708,676,644,612,580,548,516,974,942,416, 384,1006,746,714,682,650,618,586,554,522,980,948,422,390,1012,752,720,688, 656,624,592,560,528,986,954,396,364,1018,758,726,694,662,630,598,566,534,992, 960,402,370,764,732,700,668,636,604,572,540,998,966,408,376,738,706,674,642, 610,578,546,514,972,940,414,382,1004,744,712,680,648,616,584,552,520,1023, 978,946,420,388,1010,718,654,590,526,952,394,1016,756,692,628,564,990,368, 730,666,602,538,964,406,704,640,576,512,938,380,1002,742,678,614,550] [views:debug,2014-08-19T16:55:06.153,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/562. Updated state: active (0) [views:debug,2014-08-19T16:55:06.153,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/364. Updated state: replica (0) [ns_server:debug,2014-08-19T16:55:06.153,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",364,replica,0} [ns_server:debug,2014-08-19T16:55:06.153,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",562,active,0} [ns_server:debug,2014-08-19T16:55:06.270,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 381. Nacking mccouch update. [views:debug,2014-08-19T16:55:06.270,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/381. Updated state: replica (0) [ns_server:debug,2014-08-19T16:55:06.270,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",381,replica,0} [ns_server:debug,2014-08-19T16:55:06.271,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,984,673,545,724,596,413,958,647,519,698,570,387,1009,749,621,983,736, 672,608,425,970,723,659,595,531,957,710,646,582,399,1021,944,761,697,633,569, 1008,995,748,684,620,556,982,735,671,607,543,969,722,658,594,411,956,709,645, 581,517,1020,943,760,696,632,568,385,1007,994,747,683,619,555,981,734,670, 606,423,968,721,657,593,529,955,708,644,580,397,1019,942,759,695,631,567, 1006,993,746,682,618,554,980,733,669,605,541,967,720,656,592,409,954,707,643, 579,515,1018,941,758,694,630,566,383,1005,992,745,681,617,553,979,732,668, 604,421,966,719,655,591,527,953,706,642,578,395,1017,940,757,693,629,565, 1004,991,744,680,616,552,978,731,667,603,539,965,718,654,590,407,952,705,641, 577,513,1016,939,756,692,628,564,381,1003,990,743,679,615,551,977,730,666, 602,419,964,717,653,589,525,951,704,640,576,393,1015,938,755,691,627,563, 1002,989,742,678,614,550,976,729,665,601,537,963,716,652,588,405,950,767,703, 639,575,1014,754,690,626,562,1001,988,741,677,613,549,975,728,664,600,417, 962,715,651,587,523,949,766,702,638,574,391,1013,753,689,625,561,1000,987, 740,676,612,548,974,727,663,599,535,961,714,650,586,403,948,765,701,637,573, 1012,999,752,688,624,560,986,739,675,611,547,973,726,662,598,415,960,713,649, 585,521,947,764,700,636,572,389,1011,998,751,687,623,559,985,738,674,610,972, 725,661,597,533,959,712,648,584,401,1023,946,763,699,635,571,1010,997,686, 558,737,609,971,660,711,583,1022,945,762,634,996,685,557] [views:debug,2014-08-19T16:55:06.320,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/381. Updated state: replica (0) [ns_server:debug,2014-08-19T16:55:06.321,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",381,replica,0} [ns_server:debug,2014-08-19T16:55:06.431,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 560. Nacking mccouch update. [ns_server:debug,2014-08-19T16:55:06.431,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 362. Nacking mccouch update. [views:debug,2014-08-19T16:55:06.431,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/560. Updated state: active (0) [views:debug,2014-08-19T16:55:06.431,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/362. Updated state: replica (0) [ns_server:debug,2014-08-19T16:55:06.431,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",560,active,0} [ns_server:debug,2014-08-19T16:55:06.431,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",362,replica,0} [ns_server:debug,2014-08-19T16:55:06.432,ns_1@10.242.238.90:capi_set_view_manager-tiles<0.6184.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,718,686,654,622,590,984,952,1016,756,724,692,660,628,596,564,990,958, 1022,762,730,698,666,634,602,570,996,964,736,704,672,640,608,576,970,938, 1002,742,710,678,646,614,582,976,944,1008,748,716,684,652,620,588,982,950, 1014,754,722,690,658,626,594,562,988,956,1020,760,744,728,712,696,680,664, 648,632,616,600,584,568,1023,994,978,962,946,1010,766,734,702,670,638,606, 574,968,1000,740,708,676,644,612,580,974,942,1006,746,714,682,650,618,586, 980,948,1012,752,720,688,656,624,592,560,986,954,1018,758,726,694,662,630, 598,566,992,960,764,732,700,668,636,604,572,998,966,738,706,674,642,610,578, 972,940,1004] [ns_server:debug,2014-08-19T16:55:06.432,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_tiles<0.4349.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,686,622,558,984,426,362,724,660,596,532,958,400,1022,762,698,634,570,996, 374,736,672,608,544,970,412,710,646,582,518,976,944,418,386,1008,748,716,684, 652,620,588,556,524,982,950,424,392,1014,754,722,690,658,626,594,562,530,988, 956,398,366,1020,760,728,696,664,632,600,568,536,994,962,404,372,766,734,702, 670,638,606,574,542,968,410,378,1000,740,708,676,644,612,580,548,516,974,942, 416,384,1006,746,714,682,650,618,586,554,522,980,948,422,390,1012,752,720, 688,656,624,592,560,528,986,954,396,364,1018,758,726,694,662,630,598,566,534, 992,960,402,370,764,732,700,668,636,604,572,540,998,966,408,376,738,706,674, 642,610,578,546,514,972,940,414,382,1004,744,712,680,648,616,584,552,520, 1023,978,946,420,388,1010,718,654,590,526,952,394,1016,756,692,628,564,990, 368,730,666,602,538,964,406,704,640,576,512,938,380,1002,742,678,614,550] [views:debug,2014-08-19T16:55:06.507,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/362. Updated state: replica (0) [views:debug,2014-08-19T16:55:06.507,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/560. Updated state: active (0) [ns_server:debug,2014-08-19T16:55:06.507,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",362,replica,0} [ns_server:debug,2014-08-19T16:55:06.508,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",560,active,0} [ns_server:debug,2014-08-19T16:55:06.557,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 379. Nacking mccouch update. [views:debug,2014-08-19T16:55:06.557,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/379. Updated state: replica (0) [ns_server:debug,2014-08-19T16:55:06.558,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",379,replica,0} [ns_server:debug,2014-08-19T16:55:06.558,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,984,673,545,724,596,413,958,647,519,698,570,387,1009,749,621,983,736, 672,608,425,970,723,659,595,531,957,710,646,582,399,1021,944,761,697,633,569, 1008,995,748,684,620,556,982,735,671,607,543,969,722,658,594,411,956,709,645, 581,517,1020,943,760,696,632,568,385,1007,994,747,683,619,555,981,734,670, 606,423,968,721,657,593,529,955,708,644,580,397,1019,942,759,695,631,567, 1006,993,746,682,618,554,980,733,669,605,541,967,720,656,592,409,954,707,643, 579,515,1018,941,758,694,630,566,383,1005,992,745,681,617,553,979,732,668, 604,421,966,719,655,591,527,953,706,642,578,395,1017,940,757,693,629,565, 1004,991,744,680,616,552,978,731,667,603,539,965,718,654,590,407,952,705,641, 577,513,1016,939,756,692,628,564,381,1003,990,743,679,615,551,977,730,666, 602,419,964,717,653,589,525,951,704,640,576,393,1015,938,755,691,627,563, 1002,989,742,678,614,550,976,729,665,601,537,963,716,652,588,405,950,767,703, 639,575,1014,754,690,626,562,379,1001,988,741,677,613,549,975,728,664,600, 417,962,715,651,587,523,949,766,702,638,574,391,1013,753,689,625,561,1000, 987,740,676,612,548,974,727,663,599,535,961,714,650,586,403,948,765,701,637, 573,1012,999,752,688,624,560,986,739,675,611,547,973,726,662,598,415,960,713, 649,585,521,947,764,700,636,572,389,1011,998,751,687,623,559,985,738,674,610, 972,725,661,597,533,959,712,648,584,401,1023,946,763,699,635,571,1010,997, 686,558,737,609,971,660,711,583,1022,945,762,634,996,685,557] [views:debug,2014-08-19T16:55:06.610,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/379. Updated state: replica (0) [ns_server:debug,2014-08-19T16:55:06.610,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",379,replica,0} [ns_server:debug,2014-08-19T16:55:06.679,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 558. Nacking mccouch update. [views:debug,2014-08-19T16:55:06.679,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/558. Updated state: active (0) [ns_server:debug,2014-08-19T16:55:06.679,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",558,active,0} [ns_server:debug,2014-08-19T16:55:06.679,ns_1@10.242.238.90:capi_set_view_manager-tiles<0.6184.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,718,686,654,622,590,558,984,952,1016,756,724,692,660,628,596,564,990,958, 1022,762,730,698,666,634,602,570,996,964,736,704,672,640,608,576,970,938, 1002,742,710,678,646,614,582,976,944,1008,748,716,684,652,620,588,982,950, 1014,754,722,690,658,626,594,562,988,956,1020,760,744,728,712,696,680,664, 648,632,616,600,584,568,1023,994,978,962,946,1010,766,734,702,670,638,606, 574,968,1000,740,708,676,644,612,580,974,942,1006,746,714,682,650,618,586, 980,948,1012,752,720,688,656,624,592,560,986,954,1018,758,726,694,662,630, 598,566,992,960,764,732,700,668,636,604,572,998,966,738,706,674,642,610,578, 972,940,1004] [ns_server:debug,2014-08-19T16:55:06.693,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 360. Nacking mccouch update. [views:debug,2014-08-19T16:55:06.693,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/360. Updated state: replica (0) [ns_server:debug,2014-08-19T16:55:06.694,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",360,replica,0} [ns_server:debug,2014-08-19T16:55:06.694,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_tiles<0.4349.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,686,622,558,984,426,362,724,660,596,532,958,400,1022,762,698,634,570,996, 374,736,672,608,544,970,412,710,646,582,518,944,386,1008,748,716,684,652,620, 588,556,524,982,950,424,392,360,1014,754,722,690,658,626,594,562,530,988,956, 398,366,1020,760,728,696,664,632,600,568,536,994,962,404,372,766,734,702,670, 638,606,574,542,968,410,378,1000,740,708,676,644,612,580,548,516,974,942,416, 384,1006,746,714,682,650,618,586,554,522,980,948,422,390,1012,752,720,688, 656,624,592,560,528,986,954,396,364,1018,758,726,694,662,630,598,566,534,992, 960,402,370,764,732,700,668,636,604,572,540,998,966,408,376,738,706,674,642, 610,578,546,514,972,940,414,382,1004,744,712,680,648,616,584,552,520,1023, 978,946,420,388,1010,718,654,590,526,952,394,1016,756,692,628,564,990,368, 730,666,602,538,964,406,704,640,576,512,938,380,1002,742,678,614,550,976,418] [views:debug,2014-08-19T16:55:06.744,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/558. Updated state: active (0) [ns_server:debug,2014-08-19T16:55:06.744,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",558,active,0} [views:debug,2014-08-19T16:55:06.761,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/360. Updated state: replica (0) [ns_server:debug,2014-08-19T16:55:06.761,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",360,replica,0} [ns_server:debug,2014-08-19T16:55:06.786,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 377. Nacking mccouch update. [views:debug,2014-08-19T16:55:06.786,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/377. Updated state: replica (0) [ns_server:debug,2014-08-19T16:55:06.786,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",377,replica,0} [ns_server:debug,2014-08-19T16:55:06.787,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,984,673,545,724,596,413,958,647,519,698,570,387,1009,749,621,983,736, 672,608,425,970,723,659,595,531,957,710,646,582,399,1021,944,761,697,633,569, 1008,995,748,684,620,556,982,735,671,607,543,969,722,658,594,411,956,709,645, 581,517,1020,943,760,696,632,568,385,1007,994,747,683,619,555,981,734,670, 606,423,968,721,657,593,529,955,708,644,580,397,1019,942,759,695,631,567, 1006,993,746,682,618,554,980,733,669,605,541,967,720,656,592,409,954,707,643, 579,515,1018,941,758,694,630,566,383,1005,992,745,681,617,553,979,732,668, 604,421,966,719,655,591,527,953,706,642,578,395,1017,940,757,693,629,565, 1004,991,744,680,616,552,978,731,667,603,539,965,718,654,590,407,952,705,641, 577,513,1016,939,756,692,628,564,381,1003,990,743,679,615,551,977,730,666, 602,419,964,717,653,589,525,951,704,640,576,393,1015,938,755,691,627,563, 1002,989,742,678,614,550,976,729,665,601,537,963,716,652,588,405,950,767,703, 639,575,1014,754,690,626,562,379,1001,988,741,677,613,549,975,728,664,600, 417,962,715,651,587,523,949,766,702,638,574,391,1013,753,689,625,561,1000, 987,740,676,612,548,974,727,663,599,535,961,714,650,586,403,948,765,701,637, 573,1012,999,752,688,624,560,377,986,739,675,611,547,973,726,662,598,415,960, 713,649,585,521,947,764,700,636,572,389,1011,998,751,687,623,559,985,738,674, 610,972,725,661,597,533,959,712,648,584,401,1023,946,763,699,635,571,1010, 997,686,558,737,609,971,660,711,583,1022,945,762,634,996,685,557] [views:debug,2014-08-19T16:55:06.913,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/377. Updated state: replica (0) [ns_server:debug,2014-08-19T16:55:06.913,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",377,replica,0} [ns_server:debug,2014-08-19T16:55:07.072,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 358. Nacking mccouch update. [ns_server:debug,2014-08-19T16:55:07.072,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 556. Nacking mccouch update. [views:debug,2014-08-19T16:55:07.072,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/358. Updated state: replica (0) [views:debug,2014-08-19T16:55:07.072,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/556. Updated state: active (0) [ns_server:debug,2014-08-19T16:55:07.072,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",358,replica,0} [ns_server:debug,2014-08-19T16:55:07.072,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",556,active,0} [ns_server:debug,2014-08-19T16:55:07.072,ns_1@10.242.238.90:capi_set_view_manager-tiles<0.6184.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,718,686,654,622,590,558,984,952,1016,756,724,692,660,628,596,564,990,958, 1022,762,730,698,666,634,602,570,996,964,736,704,672,640,608,576,970,938, 1002,742,710,678,646,614,582,976,944,1008,748,716,684,652,620,588,556,982, 950,1014,754,722,690,658,626,594,562,988,956,1020,760,744,728,712,696,680, 664,648,632,616,600,584,568,1023,994,978,962,946,1010,766,734,702,670,638, 606,574,968,1000,740,708,676,644,612,580,974,942,1006,746,714,682,650,618, 586,980,948,1012,752,720,688,656,624,592,560,986,954,1018,758,726,694,662, 630,598,566,992,960,764,732,700,668,636,604,572,998,966,738,706,674,642,610, 578,972,940,1004] [ns_server:debug,2014-08-19T16:55:07.072,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_tiles<0.4349.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,686,622,558,984,426,362,724,660,596,532,958,400,1022,762,698,634,570,996, 374,736,672,608,544,970,412,710,646,582,518,944,386,1008,748,716,684,652,620, 588,556,524,982,950,424,392,360,1014,754,722,690,658,626,594,562,530,988,956, 398,366,1020,760,728,696,664,632,600,568,536,994,962,404,372,766,734,702,670, 638,606,574,542,968,410,378,1000,740,708,676,644,612,580,548,516,974,942,416, 384,1006,746,714,682,650,618,586,554,522,980,948,422,390,358,1012,752,720, 688,656,624,592,560,528,986,954,396,364,1018,758,726,694,662,630,598,566,534, 992,960,402,370,764,732,700,668,636,604,572,540,998,966,408,376,738,706,674, 642,610,578,546,514,972,940,414,382,1004,744,712,680,648,616,584,552,520, 1023,978,946,420,388,1010,718,654,590,526,952,394,1016,756,692,628,564,990, 368,730,666,602,538,964,406,704,640,576,512,938,380,1002,742,678,614,550,976, 418] [views:debug,2014-08-19T16:55:07.214,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/556. Updated state: active (0) [views:debug,2014-08-19T16:55:07.214,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/358. Updated state: replica (0) [ns_server:debug,2014-08-19T16:55:07.214,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",556,active,0} [ns_server:debug,2014-08-19T16:55:07.214,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",358,replica,0} [ns_server:debug,2014-08-19T16:55:07.306,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 375. Nacking mccouch update. [views:debug,2014-08-19T16:55:07.306,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/375. Updated state: replica (0) [ns_server:debug,2014-08-19T16:55:07.306,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",375,replica,0} [ns_server:debug,2014-08-19T16:55:07.307,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,984,673,545,724,596,413,958,647,519,698,570,387,1009,749,621,983,736, 672,608,425,970,723,659,595,531,957,710,646,582,399,1021,944,761,697,633,569, 1008,995,748,684,620,556,982,735,671,607,543,969,722,658,594,411,956,709,645, 581,517,1020,943,760,696,632,568,385,1007,994,747,683,619,555,981,734,670, 606,423,968,721,657,593,529,955,708,644,580,397,1019,942,759,695,631,567, 1006,993,746,682,618,554,980,733,669,605,541,967,720,656,592,409,954,707,643, 579,515,1018,941,758,694,630,566,383,1005,992,745,681,617,553,979,732,668, 604,421,966,719,655,591,527,953,706,642,578,395,1017,940,757,693,629,565, 1004,991,744,680,616,552,978,731,667,603,539,965,718,654,590,407,952,705,641, 577,513,1016,939,756,692,628,564,381,1003,990,743,679,615,551,977,730,666, 602,419,964,717,653,589,525,951,704,640,576,393,1015,938,755,691,627,563, 1002,989,742,678,614,550,976,729,665,601,537,963,716,652,588,405,950,767,703, 639,575,1014,754,690,626,562,379,1001,988,741,677,613,549,975,728,664,600, 417,962,715,651,587,523,949,766,702,638,574,391,1013,753,689,625,561,1000, 987,740,676,612,548,974,727,663,599,535,961,714,650,586,403,948,765,701,637, 573,1012,999,752,688,624,560,377,986,739,675,611,547,973,726,662,598,415,960, 713,649,585,521,947,764,700,636,572,389,1011,998,751,687,623,559,985,738,674, 610,972,725,661,597,533,959,712,648,584,401,1023,946,763,699,635,571,1010, 997,686,558,375,737,609,971,660,711,583,1022,945,762,634,996,685,557] [views:debug,2014-08-19T16:55:07.415,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/375. Updated state: replica (0) [ns_server:debug,2014-08-19T16:55:07.415,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",375,replica,0} [ns_server:debug,2014-08-19T16:55:07.574,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 554. Nacking mccouch update. [ns_server:debug,2014-08-19T16:55:07.574,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 356. Nacking mccouch update. [views:debug,2014-08-19T16:55:07.574,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/554. Updated state: active (0) [views:debug,2014-08-19T16:55:07.574,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/356. Updated state: replica (0) [ns_server:debug,2014-08-19T16:55:07.574,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",554,active,0} [ns_server:debug,2014-08-19T16:55:07.574,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",356,replica,0} [ns_server:debug,2014-08-19T16:55:07.574,ns_1@10.242.238.90:capi_set_view_manager-tiles<0.6184.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,718,686,654,622,590,558,984,952,1016,756,724,692,660,628,596,564,990,958, 1022,762,730,698,666,634,602,570,996,964,736,704,672,640,608,576,970,938, 1002,742,710,678,646,614,582,976,944,1008,748,716,684,652,620,588,556,982, 950,1014,754,722,690,658,626,594,562,988,956,1020,760,728,696,664,632,600, 568,994,978,962,946,1010,766,734,702,670,638,606,574,968,1000,740,708,676, 644,612,580,974,942,1006,746,714,682,650,618,586,554,980,948,1012,752,720, 688,656,624,592,560,986,954,1018,758,726,694,662,630,598,566,992,960,764,732, 700,668,636,604,572,998,966,738,706,674,642,610,578,972,940,1004,744,712,680, 648,616,584,1023] [ns_server:debug,2014-08-19T16:55:07.575,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_tiles<0.4349.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,686,622,558,984,426,362,724,660,596,532,958,400,1022,762,698,634,570,996, 374,736,672,608,544,970,412,710,646,582,518,944,386,1008,748,716,684,652,620, 588,556,524,982,950,424,392,360,1014,754,722,690,658,626,594,562,530,988,956, 398,366,1020,760,728,696,664,632,600,568,536,994,962,404,372,766,734,702,670, 638,606,574,542,968,410,378,1000,740,708,676,644,612,580,548,516,974,942,416, 384,1006,746,714,682,650,618,586,554,522,980,948,422,390,358,1012,752,720, 688,656,624,592,560,528,986,954,396,364,1018,758,726,694,662,630,598,566,534, 992,960,402,370,764,732,700,668,636,604,572,540,998,966,408,376,738,706,674, 642,610,578,546,514,972,940,414,382,1004,744,712,680,648,616,584,552,520, 1023,978,946,420,388,356,1010,718,654,590,526,952,394,1016,756,692,628,564, 990,368,730,666,602,538,964,406,704,640,576,512,938,380,1002,742,678,614,550, 976,418] [views:debug,2014-08-19T16:55:07.676,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/356. Updated state: replica (0) [views:debug,2014-08-19T16:55:07.676,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/554. Updated state: active (0) [ns_server:debug,2014-08-19T16:55:07.676,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",356,replica,0} [ns_server:debug,2014-08-19T16:55:07.676,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",554,active,0} [ns_server:debug,2014-08-19T16:55:07.753,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 373. Nacking mccouch update. [views:debug,2014-08-19T16:55:07.753,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/373. Updated state: replica (0) [ns_server:debug,2014-08-19T16:55:07.753,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",373,replica,0} [ns_server:debug,2014-08-19T16:55:07.754,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,984,673,545,724,596,413,958,647,519,698,570,387,1009,749,621,983,672, 970,723,659,595,531,957,710,646,582,399,1021,944,761,697,633,569,1008,995, 748,684,620,556,373,982,735,671,607,543,969,722,658,594,411,956,709,645,581, 517,1020,943,760,696,632,568,385,1007,994,747,683,619,555,981,734,670,606, 423,968,721,657,593,529,955,708,644,580,397,1019,942,759,695,631,567,1006, 993,746,682,618,554,980,733,669,605,541,967,720,656,592,409,954,707,643,579, 515,1018,941,758,694,630,566,383,1005,992,745,681,617,553,979,732,668,604, 421,966,719,655,591,527,953,706,642,578,395,1017,940,757,693,629,565,1004, 991,744,680,616,552,978,731,667,603,539,965,718,654,590,407,952,705,641,577, 513,1016,939,756,692,628,564,381,1003,990,743,679,615,551,977,730,666,602, 419,964,717,653,589,525,951,704,640,576,393,1015,938,755,691,627,563,1002, 989,742,678,614,550,976,729,665,601,537,963,716,652,588,405,950,767,703,639, 575,1014,754,690,626,562,379,1001,988,741,677,613,549,975,728,664,600,417, 962,715,651,587,523,949,766,702,638,574,391,1013,753,689,625,561,1000,987, 740,676,612,548,974,727,663,599,535,961,714,650,586,403,948,765,701,637,573, 1012,999,752,688,624,560,377,986,739,675,611,547,973,726,662,598,415,960,713, 649,585,521,947,764,700,636,572,389,1011,998,751,687,623,559,985,738,674,610, 972,725,661,597,533,959,712,648,584,401,1023,946,763,699,635,571,1010,997, 686,558,375,737,609,971,660,711,583,1022,945,762,634,996,685,557,736,608,425] [views:debug,2014-08-19T16:55:07.804,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/373. Updated state: replica (0) [ns_server:debug,2014-08-19T16:55:07.804,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",373,replica,0} [ns_server:debug,2014-08-19T16:55:07.904,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 552. Nacking mccouch update. [ns_server:debug,2014-08-19T16:55:07.904,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 354. Nacking mccouch update. [views:debug,2014-08-19T16:55:07.904,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/552. Updated state: active (0) [views:debug,2014-08-19T16:55:07.904,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/354. Updated state: replica (0) [ns_server:debug,2014-08-19T16:55:07.904,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",552,active,0} [ns_server:debug,2014-08-19T16:55:07.905,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",354,replica,0} [ns_server:debug,2014-08-19T16:55:07.905,ns_1@10.242.238.90:capi_set_view_manager-tiles<0.6184.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,718,686,654,622,590,558,984,952,1016,756,724,692,660,628,596,564,990,958, 1022,762,730,698,666,634,602,570,996,964,736,704,672,640,608,576,970,938, 1002,742,710,678,646,614,582,976,944,1008,748,716,684,652,620,588,556,982, 950,1014,754,722,690,658,626,594,562,988,956,1020,760,728,696,664,632,600, 568,994,978,962,946,1010,766,734,702,670,638,606,574,968,1000,740,708,676, 644,612,580,974,942,1006,746,714,682,650,618,586,554,980,948,1012,752,720, 688,656,624,592,560,986,954,1018,758,726,694,662,630,598,566,992,960,764,732, 700,668,636,604,572,998,966,738,706,674,642,610,578,972,940,1004,744,712,680, 648,616,584,552,1023] [ns_server:debug,2014-08-19T16:55:07.905,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_tiles<0.4349.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,686,622,558,984,426,362,724,660,596,532,958,400,1022,762,698,634,570,996, 374,736,672,608,544,970,412,710,646,582,518,944,386,1008,748,716,684,652,620, 588,556,524,982,950,424,392,360,1014,754,722,690,658,626,594,562,530,988,956, 398,366,1020,760,728,696,664,632,600,568,536,994,962,404,372,766,734,702,670, 638,606,574,542,968,410,378,1000,740,708,676,644,612,580,548,516,974,942,416, 384,1006,746,714,682,650,618,586,554,522,980,948,422,390,358,1012,752,720, 688,656,624,592,560,528,986,954,396,364,1018,758,726,694,662,630,598,566,534, 992,960,402,370,764,732,700,668,636,604,572,540,998,966,408,376,738,706,674, 642,610,578,546,514,972,940,414,382,1004,744,712,680,648,616,584,552,520, 1023,978,946,420,388,356,1010,718,654,590,526,952,394,1016,756,692,628,564, 990,368,730,666,602,538,964,406,704,640,576,512,938,380,1002,742,678,614,550, 976,418,354] [views:debug,2014-08-19T16:55:07.963,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/552. Updated state: active (0) [ns_server:debug,2014-08-19T16:55:07.963,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",552,active,0} [views:debug,2014-08-19T16:55:07.971,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/354. Updated state: replica (0) [ns_server:debug,2014-08-19T16:55:07.971,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",354,replica,0} [ns_server:debug,2014-08-19T16:55:08.063,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 371. Nacking mccouch update. [views:debug,2014-08-19T16:55:08.064,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/371. Updated state: replica (0) [ns_server:debug,2014-08-19T16:55:08.064,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",371,replica,0} [ns_server:debug,2014-08-19T16:55:08.064,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,984,673,545,724,596,413,958,647,519,698,570,387,1009,749,621,983,672, 970,723,659,595,531,957,710,646,582,399,1021,944,761,697,633,569,1008,995, 748,684,620,556,373,982,735,671,607,543,969,722,658,594,411,956,709,645,581, 517,1020,943,760,696,632,568,385,1007,994,747,683,619,555,981,734,670,606, 423,968,721,657,593,529,955,708,644,580,397,1019,942,759,695,631,567,1006, 993,746,682,618,554,371,980,733,669,605,541,967,720,656,592,409,954,707,643, 579,515,1018,941,758,694,630,566,383,1005,992,745,681,617,553,979,732,668, 604,421,966,719,655,591,527,953,706,642,578,395,1017,940,757,693,629,565, 1004,991,744,680,616,552,978,731,667,603,539,965,718,654,590,407,952,705,641, 577,513,1016,939,756,692,628,564,381,1003,990,743,679,615,551,977,730,666, 602,419,964,717,653,589,525,951,704,640,576,393,1015,938,755,691,627,563, 1002,989,742,678,614,550,976,729,665,601,537,963,716,652,588,405,950,767,703, 639,575,1014,754,690,626,562,379,1001,988,741,677,613,549,975,728,664,600, 417,962,715,651,587,523,949,766,702,638,574,391,1013,753,689,625,561,1000, 987,740,676,612,548,974,727,663,599,535,961,714,650,586,403,948,765,701,637, 573,1012,999,752,688,624,560,377,986,739,675,611,547,973,726,662,598,415,960, 713,649,585,521,947,764,700,636,572,389,1011,998,751,687,623,559,985,738,674, 610,972,725,661,597,533,959,712,648,584,401,1023,946,763,699,635,571,1010, 997,686,558,375,737,609,971,660,711,583,1022,945,762,634,996,685,557,736,608, 425] [views:debug,2014-08-19T16:55:08.115,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/371. Updated state: replica (0) [ns_server:debug,2014-08-19T16:55:08.115,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",371,replica,0} [ns_server:debug,2014-08-19T16:55:08.248,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 550. Nacking mccouch update. [views:debug,2014-08-19T16:55:08.248,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/550. Updated state: active (0) [ns_server:debug,2014-08-19T16:55:08.248,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",550,active,0} [ns_server:debug,2014-08-19T16:55:08.248,ns_1@10.242.238.90:capi_set_view_manager-tiles<0.6184.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,718,686,654,622,590,558,984,952,1016,756,724,692,660,628,596,564,990,958, 1022,762,730,698,666,634,602,570,996,964,736,704,672,640,608,576,970,938, 1002,742,710,678,646,614,582,550,976,944,1008,748,716,684,652,620,588,556, 982,950,1014,754,722,690,658,626,594,562,988,956,1020,760,728,696,664,632, 600,568,994,978,962,946,1010,766,734,702,670,638,606,574,968,1000,740,708, 676,644,612,580,974,942,1006,746,714,682,650,618,586,554,980,948,1012,752, 720,688,656,624,592,560,986,954,1018,758,726,694,662,630,598,566,992,960,764, 732,700,668,636,604,572,998,966,738,706,674,642,610,578,972,940,1004,744,712, 680,648,616,584,552,1023] [ns_server:debug,2014-08-19T16:55:08.292,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 352. Nacking mccouch update. [views:debug,2014-08-19T16:55:08.292,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/352. Updated state: replica (0) [ns_server:debug,2014-08-19T16:55:08.292,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",352,replica,0} [ns_server:debug,2014-08-19T16:55:08.292,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_tiles<0.4349.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,686,622,558,984,426,362,724,660,596,532,958,400,1022,762,698,634,570,996, 374,736,672,608,544,970,412,710,646,582,518,944,386,1008,748,716,684,652,620, 588,556,524,982,950,424,392,360,1014,754,722,690,658,626,594,562,530,988,956, 398,366,1020,760,728,696,664,632,600,568,536,994,962,404,372,766,734,702,670, 638,606,574,542,968,410,378,1000,740,708,676,644,612,580,548,516,974,942,416, 384,352,1006,746,714,682,650,618,586,554,522,980,948,422,390,358,1012,752, 720,688,656,624,592,560,528,986,954,396,364,1018,758,726,694,662,630,598,566, 534,992,960,402,370,764,732,700,668,636,604,572,540,998,966,408,376,738,706, 674,642,610,578,546,514,972,940,414,382,1004,744,712,680,648,616,584,552,520, 1023,978,946,420,388,356,1010,718,654,590,526,952,394,1016,756,692,628,564, 990,368,730,666,602,538,964,406,704,640,576,512,938,380,1002,742,678,614,550, 976,418,354] [views:debug,2014-08-19T16:55:08.410,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/550. Updated state: active (0) [ns_server:debug,2014-08-19T16:55:08.410,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",550,active,0} [views:debug,2014-08-19T16:55:08.451,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/352. Updated state: replica (0) [ns_server:debug,2014-08-19T16:55:08.451,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",352,replica,0} [ns_server:debug,2014-08-19T16:55:08.484,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 369. Nacking mccouch update. [views:debug,2014-08-19T16:55:08.484,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/369. Updated state: replica (0) [ns_server:debug,2014-08-19T16:55:08.484,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",369,replica,0} [ns_server:debug,2014-08-19T16:55:08.485,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,984,673,545,724,596,413,958,647,519,698,570,387,1009,749,621,983,672, 970,723,659,595,531,957,710,646,582,399,1021,944,761,697,633,569,1008,995, 748,684,620,556,373,982,735,671,607,543,969,722,658,594,411,956,709,645,581, 517,1020,943,760,696,632,568,385,1007,994,747,683,619,555,981,734,670,606, 423,968,721,657,593,529,955,708,644,580,397,1019,942,759,695,631,567,1006, 993,746,682,618,554,371,980,733,669,605,541,967,720,656,592,409,954,707,643, 579,515,1018,941,758,694,630,566,383,1005,992,745,681,617,553,979,732,668, 604,421,966,719,655,591,527,953,706,642,578,395,1017,940,757,693,629,565, 1004,991,744,680,616,552,369,978,731,667,603,539,965,718,654,590,407,952,705, 641,577,513,1016,939,756,692,628,564,381,1003,990,743,679,615,551,977,730, 666,602,419,964,717,653,589,525,951,704,640,576,393,1015,938,755,691,627,563, 1002,989,742,678,614,550,976,729,665,601,537,963,716,652,588,405,950,767,703, 639,575,1014,754,690,626,562,379,1001,988,741,677,613,549,975,728,664,600, 417,962,715,651,587,523,949,766,702,638,574,391,1013,753,689,625,561,1000, 987,740,676,612,548,974,727,663,599,535,961,714,650,586,403,948,765,701,637, 573,1012,999,752,688,624,560,377,986,739,675,611,547,973,726,662,598,415,960, 713,649,585,521,947,764,700,636,572,389,1011,998,751,687,623,559,985,738,674, 610,972,725,661,597,533,959,712,648,584,401,1023,946,763,699,635,571,1010, 997,686,558,375,737,609,971,660,711,583,1022,945,762,634,996,685,557,736,608, 425] [views:debug,2014-08-19T16:55:08.579,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/369. Updated state: replica (0) [ns_server:debug,2014-08-19T16:55:08.579,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",369,replica,0} [ns_server:debug,2014-08-19T16:55:08.744,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 350. Nacking mccouch update. [ns_server:debug,2014-08-19T16:55:08.744,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 548. Nacking mccouch update. [views:debug,2014-08-19T16:55:08.744,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/350. Updated state: replica (0) [views:debug,2014-08-19T16:55:08.744,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/548. Updated state: active (0) [ns_server:debug,2014-08-19T16:55:08.744,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",350,replica,0} [ns_server:debug,2014-08-19T16:55:08.744,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",548,active,0} [ns_server:debug,2014-08-19T16:55:08.745,ns_1@10.242.238.90:capi_set_view_manager-tiles<0.6184.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,718,686,654,622,590,558,984,952,1016,756,724,692,660,628,596,564,990,958, 1022,762,730,698,666,634,602,570,996,964,736,704,672,640,608,576,970,938, 1002,742,710,678,646,614,582,550,976,944,1008,748,716,684,652,620,588,556, 982,950,1014,754,722,690,658,626,594,562,988,956,1020,760,728,696,664,632, 600,568,994,978,962,946,1010,766,734,702,670,638,606,574,968,1000,740,708, 676,644,612,580,548,974,942,1006,746,714,682,650,618,586,554,980,948,1012, 752,720,688,656,624,592,560,986,954,1018,758,726,694,662,630,598,566,992,960, 764,732,700,668,636,604,572,998,966,738,706,674,642,610,578,972,940,1004,744, 712,680,648,616,584,552,1023] [ns_server:debug,2014-08-19T16:55:08.745,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_tiles<0.4349.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,686,622,558,984,426,362,724,660,596,532,958,400,1022,762,698,634,570,996, 374,736,672,608,544,970,412,710,646,582,518,944,386,1008,748,684,620,556,982, 950,424,392,360,1014,754,722,690,658,626,594,562,530,988,956,398,366,1020, 760,728,696,664,632,600,568,536,994,962,404,372,766,734,702,670,638,606,574, 542,968,410,378,1000,740,708,676,644,612,580,548,516,974,942,416,384,352, 1006,746,714,682,650,618,586,554,522,980,948,422,390,358,1012,752,720,688, 656,624,592,560,528,986,954,396,364,1018,758,726,694,662,630,598,566,534,992, 960,402,370,764,732,700,668,636,604,572,540,998,966,408,376,738,706,674,642, 610,578,546,514,972,940,414,382,350,1004,744,712,680,648,616,584,552,520, 1023,978,946,420,388,356,1010,718,654,590,526,952,394,1016,756,692,628,564, 990,368,730,666,602,538,964,406,704,640,576,512,938,380,1002,742,678,614,550, 976,418,354,716,652,588,524] [views:debug,2014-08-19T16:55:08.836,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/548. Updated state: active (0) [views:debug,2014-08-19T16:55:08.836,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/350. Updated state: replica (0) [ns_server:debug,2014-08-19T16:55:08.836,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",548,active,0} [ns_server:debug,2014-08-19T16:55:08.836,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",350,replica,0} [ns_server:debug,2014-08-19T16:55:08.928,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 367. Nacking mccouch update. [views:debug,2014-08-19T16:55:08.928,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/367. Updated state: replica (0) [ns_server:debug,2014-08-19T16:55:08.928,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",367,replica,0} [ns_server:debug,2014-08-19T16:55:08.929,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,984,673,545,724,596,413,958,647,519,698,570,387,1009,749,621,983,672, 970,723,659,595,531,957,710,646,582,399,1021,944,761,697,633,569,1008,995, 748,684,620,556,373,982,735,671,607,543,969,722,658,594,411,956,709,645,581, 517,1020,943,760,696,632,568,385,1007,994,747,683,619,555,981,734,670,606, 423,968,721,657,593,529,955,708,644,580,397,1019,942,759,695,631,567,1006, 993,746,682,618,554,371,980,733,669,605,541,967,720,656,592,409,954,707,643, 579,515,1018,941,758,694,630,566,383,1005,992,745,681,617,553,979,732,668, 604,421,966,719,655,591,527,953,706,642,578,395,1017,940,757,693,629,565, 1004,991,744,680,616,552,369,978,731,667,603,539,965,718,654,590,407,952,705, 641,577,513,1016,939,756,692,628,564,381,1003,990,743,679,615,551,977,730, 666,602,419,964,717,653,589,525,951,704,640,576,393,1015,938,755,691,627,563, 1002,989,742,678,614,550,367,976,729,665,601,537,963,716,652,588,405,950,767, 703,639,575,1014,754,690,626,562,379,1001,988,741,677,613,549,975,728,664, 600,417,962,715,651,587,523,949,766,702,638,574,391,1013,753,689,625,561, 1000,987,740,676,612,548,974,727,663,599,535,961,714,650,586,403,948,765,701, 637,573,1012,999,752,688,624,560,377,986,739,675,611,547,973,726,662,598,415, 960,713,649,585,521,947,764,700,636,572,389,1011,998,751,687,623,559,985,738, 674,610,972,725,661,597,533,959,712,648,584,401,1023,946,763,699,635,571, 1010,997,686,558,375,737,609,971,660,711,583,1022,945,762,634,996,685,557, 736,608,425] [views:debug,2014-08-19T16:55:09.029,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/367. Updated state: replica (0) [ns_server:debug,2014-08-19T16:55:09.029,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",367,replica,0} [ns_server:debug,2014-08-19T16:55:09.156,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 546. Nacking mccouch update. [ns_server:debug,2014-08-19T16:55:09.157,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 348. Nacking mccouch update. [views:debug,2014-08-19T16:55:09.157,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/546. Updated state: active (0) [views:debug,2014-08-19T16:55:09.157,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/348. Updated state: replica (0) [ns_server:debug,2014-08-19T16:55:09.157,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",546,active,0} [ns_server:debug,2014-08-19T16:55:09.157,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",348,replica,0} [ns_server:debug,2014-08-19T16:55:09.157,ns_1@10.242.238.90:capi_set_view_manager-tiles<0.6184.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,718,686,654,622,590,558,984,952,1016,756,724,692,660,628,596,564,990,958, 1022,762,730,698,666,634,602,570,996,964,736,704,672,640,608,576,970,938, 1002,742,710,678,646,614,582,550,976,944,1008,748,716,684,652,620,588,556, 982,950,1014,754,722,690,658,626,594,562,988,956,1020,760,728,696,664,632, 600,568,994,978,962,946,1010,766,734,702,670,638,606,574,968,1000,740,708, 676,644,612,580,548,974,942,1006,746,714,682,650,618,586,554,980,948,1012, 752,720,688,656,624,592,560,986,954,1018,758,726,694,662,630,598,566,992,960, 764,732,700,668,636,604,572,998,966,738,706,674,642,610,578,546,972,940,1004, 744,712,680,648,616,584,552,1023] [ns_server:debug,2014-08-19T16:55:09.157,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_tiles<0.4349.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,686,622,558,984,426,362,724,660,596,532,958,400,1022,762,698,634,570,996, 374,736,672,608,544,970,412,348,710,646,582,518,944,386,1008,748,684,620,556, 982,950,424,392,360,1014,754,722,690,658,626,594,562,530,988,956,398,366, 1020,760,728,696,664,632,600,568,536,994,962,404,372,766,734,702,670,638,606, 574,542,968,410,378,1000,740,708,676,644,612,580,548,516,974,942,416,384,352, 1006,746,714,682,650,618,586,554,522,980,948,422,390,358,1012,752,720,688, 656,624,592,560,528,986,954,396,364,1018,758,726,694,662,630,598,566,534,992, 960,402,370,764,732,700,668,636,604,572,540,998,966,408,376,738,706,674,642, 610,578,546,514,972,940,414,382,350,1004,744,712,680,648,616,584,552,520, 1023,978,946,420,388,356,1010,718,654,590,526,952,394,1016,756,692,628,564, 990,368,730,666,602,538,964,406,704,640,576,512,938,380,1002,742,678,614,550, 976,418,354,716,652,588,524] [views:debug,2014-08-19T16:55:09.241,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/348. Updated state: replica (0) [ns_server:debug,2014-08-19T16:55:09.241,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",348,replica,0} [views:debug,2014-08-19T16:55:09.241,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/546. Updated state: active (0) [ns_server:debug,2014-08-19T16:55:09.241,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",546,active,0} [ns_server:debug,2014-08-19T16:55:09.337,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 365. Nacking mccouch update. [views:debug,2014-08-19T16:55:09.337,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/365. Updated state: replica (0) [ns_server:debug,2014-08-19T16:55:09.338,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",365,replica,0} [ns_server:debug,2014-08-19T16:55:09.338,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,984,673,545,724,596,413,958,647,519,698,570,387,1009,749,621,983,672, 970,723,659,595,531,957,710,646,582,399,1021,944,761,697,633,569,1008,995, 748,684,620,556,373,982,735,671,607,543,969,722,658,594,411,956,709,645,581, 517,1020,943,760,696,632,568,385,1007,994,747,683,619,555,981,734,670,606, 423,968,721,657,593,529,955,708,644,580,397,1019,942,759,695,631,567,1006, 993,746,682,618,554,371,980,733,669,605,541,967,720,656,592,409,954,707,643, 579,515,1018,941,758,694,630,566,383,1005,992,745,681,617,553,979,732,668, 604,421,966,719,655,591,527,953,706,642,578,395,1017,940,757,693,629,565, 1004,991,744,680,616,552,369,978,731,667,603,539,965,718,654,590,407,952,705, 641,577,513,1016,939,756,692,628,564,381,1003,990,743,679,615,551,977,730, 666,602,419,964,717,653,589,525,951,704,640,576,393,1015,938,755,691,627,563, 1002,989,742,678,614,550,367,976,729,665,601,537,963,716,652,588,405,950,767, 703,639,575,1014,754,690,626,562,379,1001,988,741,677,613,549,975,728,664, 600,417,962,715,651,587,523,949,766,702,638,574,391,1013,753,689,625,561, 1000,987,740,676,612,548,365,974,727,663,599,535,961,714,650,586,403,948,765, 701,637,573,1012,999,752,688,624,560,377,986,739,675,611,547,973,726,662,598, 415,960,713,649,585,521,947,764,700,636,572,389,1011,998,751,687,623,559,985, 738,674,610,972,725,661,597,533,959,712,648,584,401,1023,946,763,699,635,571, 1010,997,686,558,375,737,609,971,660,711,583,1022,945,762,634,996,685,557, 736,608,425] [views:debug,2014-08-19T16:55:09.408,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/365. Updated state: replica (0) [ns_server:debug,2014-08-19T16:55:09.408,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",365,replica,0} [ns_server:debug,2014-08-19T16:55:09.517,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 544. Nacking mccouch update. [views:debug,2014-08-19T16:55:09.517,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/544. Updated state: active (0) [ns_server:debug,2014-08-19T16:55:09.517,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",544,active,0} [ns_server:debug,2014-08-19T16:55:09.518,ns_1@10.242.238.90:capi_set_view_manager-tiles<0.6184.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,718,686,654,622,590,558,984,952,1016,756,724,692,660,628,596,564,990,958, 1022,762,730,698,666,634,602,570,996,964,736,704,672,640,608,576,544,970,938, 1002,742,710,678,646,614,582,550,976,944,1008,748,716,684,652,620,588,556, 982,950,1014,754,722,690,658,626,594,562,988,956,1020,760,728,696,664,632, 600,568,994,962,766,734,702,670,638,606,574,968,1000,740,708,676,644,612,580, 548,974,942,1006,746,714,682,650,618,586,554,980,948,1012,752,720,688,656, 624,592,560,986,954,1018,758,726,694,662,630,598,566,992,960,764,732,700,668, 636,604,572,998,966,738,706,674,642,610,578,546,972,940,1004,744,712,680,648, 616,584,552,1023,978,946,1010] [ns_server:debug,2014-08-19T16:55:09.629,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 346. Nacking mccouch update. [views:debug,2014-08-19T16:55:09.629,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/346. Updated state: replica (0) [ns_server:debug,2014-08-19T16:55:09.629,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",346,replica,0} [ns_server:debug,2014-08-19T16:55:09.630,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_tiles<0.4349.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,686,622,558,984,426,362,724,660,596,532,958,400,1022,762,698,634,570,996, 374,736,672,608,544,970,412,348,710,646,582,518,944,386,1008,748,684,620,556, 982,950,424,392,360,1014,754,722,690,658,626,594,562,530,988,956,398,366, 1020,760,728,696,664,632,600,568,536,994,962,404,372,766,734,702,670,638,606, 574,542,968,410,378,346,1000,740,708,676,644,612,580,548,516,974,942,416,384, 352,1006,746,714,682,650,618,586,554,522,980,948,422,390,358,1012,752,720, 688,656,624,592,560,528,986,954,396,364,1018,758,726,694,662,630,598,566,534, 992,960,402,370,764,732,700,668,636,604,572,540,998,966,408,376,738,706,674, 642,610,578,546,514,972,940,414,382,350,1004,744,712,680,648,616,584,552,520, 1023,978,946,420,388,356,1010,718,654,590,526,952,394,1016,756,692,628,564, 990,368,730,666,602,538,964,406,704,640,576,512,938,380,1002,742,678,614,550, 976,418,354,716,652,588,524] [ns_server:debug,2014-08-19T16:55:09.694,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 363. Nacking mccouch update. [views:debug,2014-08-19T16:55:09.694,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/363. Updated state: replica (0) [ns_server:debug,2014-08-19T16:55:09.694,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",363,replica,0} [views:debug,2014-08-19T16:55:09.694,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/544. Updated state: active (0) [ns_server:debug,2014-08-19T16:55:09.694,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",544,active,0} [ns_server:debug,2014-08-19T16:55:09.695,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,984,673,545,724,596,413,958,647,519,698,570,387,1009,749,621,983,672, 723,595,957,710,646,582,399,1021,944,761,697,633,569,1008,995,748,684,620, 556,373,982,735,671,607,543,969,722,658,594,411,956,709,645,581,517,1020,943, 760,696,632,568,385,1007,994,747,683,619,555,981,734,670,606,423,968,721,657, 593,529,955,708,644,580,397,1019,942,759,695,631,567,1006,993,746,682,618, 554,371,980,733,669,605,541,967,720,656,592,409,954,707,643,579,515,1018,941, 758,694,630,566,383,1005,992,745,681,617,553,979,732,668,604,421,966,719,655, 591,527,953,706,642,578,395,1017,940,757,693,629,565,1004,991,744,680,616, 552,369,978,731,667,603,539,965,718,654,590,407,952,705,641,577,513,1016,939, 756,692,628,564,381,1003,990,743,679,615,551,977,730,666,602,419,964,717,653, 589,525,951,704,640,576,393,1015,938,755,691,627,563,1002,989,742,678,614, 550,367,976,729,665,601,537,963,716,652,588,405,950,767,703,639,575,1014,754, 690,626,562,379,1001,988,741,677,613,549,975,728,664,600,417,962,715,651,587, 523,949,766,702,638,574,391,1013,753,689,625,561,1000,987,740,676,612,548, 365,974,727,663,599,535,961,714,650,586,403,948,765,701,637,573,1012,999,752, 688,624,560,377,986,739,675,611,547,973,726,662,598,415,960,713,649,585,521, 947,764,700,636,572,389,1011,998,751,687,623,559,985,738,674,610,363,972,725, 661,597,533,959,712,648,584,401,1023,946,763,699,635,571,1010,997,686,558, 375,737,609,971,660,711,583,1022,945,762,634,996,685,557,736,608,425,970,659, 531] [views:debug,2014-08-19T16:55:09.711,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/346. Updated state: replica (0) [ns_server:debug,2014-08-19T16:55:09.711,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",346,replica,0} [views:debug,2014-08-19T16:55:09.821,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/363. Updated state: replica (0) [ns_server:debug,2014-08-19T16:55:09.821,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",363,replica,0} [ns_server:debug,2014-08-19T16:55:09.946,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 344. Nacking mccouch update. [ns_server:debug,2014-08-19T16:55:09.946,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 542. Nacking mccouch update. [views:debug,2014-08-19T16:55:09.946,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/344. Updated state: replica (0) [views:debug,2014-08-19T16:55:09.947,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/542. Updated state: active (0) [ns_server:debug,2014-08-19T16:55:09.947,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",344,replica,0} [ns_server:debug,2014-08-19T16:55:09.947,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",542,active,0} [ns_server:debug,2014-08-19T16:55:09.947,ns_1@10.242.238.90:capi_set_view_manager-tiles<0.6184.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,718,686,654,622,590,558,984,952,1016,756,724,692,660,628,596,564,990,958, 1022,762,730,698,666,634,602,570,996,964,736,704,672,640,608,576,544,970,938, 1002,742,710,678,646,614,582,550,976,944,1008,748,716,684,652,620,588,556, 982,950,1014,754,722,690,658,626,594,562,988,956,1020,760,728,696,664,632, 600,568,994,962,766,734,702,670,638,606,574,542,968,1000,740,708,676,644,612, 580,548,974,942,1006,746,714,682,650,618,586,554,980,948,1012,752,720,688, 656,624,592,560,986,954,1018,758,726,694,662,630,598,566,992,960,764,732,700, 668,636,604,572,998,966,738,706,674,642,610,578,546,972,940,1004,744,712,680, 648,616,584,552,1023,978,946,1010] [ns_server:debug,2014-08-19T16:55:09.947,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_tiles<0.4349.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,686,622,558,984,426,362,724,660,596,532,958,400,1022,762,698,634,570,996, 374,736,672,608,544,970,412,348,710,646,582,518,944,386,1008,748,684,620,556, 982,950,424,392,360,1014,754,722,690,658,626,594,562,530,988,956,398,366, 1020,760,728,696,664,632,600,568,536,994,962,404,372,766,734,702,670,638,606, 574,542,968,410,378,346,1000,740,708,676,644,612,580,548,516,974,942,416,384, 352,1006,746,714,682,650,618,586,554,522,980,948,422,390,358,1012,752,720, 688,656,624,592,560,528,986,954,396,364,1018,758,726,694,662,630,598,566,534, 992,960,402,370,764,732,700,668,636,604,572,540,998,966,408,376,344,738,706, 674,642,610,578,546,514,972,940,414,382,350,1004,744,712,680,648,616,584,552, 520,1023,978,946,420,388,356,1010,718,654,590,526,952,394,1016,756,692,628, 564,990,368,730,666,602,538,964,406,704,640,576,512,938,380,1002,742,678,614, 550,976,418,354,716,652,588,524] [ns_server:debug,2014-08-19T16:55:10.038,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 361. Nacking mccouch update. [views:debug,2014-08-19T16:55:10.038,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/361. Updated state: replica (0) [ns_server:debug,2014-08-19T16:55:10.039,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",361,replica,0} [views:debug,2014-08-19T16:55:10.039,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/344. Updated state: replica (0) [views:debug,2014-08-19T16:55:10.039,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/542. Updated state: active (0) [ns_server:debug,2014-08-19T16:55:10.039,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",344,replica,0} [ns_server:debug,2014-08-19T16:55:10.040,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",542,active,0} [ns_server:debug,2014-08-19T16:55:10.039,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,984,673,545,724,596,413,958,647,519,698,570,387,1009,749,621,983,672, 361,723,595,957,710,646,582,399,1021,944,761,697,633,569,1008,995,748,684, 620,556,373,982,735,671,607,543,969,722,658,594,411,956,709,645,581,517,1020, 943,760,696,632,568,385,1007,994,747,683,619,555,981,734,670,606,423,968,721, 657,593,529,955,708,644,580,397,1019,942,759,695,631,567,1006,993,746,682, 618,554,371,980,733,669,605,541,967,720,656,592,409,954,707,643,579,515,1018, 941,758,694,630,566,383,1005,992,745,681,617,553,979,732,668,604,421,966,719, 655,591,527,953,706,642,578,395,1017,940,757,693,629,565,1004,991,744,680, 616,552,369,978,731,667,603,539,965,718,654,590,407,952,705,641,577,513,1016, 939,756,692,628,564,381,1003,990,743,679,615,551,977,730,666,602,419,964,717, 653,589,525,951,704,640,576,393,1015,938,755,691,627,563,1002,989,742,678, 614,550,367,976,729,665,601,537,963,716,652,588,405,950,767,703,639,575,1014, 754,690,626,562,379,1001,988,741,677,613,549,975,728,664,600,417,962,715,651, 587,523,949,766,702,638,574,391,1013,753,689,625,561,1000,987,740,676,612, 548,365,974,727,663,599,535,961,714,650,586,403,948,765,701,637,573,1012,999, 752,688,624,560,377,986,739,675,611,547,973,726,662,598,415,960,713,649,585, 521,947,764,700,636,572,389,1011,998,751,687,623,559,985,738,674,610,363,972, 725,661,597,533,959,712,648,584,401,1023,946,763,699,635,571,1010,997,686, 558,375,737,609,971,660,711,583,1022,945,762,634,996,685,557,736,608,425,970, 659,531] [views:debug,2014-08-19T16:55:10.139,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/361. Updated state: replica (0) [ns_server:debug,2014-08-19T16:55:10.140,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",361,replica,0} [ns_server:debug,2014-08-19T16:55:10.298,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 540. Nacking mccouch update. [views:debug,2014-08-19T16:55:10.298,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/540. Updated state: active (0) [ns_server:debug,2014-08-19T16:55:10.298,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",540,active,0} [ns_server:debug,2014-08-19T16:55:10.299,ns_1@10.242.238.90:capi_set_view_manager-tiles<0.6184.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,718,686,654,622,590,558,984,952,1016,756,724,692,660,628,596,564,990,958, 1022,762,730,698,666,634,602,570,996,964,736,704,672,640,608,576,544,970,938, 1002,742,710,678,646,614,582,550,976,944,1008,748,716,684,652,620,588,556, 982,950,1014,754,722,690,658,626,594,562,988,956,1020,760,728,696,664,632, 600,568,994,962,766,734,702,670,638,606,574,542,968,1000,740,708,676,644,612, 580,548,974,942,1006,746,714,682,650,618,586,554,980,948,1012,752,720,688, 656,624,592,560,986,954,1018,758,726,694,662,630,598,566,992,960,764,732,700, 668,636,604,572,540,998,966,738,706,674,642,610,578,546,972,940,1004,744,712, 680,648,616,584,552,1023,978,946,1010] [ns_server:debug,2014-08-19T16:55:10.315,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 342. Nacking mccouch update. [views:debug,2014-08-19T16:55:10.315,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/342. Updated state: replica (0) [ns_server:debug,2014-08-19T16:55:10.315,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",342,replica,0} [ns_server:debug,2014-08-19T16:55:10.316,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_tiles<0.4349.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,686,622,558,984,426,362,724,660,596,532,958,400,1022,762,698,634,570,996, 374,736,672,608,544,970,412,348,710,646,582,518,944,386,1008,748,684,620,556, 982,950,424,392,360,1014,754,722,690,658,626,594,562,530,988,956,398,366, 1020,760,728,696,664,632,600,568,536,994,962,404,372,766,734,702,670,638,606, 574,542,968,410,378,346,1000,740,708,676,644,612,580,548,516,974,942,416,384, 352,1006,746,714,682,650,618,586,554,522,980,948,422,390,358,1012,752,720, 688,656,624,592,560,528,986,954,396,364,1018,758,726,694,662,630,598,566,534, 992,960,402,370,764,732,700,668,636,604,572,540,998,966,408,376,344,738,706, 674,642,610,578,546,514,972,940,414,382,350,1004,744,712,680,648,616,584,552, 520,1023,978,946,420,388,356,1010,718,654,590,526,952,394,1016,756,692,628, 564,990,368,730,666,602,538,964,406,342,704,640,576,512,938,380,1002,742,678, 614,550,976,418,354,716,652,588,524] [ns_server:debug,2014-08-19T16:55:10.365,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 359. Nacking mccouch update. [views:debug,2014-08-19T16:55:10.365,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/359. Updated state: replica (0) [ns_server:debug,2014-08-19T16:55:10.365,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",359,replica,0} [ns_server:debug,2014-08-19T16:55:10.366,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,984,673,545,724,596,413,958,647,519,698,570,387,1009,749,621,983,672, 361,723,595,957,710,646,582,399,1021,944,761,697,633,569,1008,995,748,684, 620,556,373,982,735,671,607,543,969,722,658,594,411,956,709,645,581,517,1020, 943,760,696,632,568,385,1007,994,747,683,619,555,981,734,670,606,423,359,968, 721,657,593,529,955,708,644,580,397,1019,942,759,695,631,567,1006,993,746, 682,618,554,371,980,733,669,605,541,967,720,656,592,409,954,707,643,579,515, 1018,941,758,694,630,566,383,1005,992,745,681,617,553,979,732,668,604,421, 966,719,655,591,527,953,706,642,578,395,1017,940,757,693,629,565,1004,991, 744,680,616,552,369,978,731,667,603,539,965,718,654,590,407,952,705,641,577, 513,1016,939,756,692,628,564,381,1003,990,743,679,615,551,977,730,666,602, 419,964,717,653,589,525,951,704,640,576,393,1015,938,755,691,627,563,1002, 989,742,678,614,550,367,976,729,665,601,537,963,716,652,588,405,950,767,703, 639,575,1014,754,690,626,562,379,1001,988,741,677,613,549,975,728,664,600, 417,962,715,651,587,523,949,766,702,638,574,391,1013,753,689,625,561,1000, 987,740,676,612,548,365,974,727,663,599,535,961,714,650,586,403,948,765,701, 637,573,1012,999,752,688,624,560,377,986,739,675,611,547,973,726,662,598,415, 960,713,649,585,521,947,764,700,636,572,389,1011,998,751,687,623,559,985,738, 674,610,363,972,725,661,597,533,959,712,648,584,401,1023,946,763,699,635,571, 1010,997,686,558,375,737,609,971,660,711,583,1022,945,762,634,996,685,557, 736,608,425,970,659,531] [views:debug,2014-08-19T16:55:10.436,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/342. Updated state: replica (0) [views:debug,2014-08-19T16:55:10.436,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/540. Updated state: active (0) [ns_server:debug,2014-08-19T16:55:10.436,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",342,replica,0} [ns_server:debug,2014-08-19T16:55:10.437,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",540,active,0} [views:debug,2014-08-19T16:55:10.478,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/359. Updated state: replica (0) [ns_server:debug,2014-08-19T16:55:10.478,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",359,replica,0} [ns_server:debug,2014-08-19T16:55:10.671,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 538. Nacking mccouch update. [ns_server:debug,2014-08-19T16:55:10.671,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 170. Nacking mccouch update. [views:debug,2014-08-19T16:55:10.671,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/538. Updated state: active (0) [views:debug,2014-08-19T16:55:10.671,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/170. Updated state: replica (0) [ns_server:debug,2014-08-19T16:55:10.671,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",538,active,0} [ns_server:debug,2014-08-19T16:55:10.671,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",170,replica,0} [ns_server:debug,2014-08-19T16:55:10.671,ns_1@10.242.238.90:capi_set_view_manager-tiles<0.6184.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,718,686,654,622,590,558,984,952,1016,756,724,692,660,628,596,564,990,958, 1022,762,730,698,666,634,602,570,538,996,964,736,704,672,640,608,576,544,970, 938,1002,742,710,678,646,614,582,550,976,944,1008,748,716,684,652,620,588, 556,982,950,1014,754,722,690,658,626,594,562,988,956,1020,760,728,696,664, 632,600,568,994,962,766,734,702,670,638,606,574,542,968,1000,740,708,676,644, 612,580,548,974,942,1006,746,714,682,650,618,586,554,980,948,1012,752,720, 688,656,624,592,560,986,954,1018,758,726,694,662,630,598,566,992,960,764,732, 700,668,636,604,572,540,998,966,738,706,674,642,610,578,546,972,940,1004,744, 712,680,648,616,584,552,1023,978,946,1010] [ns_server:debug,2014-08-19T16:55:10.672,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_tiles<0.4349.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,686,622,558,984,426,362,724,660,596,532,958,400,1022,762,698,634,570,996, 374,736,672,608,544,970,412,348,710,646,582,518,944,386,1008,748,684,620,556, 982,424,360,754,722,690,658,626,594,562,530,988,956,398,366,1020,760,728,696, 664,632,600,568,536,170,994,962,404,372,766,734,702,670,638,606,574,542,968, 410,378,346,1000,740,708,676,644,612,580,548,516,974,942,416,384,352,1006, 746,714,682,650,618,586,554,522,980,948,422,390,358,1012,752,720,688,656,624, 592,560,528,986,954,396,364,1018,758,726,694,662,630,598,566,534,992,960,402, 370,764,732,700,668,636,604,572,540,998,966,408,376,344,738,706,674,642,610, 578,546,514,972,940,414,382,350,1004,744,712,680,648,616,584,552,520,1023, 978,946,420,388,356,1010,718,654,590,526,952,394,1016,756,692,628,564,990, 368,730,666,602,538,964,406,342,704,640,576,512,938,380,1002,742,678,614,550, 976,418,354,716,652,588,524,950,392,1014] [ns_server:debug,2014-08-19T16:55:10.688,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 357. Nacking mccouch update. [views:debug,2014-08-19T16:55:10.688,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/357. Updated state: replica (0) [ns_server:debug,2014-08-19T16:55:10.688,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",357,replica,0} [ns_server:debug,2014-08-19T16:55:10.689,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,984,673,545,724,596,413,958,647,519,698,570,387,1009,749,621,983,672, 361,723,595,957,710,646,582,399,1021,944,761,697,633,569,1008,995,748,684, 620,556,373,982,735,671,607,543,969,722,658,594,411,956,709,645,581,517,1020, 943,760,696,632,568,385,1007,994,747,683,619,555,981,734,670,606,423,359,968, 721,657,593,529,955,708,644,580,397,1019,942,759,695,631,567,1006,993,746, 682,618,554,371,980,733,669,605,541,967,720,656,592,409,954,707,643,579,515, 1018,941,758,694,630,566,383,1005,992,745,681,617,553,979,732,668,604,421, 357,966,719,655,591,527,953,706,642,578,395,1017,940,757,693,629,565,1004, 991,744,680,616,552,369,978,731,667,603,539,965,718,654,590,407,952,705,641, 577,513,1016,939,756,692,628,564,381,1003,990,743,679,615,551,977,730,666, 602,419,964,717,653,589,525,951,704,640,576,393,1015,938,755,691,627,563, 1002,989,742,678,614,550,367,976,729,665,601,537,963,716,652,588,405,950,767, 703,639,575,1014,754,690,626,562,379,1001,988,741,677,613,549,975,728,664, 600,417,962,715,651,587,523,949,766,702,638,574,391,1013,753,689,625,561, 1000,987,740,676,612,548,365,974,727,663,599,535,961,714,650,586,403,948,765, 701,637,573,1012,999,752,688,624,560,377,986,739,675,611,547,973,726,662,598, 415,960,713,649,585,521,947,764,700,636,572,389,1011,998,751,687,623,559,985, 738,674,610,363,972,725,661,597,533,959,712,648,584,401,1023,946,763,699,635, 571,1010,997,686,558,375,737,609,971,660,711,583,1022,945,762,634,996,685, 557,736,608,425,970,659,531] [views:debug,2014-08-19T16:55:10.797,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/538. Updated state: active (0) [ns_server:debug,2014-08-19T16:55:10.797,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",538,active,0} [views:debug,2014-08-19T16:55:10.797,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/170. Updated state: replica (0) [ns_server:debug,2014-08-19T16:55:10.797,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",170,replica,0} [views:debug,2014-08-19T16:55:10.838,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/357. Updated state: replica (0) [ns_server:debug,2014-08-19T16:55:10.839,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",357,replica,0} [ns_server:debug,2014-08-19T16:55:11.007,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 168. Nacking mccouch update. [views:debug,2014-08-19T16:55:11.007,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/168. Updated state: replica (0) [ns_server:debug,2014-08-19T16:55:11.007,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",168,replica,0} [ns_server:debug,2014-08-19T16:55:11.008,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_tiles<0.4349.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,686,622,558,984,426,362,724,660,596,532,958,400,1022,762,698,634,570,996, 374,736,672,608,544,970,412,348,710,646,582,518,944,386,1008,748,684,620,556, 982,424,360,754,722,690,658,626,594,562,530,988,956,398,366,1020,760,728,696, 664,632,600,568,536,170,994,962,404,372,766,734,702,670,638,606,574,542,968, 410,378,346,1000,740,708,676,644,612,580,548,516,974,942,416,384,352,1006, 746,714,682,650,618,586,554,522,980,948,422,390,358,1012,752,720,688,656,624, 592,560,528,986,954,396,364,1018,758,726,694,662,630,598,566,534,168,992,960, 402,370,764,732,700,668,636,604,572,540,998,966,408,376,344,738,706,674,642, 610,578,546,514,972,940,414,382,350,1004,744,712,680,648,616,584,552,520, 1023,978,946,420,388,356,1010,718,654,590,526,952,394,1016,756,692,628,564, 990,368,730,666,602,538,964,406,342,704,640,576,512,938,380,1002,742,678,614, 550,976,418,354,716,652,588,524,950,392,1014] [ns_server:debug,2014-08-19T16:55:11.041,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 536. Nacking mccouch update. [ns_server:debug,2014-08-19T16:55:11.041,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 355. Nacking mccouch update. [views:debug,2014-08-19T16:55:11.041,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/536. Updated state: active (0) [ns_server:debug,2014-08-19T16:55:11.041,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",536,active,0} [views:debug,2014-08-19T16:55:11.041,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/355. Updated state: replica (0) [ns_server:debug,2014-08-19T16:55:11.041,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",355,replica,0} [ns_server:debug,2014-08-19T16:55:11.041,ns_1@10.242.238.90:capi_set_view_manager-tiles<0.6184.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,718,686,654,622,590,558,984,952,1016,756,724,692,660,628,596,564,990,958, 1022,762,730,698,666,634,602,570,538,996,964,736,704,672,640,608,576,544,970, 938,1002,742,710,678,646,614,582,550,976,944,1008,748,716,684,652,620,588, 556,982,950,1014,754,722,690,658,626,594,562,988,956,1020,760,728,696,664, 632,600,568,536,994,962,766,734,702,670,638,606,574,542,968,1000,740,708,676, 644,612,580,548,974,942,1006,746,714,682,650,618,586,554,980,948,1012,752, 720,688,656,624,592,560,986,954,1018,758,726,694,662,630,598,566,992,960,764, 732,700,668,636,604,572,540,998,966,738,706,674,642,610,578,546,972,940,1004, 744,712,680,648,616,584,552,1023,978,946,1010] [ns_server:debug,2014-08-19T16:55:11.042,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,984,673,545,724,596,413,958,647,519,698,570,387,1009,749,621,983,672, 361,723,595,957,710,646,582,399,1021,944,761,697,633,569,1008,995,748,684, 620,556,373,982,735,671,607,543,969,722,658,594,411,956,709,645,581,517,1020, 943,760,696,632,568,385,1007,994,747,683,619,555,981,734,670,606,423,359,968, 721,657,593,529,955,708,644,580,397,1019,942,759,695,631,567,1006,993,746, 682,618,554,371,980,733,669,605,541,967,720,656,592,409,954,707,643,579,515, 1018,941,758,694,630,566,383,1005,992,745,681,617,553,979,732,668,604,421, 357,966,719,655,591,527,953,706,642,578,395,1017,940,757,693,629,565,1004, 991,744,680,616,552,369,978,731,667,603,539,965,718,654,590,407,952,705,641, 577,513,1016,939,756,692,628,564,381,1003,990,743,679,615,551,977,730,666, 602,419,355,964,717,653,589,525,951,704,640,576,393,1015,938,755,691,627,563, 1002,989,742,678,614,550,367,976,729,665,601,537,963,716,652,588,405,950,767, 703,639,575,1014,754,690,626,562,379,1001,988,741,677,613,549,975,728,664, 600,417,962,715,651,587,523,949,766,702,638,574,391,1013,753,689,625,561, 1000,987,740,676,612,548,365,974,727,663,599,535,961,714,650,586,403,948,765, 701,637,573,1012,999,752,688,624,560,377,986,739,675,611,547,973,726,662,598, 415,960,713,649,585,521,947,764,700,636,572,389,1011,998,751,687,623,559,985, 738,674,610,363,972,725,661,597,533,959,712,648,584,401,1023,946,763,699,635, 571,1010,997,686,558,375,737,609,971,660,711,583,1022,945,762,634,996,685, 557,736,608,425,970,659,531] [views:debug,2014-08-19T16:55:11.141,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/168. Updated state: replica (0) [ns_server:debug,2014-08-19T16:55:11.142,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",168,replica,0} [views:debug,2014-08-19T16:55:11.192,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/355. Updated state: replica (0) [ns_server:debug,2014-08-19T16:55:11.192,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",355,replica,0} [views:debug,2014-08-19T16:55:11.192,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/536. Updated state: active (0) [ns_server:debug,2014-08-19T16:55:11.192,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",536,active,0} [ns_server:debug,2014-08-19T16:55:11.519,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 353. Nacking mccouch update. [ns_server:debug,2014-08-19T16:55:11.519,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 534. Nacking mccouch update. [views:debug,2014-08-19T16:55:11.519,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/353. Updated state: replica (0) [views:debug,2014-08-19T16:55:11.519,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/534. Updated state: active (0) [ns_server:debug,2014-08-19T16:55:11.519,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",353,replica,0} [ns_server:debug,2014-08-19T16:55:11.519,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",534,active,0} [ns_server:debug,2014-08-19T16:55:11.520,ns_1@10.242.238.90:capi_set_view_manager-tiles<0.6184.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,686,622,558,984,952,1016,756,724,692,660,628,596,564,990,958,1022,762, 730,698,666,634,602,570,538,996,964,736,704,672,640,608,576,544,970,938,1002, 742,710,678,646,614,582,550,976,944,1008,748,716,684,652,620,588,556,982,950, 1014,754,722,690,658,626,594,562,988,956,1020,760,728,696,664,632,600,568, 536,994,962,766,734,702,670,638,606,574,542,968,1000,740,708,676,644,612,580, 548,974,942,1006,746,714,682,650,618,586,554,980,948,1012,752,720,688,656, 624,592,560,986,954,1018,758,726,694,662,630,598,566,534,992,960,764,732,700, 668,636,604,572,540,998,966,738,706,674,642,610,578,546,972,940,1004,744,712, 680,648,616,584,552,1023,978,946,1010,718,654,590] [ns_server:debug,2014-08-19T16:55:11.520,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,984,673,545,724,596,413,958,647,519,698,570,387,1009,749,621,983,672, 361,723,595,957,646,944,761,697,633,569,1008,995,748,684,620,556,373,982,735, 671,607,543,969,722,658,594,411,956,709,645,581,517,1020,943,760,696,632,568, 385,1007,994,747,683,619,555,981,734,670,606,423,359,968,721,657,593,529,955, 708,644,580,397,1019,942,759,695,631,567,1006,993,746,682,618,554,371,980, 733,669,605,541,967,720,656,592,409,954,707,643,579,515,1018,941,758,694,630, 566,383,1005,992,745,681,617,553,979,732,668,604,421,357,966,719,655,591,527, 953,706,642,578,395,1017,940,757,693,629,565,1004,991,744,680,616,552,369, 978,731,667,603,539,965,718,654,590,407,952,705,641,577,513,1016,939,756,692, 628,564,381,1003,990,743,679,615,551,977,730,666,602,419,355,964,717,653,589, 525,951,704,640,576,393,1015,938,755,691,627,563,1002,989,742,678,614,550, 367,976,729,665,601,537,963,716,652,588,405,950,767,703,639,575,1014,754,690, 626,562,379,1001,988,741,677,613,549,975,728,664,600,417,353,962,715,651,587, 523,949,766,702,638,574,391,1013,753,689,625,561,1000,987,740,676,612,548, 365,974,727,663,599,535,961,714,650,586,403,948,765,701,637,573,1012,999,752, 688,624,560,377,986,739,675,611,547,973,726,662,598,415,960,713,649,585,521, 947,764,700,636,572,389,1011,998,751,687,623,559,985,738,674,610,363,972,725, 661,597,533,959,712,648,584,401,1023,946,763,699,635,571,1010,997,686,558, 375,737,609,971,660,711,583,1022,945,762,634,996,685,557,736,608,425,970,659, 531,710,582,399,1021] [ns_server:debug,2014-08-19T16:55:11.586,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 166. Nacking mccouch update. [views:debug,2014-08-19T16:55:11.586,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/166. Updated state: replica (0) [ns_server:debug,2014-08-19T16:55:11.586,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",166,replica,0} [ns_server:debug,2014-08-19T16:55:11.587,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_tiles<0.4349.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,686,622,558,984,426,362,724,660,596,532,166,958,400,1022,762,698,634,570, 996,374,736,672,608,544,970,412,348,710,646,582,518,944,386,1008,748,684,620, 556,982,424,360,754,722,690,658,626,594,562,530,988,956,398,366,1020,760,728, 696,664,632,600,568,536,170,994,962,404,372,766,734,702,670,638,606,574,542, 968,410,378,346,1000,740,708,676,644,612,580,548,516,974,942,416,384,352, 1006,746,714,682,650,618,586,554,522,980,948,422,390,358,1012,752,720,688, 656,624,592,560,528,986,954,396,364,1018,758,726,694,662,630,598,566,534,168, 992,960,402,370,764,732,700,668,636,604,572,540,998,966,408,376,344,738,706, 674,642,610,578,546,514,972,940,414,382,350,1004,744,712,680,648,616,584,552, 520,1023,978,946,420,388,356,1010,718,654,590,526,952,394,1016,756,692,628, 564,990,368,730,666,602,538,964,406,342,704,640,576,512,938,380,1002,742,678, 614,550,976,418,354,716,652,588,524,950,392,1014] [views:debug,2014-08-19T16:55:11.686,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/534. Updated state: active (0) [views:debug,2014-08-19T16:55:11.686,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/353. Updated state: replica (0) [ns_server:debug,2014-08-19T16:55:11.687,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",534,active,0} [ns_server:debug,2014-08-19T16:55:11.687,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",353,replica,0} [views:debug,2014-08-19T16:55:11.728,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/166. Updated state: replica (0) [ns_server:debug,2014-08-19T16:55:11.728,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",166,replica,0} [ns_server:debug,2014-08-19T16:55:12.048,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 532. Nacking mccouch update. [views:debug,2014-08-19T16:55:12.048,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/532. Updated state: active (0) [ns_server:debug,2014-08-19T16:55:12.049,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",532,active,0} [ns_server:debug,2014-08-19T16:55:12.049,ns_1@10.242.238.90:capi_set_view_manager-tiles<0.6184.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,686,622,558,984,952,1016,756,724,692,660,628,596,564,532,990,958,1022, 762,730,698,666,634,602,570,538,996,964,736,704,672,640,608,576,544,970,938, 1002,742,710,678,646,614,582,550,976,944,1008,748,716,684,652,620,588,556, 982,950,1014,754,722,690,658,626,594,562,988,956,1020,760,728,696,664,632, 600,568,536,994,962,766,734,702,670,638,606,574,542,968,1000,740,708,676,644, 612,580,548,974,942,1006,746,714,682,650,618,586,554,980,948,1012,752,720, 688,656,624,592,560,986,954,1018,758,726,694,662,630,598,566,534,992,960,764, 732,700,668,636,604,572,540,998,966,738,706,674,642,610,578,546,972,940,1004, 744,712,680,648,616,584,552,1023,978,946,1010,718,654,590] [ns_server:debug,2014-08-19T16:55:12.065,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 351. Nacking mccouch update. [views:debug,2014-08-19T16:55:12.065,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/351. Updated state: replica (0) [ns_server:debug,2014-08-19T16:55:12.065,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",351,replica,0} [ns_server:debug,2014-08-19T16:55:12.066,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,984,673,545,724,596,413,958,647,519,698,570,387,1009,749,621,983,672, 361,723,595,957,646,944,761,697,633,569,1008,995,748,684,620,556,373,982,735, 671,607,543,969,722,658,594,411,956,709,645,581,517,1020,943,760,696,632,568, 385,1007,994,747,683,619,555,981,734,670,606,423,359,968,721,657,593,529,955, 708,644,580,397,1019,942,759,695,631,567,1006,993,746,682,618,554,371,980, 733,669,605,541,967,720,656,592,409,954,707,643,579,515,1018,941,758,694,630, 566,383,1005,992,745,681,617,553,979,732,668,604,421,357,966,719,655,591,527, 953,706,642,578,395,1017,940,757,693,629,565,1004,991,744,680,616,552,369, 978,731,667,603,539,965,718,654,590,407,952,705,641,577,513,1016,939,756,692, 628,564,381,1003,990,743,679,615,551,977,730,666,602,419,355,964,717,653,589, 525,951,704,640,576,393,1015,938,755,691,627,563,1002,989,742,678,614,550, 367,976,729,665,601,537,963,716,652,588,405,950,767,703,639,575,1014,754,690, 626,562,379,1001,988,741,677,613,549,975,728,664,600,417,353,962,715,651,587, 523,949,766,702,638,574,391,1013,753,689,625,561,1000,987,740,676,612,548, 365,974,727,663,599,535,961,714,650,586,403,948,765,701,637,573,1012,999,752, 688,624,560,377,986,739,675,611,547,973,726,662,598,415,351,960,713,649,585, 521,947,764,700,636,572,389,1011,998,751,687,623,559,985,738,674,610,363,972, 725,661,597,533,959,712,648,584,401,1023,946,763,699,635,571,1010,997,686, 558,375,737,609,971,660,711,583,1022,945,762,634,996,685,557,736,608,425,970, 659,531,710,582,399,1021] [ns_server:debug,2014-08-19T16:55:12.082,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 164. Nacking mccouch update. [views:debug,2014-08-19T16:55:12.082,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/164. Updated state: replica (0) [ns_server:debug,2014-08-19T16:55:12.083,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",164,replica,0} [ns_server:debug,2014-08-19T16:55:12.083,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_tiles<0.4349.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,686,622,558,984,426,362,724,660,596,532,166,958,400,1022,762,698,634,570, 996,374,736,672,608,544,970,412,348,710,646,582,518,944,386,1008,748,684,620, 556,982,424,360,754,722,690,658,626,594,562,530,164,988,956,398,366,1020,760, 728,696,664,632,600,568,536,170,994,962,404,372,766,734,702,670,638,606,574, 542,968,410,378,346,1000,740,708,676,644,612,580,548,516,974,942,416,384,352, 1006,746,714,682,650,618,586,554,522,980,948,422,390,358,1012,752,720,688, 656,624,592,560,528,986,954,396,364,1018,758,726,694,662,630,598,566,534,168, 992,960,402,370,764,732,700,668,636,604,572,540,998,966,408,376,344,738,706, 674,642,610,578,546,514,972,940,414,382,350,1004,744,712,680,648,616,584,552, 520,1023,978,946,420,388,356,1010,718,654,590,526,952,394,1016,756,692,628, 564,990,368,730,666,602,538,964,406,342,704,640,576,512,938,380,1002,742,678, 614,550,976,418,354,716,652,588,524,950,392,1014] [ns_server:debug,2014-08-19T16:55:12.111,ns_1@10.242.238.90:compaction_daemon<0.17567.0>:compaction_daemon:handle_info:447]Starting compaction for the following buckets: [<<"tiles">>,<<"maps_1_8_tiles">>,<<"maps_1_8_metahash">>,<<"default">>] [ns_server:info,2014-08-19T16:55:12.114,ns_1@10.242.238.90:<0.13444.1>:compaction_daemon:check_all_dbs_exist:1611]Skipping compaction of bucket `tiles` since at least database `tiles/100` seems to be missing. [views:debug,2014-08-19T16:55:12.116,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/532. Updated state: active (0) [ns_server:debug,2014-08-19T16:55:12.116,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",532,active,0} [ns_server:info,2014-08-19T16:55:12.117,ns_1@10.242.238.90:<0.13445.1>:compaction_daemon:check_all_dbs_exist:1611]Skipping compaction of bucket `maps_1_8_tiles` since at least database `maps_1_8_tiles/100` seems to be missing. [ns_server:info,2014-08-19T16:55:12.122,ns_1@10.242.238.90:<0.13446.1>:compaction_daemon:check_all_dbs_exist:1611]Skipping compaction of bucket `maps_1_8_metahash` since at least database `maps_1_8_metahash/100` seems to be missing. [ns_server:info,2014-08-19T16:55:12.126,ns_1@10.242.238.90:<0.13447.1>:compaction_daemon:try_to_cleanup_indexes:650]Cleaning up indexes for bucket `default` [ns_server:info,2014-08-19T16:55:12.126,ns_1@10.242.238.90:<0.13447.1>:compaction_daemon:spawn_bucket_compactor:609]Compacting bucket default with config: [{database_fragmentation_threshold,{30,undefined}}, {view_fragmentation_threshold,{30,undefined}}] [ns_server:debug,2014-08-19T16:55:12.131,ns_1@10.242.238.90:<0.13450.1>:compaction_daemon:bucket_needs_compaction:1042]`default` data size is 71307, disk size is 8165934 [ns_server:debug,2014-08-19T16:55:12.131,ns_1@10.242.238.90:compaction_daemon<0.17567.0>:compaction_daemon:handle_info:505]Finished compaction iteration. [ns_server:debug,2014-08-19T16:55:12.133,ns_1@10.242.238.90:compaction_daemon<0.17567.0>:compaction_daemon:schedule_next_compaction:1519]Finished compaction too soon. Next run will be in 30s [views:debug,2014-08-19T16:55:12.150,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/351. Updated state: replica (0) [ns_server:debug,2014-08-19T16:55:12.150,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",351,replica,0} [views:debug,2014-08-19T16:55:12.166,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/164. Updated state: replica (0) [ns_server:debug,2014-08-19T16:55:12.167,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",164,replica,0} [ns_server:debug,2014-08-19T16:55:12.300,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 530. Nacking mccouch update. [views:debug,2014-08-19T16:55:12.301,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/530. Updated state: active (0) [ns_server:debug,2014-08-19T16:55:12.301,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",530,active,0} [ns_server:debug,2014-08-19T16:55:12.301,ns_1@10.242.238.90:capi_set_view_manager-tiles<0.6184.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,686,622,558,984,952,1016,756,724,692,660,628,596,564,532,990,958,1022, 762,730,698,666,634,602,570,538,996,964,736,704,672,640,608,576,544,970,938, 1002,742,710,678,646,614,582,550,976,944,1008,748,716,684,652,620,588,556, 982,950,1014,754,722,690,658,626,594,562,530,988,956,1020,760,728,696,664, 632,600,568,536,994,962,766,734,702,670,638,606,574,542,968,1000,740,708,676, 644,612,580,548,974,942,1006,746,714,682,650,618,586,554,980,948,1012,752, 720,688,656,624,592,560,986,954,1018,758,726,694,662,630,598,566,534,992,960, 764,732,700,668,636,604,572,540,998,966,738,706,674,642,610,578,546,972,940, 1004,744,712,680,648,616,584,552,1023,978,946,1010,718,654,590] [ns_server:debug,2014-08-19T16:55:12.342,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 349. Nacking mccouch update. [ns_server:debug,2014-08-19T16:55:12.342,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 162. Nacking mccouch update. [views:debug,2014-08-19T16:55:12.342,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/349. Updated state: replica (0) [views:debug,2014-08-19T16:55:12.342,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/162. Updated state: replica (0) [ns_server:debug,2014-08-19T16:55:12.343,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",349,replica,0} [ns_server:debug,2014-08-19T16:55:12.343,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",162,replica,0} [ns_server:debug,2014-08-19T16:55:12.343,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_tiles<0.4349.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,686,622,558,984,426,362,724,660,596,532,166,958,400,1022,762,698,634,570, 996,374,736,672,608,544,970,412,348,710,646,582,518,944,386,1008,748,684,620, 556,982,424,360,754,722,690,658,626,594,562,530,164,988,956,398,366,1020,760, 728,696,664,632,600,568,536,170,994,962,404,372,766,734,702,670,638,606,574, 542,968,410,378,346,1000,740,708,676,644,612,580,548,516,974,942,416,384,352, 1006,746,714,682,650,618,586,554,522,980,948,422,390,358,1012,752,720,688, 656,624,592,560,528,162,986,954,396,364,1018,758,726,694,662,630,598,566,534, 168,992,960,402,370,764,732,700,668,636,604,572,540,998,966,408,376,344,738, 706,674,642,610,578,546,514,972,940,414,382,350,1004,744,712,680,648,616,584, 552,520,1023,978,946,420,388,356,1010,718,654,590,526,952,394,1016,756,692, 628,564,990,368,730,666,602,538,964,406,342,704,640,576,512,938,380,1002,742, 678,614,550,976,418,354,716,652,588,524,950,392,1014] [ns_server:debug,2014-08-19T16:55:12.344,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,984,673,545,724,596,413,958,647,519,698,570,387,1009,749,621,983,672, 361,723,595,957,646,944,761,697,633,569,1008,995,748,684,620,556,373,982,735, 671,607,543,969,722,658,594,411,956,709,645,581,517,1020,943,760,696,632,568, 385,1007,994,747,683,619,555,981,734,670,606,423,359,968,721,657,593,529,955, 708,644,580,397,1019,942,759,695,631,567,1006,993,746,682,618,554,371,980, 733,669,605,541,967,720,656,592,409,954,707,643,579,515,1018,941,758,694,630, 566,383,1005,992,745,681,617,553,979,732,668,604,421,357,966,719,655,591,527, 953,706,642,578,395,1017,940,757,693,629,565,1004,991,744,680,616,552,369, 978,731,667,603,539,965,718,654,590,407,952,705,641,577,513,1016,939,756,692, 628,564,381,1003,990,743,679,615,551,977,730,666,602,419,355,964,717,653,589, 525,951,704,640,576,393,1015,938,755,691,627,563,1002,989,742,678,614,550, 367,976,729,665,601,537,963,716,652,588,405,950,767,703,639,575,1014,754,690, 626,562,379,1001,988,741,677,613,549,975,728,664,600,417,353,962,715,651,587, 523,949,766,702,638,574,391,1013,753,689,625,561,1000,987,740,676,612,548, 365,974,727,663,599,535,961,714,650,586,403,948,765,701,637,573,1012,999,752, 688,624,560,377,986,739,675,611,547,973,726,662,598,415,351,960,713,649,585, 521,947,764,700,636,572,389,1011,998,751,687,623,559,985,738,674,610,363,972, 725,661,597,533,959,712,648,584,401,1023,946,763,699,635,571,1010,997,686, 558,375,737,609,971,660,349,711,583,1022,945,762,634,996,685,557,736,608,425, 970,659,531,710,582,399,1021] [views:debug,2014-08-19T16:55:12.430,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/530. Updated state: active (0) [ns_server:debug,2014-08-19T16:55:12.430,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",530,active,0} [views:debug,2014-08-19T16:55:12.443,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/349. Updated state: replica (0) [views:debug,2014-08-19T16:55:12.443,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/162. Updated state: replica (0) [ns_server:debug,2014-08-19T16:55:12.443,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",349,replica,0} [ns_server:debug,2014-08-19T16:55:12.443,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",162,replica,0} [ns_server:debug,2014-08-19T16:55:12.721,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 160. Nacking mccouch update. [views:debug,2014-08-19T16:55:12.721,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/160. Updated state: replica (0) [ns_server:debug,2014-08-19T16:55:12.721,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",160,replica,0} [ns_server:debug,2014-08-19T16:55:12.722,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_tiles<0.4349.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,686,622,558,984,426,362,724,660,596,532,166,958,400,1022,762,698,634,570, 996,374,736,672,608,544,970,412,348,710,646,582,518,944,386,1008,748,684,620, 556,982,424,360,722,658,594,530,164,988,956,398,366,1020,760,728,696,664,632, 600,568,536,170,994,962,404,372,766,734,702,670,638,606,574,542,968,410,378, 346,1000,740,708,676,644,612,580,548,516,974,942,416,384,352,1006,746,714, 682,650,618,586,554,522,980,948,422,390,358,1012,752,720,688,656,624,592,560, 528,162,986,954,396,364,1018,758,726,694,662,630,598,566,534,168,992,960,402, 370,764,732,700,668,636,604,572,540,998,966,408,376,344,738,706,674,642,610, 578,546,514,972,940,414,382,350,1004,744,712,680,648,616,584,552,520,1023, 978,946,420,388,356,1010,718,654,590,526,160,952,394,1016,756,692,628,564, 990,368,730,666,602,538,964,406,342,704,640,576,512,938,380,1002,742,678,614, 550,976,418,354,716,652,588,524,950,392,1014,754,690,626,562] [ns_server:debug,2014-08-19T16:55:12.738,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 347. Nacking mccouch update. [ns_server:debug,2014-08-19T16:55:12.738,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 528. Nacking mccouch update. [views:debug,2014-08-19T16:55:12.738,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/347. Updated state: replica (0) [views:debug,2014-08-19T16:55:12.738,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/528. Updated state: active (0) [ns_server:debug,2014-08-19T16:55:12.738,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",347,replica,0} [ns_server:debug,2014-08-19T16:55:12.738,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",528,active,0} [ns_server:debug,2014-08-19T16:55:12.739,ns_1@10.242.238.90:capi_set_view_manager-tiles<0.6184.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,686,622,558,984,952,1016,756,724,692,660,628,596,564,532,990,958,1022, 762,730,698,666,634,602,570,538,996,964,736,704,672,640,608,576,544,970,938, 1002,742,710,678,646,614,582,550,976,944,1008,748,716,684,652,620,588,556, 982,950,1014,754,722,690,658,626,594,562,530,988,956,1020,760,728,696,664, 632,600,568,536,994,962,766,734,702,670,638,606,574,542,968,1000,740,708,676, 644,612,580,548,974,942,1006,746,714,682,650,618,586,554,980,948,1012,752, 720,688,656,624,592,560,528,986,954,1018,758,726,694,662,630,598,566,534,992, 960,764,732,700,668,636,604,572,540,998,966,738,706,674,642,610,578,546,972, 940,1004,744,712,680,648,616,584,552,1023,978,946,1010,718,654,590] [ns_server:debug,2014-08-19T16:55:12.740,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,984,673,545,724,596,413,958,647,519,698,570,387,1009,749,621,983,672, 361,723,595,957,646,944,761,697,633,569,1008,995,748,684,620,556,373,982,735, 671,607,543,969,722,658,594,411,347,956,709,645,581,517,1020,943,760,696,632, 568,385,1007,994,747,683,619,555,981,734,670,606,423,359,968,721,657,593,529, 955,708,644,580,397,1019,942,759,695,631,567,1006,993,746,682,618,554,371, 980,733,669,605,541,967,720,656,592,409,954,707,643,579,515,1018,941,758,694, 630,566,383,1005,992,745,681,617,553,979,732,668,604,421,357,966,719,655,591, 527,953,706,642,578,395,1017,940,757,693,629,565,1004,991,744,680,616,552, 369,978,731,667,603,539,965,718,654,590,407,952,705,641,577,513,1016,939,756, 692,628,564,381,1003,990,743,679,615,551,977,730,666,602,419,355,964,717,653, 589,525,951,704,640,576,393,1015,938,755,691,627,563,1002,989,742,678,614, 550,367,976,729,665,601,537,963,716,652,588,405,950,767,703,639,575,1014,754, 690,626,562,379,1001,988,741,677,613,549,975,728,664,600,417,353,962,715,651, 587,523,949,766,702,638,574,391,1013,753,689,625,561,1000,987,740,676,612, 548,365,974,727,663,599,535,961,714,650,586,403,948,765,701,637,573,1012,999, 752,688,624,560,377,986,739,675,611,547,973,726,662,598,415,351,960,713,649, 585,521,947,764,700,636,572,389,1011,998,751,687,623,559,985,738,674,610,363, 972,725,661,597,533,959,712,648,584,401,1023,946,763,699,635,571,1010,997, 686,558,375,737,609,971,660,349,711,583,1022,945,762,634,996,685,557,736,608, 425,970,659,531,710,582,399,1021] [views:debug,2014-08-19T16:55:12.839,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/160. Updated state: replica (0) [views:debug,2014-08-19T16:55:12.839,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/347. Updated state: replica (0) [ns_server:debug,2014-08-19T16:55:12.839,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",160,replica,0} [views:debug,2014-08-19T16:55:12.839,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/528. Updated state: active (0) [ns_server:debug,2014-08-19T16:55:12.839,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",347,replica,0} [ns_server:debug,2014-08-19T16:55:12.840,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",528,active,0} [ns_server:debug,2014-08-19T16:55:13.131,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 158. Nacking mccouch update. [ns_server:debug,2014-08-19T16:55:13.131,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 526. Nacking mccouch update. [views:debug,2014-08-19T16:55:13.132,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/158. Updated state: replica (0) [views:debug,2014-08-19T16:55:13.132,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/526. Updated state: active (0) [ns_server:debug,2014-08-19T16:55:13.132,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",526,active,0} [ns_server:debug,2014-08-19T16:55:13.132,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",158,replica,0} [ns_server:debug,2014-08-19T16:55:13.132,ns_1@10.242.238.90:capi_set_view_manager-tiles<0.6184.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,686,622,558,984,952,1016,756,724,692,660,628,596,564,532,990,958,1022, 762,730,698,666,634,602,570,538,996,964,736,704,672,640,608,576,544,970,938, 1002,742,710,678,646,614,582,550,976,944,1008,748,716,684,652,620,588,556, 982,950,1014,754,722,690,658,626,594,562,530,988,956,1020,760,728,696,664, 632,600,568,536,994,962,766,734,702,670,638,606,574,542,968,1000,740,708,676, 644,612,580,548,974,942,1006,746,714,682,650,618,586,554,980,948,1012,752, 720,688,656,624,592,560,528,986,954,1018,758,726,694,662,630,598,566,534,992, 960,764,732,700,668,636,604,572,540,998,966,738,706,674,642,610,578,546,972, 940,1004,744,712,680,648,616,584,552,1023,978,946,1010,718,654,590,526] [ns_server:debug,2014-08-19T16:55:13.132,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_tiles<0.4349.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,686,622,558,984,426,362,724,660,596,532,166,958,400,1022,762,698,634,570, 996,374,736,672,608,544,970,412,348,710,646,582,518,944,386,1008,748,684,620, 556,982,424,360,722,658,594,530,164,988,956,398,366,1020,760,728,696,664,632, 600,568,536,170,994,962,404,372,766,734,702,670,638,606,574,542,968,410,378, 346,1000,740,708,676,644,612,580,548,516,974,942,416,384,352,1006,746,714, 682,650,618,586,554,522,980,948,422,390,358,1012,752,720,688,656,624,592,560, 528,162,986,954,396,364,1018,758,726,694,662,630,598,566,534,168,992,960,402, 370,764,732,700,668,636,604,572,540,998,966,408,376,344,738,706,674,642,610, 578,546,514,972,940,414,382,350,1004,744,712,680,648,616,584,552,520,1023, 978,946,420,388,356,1010,718,654,590,526,160,952,394,1016,756,692,628,564, 990,368,730,666,602,538,964,406,342,704,640,576,512,938,380,1002,742,678,614, 550,976,418,354,716,652,588,524,158,950,392,1014,754,690,626,562] [ns_server:debug,2014-08-19T16:55:13.173,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 345. Nacking mccouch update. [views:debug,2014-08-19T16:55:13.173,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/345. Updated state: replica (0) [ns_server:debug,2014-08-19T16:55:13.174,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",345,replica,0} [ns_server:debug,2014-08-19T16:55:13.174,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,984,673,545,724,596,413,958,647,519,698,570,387,1009,749,621,983,672, 361,723,595,957,646,944,761,697,633,569,1008,995,748,684,620,556,373,982,735, 671,607,543,969,722,658,594,411,347,956,709,645,581,517,1020,943,760,696,632, 568,385,1007,994,747,683,619,555,981,734,670,606,423,359,968,721,657,593,529, 955,708,644,580,397,1019,942,759,695,631,567,1006,993,746,682,618,554,371, 980,733,669,605,541,967,720,656,592,409,345,954,707,643,579,515,1018,941,758, 694,630,566,383,1005,992,745,681,617,553,979,732,668,604,421,357,966,719,655, 591,527,953,706,642,578,395,1017,940,757,693,629,565,1004,991,744,680,616, 552,369,978,731,667,603,539,965,718,654,590,407,952,705,641,577,513,1016,939, 756,692,628,564,381,1003,990,743,679,615,551,977,730,666,602,419,355,964,717, 653,589,525,951,704,640,576,393,1015,938,755,691,627,563,1002,989,742,678, 614,550,367,976,729,665,601,537,963,716,652,588,405,950,767,703,639,575,1014, 754,690,626,562,379,1001,988,741,677,613,549,975,728,664,600,417,353,962,715, 651,587,523,949,766,702,638,574,391,1013,753,689,625,561,1000,987,740,676, 612,548,365,974,727,663,599,535,961,714,650,586,403,948,765,701,637,573,1012, 999,752,688,624,560,377,986,739,675,611,547,973,726,662,598,415,351,960,713, 649,585,521,947,764,700,636,572,389,1011,998,751,687,623,559,985,738,674,610, 363,972,725,661,597,533,959,712,648,584,401,1023,946,763,699,635,571,1010, 997,686,558,375,737,609,971,660,349,711,583,1022,945,762,634,996,685,557,736, 608,425,970,659,531,710,582,399,1021] [views:debug,2014-08-19T16:55:13.224,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/526. Updated state: active (0) [views:debug,2014-08-19T16:55:13.224,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/158. Updated state: replica (0) [ns_server:debug,2014-08-19T16:55:13.224,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",526,active,0} [ns_server:debug,2014-08-19T16:55:13.224,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",158,replica,0} [views:debug,2014-08-19T16:55:13.312,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/345. Updated state: replica (0) [ns_server:debug,2014-08-19T16:55:13.312,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",345,replica,0} [ns_server:debug,2014-08-19T16:55:13.454,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 156. Nacking mccouch update. [views:debug,2014-08-19T16:55:13.454,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/156. Updated state: replica (0) [ns_server:debug,2014-08-19T16:55:13.454,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",156,replica,0} [ns_server:debug,2014-08-19T16:55:13.455,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_tiles<0.4349.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,686,622,558,984,426,362,724,660,596,532,166,958,400,1022,762,698,634,570, 996,374,736,672,608,544,970,412,348,710,646,582,518,944,386,1008,748,684,620, 556,982,424,360,722,658,594,530,164,988,956,398,366,1020,760,728,696,664,632, 600,568,536,170,994,962,404,372,766,734,702,670,638,606,574,542,968,410,378, 346,1000,740,708,676,644,612,580,548,516,974,942,416,384,352,1006,746,714, 682,650,618,586,554,522,156,980,948,422,390,358,1012,752,720,688,656,624,592, 560,528,162,986,954,396,364,1018,758,726,694,662,630,598,566,534,168,992,960, 402,370,764,732,700,668,636,604,572,540,998,966,408,376,344,738,706,674,642, 610,578,546,514,972,940,414,382,350,1004,744,712,680,648,616,584,552,520, 1023,978,946,420,388,356,1010,718,654,590,526,160,952,394,1016,756,692,628, 564,990,368,730,666,602,538,964,406,342,704,640,576,512,938,380,1002,742,678, 614,550,976,418,354,716,652,588,524,158,950,392,1014,754,690,626,562] [ns_server:debug,2014-08-19T16:55:13.512,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 524. Nacking mccouch update. [ns_server:debug,2014-08-19T16:55:13.512,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 343. Nacking mccouch update. [views:debug,2014-08-19T16:55:13.512,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/524. Updated state: active (0) [views:debug,2014-08-19T16:55:13.513,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/343. Updated state: replica (0) [ns_server:debug,2014-08-19T16:55:13.513,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",524,active,0} [ns_server:debug,2014-08-19T16:55:13.513,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",343,replica,0} [ns_server:debug,2014-08-19T16:55:13.513,ns_1@10.242.238.90:capi_set_view_manager-tiles<0.6184.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,686,622,558,984,756,724,692,660,628,596,564,532,990,958,1022,762,730,698, 666,634,602,570,538,996,964,736,704,672,640,608,576,544,970,938,1002,742,710, 678,646,614,582,550,976,944,1008,748,716,684,652,620,588,556,524,982,950, 1014,754,722,690,658,626,594,562,530,988,956,1020,760,728,696,664,632,600, 568,536,994,962,766,734,702,670,638,606,574,542,968,1000,740,708,676,644,612, 580,548,974,942,1006,746,714,682,650,618,586,554,980,948,1012,752,720,688, 656,624,592,560,528,986,954,1018,758,726,694,662,630,598,566,534,992,960,764, 732,700,668,636,604,572,540,998,966,738,706,674,642,610,578,546,972,940,1004, 744,712,680,648,616,584,552,1023,978,946,1010,718,654,590,526,952,1016] [ns_server:debug,2014-08-19T16:55:13.514,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,984,673,545,724,596,413,958,647,519,698,570,387,1009,749,621,983,672, 361,723,595,957,646,697,569,1008,995,748,684,620,556,373,982,735,671,607,543, 969,722,658,594,411,347,956,709,645,581,517,1020,943,760,696,632,568,385, 1007,994,747,683,619,555,981,734,670,606,423,359,968,721,657,593,529,955,708, 644,580,397,1019,942,759,695,631,567,1006,993,746,682,618,554,371,980,733, 669,605,541,967,720,656,592,409,345,954,707,643,579,515,1018,941,758,694,630, 566,383,1005,992,745,681,617,553,979,732,668,604,421,357,966,719,655,591,527, 953,706,642,578,395,1017,940,757,693,629,565,1004,991,744,680,616,552,369, 978,731,667,603,539,965,718,654,590,407,343,952,705,641,577,513,1016,939,756, 692,628,564,381,1003,990,743,679,615,551,977,730,666,602,419,355,964,717,653, 589,525,951,704,640,576,393,1015,938,755,691,627,563,1002,989,742,678,614, 550,367,976,729,665,601,537,963,716,652,588,405,950,767,703,639,575,1014,754, 690,626,562,379,1001,988,741,677,613,549,975,728,664,600,417,353,962,715,651, 587,523,949,766,702,638,574,391,1013,753,689,625,561,1000,987,740,676,612, 548,365,974,727,663,599,535,961,714,650,586,403,948,765,701,637,573,1012,999, 752,688,624,560,377,986,739,675,611,547,973,726,662,598,415,351,960,713,649, 585,521,947,764,700,636,572,389,1011,998,751,687,623,559,985,738,674,610,363, 972,725,661,597,533,959,712,648,584,401,1023,946,763,699,635,571,1010,997, 686,558,375,737,609,971,660,349,711,583,1022,945,762,634,996,685,557,736,608, 425,970,659,531,710,582,399,1021,944,761,633] [views:debug,2014-08-19T16:55:13.580,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/156. Updated state: replica (0) [ns_server:debug,2014-08-19T16:55:13.580,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",156,replica,0} [views:debug,2014-08-19T16:55:13.638,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/524. Updated state: active (0) [views:debug,2014-08-19T16:55:13.638,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/343. Updated state: replica (0) [ns_server:debug,2014-08-19T16:55:13.638,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",524,active,0} [ns_server:debug,2014-08-19T16:55:13.639,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",343,replica,0} [ns_server:debug,2014-08-19T16:55:13.989,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 154. Nacking mccouch update. [views:debug,2014-08-19T16:55:13.989,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/154. Updated state: replica (0) [ns_server:debug,2014-08-19T16:55:13.989,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",154,replica,0} [ns_server:debug,2014-08-19T16:55:13.990,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_tiles<0.4349.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,686,622,558,984,426,362,724,660,596,532,166,958,400,1022,762,698,634,570, 996,374,736,672,608,544,970,412,348,710,646,582,518,944,386,1008,748,684,620, 556,982,424,360,722,658,594,530,164,988,956,398,366,1020,760,728,696,664,632, 600,568,536,170,994,962,404,372,766,734,702,670,638,606,574,542,968,410,378, 346,1000,740,708,676,644,612,580,548,516,974,942,416,384,352,1006,746,714, 682,650,618,586,554,522,156,980,948,422,390,358,1012,752,720,688,656,624,592, 560,528,162,986,954,396,364,1018,758,726,694,662,630,598,566,534,168,992,960, 402,370,764,732,700,668,636,604,572,540,998,966,408,376,344,738,706,674,642, 610,578,546,514,972,940,414,382,350,1004,744,712,680,648,616,584,552,520,154, 1023,978,946,420,388,356,1010,718,654,590,526,160,952,394,1016,756,692,628, 564,990,368,730,666,602,538,964,406,342,704,640,576,512,938,380,1002,742,678, 614,550,976,418,354,716,652,588,524,158,950,392,1014,754,690,626,562] [ns_server:debug,2014-08-19T16:55:14.098,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 169. Nacking mccouch update. [views:debug,2014-08-19T16:55:14.098,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/169. Updated state: replica (0) [ns_server:debug,2014-08-19T16:55:14.098,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",169,replica,0} [views:debug,2014-08-19T16:55:14.098,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/154. Updated state: replica (0) [ns_server:debug,2014-08-19T16:55:14.098,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",154,replica,0} [ns_server:debug,2014-08-19T16:55:14.099,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,984,673,545,724,596,413,958,647,519,698,570,387,1009,749,621,983,672, 361,723,595,957,646,697,569,1008,995,748,684,620,556,373,982,735,671,607,543, 969,722,658,594,411,347,956,709,645,581,517,1020,943,760,696,632,568,385, 1007,994,747,683,619,555,981,734,670,606,423,359,968,721,657,593,529,955,708, 644,580,397,1019,942,759,695,631,567,1006,993,746,682,618,554,371,980,733, 669,605,541,967,720,656,592,409,345,954,707,643,579,515,1018,941,758,694,630, 566,383,1005,992,745,681,617,553,979,732,668,604,421,357,966,719,655,591,527, 953,706,642,578,395,1017,940,757,693,629,565,1004,991,744,680,616,552,369, 978,731,667,603,539,965,718,654,590,407,343,952,705,641,577,513,1016,939,756, 692,628,564,381,1003,990,743,679,615,551,977,730,666,602,419,355,964,717,653, 589,525,951,704,640,576,393,1015,938,755,691,627,563,1002,989,742,678,614, 550,367,976,729,665,601,537,963,716,652,588,405,950,767,703,639,575,1014,754, 690,626,562,379,1001,988,741,677,613,549,975,728,664,600,417,353,962,715,651, 587,523,949,766,702,638,574,391,1013,753,689,625,561,1000,987,740,676,612, 548,365,974,727,663,599,535,169,961,714,650,586,403,948,765,701,637,573,1012, 999,752,688,624,560,377,986,739,675,611,547,973,726,662,598,415,351,960,713, 649,585,521,947,764,700,636,572,389,1011,998,751,687,623,559,985,738,674,610, 363,972,725,661,597,533,959,712,648,584,401,1023,946,763,699,635,571,1010, 997,686,558,375,737,609,971,660,349,711,583,1022,945,762,634,996,685,557,736, 608,425,970,659,531,710,582,399,1021,944,761,633] [ns_server:debug,2014-08-19T16:55:14.131,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 522. Nacking mccouch update. [views:debug,2014-08-19T16:55:14.131,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/522. Updated state: active (0) [ns_server:debug,2014-08-19T16:55:14.131,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",522,active,0} [ns_server:debug,2014-08-19T16:55:14.132,ns_1@10.242.238.90:capi_set_view_manager-tiles<0.6184.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,686,622,558,984,756,724,692,660,628,596,564,532,990,958,1022,762,730,698, 666,634,602,570,538,996,964,736,704,672,640,608,576,544,970,938,1002,742,710, 678,646,614,582,550,976,944,1008,748,716,684,652,620,588,556,524,982,950, 1014,754,722,690,658,626,594,562,530,988,956,1020,760,728,696,664,632,600, 568,536,994,962,766,734,702,670,638,606,574,542,968,1000,740,708,676,644,612, 580,548,974,942,1006,746,714,682,650,618,586,554,522,980,948,1012,752,720, 688,656,624,592,560,528,986,954,1018,758,726,694,662,630,598,566,534,992,960, 764,732,700,668,636,604,572,540,998,966,738,706,674,642,610,578,546,972,940, 1004,744,712,680,648,616,584,552,1023,978,946,1010,718,654,590,526,952,1016] [views:debug,2014-08-19T16:55:14.233,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/169. Updated state: replica (0) [views:debug,2014-08-19T16:55:14.233,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/522. Updated state: active (0) [ns_server:debug,2014-08-19T16:55:14.233,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",169,replica,0} [ns_server:debug,2014-08-19T16:55:14.234,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",522,active,0} [ns_server:debug,2014-08-19T16:55:14.300,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 152. Nacking mccouch update. [views:debug,2014-08-19T16:55:14.300,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/152. Updated state: replica (0) [ns_server:debug,2014-08-19T16:55:14.300,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",152,replica,0} [ns_server:debug,2014-08-19T16:55:14.301,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_tiles<0.4349.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,686,622,558,984,426,362,724,660,596,532,166,958,400,1022,762,698,634,570, 996,374,736,672,608,544,970,412,348,710,646,582,518,152,944,386,1008,748,684, 620,556,982,424,360,722,658,594,530,164,988,956,398,366,1020,760,728,696,664, 632,600,568,536,170,994,962,404,372,766,734,702,670,638,606,574,542,968,410, 378,346,1000,740,708,676,644,612,580,548,516,974,942,416,384,352,1006,746, 714,682,650,618,586,554,522,156,980,948,422,390,358,1012,752,720,688,656,624, 592,560,528,162,986,954,396,364,1018,758,726,694,662,630,598,566,534,168,992, 960,402,370,764,732,700,668,636,604,572,540,998,966,408,376,344,738,706,674, 642,610,578,546,514,972,940,414,382,350,1004,744,712,680,648,616,584,552,520, 154,1023,978,946,420,388,356,1010,718,654,590,526,160,952,394,1016,756,692, 628,564,990,368,730,666,602,538,964,406,342,704,640,576,512,938,380,1002,742, 678,614,550,976,418,354,716,652,588,524,158,950,392,1014,754,690,626,562] [views:debug,2014-08-19T16:55:14.334,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/152. Updated state: replica (0) [ns_server:debug,2014-08-19T16:55:14.334,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",152,replica,0} [ns_server:debug,2014-08-19T16:55:14.384,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 520. Nacking mccouch update. [ns_server:debug,2014-08-19T16:55:14.384,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 167. Nacking mccouch update. [views:debug,2014-08-19T16:55:14.384,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/520. Updated state: active (0) [views:debug,2014-08-19T16:55:14.384,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/167. Updated state: replica (0) [ns_server:debug,2014-08-19T16:55:14.384,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",520,active,0} [ns_server:debug,2014-08-19T16:55:14.385,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",167,replica,0} [ns_server:debug,2014-08-19T16:55:14.385,ns_1@10.242.238.90:capi_set_view_manager-tiles<0.6184.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,686,622,558,984,756,724,692,660,628,596,564,532,990,958,1022,762,730,698, 666,634,602,570,538,996,964,736,704,672,640,608,576,544,970,938,1002,742,710, 678,646,614,582,550,976,944,1008,748,716,684,652,620,588,556,524,982,950, 1014,754,722,690,658,626,594,562,530,988,956,1020,760,728,696,664,632,600, 568,536,994,962,766,734,702,670,638,606,574,542,968,1000,740,708,676,644,612, 580,548,974,942,1006,746,714,682,650,618,586,554,522,980,948,1012,752,720, 688,656,624,592,560,528,986,954,1018,758,726,694,662,630,598,566,534,992,960, 764,732,700,668,636,604,572,540,998,966,738,706,674,642,610,578,546,972,940, 1004,744,712,680,648,616,584,552,520,1023,978,946,1010,718,654,590,526,952, 1016] [ns_server:debug,2014-08-19T16:55:14.385,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,984,673,545,724,596,413,958,647,519,698,570,387,1009,749,621,983,672, 361,723,595,957,646,697,569,1008,995,748,684,620,556,373,982,735,671,607,543, 969,722,658,594,411,347,956,709,645,581,517,1020,943,760,696,632,568,385, 1007,994,747,683,619,555,981,734,670,606,423,359,968,721,657,593,529,955,708, 644,580,397,1019,942,759,695,631,567,1006,993,746,682,618,554,371,980,733, 669,605,541,967,720,656,592,409,345,954,707,643,579,515,1018,941,758,694,630, 566,383,1005,992,745,681,617,553,979,732,668,604,421,357,966,719,655,591,527, 953,706,642,578,395,1017,940,757,693,629,565,1004,991,744,680,616,552,369, 978,731,667,603,539,965,718,654,590,407,343,952,705,641,577,513,1016,939,756, 692,628,564,381,1003,990,743,679,615,551,977,730,666,602,419,355,964,717,653, 589,525,951,704,640,576,393,1015,938,755,691,627,563,1002,989,742,678,614, 550,367,976,729,665,601,537,963,716,652,588,405,950,767,703,639,575,1014,754, 690,626,562,379,1001,988,741,677,613,549,975,728,664,600,417,353,962,715,651, 587,523,949,766,702,638,574,391,1013,753,689,625,561,1000,987,740,676,612, 548,365,974,727,663,599,535,169,961,714,650,586,403,948,765,701,637,573,1012, 999,752,688,624,560,377,986,739,675,611,547,973,726,662,598,415,351,960,713, 649,585,521,947,764,700,636,572,389,1011,998,751,687,623,559,985,738,674,610, 363,972,725,661,597,533,167,959,712,648,584,401,1023,946,763,699,635,571, 1010,997,686,558,375,737,609,971,660,349,711,583,1022,945,762,634,996,685, 557,736,608,425,970,659,531,710,582,399,1021,944,761,633] [views:debug,2014-08-19T16:55:14.586,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/167. Updated state: replica (0) [views:debug,2014-08-19T16:55:14.586,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/520. Updated state: active (0) [ns_server:debug,2014-08-19T16:55:14.586,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",167,replica,0} [ns_server:debug,2014-08-19T16:55:14.587,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",520,active,0} [ns_server:debug,2014-08-19T16:55:14.661,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 150. Nacking mccouch update. [views:debug,2014-08-19T16:55:14.661,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/150. Updated state: replica (0) [ns_server:debug,2014-08-19T16:55:14.662,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",150,replica,0} [ns_server:debug,2014-08-19T16:55:14.662,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_tiles<0.4349.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,686,622,558,984,426,362,724,660,596,532,166,958,400,1022,762,698,634,570, 996,374,736,672,608,544,970,412,348,710,646,582,518,152,944,386,1008,748,684, 620,556,982,424,360,722,658,594,530,164,956,398,1020,760,728,696,664,632,600, 568,536,170,994,962,404,372,766,734,702,670,638,606,574,542,968,410,378,346, 1000,740,708,676,644,612,580,548,516,150,974,942,416,384,352,1006,746,714, 682,650,618,586,554,522,156,980,948,422,390,358,1012,752,720,688,656,624,592, 560,528,162,986,954,396,364,1018,758,726,694,662,630,598,566,534,168,992,960, 402,370,764,732,700,668,636,604,572,540,998,966,408,376,344,738,706,674,642, 610,578,546,514,972,940,414,382,350,1004,744,712,680,648,616,584,552,520,154, 1023,978,946,420,388,356,1010,718,654,590,526,160,952,394,1016,756,692,628, 564,990,368,730,666,602,538,964,406,342,704,640,576,512,938,380,1002,742,678, 614,550,976,418,354,716,652,588,524,158,950,392,1014,754,690,626,562,988,366] [views:debug,2014-08-19T16:55:14.712,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/150. Updated state: replica (0) [ns_server:debug,2014-08-19T16:55:14.712,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",150,replica,0} [ns_server:debug,2014-08-19T16:55:14.840,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 518. Nacking mccouch update. [ns_server:debug,2014-08-19T16:55:14.840,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 165. Nacking mccouch update. [views:debug,2014-08-19T16:55:14.840,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/518. Updated state: active (0) [views:debug,2014-08-19T16:55:14.840,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/165. Updated state: replica (0) [ns_server:debug,2014-08-19T16:55:14.840,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",518,active,0} [ns_server:debug,2014-08-19T16:55:14.841,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",165,replica,0} [ns_server:debug,2014-08-19T16:55:14.841,ns_1@10.242.238.90:capi_set_view_manager-tiles<0.6184.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,686,622,558,984,756,724,692,660,628,596,564,532,990,958,1022,762,730,698, 666,634,602,570,538,996,964,736,704,672,640,608,576,544,970,938,1002,742,710, 678,646,614,582,550,518,976,944,1008,748,716,684,652,620,588,556,524,982,950, 1014,754,722,690,658,626,594,562,530,988,956,1020,760,728,696,664,632,600, 568,536,994,962,766,734,702,670,638,606,574,542,968,1000,740,708,676,644,612, 580,548,974,942,1006,746,714,682,650,618,586,554,522,980,948,1012,752,720, 688,656,624,592,560,528,986,954,1018,758,726,694,662,630,598,566,534,992,960, 764,732,700,668,636,604,572,540,998,966,738,706,674,642,610,578,546,972,940, 1004,744,712,680,648,616,584,552,520,1023,978,946,1010,718,654,590,526,952, 1016] [ns_server:debug,2014-08-19T16:55:14.841,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,984,673,545,724,596,413,958,647,519,698,570,387,1009,749,621,983,672, 361,723,595,957,646,697,569,1008,995,748,684,620,556,373,982,735,671,607,543, 969,722,658,594,411,347,956,709,645,581,517,1020,943,760,696,632,568,385, 1007,994,747,683,619,555,981,734,670,606,423,359,968,721,657,593,529,955,708, 644,580,397,1019,942,759,695,631,567,1006,993,746,682,618,554,371,980,733, 669,605,541,967,720,656,592,409,345,954,707,643,579,515,1018,941,758,694,630, 566,383,1005,992,745,681,617,553,979,732,668,604,421,357,966,719,655,591,527, 953,706,642,578,395,1017,940,757,693,629,565,1004,991,744,680,616,552,369, 978,731,667,603,539,965,718,654,590,407,343,952,705,641,577,513,1016,939,756, 692,628,564,381,1003,990,743,679,615,551,977,730,666,602,419,355,964,717,653, 589,525,951,704,640,576,393,1015,938,755,691,627,563,1002,989,742,678,614, 550,367,976,729,665,601,537,963,716,652,588,405,950,767,703,639,575,1014,754, 690,626,562,379,1001,988,741,677,613,549,975,728,664,600,417,353,962,715,651, 587,523,949,766,702,638,574,391,1013,753,689,625,561,1000,987,740,676,612, 548,365,974,727,663,599,535,169,961,714,650,586,403,948,765,701,637,573,1012, 999,752,688,624,560,377,986,739,675,611,547,973,726,662,598,415,351,960,713, 649,585,521,947,764,700,636,572,389,1011,998,751,687,623,559,985,738,674,610, 363,972,725,661,597,533,167,959,712,648,584,401,1023,946,763,699,635,571, 1010,997,686,558,375,737,609,971,660,349,711,583,1022,945,762,634,996,685, 557,736,608,425,970,659,531,165,710,582,399,1021,944,761,633] [views:debug,2014-08-19T16:55:14.949,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/165. Updated state: replica (0) [views:debug,2014-08-19T16:55:14.949,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/518. Updated state: active (0) [ns_server:debug,2014-08-19T16:55:14.952,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",165,replica,0} [ns_server:debug,2014-08-19T16:55:14.952,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",518,active,0} [ns_server:debug,2014-08-19T16:55:14.982,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 148. Nacking mccouch update. [views:debug,2014-08-19T16:55:14.983,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/148. Updated state: replica (0) [ns_server:debug,2014-08-19T16:55:14.983,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",148,replica,0} [ns_server:debug,2014-08-19T16:55:14.983,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_tiles<0.4349.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,686,622,558,984,426,362,724,660,596,532,166,958,400,1022,762,698,634,570, 996,374,736,672,608,544,970,412,348,710,646,582,518,152,944,386,1008,748,684, 620,556,982,424,360,722,658,594,530,164,956,398,1020,760,728,696,664,632,600, 568,536,170,994,962,404,372,766,734,702,670,638,606,574,542,968,410,378,346, 1000,740,708,676,644,612,580,548,516,150,974,942,416,384,352,1006,746,714, 682,650,618,586,554,522,156,980,948,422,390,358,1012,752,720,688,656,624,592, 560,528,162,986,954,396,364,1018,758,726,694,662,630,598,566,534,168,992,960, 402,370,764,732,700,668,636,604,572,540,998,966,408,376,344,738,706,674,642, 610,578,546,514,148,972,940,414,382,350,1004,744,712,680,648,616,584,552,520, 154,1023,978,946,420,388,356,1010,718,654,590,526,160,952,394,1016,756,692, 628,564,990,368,730,666,602,538,964,406,342,704,640,576,512,938,380,1002,742, 678,614,550,976,418,354,716,652,588,524,158,950,392,1014,754,690,626,562,988, 366] [views:debug,2014-08-19T16:55:15.094,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/148. Updated state: replica (0) [ns_server:debug,2014-08-19T16:55:15.094,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",148,replica,0} [ns_server:debug,2014-08-19T16:55:15.417,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 516. Nacking mccouch update. [views:debug,2014-08-19T16:55:15.417,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/516. Updated state: active (0) [ns_server:debug,2014-08-19T16:55:15.417,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",516,active,0} [ns_server:debug,2014-08-19T16:55:15.418,ns_1@10.242.238.90:capi_set_view_manager-tiles<0.6184.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,686,622,558,984,756,724,692,660,628,596,564,532,990,958,1022,762,730,698, 666,634,602,570,538,996,964,736,704,672,640,608,576,544,970,938,1002,742,710, 678,646,614,582,550,518,976,944,1008,748,716,684,652,620,588,556,524,982,950, 1014,754,722,690,658,626,594,562,530,988,956,1020,760,728,696,664,632,600, 568,536,994,962,766,734,702,670,638,606,574,542,968,1000,740,708,676,644,612, 580,548,516,974,942,1006,746,714,682,650,618,586,554,522,980,948,1012,752, 720,688,656,624,592,560,528,986,954,1018,758,726,694,662,630,598,566,534,992, 960,764,732,700,668,636,604,572,540,998,966,738,706,674,642,610,578,546,972, 940,1004,744,712,680,648,616,584,552,520,1023,978,946,1010,718,654,590,526, 952,1016] [ns_server:debug,2014-08-19T16:55:15.459,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 163. Nacking mccouch update. [views:debug,2014-08-19T16:55:15.459,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/163. Updated state: replica (0) [ns_server:debug,2014-08-19T16:55:15.460,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",163,replica,0} [ns_server:debug,2014-08-19T16:55:15.460,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,984,673,545,724,596,413,958,647,519,698,570,387,1009,749,621,983,672, 361,723,595,957,646,697,569,1008,995,748,684,620,556,373,982,735,671,607,543, 969,722,658,594,411,347,956,709,645,581,517,1020,943,760,696,632,568,385, 1007,994,747,683,619,555,981,734,670,606,423,359,968,721,657,593,529,163,955, 708,644,580,397,1019,942,759,695,631,567,1006,993,746,682,618,554,371,980, 733,669,605,541,967,720,656,592,409,345,954,707,643,579,515,1018,941,758,694, 630,566,383,1005,992,745,681,617,553,979,732,668,604,421,357,966,719,655,591, 527,953,706,642,578,395,1017,940,757,693,629,565,1004,991,744,680,616,552, 369,978,731,667,603,539,965,718,654,590,407,343,952,705,641,577,513,1016,939, 756,692,628,564,381,1003,990,743,679,615,551,977,730,666,602,419,355,964,717, 653,589,525,951,704,640,576,393,1015,938,755,691,627,563,1002,989,742,678, 614,550,367,976,729,665,601,537,963,716,652,588,405,950,767,703,639,575,1014, 754,690,626,562,379,1001,988,741,677,613,549,975,728,664,600,417,353,962,715, 651,587,523,949,766,702,638,574,391,1013,753,689,625,561,1000,987,740,676, 612,548,365,974,727,663,599,535,169,961,714,650,586,403,948,765,701,637,573, 1012,999,752,688,624,560,377,986,739,675,611,547,973,726,662,598,415,351,960, 713,649,585,521,947,764,700,636,572,389,1011,998,751,687,623,559,985,738,674, 610,363,972,725,661,597,533,167,959,712,648,584,401,1023,946,763,699,635,571, 1010,997,686,558,375,737,609,971,660,349,711,583,1022,945,762,634,996,685, 557,736,608,425,970,659,531,165,710,582,399,1021,944,761,633] [views:debug,2014-08-19T16:55:15.535,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/516. Updated state: active (0) [ns_server:debug,2014-08-19T16:55:15.535,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",516,active,0} [views:debug,2014-08-19T16:55:15.593,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/163. Updated state: replica (0) [ns_server:debug,2014-08-19T16:55:15.594,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",163,replica,0} [ns_server:debug,2014-08-19T16:55:15.651,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 146. Nacking mccouch update. [views:debug,2014-08-19T16:55:15.652,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/146. Updated state: replica (0) [ns_server:debug,2014-08-19T16:55:15.652,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",146,replica,0} [ns_server:debug,2014-08-19T16:55:15.652,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_tiles<0.4349.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,686,622,558,984,426,362,724,660,596,532,166,958,400,1022,762,698,634,570, 996,374,736,672,608,544,970,412,348,710,646,582,518,152,944,386,1008,748,684, 620,556,982,424,360,722,658,594,530,164,956,398,1020,760,728,696,664,632,600, 568,536,170,994,962,404,372,766,734,702,670,638,606,574,542,968,410,378,346, 1000,740,708,676,644,612,580,548,516,150,974,942,416,384,352,1006,746,714, 682,650,618,586,554,522,156,980,948,422,390,358,1012,752,720,688,656,624,592, 560,528,162,986,954,396,364,1018,758,726,694,662,630,598,566,534,168,992,960, 402,370,764,732,700,668,636,604,572,540,998,966,408,376,344,738,706,674,642, 610,578,546,514,148,972,940,414,382,350,1004,744,712,680,648,616,584,552,520, 154,1023,978,946,420,388,356,1010,718,654,590,526,160,952,394,1016,756,692, 628,564,990,368,730,666,602,538,964,406,342,704,640,576,512,146,938,380,1002, 742,678,614,550,976,418,354,716,652,588,524,158,950,392,1014,754,690,626,562, 988,366] [views:debug,2014-08-19T16:55:15.721,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/146. Updated state: replica (0) [ns_server:debug,2014-08-19T16:55:15.721,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",146,replica,0} [ns_server:debug,2014-08-19T16:55:15.737,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 514. Nacking mccouch update. [views:debug,2014-08-19T16:55:15.737,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/514. Updated state: active (0) [ns_server:debug,2014-08-19T16:55:15.738,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",514,active,0} [ns_server:debug,2014-08-19T16:55:15.738,ns_1@10.242.238.90:capi_set_view_manager-tiles<0.6184.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,686,622,558,984,724,660,596,532,990,958,1022,762,730,698,666,634,602,570, 538,996,964,736,704,672,640,608,576,544,970,938,1002,742,710,678,646,614,582, 550,518,976,944,1008,748,716,684,652,620,588,556,524,982,950,1014,754,722, 690,658,626,594,562,530,988,956,1020,760,728,696,664,632,600,568,536,994,962, 766,734,702,670,638,606,574,542,968,1000,740,708,676,644,612,580,548,516,974, 942,1006,746,714,682,650,618,586,554,522,980,948,1012,752,720,688,656,624, 592,560,528,986,954,1018,758,726,694,662,630,598,566,534,992,960,764,732,700, 668,636,604,572,540,998,966,738,706,674,642,610,578,546,514,972,940,1004,744, 712,680,648,616,584,552,520,1023,978,946,1010,718,654,590,526,952,1016,756, 692,628,564] [ns_server:debug,2014-08-19T16:55:15.805,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 161. Nacking mccouch update. [views:debug,2014-08-19T16:55:15.805,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/161. Updated state: replica (0) [ns_server:debug,2014-08-19T16:55:15.805,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",161,replica,0} [views:debug,2014-08-19T16:55:15.805,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/514. Updated state: active (0) [ns_server:debug,2014-08-19T16:55:15.806,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",514,active,0} [ns_server:debug,2014-08-19T16:55:15.806,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,984,673,545,724,596,413,958,647,519,698,570,387,1009,749,621,983,672, 361,723,595,957,646,697,569,1008,748,620,982,735,671,607,543,969,722,658,594, 411,347,956,709,645,581,517,1020,943,760,696,632,568,385,1007,994,747,683, 619,555,981,734,670,606,423,359,968,721,657,593,529,163,955,708,644,580,397, 1019,942,759,695,631,567,1006,993,746,682,618,554,371,980,733,669,605,541, 967,720,656,592,409,345,954,707,643,579,515,1018,941,758,694,630,566,383, 1005,992,745,681,617,553,979,732,668,604,421,357,966,719,655,591,527,161,953, 706,642,578,395,1017,940,757,693,629,565,1004,991,744,680,616,552,369,978, 731,667,603,539,965,718,654,590,407,343,952,705,641,577,513,1016,939,756,692, 628,564,381,1003,990,743,679,615,551,977,730,666,602,419,355,964,717,653,589, 525,951,704,640,576,393,1015,938,755,691,627,563,1002,989,742,678,614,550, 367,976,729,665,601,537,963,716,652,588,405,950,767,703,639,575,1014,754,690, 626,562,379,1001,988,741,677,613,549,975,728,664,600,417,353,962,715,651,587, 523,949,766,702,638,574,391,1013,753,689,625,561,1000,987,740,676,612,548, 365,974,727,663,599,535,169,961,714,650,586,403,948,765,701,637,573,1012,999, 752,688,624,560,377,986,739,675,611,547,973,726,662,598,415,351,960,713,649, 585,521,947,764,700,636,572,389,1011,998,751,687,623,559,985,738,674,610,363, 972,725,661,597,533,167,959,712,648,584,401,1023,946,763,699,635,571,1010, 997,686,558,375,737,609,971,660,349,711,583,1022,945,762,634,996,685,557,736, 608,425,970,659,531,165,710,582,399,1021,944,761,633,995,684,556,373] [views:debug,2014-08-19T16:55:15.856,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/161. Updated state: replica (0) [ns_server:debug,2014-08-19T16:55:15.856,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",161,replica,0} [ns_server:debug,2014-08-19T16:55:15.889,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 144. Nacking mccouch update. [views:debug,2014-08-19T16:55:15.889,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/144. Updated state: replica (0) [ns_server:debug,2014-08-19T16:55:15.889,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",144,replica,0} [ns_server:debug,2014-08-19T16:55:15.890,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_tiles<0.4349.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,686,622,558,984,426,362,724,660,596,532,166,958,400,1022,762,698,634,570, 996,374,736,672,608,544,970,412,348,710,646,582,518,152,944,386,1008,748,684, 620,556,982,424,360,722,658,594,530,164,956,398,1020,760,728,696,664,632,600, 568,536,170,994,962,404,372,766,734,702,670,638,606,574,542,144,968,410,378, 346,1000,740,708,676,644,612,580,548,516,150,974,942,416,384,352,1006,746, 714,682,650,618,586,554,522,156,980,948,422,390,358,1012,752,720,688,656,624, 592,560,528,162,986,954,396,364,1018,758,726,694,662,630,598,566,534,168,992, 960,402,370,764,732,700,668,636,604,572,540,998,966,408,376,344,738,706,674, 642,610,578,546,514,148,972,940,414,382,350,1004,744,712,680,648,616,584,552, 520,154,1023,978,946,420,388,356,1010,718,654,590,526,160,952,394,1016,756, 692,628,564,990,368,730,666,602,538,964,406,342,704,640,576,512,146,938,380, 1002,742,678,614,550,976,418,354,716,652,588,524,158,950,392,1014,754,690, 626,562,988,366] [views:debug,2014-08-19T16:55:16.048,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/144. Updated state: replica (0) [ns_server:debug,2014-08-19T16:55:16.048,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",144,replica,0} [ns_server:debug,2014-08-19T16:55:16.098,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 512. Nacking mccouch update. [views:debug,2014-08-19T16:55:16.099,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/512. Updated state: active (0) [ns_server:debug,2014-08-19T16:55:16.099,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",512,active,0} [ns_server:debug,2014-08-19T16:55:16.099,ns_1@10.242.238.90:capi_set_view_manager-tiles<0.6184.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,686,622,558,984,724,660,596,532,990,958,1022,762,730,698,666,634,602,570, 538,996,964,736,704,672,640,608,576,544,512,970,938,1002,742,710,678,646,614, 582,550,518,976,944,1008,748,716,684,652,620,588,556,524,982,950,1014,754, 722,690,658,626,594,562,530,988,956,1020,760,728,696,664,632,600,568,536,994, 962,766,734,702,670,638,606,574,542,968,1000,740,708,676,644,612,580,548,516, 974,942,1006,746,714,682,650,618,586,554,522,980,948,1012,752,720,688,656, 624,592,560,528,986,954,1018,758,726,694,662,630,598,566,534,992,960,764,732, 700,668,636,604,572,540,998,966,738,706,674,642,610,578,546,514,972,940,1004, 744,712,680,648,616,584,552,520,1023,978,946,1010,718,654,590,526,952,1016, 756,692,628,564] [ns_server:debug,2014-08-19T16:55:16.132,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 159. Nacking mccouch update. [views:debug,2014-08-19T16:55:16.132,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/159. Updated state: replica (0) [ns_server:debug,2014-08-19T16:55:16.132,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",159,replica,0} [ns_server:debug,2014-08-19T16:55:16.133,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,984,673,545,724,596,413,958,647,519,698,570,387,1009,749,621,983,672, 361,723,595,957,646,697,569,1008,748,620,982,735,671,607,543,969,722,658,594, 411,347,956,709,645,581,517,1020,943,760,696,632,568,385,1007,994,747,683, 619,555,981,734,670,606,423,359,968,721,657,593,529,163,955,708,644,580,397, 1019,942,759,695,631,567,1006,993,746,682,618,554,371,980,733,669,605,541, 967,720,656,592,409,345,954,707,643,579,515,1018,941,758,694,630,566,383, 1005,992,745,681,617,553,979,732,668,604,421,357,966,719,655,591,527,161,953, 706,642,578,395,1017,940,757,693,629,565,1004,991,744,680,616,552,369,978, 731,667,603,539,965,718,654,590,407,343,952,705,641,577,513,1016,939,756,692, 628,564,381,1003,990,743,679,615,551,977,730,666,602,419,355,964,717,653,589, 525,159,951,704,640,576,393,1015,938,755,691,627,563,1002,989,742,678,614, 550,367,976,729,665,601,537,963,716,652,588,405,950,767,703,639,575,1014,754, 690,626,562,379,1001,988,741,677,613,549,975,728,664,600,417,353,962,715,651, 587,523,949,766,702,638,574,391,1013,753,689,625,561,1000,987,740,676,612, 548,365,974,727,663,599,535,169,961,714,650,586,403,948,765,701,637,573,1012, 999,752,688,624,560,377,986,739,675,611,547,973,726,662,598,415,351,960,713, 649,585,521,947,764,700,636,572,389,1011,998,751,687,623,559,985,738,674,610, 363,972,725,661,597,533,167,959,712,648,584,401,1023,946,763,699,635,571, 1010,997,686,558,375,737,609,971,660,349,711,583,1022,945,762,634,996,685, 557,736,608,425,970,659,531,165,710,582,399,1021,944,761,633,995,684,556,373] [views:debug,2014-08-19T16:55:16.149,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/512. Updated state: active (0) [ns_server:debug,2014-08-19T16:55:16.149,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",512,active,0} [ns_server:debug,2014-08-19T16:55:16.285,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 142. Nacking mccouch update. [views:debug,2014-08-19T16:55:16.286,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/142. Updated state: replica (0) [ns_server:debug,2014-08-19T16:55:16.286,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",142,replica,0} [ns_server:debug,2014-08-19T16:55:16.286,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_tiles<0.4349.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,686,622,558,984,426,362,724,660,596,532,166,958,400,1022,762,698,634,570, 996,374,736,672,608,544,970,412,348,710,646,582,518,152,944,386,1008,748,684, 620,556,982,424,360,722,658,594,530,164,956,398,1020,760,728,696,664,632,600, 568,536,170,994,962,404,372,766,734,702,670,638,606,574,542,144,968,410,378, 346,1000,740,708,676,644,612,580,548,516,150,974,942,416,384,352,1006,746, 714,682,650,618,586,554,522,156,980,948,422,390,358,1012,752,720,688,656,624, 592,560,528,162,986,954,396,364,1018,758,726,694,662,630,598,566,534,168,992, 960,402,370,764,732,700,668,636,604,572,540,142,998,966,408,376,344,738,706, 674,642,610,578,546,514,148,972,940,414,382,350,1004,744,712,680,648,616,584, 552,520,154,1023,978,946,420,388,356,1010,718,654,590,526,160,952,394,1016, 756,692,628,564,990,368,730,666,602,538,964,406,342,704,640,576,512,146,938, 380,1002,742,678,614,550,976,418,354,716,652,588,524,158,950,392,1014,754, 690,626,562,988,366] [views:debug,2014-08-19T16:55:16.311,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/159. Updated state: replica (0) [ns_server:debug,2014-08-19T16:55:16.311,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",159,replica,0} [views:debug,2014-08-19T16:55:16.395,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/142. Updated state: replica (0) [ns_server:debug,2014-08-19T16:55:16.395,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",142,replica,0} [ns_server:debug,2014-08-19T16:55:16.454,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 426. Nacking mccouch update. [views:debug,2014-08-19T16:55:16.454,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/426. Updated state: replica (0) [ns_server:debug,2014-08-19T16:55:16.455,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",426,replica,0} [ns_server:debug,2014-08-19T16:55:16.455,ns_1@10.242.238.90:capi_set_view_manager-tiles<0.6184.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,686,622,558,984,426,724,660,596,532,990,958,1022,762,730,698,666,634,602, 570,538,996,964,736,704,672,640,608,576,544,512,970,938,1002,742,710,678,646, 614,582,550,518,976,944,1008,748,716,684,652,620,588,556,524,982,950,1014, 754,722,690,658,626,594,562,530,988,956,1020,760,728,696,664,632,600,568,536, 994,962,766,734,702,670,638,606,574,542,968,1000,740,708,676,644,612,580,548, 516,974,942,1006,746,714,682,650,618,586,554,522,980,948,1012,752,720,688, 656,624,592,560,528,986,954,1018,758,726,694,662,630,598,566,534,992,960,764, 732,700,668,636,604,572,540,998,966,738,706,674,642,610,578,546,514,972,940, 1004,744,712,680,648,616,584,552,520,1023,978,946,1010,718,654,590,526,952, 1016,756,692,628,564] [views:debug,2014-08-19T16:55:16.747,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/426. Updated state: replica (0) [ns_server:debug,2014-08-19T16:55:16.747,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",426,replica,0} [ns_server:debug,2014-08-19T16:55:16.780,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 157. Nacking mccouch update. [views:debug,2014-08-19T16:55:16.780,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/157. Updated state: replica (0) [ns_server:debug,2014-08-19T16:55:16.780,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",157,replica,0} [ns_server:debug,2014-08-19T16:55:16.781,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,984,673,545,724,596,413,958,647,519,698,570,387,1009,749,621,983,672, 361,723,595,957,646,697,569,1008,748,620,982,735,671,607,543,969,722,658,594, 411,347,956,709,645,581,517,1020,943,760,696,632,568,385,1007,994,747,683, 619,555,981,734,670,606,423,359,968,721,657,593,529,163,955,708,644,580,397, 1019,942,759,695,631,567,1006,993,746,682,618,554,371,980,733,669,605,541, 967,720,656,592,409,345,954,707,643,579,515,1018,941,758,694,630,566,383, 1005,992,745,681,617,553,979,732,668,604,421,357,966,719,655,591,527,161,953, 706,642,578,395,1017,940,757,693,629,565,1004,991,744,680,616,552,369,978, 731,667,603,539,965,718,654,590,407,343,952,705,641,577,513,1016,939,756,692, 628,564,381,1003,990,743,679,615,551,977,730,666,602,419,355,964,717,653,589, 525,159,951,704,640,576,393,1015,938,755,691,627,563,1002,989,742,678,614, 550,367,976,729,665,601,537,963,716,652,588,405,950,767,703,639,575,1014,754, 690,626,562,379,1001,988,741,677,613,549,975,728,664,600,417,353,962,715,651, 587,523,157,949,766,702,638,574,391,1013,753,689,625,561,1000,987,740,676, 612,548,365,974,727,663,599,535,169,961,714,650,586,403,948,765,701,637,573, 1012,999,752,688,624,560,377,986,739,675,611,547,973,726,662,598,415,351,960, 713,649,585,521,947,764,700,636,572,389,1011,998,751,687,623,559,985,738,674, 610,363,972,725,661,597,533,167,959,712,648,584,401,1023,946,763,699,635,571, 1010,997,686,558,375,737,609,971,660,349,711,583,1022,945,762,634,996,685, 557,736,608,425,970,659,531,165,710,582,399,1021,944,761,633,995,684,556,373] [ns_server:debug,2014-08-19T16:55:16.847,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 140. Nacking mccouch update. [views:debug,2014-08-19T16:55:16.847,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/140. Updated state: replica (0) [ns_server:debug,2014-08-19T16:55:16.847,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",140,replica,0} [ns_server:debug,2014-08-19T16:55:16.848,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_tiles<0.4349.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,686,622,558,984,426,362,724,660,596,532,166,958,400,1022,762,698,634,570, 140,996,374,736,672,608,544,970,412,348,710,646,582,518,152,944,386,1008,748, 684,620,556,982,424,360,722,658,594,530,164,956,398,1020,760,696,632,568,994, 962,404,372,766,734,702,670,638,606,574,542,144,968,410,378,346,1000,740,708, 676,644,612,580,548,516,150,974,942,416,384,352,1006,746,714,682,650,618,586, 554,522,156,980,948,422,390,358,1012,752,720,688,656,624,592,560,528,162,986, 954,396,364,1018,758,726,694,662,630,598,566,534,168,992,960,402,370,764,732, 700,668,636,604,572,540,142,998,966,408,376,344,738,706,674,642,610,578,546, 514,148,972,940,414,382,350,1004,744,712,680,648,616,584,552,520,154,1023, 978,946,420,388,356,1010,718,654,590,526,160,952,394,1016,756,692,628,564, 990,368,730,666,602,538,964,406,342,704,640,576,512,146,938,380,1002,742,678, 614,550,976,418,354,716,652,588,524,158,950,392,1014,754,690,626,562,988,366, 728,664,600,536,170] [views:debug,2014-08-19T16:55:16.931,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/157. Updated state: replica (0) [ns_server:debug,2014-08-19T16:55:16.931,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",157,replica,0} [views:debug,2014-08-19T16:55:16.998,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/140. Updated state: replica (0) [ns_server:debug,2014-08-19T16:55:16.998,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",140,replica,0} [ns_server:debug,2014-08-19T16:55:17.140,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 424. Nacking mccouch update. [views:debug,2014-08-19T16:55:17.141,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/424. Updated state: replica (0) [ns_server:debug,2014-08-19T16:55:17.141,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",424,replica,0} [ns_server:debug,2014-08-19T16:55:17.141,ns_1@10.242.238.90:capi_set_view_manager-tiles<0.6184.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,686,622,558,984,426,724,660,596,532,990,958,1022,762,730,698,666,634,602, 570,538,996,964,736,704,672,640,608,576,544,512,970,938,1002,742,710,678,646, 614,582,550,518,976,944,1008,748,716,684,652,620,588,556,524,982,950,424, 1014,754,722,690,658,626,594,562,530,988,956,1020,760,728,696,664,632,600, 568,536,994,962,766,734,702,670,638,606,574,542,968,1000,740,708,676,644,612, 580,548,516,974,942,1006,746,714,682,650,618,586,554,522,980,948,1012,752, 720,688,656,624,592,560,528,986,954,1018,758,726,694,662,630,598,566,534,992, 960,764,732,700,668,636,604,572,540,998,966,738,706,674,642,610,578,546,514, 972,940,1004,744,712,680,648,616,584,552,520,1023,978,946,1010,718,654,590, 526,952,1016,756,692,628,564] [views:debug,2014-08-19T16:55:17.174,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/424. Updated state: replica (0) [ns_server:debug,2014-08-19T16:55:17.174,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",424,replica,0} [ns_server:debug,2014-08-19T16:55:17.233,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 155. Nacking mccouch update. [views:debug,2014-08-19T16:55:17.233,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/155. Updated state: replica (0) [ns_server:debug,2014-08-19T16:55:17.233,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 138. Nacking mccouch update. [views:debug,2014-08-19T16:55:17.233,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/138. Updated state: replica (0) [ns_server:debug,2014-08-19T16:55:17.233,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",155,replica,0} [ns_server:debug,2014-08-19T16:55:17.234,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",138,replica,0} [ns_server:debug,2014-08-19T16:55:17.234,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_tiles<0.4349.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,686,622,558,984,426,362,724,660,596,532,166,958,400,1022,762,698,634,570, 140,996,374,736,672,608,544,970,412,348,710,646,582,518,152,944,386,1008,748, 684,620,556,982,424,360,722,658,594,530,164,956,398,1020,760,696,632,568,138, 994,962,404,372,766,734,702,670,638,606,574,542,144,968,410,378,346,1000,740, 708,676,644,612,580,548,516,150,974,942,416,384,352,1006,746,714,682,650,618, 586,554,522,156,980,948,422,390,358,1012,752,720,688,656,624,592,560,528,162, 986,954,396,364,1018,758,726,694,662,630,598,566,534,168,992,960,402,370,764, 732,700,668,636,604,572,540,142,998,966,408,376,344,738,706,674,642,610,578, 546,514,148,972,940,414,382,350,1004,744,712,680,648,616,584,552,520,154, 1023,978,946,420,388,356,1010,718,654,590,526,160,952,394,1016,756,692,628, 564,990,368,730,666,602,538,964,406,342,704,640,576,512,146,938,380,1002,742, 678,614,550,976,418,354,716,652,588,524,158,950,392,1014,754,690,626,562,988, 366,728,664,600,536,170] [ns_server:debug,2014-08-19T16:55:17.234,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,984,673,545,724,596,413,958,647,519,698,570,387,1009,749,621,983,672, 361,723,595,957,646,697,569,1008,748,620,982,735,671,607,543,969,722,658,594, 411,347,956,709,645,581,517,1020,943,760,696,632,568,385,1007,994,747,683, 619,555,981,734,670,606,423,359,968,721,657,593,529,163,955,708,644,580,397, 1019,942,759,695,631,567,1006,993,746,682,618,554,371,980,733,669,605,541, 967,720,656,592,409,345,954,707,643,579,515,1018,941,758,694,630,566,383, 1005,992,745,681,617,553,979,732,668,604,421,357,966,719,655,591,527,161,953, 706,642,578,395,1017,940,757,693,629,565,1004,991,744,680,616,552,369,978, 731,667,603,539,965,718,654,590,407,343,952,705,641,577,513,1016,939,756,692, 628,564,381,1003,990,743,679,615,551,977,730,666,602,419,355,964,717,653,589, 525,159,951,704,640,576,393,1015,938,755,691,627,563,1002,989,742,678,614, 550,367,976,729,665,601,537,963,716,652,588,405,950,767,703,639,575,1014,754, 690,626,562,379,1001,988,741,677,613,549,975,728,664,600,417,353,962,715,651, 587,523,157,949,766,702,638,574,391,1013,753,689,625,561,1000,987,740,676, 612,548,365,974,727,663,599,535,169,961,714,650,586,403,948,765,701,637,573, 1012,999,752,688,624,560,377,986,739,675,611,547,973,726,662,598,415,351,960, 713,649,585,521,155,947,764,700,636,572,389,1011,998,751,687,623,559,985,738, 674,610,363,972,725,661,597,533,167,959,712,648,584,401,1023,946,763,699,635, 571,1010,997,686,558,375,737,609,971,660,349,711,583,1022,945,762,634,996, 685,557,736,608,425,970,659,531,165,710,582,399,1021,944,761,633,995,684,556, 373] [views:debug,2014-08-19T16:55:17.300,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/155. Updated state: replica (0) [views:debug,2014-08-19T16:55:17.301,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/138. Updated state: replica (0) [ns_server:debug,2014-08-19T16:55:17.301,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",155,replica,0} [ns_server:debug,2014-08-19T16:55:17.301,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",138,replica,0} [ns_server:debug,2014-08-19T16:55:17.401,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 422. Nacking mccouch update. [views:debug,2014-08-19T16:55:17.401,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/422. Updated state: replica (0) [ns_server:debug,2014-08-19T16:55:17.401,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",422,replica,0} [ns_server:debug,2014-08-19T16:55:17.401,ns_1@10.242.238.90:capi_set_view_manager-tiles<0.6184.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,686,622,558,984,426,724,660,596,532,990,958,1022,762,730,698,666,634,602, 570,538,996,964,736,704,672,640,608,576,544,512,970,938,1002,742,710,678,646, 614,582,550,518,976,944,1008,748,716,684,652,620,588,556,524,982,950,424, 1014,754,722,690,658,626,594,562,530,988,956,1020,760,728,696,664,632,600, 568,536,994,962,766,734,702,670,638,606,574,542,968,1000,740,708,676,644,612, 580,548,516,974,942,1006,746,714,682,650,618,586,554,522,980,948,422,1012, 752,720,688,656,624,592,560,528,986,954,1018,758,726,694,662,630,598,566,534, 992,960,764,732,700,668,636,604,572,540,998,966,738,706,674,642,610,578,546, 514,972,940,1004,744,712,680,648,616,584,552,520,1023,978,946,1010,718,654, 590,526,952,1016,756,692,628,564] [views:debug,2014-08-19T16:55:17.452,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/422. Updated state: replica (0) [ns_server:debug,2014-08-19T16:55:17.452,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",422,replica,0} [ns_server:debug,2014-08-19T16:55:17.660,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 136. Nacking mccouch update. [ns_server:debug,2014-08-19T16:55:17.660,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 153. Nacking mccouch update. [views:debug,2014-08-19T16:55:17.660,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/136. Updated state: replica (0) [views:debug,2014-08-19T16:55:17.661,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/153. Updated state: replica (0) [ns_server:debug,2014-08-19T16:55:17.661,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",136,replica,0} [ns_server:debug,2014-08-19T16:55:17.661,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",153,replica,0} [ns_server:debug,2014-08-19T16:55:17.661,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_tiles<0.4349.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,686,622,558,984,426,362,724,660,596,532,166,958,400,1022,762,698,634,570, 140,996,374,736,672,608,544,970,412,348,710,646,582,518,152,944,386,1008,748, 684,620,556,982,424,360,722,658,594,530,164,956,398,1020,760,696,632,568,138, 994,962,404,372,766,734,702,670,638,606,574,542,144,968,410,378,346,1000,740, 708,676,644,612,580,548,516,150,974,942,416,384,352,1006,746,714,682,650,618, 586,554,522,156,980,948,422,390,358,1012,752,720,688,656,624,592,560,528,162, 986,954,396,364,1018,758,726,694,662,630,598,566,534,168,136,992,960,402,370, 764,732,700,668,636,604,572,540,142,998,966,408,376,344,738,706,674,642,610, 578,546,514,148,972,940,414,382,350,1004,744,712,680,648,616,584,552,520,154, 1023,978,946,420,388,356,1010,718,654,590,526,160,952,394,1016,756,692,628, 564,990,368,730,666,602,538,964,406,342,704,640,576,512,146,938,380,1002,742, 678,614,550,976,418,354,716,652,588,524,158,950,392,1014,754,690,626,562,988, 366,728,664,600,536,170] [ns_server:debug,2014-08-19T16:55:17.662,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,984,673,545,724,596,413,958,647,519,153,698,570,387,1009,749,621,983, 672,361,723,595,957,646,697,569,1008,748,620,982,735,671,607,543,969,722,658, 594,411,347,956,709,645,581,517,1020,943,760,696,632,568,385,1007,994,747, 683,619,555,981,734,670,606,423,359,968,721,657,593,529,163,955,708,644,580, 397,1019,942,759,695,631,567,1006,993,746,682,618,554,371,980,733,669,605, 541,967,720,656,592,409,345,954,707,643,579,515,1018,941,758,694,630,566,383, 1005,992,745,681,617,553,979,732,668,604,421,357,966,719,655,591,527,161,953, 706,642,578,395,1017,940,757,693,629,565,1004,991,744,680,616,552,369,978, 731,667,603,539,965,718,654,590,407,343,952,705,641,577,513,1016,939,756,692, 628,564,381,1003,990,743,679,615,551,977,730,666,602,419,355,964,717,653,589, 525,159,951,704,640,576,393,1015,938,755,691,627,563,1002,989,742,678,614, 550,367,976,729,665,601,537,963,716,652,588,405,950,767,703,639,575,1014,754, 690,626,562,379,1001,988,741,677,613,549,975,728,664,600,417,353,962,715,651, 587,523,157,949,766,702,638,574,391,1013,753,689,625,561,1000,987,740,676, 612,548,365,974,727,663,599,535,169,961,714,650,586,403,948,765,701,637,573, 1012,999,752,688,624,560,377,986,739,675,611,547,973,726,662,598,415,351,960, 713,649,585,521,155,947,764,700,636,572,389,1011,998,751,687,623,559,985,738, 674,610,363,972,725,661,597,533,167,959,712,648,584,401,1023,946,763,699,635, 571,1010,997,686,558,375,737,609,971,660,349,711,583,1022,945,762,634,996, 685,557,736,608,425,970,659,531,165,710,582,399,1021,944,761,633,995,684,556, 373] [ns_server:debug,2014-08-19T16:55:17.720,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 420. Nacking mccouch update. [views:debug,2014-08-19T16:55:17.720,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/420. Updated state: replica (0) [ns_server:debug,2014-08-19T16:55:17.720,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",420,replica,0} [views:debug,2014-08-19T16:55:17.721,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/153. Updated state: replica (0) [ns_server:debug,2014-08-19T16:55:17.721,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",153,replica,0} [views:debug,2014-08-19T16:55:17.721,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/136. Updated state: replica (0) [ns_server:debug,2014-08-19T16:55:17.721,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",136,replica,0} [ns_server:debug,2014-08-19T16:55:17.721,ns_1@10.242.238.90:capi_set_view_manager-tiles<0.6184.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,686,622,558,984,426,724,660,596,532,958,1022,762,730,698,666,634,602,570, 538,996,964,736,704,672,640,608,576,544,512,970,938,1002,742,710,678,646,614, 582,550,518,976,944,1008,748,716,684,652,620,588,556,524,982,950,424,1014, 754,722,690,658,626,594,562,530,988,956,1020,760,728,696,664,632,600,568,536, 994,962,766,734,702,670,638,606,574,542,968,1000,740,708,676,644,612,580,548, 516,974,942,1006,746,714,682,650,618,586,554,522,980,948,422,1012,752,720, 688,656,624,592,560,528,986,954,1018,758,726,694,662,630,598,566,534,992,960, 764,732,700,668,636,604,572,540,998,966,738,706,674,642,610,578,546,514,972, 940,1004,744,712,680,648,616,584,552,520,1023,978,946,420,1010,718,654,590, 526,952,1016,756,692,628,564,990] [views:debug,2014-08-19T16:55:17.832,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/420. Updated state: replica (0) [ns_server:debug,2014-08-19T16:55:17.833,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",420,replica,0} [ns_server:debug,2014-08-19T16:55:17.991,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 134. Nacking mccouch update. [views:debug,2014-08-19T16:55:17.991,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/134. Updated state: replica (0) [ns_server:debug,2014-08-19T16:55:17.991,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",134,replica,0} [ns_server:debug,2014-08-19T16:55:17.992,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_tiles<0.4349.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,686,622,558,984,426,362,724,660,596,532,166,958,400,1022,762,698,634,570, 140,996,374,736,672,608,544,970,412,348,710,646,582,518,152,944,386,1008,748, 684,620,556,982,424,360,722,658,594,530,164,956,398,1020,760,696,632,568,138, 994,962,404,372,766,734,702,670,638,606,574,542,144,968,410,378,346,1000,740, 708,676,644,612,580,548,516,150,974,942,416,384,352,1006,746,714,682,650,618, 586,554,522,156,980,948,422,390,358,1012,752,720,688,656,624,592,560,528,162, 986,954,396,364,1018,758,726,694,662,630,598,566,534,168,136,992,960,402,370, 764,732,700,668,636,604,572,540,142,998,966,408,376,344,738,706,674,642,610, 578,546,514,148,972,940,414,382,350,1004,744,712,680,648,616,584,552,520,154, 1023,978,946,420,388,356,1010,718,654,590,526,160,952,394,1016,756,692,628, 564,134,990,368,730,666,602,538,964,406,342,704,640,576,512,146,938,380,1002, 742,678,614,550,976,418,354,716,652,588,524,158,950,392,1014,754,690,626,562, 988,366,728,664,600,536,170] [ns_server:debug,2014-08-19T16:55:18.041,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 418. Nacking mccouch update. [ns_server:debug,2014-08-19T16:55:18.041,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 151. Nacking mccouch update. [views:debug,2014-08-19T16:55:18.042,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/418. Updated state: replica (0) [ns_server:debug,2014-08-19T16:55:18.042,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",418,replica,0} [views:debug,2014-08-19T16:55:18.042,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/151. Updated state: replica (0) [ns_server:debug,2014-08-19T16:55:18.042,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",151,replica,0} [ns_server:debug,2014-08-19T16:55:18.042,ns_1@10.242.238.90:capi_set_view_manager-tiles<0.6184.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,686,622,558,984,426,724,660,596,532,958,1022,762,730,698,666,634,602,570, 538,996,964,736,704,672,640,608,576,544,512,970,938,1002,742,710,678,646,614, 582,550,518,976,944,418,1008,748,716,684,652,620,588,556,524,982,950,424, 1014,754,722,690,658,626,594,562,530,988,956,1020,760,728,696,664,632,600, 568,536,994,962,766,734,702,670,638,606,574,542,968,1000,740,708,676,644,612, 580,548,516,974,942,1006,746,714,682,650,618,586,554,522,980,948,422,1012, 752,720,688,656,624,592,560,528,986,954,1018,758,726,694,662,630,598,566,534, 992,960,764,732,700,668,636,604,572,540,998,966,738,706,674,642,610,578,546, 514,972,940,1004,744,712,680,648,616,584,552,520,1023,978,946,420,1010,718, 654,590,526,952,1016,756,692,628,564,990] [ns_server:debug,2014-08-19T16:55:18.043,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,984,673,545,724,596,413,958,647,519,153,698,570,387,1009,749,621,983, 672,361,723,595,957,646,697,569,1008,748,620,982,671,543,969,722,658,594,411, 347,956,709,645,581,517,151,1020,943,760,696,632,568,385,1007,994,747,683, 619,555,981,734,670,606,423,359,968,721,657,593,529,163,955,708,644,580,397, 1019,942,759,695,631,567,1006,993,746,682,618,554,371,980,733,669,605,541, 967,720,656,592,409,345,954,707,643,579,515,1018,941,758,694,630,566,383, 1005,992,745,681,617,553,979,732,668,604,421,357,966,719,655,591,527,161,953, 706,642,578,395,1017,940,757,693,629,565,1004,991,744,680,616,552,369,978, 731,667,603,539,965,718,654,590,407,343,952,705,641,577,513,1016,939,756,692, 628,564,381,1003,990,743,679,615,551,977,730,666,602,419,355,964,717,653,589, 525,159,951,704,640,576,393,1015,938,755,691,627,563,1002,989,742,678,614, 550,367,976,729,665,601,537,963,716,652,588,405,950,767,703,639,575,1014,754, 690,626,562,379,1001,988,741,677,613,549,975,728,664,600,417,353,962,715,651, 587,523,157,949,766,702,638,574,391,1013,753,689,625,561,1000,987,740,676, 612,548,365,974,727,663,599,535,169,961,714,650,586,403,948,765,701,637,573, 1012,999,752,688,624,560,377,986,739,675,611,547,973,726,662,598,415,351,960, 713,649,585,521,155,947,764,700,636,572,389,1011,998,751,687,623,559,985,738, 674,610,363,972,725,661,597,533,167,959,712,648,584,401,1023,946,763,699,635, 571,1010,997,686,558,375,737,609,971,660,349,711,583,1022,945,762,634,996, 685,557,736,608,425,970,659,531,165,710,582,399,1021,944,761,633,995,684,556, 373,735,607] [views:debug,2014-08-19T16:55:18.109,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/134. Updated state: replica (0) [ns_server:debug,2014-08-19T16:55:18.109,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",134,replica,0} [views:debug,2014-08-19T16:55:18.167,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/418. Updated state: replica (0) [views:debug,2014-08-19T16:55:18.168,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/151. Updated state: replica (0) [ns_server:debug,2014-08-19T16:55:18.168,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",418,replica,0} [ns_server:debug,2014-08-19T16:55:18.168,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",151,replica,0} [ns_server:debug,2014-08-19T16:55:18.427,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 132. Nacking mccouch update. [views:debug,2014-08-19T16:55:18.427,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/132. Updated state: replica (0) [ns_server:debug,2014-08-19T16:55:18.427,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",132,replica,0} [ns_server:debug,2014-08-19T16:55:18.428,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_tiles<0.4349.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,686,622,558,984,426,362,724,660,596,532,166,958,400,1022,762,698,634,570, 140,996,374,736,672,608,544,970,412,348,710,646,582,518,152,944,386,1008,748, 684,620,556,982,424,360,722,658,594,530,164,956,398,1020,760,696,632,568,138, 994,962,404,372,766,734,702,670,638,606,574,542,144,968,410,378,346,1000,740, 708,676,644,612,580,548,516,150,974,942,416,384,352,1006,746,714,682,650,618, 586,554,522,156,980,948,422,390,358,1012,752,720,688,656,624,592,560,528,162, 986,954,396,364,1018,758,726,694,662,630,598,566,534,168,136,992,960,402,370, 764,732,700,668,636,604,572,540,142,998,966,408,376,344,738,706,674,642,610, 578,546,514,148,972,940,414,382,350,1004,744,712,680,648,616,584,552,520,154, 1023,978,946,420,388,356,1010,718,654,590,526,160,952,394,1016,756,692,628, 564,134,990,368,730,666,602,538,964,406,342,704,640,576,512,146,938,380,1002, 742,678,614,550,976,418,354,716,652,588,524,158,950,392,1014,754,690,626,562, 132,988,366,728,664,600,536,170] [views:debug,2014-08-19T16:55:18.511,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/132. Updated state: replica (0) [ns_server:debug,2014-08-19T16:55:18.511,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",132,replica,0} [ns_server:debug,2014-08-19T16:55:18.571,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 149. Nacking mccouch update. [views:debug,2014-08-19T16:55:18.571,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/149. Updated state: replica (0) [ns_server:debug,2014-08-19T16:55:18.571,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",149,replica,0} [ns_server:debug,2014-08-19T16:55:18.572,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,984,673,545,724,596,413,958,647,519,153,698,570,387,1009,749,621,983, 672,361,723,595,957,646,697,569,1008,748,620,982,671,543,969,722,658,594,411, 347,956,709,645,581,517,151,1020,943,760,696,632,568,385,1007,994,747,683, 619,555,981,734,670,606,423,359,968,721,657,593,529,163,955,708,644,580,397, 1019,942,759,695,631,567,1006,993,746,682,618,554,371,980,733,669,605,541, 967,720,656,592,409,345,954,707,643,579,515,149,1018,941,758,694,630,566,383, 1005,992,745,681,617,553,979,732,668,604,421,357,966,719,655,591,527,161,953, 706,642,578,395,1017,940,757,693,629,565,1004,991,744,680,616,552,369,978, 731,667,603,539,965,718,654,590,407,343,952,705,641,577,513,1016,939,756,692, 628,564,381,1003,990,743,679,615,551,977,730,666,602,419,355,964,717,653,589, 525,159,951,704,640,576,393,1015,938,755,691,627,563,1002,989,742,678,614, 550,367,976,729,665,601,537,963,716,652,588,405,950,767,703,639,575,1014,754, 690,626,562,379,1001,988,741,677,613,549,975,728,664,600,417,353,962,715,651, 587,523,157,949,766,702,638,574,391,1013,753,689,625,561,1000,987,740,676, 612,548,365,974,727,663,599,535,169,961,714,650,586,403,948,765,701,637,573, 1012,999,752,688,624,560,377,986,739,675,611,547,973,726,662,598,415,351,960, 713,649,585,521,155,947,764,700,636,572,389,1011,998,751,687,623,559,985,738, 674,610,363,972,725,661,597,533,167,959,712,648,584,401,1023,946,763,699,635, 571,1010,997,686,558,375,737,609,971,660,349,711,583,1022,945,762,634,996, 685,557,736,608,425,970,659,531,165,710,582,399,1021,944,761,633,995,684,556, 373,735,607] [ns_server:debug,2014-08-19T16:55:18.587,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 416. Nacking mccouch update. [views:debug,2014-08-19T16:55:18.587,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/416. Updated state: replica (0) [ns_server:debug,2014-08-19T16:55:18.588,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",416,replica,0} [ns_server:debug,2014-08-19T16:55:18.588,ns_1@10.242.238.90:capi_set_view_manager-tiles<0.6184.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,686,622,558,984,426,724,660,596,532,958,1022,762,730,698,666,634,602,570, 538,996,964,736,704,672,640,608,576,544,512,970,938,1002,742,710,678,646,614, 582,550,518,976,944,418,1008,748,716,684,652,620,588,556,524,982,950,424, 1014,754,722,690,658,626,594,562,530,988,956,1020,760,728,696,664,632,600, 568,536,994,962,766,734,702,670,638,606,574,542,968,1000,740,708,676,644,612, 580,548,516,974,942,416,1006,746,714,682,650,618,586,554,522,980,948,422, 1012,752,720,688,656,624,592,560,528,986,954,1018,758,726,694,662,630,598, 566,534,992,960,764,732,700,668,636,604,572,540,998,966,738,706,674,642,610, 578,546,514,972,940,1004,744,712,680,648,616,584,552,520,1023,978,946,420, 1010,718,654,590,526,952,1016,756,692,628,564,990] [views:debug,2014-08-19T16:55:18.638,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/149. Updated state: replica (0) [ns_server:debug,2014-08-19T16:55:18.638,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",149,replica,0} [views:debug,2014-08-19T16:55:18.655,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/416. Updated state: replica (0) [ns_server:debug,2014-08-19T16:55:18.655,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",416,replica,0} [ns_server:debug,2014-08-19T16:55:18.722,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 130. Nacking mccouch update. [views:debug,2014-08-19T16:55:18.722,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/130. Updated state: replica (0) [ns_server:debug,2014-08-19T16:55:18.722,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",130,replica,0} [ns_server:debug,2014-08-19T16:55:18.723,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_tiles<0.4349.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,686,622,558,984,426,362,724,660,596,532,166,958,400,1022,762,698,634,570, 140,996,374,736,672,608,544,970,412,348,710,646,582,518,152,944,386,1008,748, 684,620,556,982,424,360,722,658,594,530,164,956,398,1020,760,696,632,568,138, 994,372,766,734,702,670,638,606,574,542,144,968,410,378,346,1000,740,708,676, 644,612,580,548,516,150,974,942,416,384,352,1006,746,714,682,650,618,586,554, 522,156,980,948,422,390,358,1012,752,720,688,656,624,592,560,528,162,130,986, 954,396,364,1018,758,726,694,662,630,598,566,534,168,136,992,960,402,370,764, 732,700,668,636,604,572,540,142,998,966,408,376,344,738,706,674,642,610,578, 546,514,148,972,940,414,382,350,1004,744,712,680,648,616,584,552,520,154, 1023,978,946,420,388,356,1010,718,654,590,526,160,952,394,1016,756,692,628, 564,134,990,368,730,666,602,538,964,406,342,704,640,576,512,146,938,380,1002, 742,678,614,550,976,418,354,716,652,588,524,158,950,392,1014,754,690,626,562, 132,988,366,728,664,600,536,170,962,404] [views:debug,2014-08-19T16:55:18.772,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/130. Updated state: replica (0) [ns_server:debug,2014-08-19T16:55:18.773,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",130,replica,0} [ns_server:debug,2014-08-19T16:55:18.823,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 414. Nacking mccouch update. [ns_server:debug,2014-08-19T16:55:18.823,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 147. Nacking mccouch update. [views:debug,2014-08-19T16:55:18.823,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/414. Updated state: replica (0) [views:debug,2014-08-19T16:55:18.823,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/147. Updated state: replica (0) [ns_server:debug,2014-08-19T16:55:18.823,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",414,replica,0} [ns_server:debug,2014-08-19T16:55:18.823,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",147,replica,0} [ns_server:debug,2014-08-19T16:55:18.824,ns_1@10.242.238.90:capi_set_view_manager-tiles<0.6184.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,686,622,558,984,426,724,660,596,532,958,1022,762,730,698,666,634,602,570, 538,996,964,736,704,672,640,608,576,544,512,970,938,1002,742,710,678,646,614, 582,550,518,976,944,418,1008,748,716,684,652,620,588,556,524,982,950,424, 1014,754,722,690,658,626,594,562,530,988,956,1020,760,728,696,664,632,600, 568,536,994,962,766,734,702,670,638,606,574,542,968,1000,740,708,676,644,612, 580,548,516,974,942,416,1006,746,714,682,650,618,586,554,522,980,948,422, 1012,752,720,688,656,624,592,560,528,986,954,1018,758,726,694,662,630,598, 566,534,992,960,764,732,700,668,636,604,572,540,998,966,738,706,674,642,610, 578,546,514,972,940,414,1004,744,712,680,648,616,584,552,520,1023,978,946, 420,1010,718,654,590,526,952,1016,756,692,628,564,990] [ns_server:debug,2014-08-19T16:55:18.824,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,984,673,545,724,596,413,958,647,519,153,698,570,387,1009,749,621,983, 672,361,723,595,957,646,697,569,1008,748,620,982,671,543,969,722,658,594,411, 347,956,709,645,581,517,151,1020,943,760,696,632,568,385,1007,994,747,683, 619,555,981,734,670,606,423,359,968,721,657,593,529,163,955,708,644,580,397, 1019,942,759,695,631,567,1006,993,746,682,618,554,371,980,733,669,605,541, 967,720,656,592,409,345,954,707,643,579,515,149,1018,941,758,694,630,566,383, 1005,992,745,681,617,553,979,732,668,604,421,357,966,719,655,591,527,161,953, 706,642,578,395,1017,940,757,693,629,565,1004,991,744,680,616,552,369,978, 731,667,603,539,965,718,654,590,407,343,952,705,641,577,513,147,1016,939,756, 692,628,564,381,1003,990,743,679,615,551,977,730,666,602,419,355,964,717,653, 589,525,159,951,704,640,576,393,1015,938,755,691,627,563,1002,989,742,678, 614,550,367,976,729,665,601,537,963,716,652,588,405,950,767,703,639,575,1014, 754,690,626,562,379,1001,988,741,677,613,549,975,728,664,600,417,353,962,715, 651,587,523,157,949,766,702,638,574,391,1013,753,689,625,561,1000,987,740, 676,612,548,365,974,727,663,599,535,169,961,714,650,586,403,948,765,701,637, 573,1012,999,752,688,624,560,377,986,739,675,611,547,973,726,662,598,415,351, 960,713,649,585,521,155,947,764,700,636,572,389,1011,998,751,687,623,559,985, 738,674,610,363,972,725,661,597,533,167,959,712,648,584,401,1023,946,763,699, 635,571,1010,997,686,558,375,737,609,971,660,349,711,583,1022,945,762,634, 996,685,557,736,608,425,970,659,531,165,710,582,399,1021,944,761,633,995,684, 556,373,735,607] [views:debug,2014-08-19T16:55:18.874,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/147. Updated state: replica (0) [views:debug,2014-08-19T16:55:18.874,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/414. Updated state: replica (0) [ns_server:debug,2014-08-19T16:55:18.874,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",147,replica,0} [ns_server:debug,2014-08-19T16:55:18.874,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",414,replica,0} [ns_server:debug,2014-08-19T16:55:19.043,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 128. Nacking mccouch update. [views:debug,2014-08-19T16:55:19.044,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/128. Updated state: replica (0) [ns_server:debug,2014-08-19T16:55:19.044,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",128,replica,0} [ns_server:debug,2014-08-19T16:55:19.044,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_tiles<0.4349.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,686,622,558,128,984,426,362,724,660,596,532,166,958,400,1022,762,698,634, 570,140,996,374,736,672,608,544,970,412,348,710,646,582,518,152,944,386,1008, 748,684,620,556,982,424,360,722,658,594,530,164,956,398,1020,760,696,632,568, 138,994,372,766,734,702,670,638,606,574,542,144,968,410,378,346,1000,740,708, 676,644,612,580,548,516,150,974,942,416,384,352,1006,746,714,682,650,618,586, 554,522,156,980,948,422,390,358,1012,752,720,688,656,624,592,560,528,162,130, 986,954,396,364,1018,758,726,694,662,630,598,566,534,168,136,992,960,402,370, 764,732,700,668,636,604,572,540,142,998,966,408,376,344,738,706,674,642,610, 578,546,514,148,972,940,414,382,350,1004,744,712,680,648,616,584,552,520,154, 1023,978,946,420,388,356,1010,718,654,590,526,160,952,394,1016,756,692,628, 564,134,990,368,730,666,602,538,964,406,342,704,640,576,512,146,938,380,1002, 742,678,614,550,976,418,354,716,652,588,524,158,950,392,1014,754,690,626,562, 132,988,366,728,664,600,536,170,962,404] [views:debug,2014-08-19T16:55:19.128,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/128. Updated state: replica (0) [ns_server:debug,2014-08-19T16:55:19.128,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",128,replica,0} [ns_server:debug,2014-08-19T16:55:19.270,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 145. Nacking mccouch update. [views:debug,2014-08-19T16:55:19.270,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/145. Updated state: replica (0) [ns_server:debug,2014-08-19T16:55:19.270,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",145,replica,0} [ns_server:debug,2014-08-19T16:55:19.270,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 412. Nacking mccouch update. [views:debug,2014-08-19T16:55:19.270,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/412. Updated state: replica (0) [ns_server:debug,2014-08-19T16:55:19.270,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",412,replica,0} [ns_server:debug,2014-08-19T16:55:19.271,ns_1@10.242.238.90:capi_set_view_manager-tiles<0.6184.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,686,622,558,984,426,724,660,596,532,958,1022,762,730,698,666,634,602,570, 538,996,964,736,704,672,640,608,576,544,512,970,938,412,1002,742,710,678,646, 614,582,550,518,976,944,418,1008,748,716,684,652,620,588,556,524,982,950,424, 1014,754,722,690,658,626,594,562,530,988,956,1020,760,728,696,664,632,600, 568,536,994,962,766,734,702,670,638,606,574,542,968,1000,740,708,676,644,612, 580,548,516,974,942,416,1006,746,714,682,650,618,586,554,522,980,948,422, 1012,752,720,688,656,624,592,560,528,986,954,1018,758,726,694,662,630,598, 566,534,992,960,764,732,700,668,636,604,572,540,998,966,738,706,674,642,610, 578,546,514,972,940,414,1004,744,712,680,648,616,584,552,520,1023,978,946, 420,1010,718,654,590,526,952,1016,756,692,628,564,990] [ns_server:debug,2014-08-19T16:55:19.271,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,984,673,545,724,596,413,958,647,519,153,698,570,387,1009,749,621,983, 672,361,723,595,957,646,697,569,1008,748,620,982,671,543,969,722,658,594,411, 347,956,709,645,581,517,151,1020,943,760,696,632,568,385,1007,994,747,683, 619,555,981,734,670,606,423,359,968,721,657,593,529,163,955,708,644,580,397, 1019,942,759,695,631,567,1006,993,746,682,618,554,371,980,733,669,605,541, 967,720,656,592,409,345,954,707,643,579,515,149,1018,941,758,694,630,566,383, 1005,992,745,681,617,553,979,732,668,604,421,357,966,719,655,591,527,161,953, 706,642,578,395,1017,940,757,693,629,565,1004,991,744,680,616,552,369,978, 731,667,603,539,965,718,654,590,407,343,952,705,641,577,513,147,1016,939,756, 692,628,564,381,1003,990,743,679,615,551,977,730,666,602,419,355,964,717,653, 589,525,159,951,704,640,576,393,1015,938,755,691,627,563,1002,989,742,678, 614,550,367,976,729,665,601,537,963,716,652,588,405,950,767,703,639,575,145, 1014,754,690,626,562,379,1001,988,741,677,613,549,975,728,664,600,417,353, 962,715,651,587,523,157,949,766,702,638,574,391,1013,753,689,625,561,1000, 987,740,676,612,548,365,974,727,663,599,535,169,961,714,650,586,403,948,765, 701,637,573,1012,999,752,688,624,560,377,986,739,675,611,547,973,726,662,598, 415,351,960,713,649,585,521,155,947,764,700,636,572,389,1011,998,751,687,623, 559,985,738,674,610,363,972,725,661,597,533,167,959,712,648,584,401,1023,946, 763,699,635,571,1010,997,686,558,375,737,609,971,660,349,711,583,1022,945, 762,634,996,685,557,736,608,425,970,659,531,165,710,582,399,1021,944,761,633, 995,684,556,373,735,607] [views:debug,2014-08-19T16:55:19.354,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/412. Updated state: replica (0) [views:debug,2014-08-19T16:55:19.354,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/145. Updated state: replica (0) [ns_server:debug,2014-08-19T16:55:19.354,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",412,replica,0} [ns_server:debug,2014-08-19T16:55:19.354,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",145,replica,0} [ns_server:debug,2014-08-19T16:55:19.413,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 126. Nacking mccouch update. [views:debug,2014-08-19T16:55:19.413,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/126. Updated state: replica (0) [ns_server:debug,2014-08-19T16:55:19.414,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",126,replica,0} [ns_server:debug,2014-08-19T16:55:19.414,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_tiles<0.4349.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,686,622,558,128,984,426,362,724,660,596,532,166,958,400,1022,762,698,634, 570,140,996,374,736,672,608,544,970,412,348,710,646,582,518,152,944,386,1008, 748,684,620,556,126,982,424,360,722,658,594,530,164,956,398,1020,760,696,632, 568,138,994,372,766,734,702,670,638,606,574,542,144,968,410,378,346,1000,740, 708,676,644,612,580,548,516,150,974,942,416,384,352,1006,746,714,682,650,618, 586,554,522,156,980,948,422,390,358,1012,752,720,688,656,624,592,560,528,162, 130,986,954,396,364,1018,758,726,694,662,630,598,566,534,168,136,992,960,402, 370,764,732,700,668,636,604,572,540,142,998,966,408,376,344,738,706,674,642, 610,578,546,514,148,972,940,414,382,350,1004,744,712,680,648,616,584,552,520, 154,1023,978,946,420,388,356,1010,718,654,590,526,160,952,394,1016,756,692, 628,564,134,990,368,730,666,602,538,964,406,342,704,640,576,512,146,938,380, 1002,742,678,614,550,976,418,354,716,652,588,524,158,950,392,1014,754,690, 626,562,132,988,366,728,664,600,536,170,962,404] [views:debug,2014-08-19T16:55:19.523,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/126. Updated state: replica (0) [ns_server:debug,2014-08-19T16:55:19.523,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",126,replica,0} [ns_server:debug,2014-08-19T16:55:19.756,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 143. Nacking mccouch update. [views:debug,2014-08-19T16:55:19.756,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/143. Updated state: replica (0) [ns_server:debug,2014-08-19T16:55:19.756,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",143,replica,0} [ns_server:debug,2014-08-19T16:55:19.757,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,984,673,545,724,596,413,958,647,519,153,698,570,387,1009,749,621,983, 672,361,723,595,957,646,697,569,1008,748,620,982,671,543,969,722,658,594,411, 347,956,709,645,581,517,151,1020,943,760,696,632,568,385,1007,994,747,683, 619,555,981,734,670,606,423,359,968,721,657,593,529,163,955,708,644,580,397, 1019,942,759,695,631,567,1006,993,746,682,618,554,371,980,733,669,605,541, 967,720,656,592,409,345,954,707,643,579,515,149,1018,941,758,694,630,566,383, 1005,992,745,681,617,553,979,732,668,604,421,357,966,719,655,591,527,161,953, 706,642,578,395,1017,940,757,693,629,565,1004,991,744,680,616,552,369,978, 731,667,603,539,965,718,654,590,407,343,952,705,641,577,513,147,1016,939,756, 692,628,564,381,1003,990,743,679,615,551,977,730,666,602,419,355,964,717,653, 589,525,159,951,704,640,576,393,1015,938,755,691,627,563,1002,989,742,678, 614,550,367,976,729,665,601,537,963,716,652,588,405,950,767,703,639,575,145, 1014,754,690,626,562,379,1001,988,741,677,613,549,975,728,664,600,417,353, 962,715,651,587,523,157,949,766,702,638,574,391,1013,753,689,625,561,1000, 987,740,676,612,548,365,974,727,663,599,535,169,961,714,650,586,403,948,765, 701,637,573,143,1012,999,752,688,624,560,377,986,739,675,611,547,973,726,662, 598,415,351,960,713,649,585,521,155,947,764,700,636,572,389,1011,998,751,687, 623,559,985,738,674,610,363,972,725,661,597,533,167,959,712,648,584,401,1023, 946,763,699,635,571,1010,997,686,558,375,737,609,971,660,349,711,583,1022, 945,762,634,996,685,557,736,608,425,970,659,531,165,710,582,399,1021,944,761, 633,995,684,556,373,735,607] [ns_server:debug,2014-08-19T16:55:19.790,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 410. Nacking mccouch update. [views:debug,2014-08-19T16:55:19.790,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/410. Updated state: replica (0) [ns_server:debug,2014-08-19T16:55:19.790,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",410,replica,0} [ns_server:debug,2014-08-19T16:55:19.790,ns_1@10.242.238.90:capi_set_view_manager-tiles<0.6184.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,686,622,558,984,426,724,660,596,532,958,1022,762,698,634,570,996,964,736, 704,672,640,608,576,544,512,970,938,412,1002,742,710,678,646,614,582,550,518, 976,944,418,1008,748,716,684,652,620,588,556,524,982,950,424,1014,754,722, 690,658,626,594,562,530,988,956,1020,760,728,696,664,632,600,568,536,994,962, 766,734,702,670,638,606,574,542,968,410,1000,740,708,676,644,612,580,548,516, 974,942,416,1006,746,714,682,650,618,586,554,522,980,948,422,1012,752,720, 688,656,624,592,560,528,986,954,1018,758,726,694,662,630,598,566,534,992,960, 764,732,700,668,636,604,572,540,998,966,738,706,674,642,610,578,546,514,972, 940,414,1004,744,712,680,648,616,584,552,520,1023,978,946,420,1010,718,654, 590,526,952,1016,756,692,628,564,990,730,666,602,538] [ns_server:debug,2014-08-19T16:55:19.899,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 124. Nacking mccouch update. [views:debug,2014-08-19T16:55:19.899,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/124. Updated state: replica (0) [ns_server:debug,2014-08-19T16:55:19.899,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",124,replica,0} [views:debug,2014-08-19T16:55:19.899,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/143. Updated state: replica (0) [ns_server:debug,2014-08-19T16:55:19.900,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_tiles<0.4349.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,686,622,558,128,984,426,362,724,660,596,532,166,958,400,1022,762,698,634, 570,140,996,374,736,672,608,544,970,412,348,710,646,582,518,152,944,386,1008, 748,684,620,556,126,982,424,360,722,658,594,530,164,956,398,1020,760,696,632, 568,138,994,372,766,734,702,670,638,606,574,542,144,968,410,378,346,1000,740, 708,676,644,612,580,548,516,150,974,942,416,384,352,1006,746,714,682,650,618, 586,554,522,156,124,980,948,422,390,358,1012,752,720,688,656,624,592,560,528, 162,130,986,954,396,364,1018,758,726,694,662,630,598,566,534,168,136,992,960, 402,370,764,732,700,668,636,604,572,540,142,998,966,408,376,344,738,706,674, 642,610,578,546,514,148,972,940,414,382,350,1004,744,712,680,648,616,584,552, 520,154,1023,978,946,420,388,356,1010,718,654,590,526,160,952,394,1016,756, 692,628,564,134,990,368,730,666,602,538,964,406,342,704,640,576,512,146,938, 380,1002,742,678,614,550,976,418,354,716,652,588,524,158,950,392,1014,754, 690,626,562,132,988,366,728,664,600,536,170,962,404] [ns_server:debug,2014-08-19T16:55:19.900,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",143,replica,0} [views:debug,2014-08-19T16:55:19.933,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/410. Updated state: replica (0) [ns_server:debug,2014-08-19T16:55:19.933,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",410,replica,0} [views:debug,2014-08-19T16:55:20.000,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/124. Updated state: replica (0) [ns_server:debug,2014-08-19T16:55:20.000,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",124,replica,0} [ns_server:debug,2014-08-19T16:55:20.075,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 408. Nacking mccouch update. [ns_server:debug,2014-08-19T16:55:20.076,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 141. Nacking mccouch update. [views:debug,2014-08-19T16:55:20.076,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/408. Updated state: replica (0) [views:debug,2014-08-19T16:55:20.076,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/141. Updated state: replica (0) [ns_server:debug,2014-08-19T16:55:20.076,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",408,replica,0} [ns_server:debug,2014-08-19T16:55:20.076,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",141,replica,0} [ns_server:debug,2014-08-19T16:55:20.076,ns_1@10.242.238.90:capi_set_view_manager-tiles<0.6184.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,686,622,558,984,426,724,660,596,532,958,1022,762,698,634,570,996,964,736, 704,672,640,608,576,544,512,970,938,412,1002,742,710,678,646,614,582,550,518, 976,944,418,1008,748,716,684,652,620,588,556,524,982,950,424,1014,754,722, 690,658,626,594,562,530,988,956,1020,760,728,696,664,632,600,568,536,994,962, 766,734,702,670,638,606,574,542,968,410,1000,740,708,676,644,612,580,548,516, 974,942,416,1006,746,714,682,650,618,586,554,522,980,948,422,1012,752,720, 688,656,624,592,560,528,986,954,1018,758,726,694,662,630,598,566,534,992,960, 764,732,700,668,636,604,572,540,998,966,408,738,706,674,642,610,578,546,514, 972,940,414,1004,744,712,680,648,616,584,552,520,1023,978,946,420,1010,718, 654,590,526,952,1016,756,692,628,564,990,730,666,602,538] [ns_server:debug,2014-08-19T16:55:20.077,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,984,673,545,724,596,413,958,647,519,153,698,570,387,1009,749,621,983, 672,361,723,595,957,646,697,569,1008,748,620,982,671,543,722,594,411,956,709, 645,581,517,151,1020,943,760,696,632,568,385,1007,994,747,683,619,555,981, 734,670,606,423,359,968,721,657,593,529,163,955,708,644,580,397,1019,942,759, 695,631,567,1006,993,746,682,618,554,371,980,733,669,605,541,967,720,656,592, 409,345,954,707,643,579,515,149,1018,941,758,694,630,566,383,1005,992,745, 681,617,553,979,732,668,604,421,357,966,719,655,591,527,161,953,706,642,578, 395,1017,940,757,693,629,565,1004,991,744,680,616,552,369,978,731,667,603, 539,965,718,654,590,407,343,952,705,641,577,513,147,1016,939,756,692,628,564, 381,1003,990,743,679,615,551,977,730,666,602,419,355,964,717,653,589,525,159, 951,704,640,576,393,1015,938,755,691,627,563,1002,989,742,678,614,550,367, 976,729,665,601,537,963,716,652,588,405,950,767,703,639,575,145,1014,754,690, 626,562,379,1001,988,741,677,613,549,975,728,664,600,417,353,962,715,651,587, 523,157,949,766,702,638,574,391,1013,753,689,625,561,1000,987,740,676,612, 548,365,974,727,663,599,535,169,961,714,650,586,403,948,765,701,637,573,143, 1012,999,752,688,624,560,377,986,739,675,611,547,973,726,662,598,415,351,960, 713,649,585,521,155,947,764,700,636,572,389,1011,998,751,687,623,559,985,738, 674,610,363,972,725,661,597,533,167,959,712,648,584,401,1023,946,763,699,635, 571,141,1010,997,686,558,375,737,609,971,660,349,711,583,1022,945,762,634, 996,685,557,736,608,425,970,659,531,165,710,582,399,1021,944,761,633,995,684, 556,373,735,607,969,658,347] [views:debug,2014-08-19T16:55:20.134,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/408. Updated state: replica (0) [ns_server:debug,2014-08-19T16:55:20.135,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",408,replica,0} [views:debug,2014-08-19T16:55:20.135,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/141. Updated state: replica (0) [ns_server:debug,2014-08-19T16:55:20.135,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",141,replica,0} [ns_server:debug,2014-08-19T16:55:20.201,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 122. Nacking mccouch update. [views:debug,2014-08-19T16:55:20.201,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/122. Updated state: replica (0) [ns_server:debug,2014-08-19T16:55:20.202,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",122,replica,0} [ns_server:debug,2014-08-19T16:55:20.202,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_tiles<0.4349.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,686,622,558,128,984,426,362,724,660,596,532,166,958,400,1022,762,698,634, 570,140,996,374,736,672,608,544,970,412,348,710,646,582,518,152,944,386,1008, 748,684,620,556,126,982,424,360,722,658,594,530,164,956,398,1020,760,696,632, 568,138,994,372,766,734,702,670,638,606,574,542,144,968,410,378,346,1000,740, 708,676,644,612,580,548,516,150,974,942,416,384,352,1006,746,714,682,650,618, 586,554,522,156,124,980,948,422,390,358,1012,752,720,688,656,624,592,560,528, 162,130,986,954,396,364,1018,758,726,694,662,630,598,566,534,168,136,992,960, 402,370,764,732,700,668,636,604,572,540,142,998,966,408,376,344,738,706,674, 642,610,578,546,514,148,972,940,414,382,350,1004,744,712,680,648,616,584,552, 520,154,122,1023,978,946,420,388,356,1010,718,654,590,526,160,952,394,1016, 756,692,628,564,134,990,368,730,666,602,538,964,406,342,704,640,576,512,146, 938,380,1002,742,678,614,550,976,418,354,716,652,588,524,158,950,392,1014, 754,690,626,562,132,988,366,728,664,600,536,170,962,404] [views:debug,2014-08-19T16:55:20.252,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/122. Updated state: replica (0) [ns_server:debug,2014-08-19T16:55:20.252,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",122,replica,0} [ns_server:debug,2014-08-19T16:55:20.407,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 406. Nacking mccouch update. [views:debug,2014-08-19T16:55:20.407,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/406. Updated state: replica (0) [ns_server:debug,2014-08-19T16:55:20.407,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",406,replica,0} [ns_server:debug,2014-08-19T16:55:20.407,ns_1@10.242.238.90:capi_set_view_manager-tiles<0.6184.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,686,622,558,984,426,724,660,596,532,958,1022,762,698,634,570,996,964,406, 736,704,672,640,608,576,544,512,970,938,412,1002,742,710,678,646,614,582,550, 518,976,944,418,1008,748,716,684,652,620,588,556,524,982,950,424,1014,754, 722,690,658,626,594,562,530,988,956,1020,760,728,696,664,632,600,568,536,994, 962,766,734,702,670,638,606,574,542,968,410,1000,740,708,676,644,612,580,548, 516,974,942,416,1006,746,714,682,650,618,586,554,522,980,948,422,1012,752, 720,688,656,624,592,560,528,986,954,1018,758,726,694,662,630,598,566,534,992, 960,764,732,700,668,636,604,572,540,998,966,408,738,706,674,642,610,578,546, 514,972,940,414,1004,744,712,680,648,616,584,552,520,1023,978,946,420,1010, 718,654,590,526,952,1016,756,692,628,564,990,730,666,602,538] [ns_server:debug,2014-08-19T16:55:20.424,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 139. Nacking mccouch update. [views:debug,2014-08-19T16:55:20.424,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/139. Updated state: replica (0) [ns_server:debug,2014-08-19T16:55:20.424,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",139,replica,0} [ns_server:debug,2014-08-19T16:55:20.425,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,984,673,545,724,596,413,958,647,519,153,698,570,387,1009,749,621,983, 672,361,723,595,957,646,697,569,1008,748,620,982,671,543,722,594,411,956,709, 645,581,517,151,1020,943,760,696,632,568,385,1007,994,747,683,619,555,981, 734,670,606,423,359,968,721,657,593,529,163,955,708,644,580,397,1019,942,759, 695,631,567,1006,993,746,682,618,554,371,980,733,669,605,541,967,720,656,592, 409,345,954,707,643,579,515,149,1018,941,758,694,630,566,383,1005,992,745, 681,617,553,979,732,668,604,421,357,966,719,655,591,527,161,953,706,642,578, 395,1017,940,757,693,629,565,1004,991,744,680,616,552,369,978,731,667,603, 539,965,718,654,590,407,343,952,705,641,577,513,147,1016,939,756,692,628,564, 381,1003,990,743,679,615,551,977,730,666,602,419,355,964,717,653,589,525,159, 951,704,640,576,393,1015,938,755,691,627,563,1002,989,742,678,614,550,367, 976,729,665,601,537,963,716,652,588,405,950,767,703,639,575,145,1014,754,690, 626,562,379,1001,988,741,677,613,549,975,728,664,600,417,353,962,715,651,587, 523,157,949,766,702,638,574,391,1013,753,689,625,561,1000,987,740,676,612, 548,365,974,727,663,599,535,169,961,714,650,586,403,948,765,701,637,573,143, 1012,999,752,688,624,560,377,986,739,675,611,547,973,726,662,598,415,351,960, 713,649,585,521,155,947,764,700,636,572,389,1011,998,751,687,623,559,985,738, 674,610,363,972,725,661,597,533,167,959,712,648,584,401,1023,946,763,699,635, 571,141,1010,997,686,558,375,737,609,971,660,349,711,583,1022,945,762,634, 996,685,557,736,608,425,970,659,531,165,710,582,399,1021,944,761,633,139,995, 684,556,373,735,607,969,658,347] [ns_server:debug,2014-08-19T16:55:20.549,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 120. Nacking mccouch update. [views:debug,2014-08-19T16:55:20.549,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/120. Updated state: replica (0) [views:debug,2014-08-19T16:55:20.549,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/406. Updated state: replica (0) [ns_server:debug,2014-08-19T16:55:20.549,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",120,replica,0} [ns_server:debug,2014-08-19T16:55:20.549,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",406,replica,0} [ns_server:debug,2014-08-19T16:55:20.550,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_tiles<0.4349.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,686,622,558,128,984,426,362,724,660,596,532,166,958,400,1022,762,698,634, 570,140,996,374,736,672,608,544,970,412,348,710,646,582,518,152,944,386,1008, 748,684,620,556,126,982,424,360,722,658,594,530,164,956,398,1020,760,696,632, 568,138,994,372,734,670,606,542,968,410,378,346,1000,740,708,676,644,612,580, 548,516,150,974,942,416,384,352,1006,746,714,682,650,618,586,554,522,156,124, 980,948,422,390,358,1012,752,720,688,656,624,592,560,528,162,130,986,954,396, 364,1018,758,726,694,662,630,598,566,534,168,136,992,960,402,370,764,732,700, 668,636,604,572,540,142,998,966,408,376,344,738,706,674,642,610,578,546,514, 148,972,940,414,382,350,1004,744,712,680,648,616,584,552,520,154,122,1023, 978,946,420,388,356,1010,718,654,590,526,160,952,394,1016,756,692,628,564, 134,990,368,730,666,602,538,964,406,342,704,640,576,512,146,938,380,1002,742, 678,614,550,120,976,418,354,716,652,588,524,158,950,392,1014,754,690,626,562, 132,988,366,728,664,600,536,170,962,404,766,702,638,574,144] [views:debug,2014-08-19T16:55:20.616,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/139. Updated state: replica (0) [ns_server:debug,2014-08-19T16:55:20.616,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",139,replica,0} [views:debug,2014-08-19T16:55:20.753,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/120. Updated state: replica (0) [ns_server:debug,2014-08-19T16:55:20.754,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",120,replica,0} [ns_server:debug,2014-08-19T16:55:20.917,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 404. Nacking mccouch update. [views:debug,2014-08-19T16:55:20.917,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/404. Updated state: replica (0) [ns_server:debug,2014-08-19T16:55:20.918,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",404,replica,0} [ns_server:debug,2014-08-19T16:55:20.918,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 137. Nacking mccouch update. [views:debug,2014-08-19T16:55:20.918,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/137. Updated state: replica (0) [ns_server:debug,2014-08-19T16:55:20.918,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",137,replica,0} [ns_server:debug,2014-08-19T16:55:20.918,ns_1@10.242.238.90:capi_set_view_manager-tiles<0.6184.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,686,622,558,984,426,724,660,596,532,958,1022,762,698,634,570,996,964,406, 736,704,672,640,608,576,544,512,970,938,412,1002,742,710,678,646,614,582,550, 518,976,944,418,1008,748,716,684,652,620,588,556,524,982,950,424,1014,754, 722,690,658,626,594,562,530,988,956,1020,760,728,696,664,632,600,568,536,994, 962,404,766,734,702,670,638,606,574,542,968,410,1000,740,708,676,644,612,580, 548,516,974,942,416,1006,746,714,682,650,618,586,554,522,980,948,422,1012, 752,720,688,656,624,592,560,528,986,954,1018,758,726,694,662,630,598,566,534, 992,960,764,732,700,668,636,604,572,540,998,966,408,738,706,674,642,610,578, 546,514,972,940,414,1004,744,712,680,648,616,584,552,520,1023,978,946,420, 1010,718,654,590,526,952,1016,756,692,628,564,990,730,666,602,538] [ns_server:debug,2014-08-19T16:55:20.919,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,984,673,545,724,596,413,958,647,519,153,698,570,387,1009,749,621,983, 672,361,723,595,957,646,697,569,1008,748,620,982,671,543,722,594,411,956,709, 645,581,517,151,1020,943,760,696,632,568,385,1007,994,747,683,619,555,981, 734,670,606,423,359,968,721,657,593,529,163,955,708,644,580,397,1019,942,759, 695,631,567,137,1006,993,746,682,618,554,371,980,733,669,605,541,967,720,656, 592,409,345,954,707,643,579,515,149,1018,941,758,694,630,566,383,1005,992, 745,681,617,553,979,732,668,604,421,357,966,719,655,591,527,161,953,706,642, 578,395,1017,940,757,693,629,565,1004,991,744,680,616,552,369,978,731,667, 603,539,965,718,654,590,407,343,952,705,641,577,513,147,1016,939,756,692,628, 564,381,1003,990,743,679,615,551,977,730,666,602,419,355,964,717,653,589,525, 159,951,704,640,576,393,1015,938,755,691,627,563,1002,989,742,678,614,550, 367,976,729,665,601,537,963,716,652,588,405,950,767,703,639,575,145,1014,754, 690,626,562,379,1001,988,741,677,613,549,975,728,664,600,417,353,962,715,651, 587,523,157,949,766,702,638,574,391,1013,753,689,625,561,1000,987,740,676, 612,548,365,974,727,663,599,535,169,961,714,650,586,403,948,765,701,637,573, 143,1012,999,752,688,624,560,377,986,739,675,611,547,973,726,662,598,415,351, 960,713,649,585,521,155,947,764,700,636,572,389,1011,998,751,687,623,559,985, 738,674,610,363,972,725,661,597,533,167,959,712,648,584,401,1023,946,763,699, 635,571,141,1010,997,686,558,375,737,609,971,660,349,711,583,1022,945,762, 634,996,685,557,736,608,425,970,659,531,165,710,582,399,1021,944,761,633,139, 995,684,556,373,735,607,969,658,347] [views:debug,2014-08-19T16:55:21.043,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/137. Updated state: replica (0) [views:debug,2014-08-19T16:55:21.043,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/404. Updated state: replica (0) [ns_server:debug,2014-08-19T16:55:21.043,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",137,replica,0} [ns_server:debug,2014-08-19T16:55:21.043,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",404,replica,0} [ns_server:debug,2014-08-19T16:55:21.168,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 118. Nacking mccouch update. [views:debug,2014-08-19T16:55:21.168,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/118. Updated state: replica (0) [ns_server:debug,2014-08-19T16:55:21.169,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",118,replica,0} [ns_server:debug,2014-08-19T16:55:21.169,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_tiles<0.4349.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,686,622,558,128,984,426,362,724,660,596,532,166,958,400,1022,762,698,634, 570,140,996,374,736,672,608,544,970,412,348,710,646,582,518,152,944,386,1008, 748,684,620,556,126,982,424,360,722,658,594,530,164,956,398,1020,760,696,632, 568,138,994,372,734,670,606,542,968,410,378,346,1000,740,708,676,644,612,580, 548,516,150,118,974,942,416,384,352,1006,746,714,682,650,618,586,554,522,156, 124,980,948,422,390,358,1012,752,720,688,656,624,592,560,528,162,130,986,954, 396,364,1018,758,726,694,662,630,598,566,534,168,136,992,960,402,370,764,732, 700,668,636,604,572,540,142,998,966,408,376,344,738,706,674,642,610,578,546, 514,148,972,940,414,382,350,1004,744,712,680,648,616,584,552,520,154,122, 1023,978,946,420,388,356,1010,718,654,590,526,160,952,394,1016,756,692,628, 564,134,990,368,730,666,602,538,964,406,342,704,640,576,512,146,938,380,1002, 742,678,614,550,120,976,418,354,716,652,588,524,158,950,392,1014,754,690,626, 562,132,988,366,728,664,600,536,170,962,404,766,702,638,574,144] [views:debug,2014-08-19T16:55:21.270,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/118. Updated state: replica (0) [ns_server:debug,2014-08-19T16:55:21.270,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",118,replica,0} [ns_server:debug,2014-08-19T16:55:21.344,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 402. Nacking mccouch update. [views:debug,2014-08-19T16:55:21.345,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/402. Updated state: replica (0) [ns_server:debug,2014-08-19T16:55:21.345,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",402,replica,0} [ns_server:debug,2014-08-19T16:55:21.345,ns_1@10.242.238.90:capi_set_view_manager-tiles<0.6184.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,686,622,558,984,426,724,660,596,532,958,1022,762,698,634,570,996,964,406, 736,704,672,640,608,576,544,512,970,938,412,1002,742,710,678,646,614,582,550, 518,976,944,418,1008,748,716,684,652,620,588,556,524,982,950,424,1014,754, 722,690,658,626,594,562,530,988,956,1020,760,728,696,664,632,600,568,536,994, 962,404,766,734,702,670,638,606,574,542,968,410,1000,740,708,676,644,612,580, 548,516,974,942,416,1006,746,714,682,650,618,586,554,522,980,948,422,1012, 752,720,688,656,624,592,560,528,986,954,1018,758,726,694,662,630,598,566,534, 992,960,402,764,732,700,668,636,604,572,540,998,966,408,738,706,674,642,610, 578,546,514,972,940,414,1004,744,712,680,648,616,584,552,520,1023,978,946, 420,1010,718,654,590,526,952,1016,756,692,628,564,990,730,666,602,538] [ns_server:debug,2014-08-19T16:55:21.363,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 135. Nacking mccouch update. [views:debug,2014-08-19T16:55:21.363,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/135. Updated state: replica (0) [ns_server:debug,2014-08-19T16:55:21.363,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",135,replica,0} [ns_server:debug,2014-08-19T16:55:21.364,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,984,673,545,724,596,413,958,647,519,153,698,570,387,1009,749,621,983, 672,361,723,595,957,646,697,569,1008,748,620,982,671,543,722,594,411,956,709, 645,581,517,151,1020,943,760,696,632,568,385,1007,994,747,683,619,555,981, 734,670,606,423,359,968,721,657,593,529,163,955,708,644,580,397,1019,942,759, 695,631,567,137,1006,993,746,682,618,554,371,980,733,669,605,541,967,720,656, 592,409,345,954,707,643,579,515,149,1018,941,758,694,630,566,383,1005,992, 745,681,617,553,979,732,668,604,421,357,966,719,655,591,527,161,953,706,642, 578,395,1017,940,757,693,629,565,135,1004,991,744,680,616,552,369,978,731, 667,603,539,965,718,654,590,407,343,952,705,641,577,513,147,1016,939,756,692, 628,564,381,1003,990,743,679,615,551,977,730,666,602,419,355,964,717,653,589, 525,159,951,704,640,576,393,1015,938,755,691,627,563,1002,989,742,678,614, 550,367,976,729,665,601,537,963,716,652,588,405,950,767,703,639,575,145,1014, 754,690,626,562,379,1001,988,741,677,613,549,975,728,664,600,417,353,962,715, 651,587,523,157,949,766,702,638,574,391,1013,753,689,625,561,1000,987,740, 676,612,548,365,974,727,663,599,535,169,961,714,650,586,403,948,765,701,637, 573,143,1012,999,752,688,624,560,377,986,739,675,611,547,973,726,662,598,415, 351,960,713,649,585,521,155,947,764,700,636,572,389,1011,998,751,687,623,559, 985,738,674,610,363,972,725,661,597,533,167,959,712,648,584,401,1023,946,763, 699,635,571,141,1010,997,686,558,375,737,609,971,660,349,711,583,1022,945, 762,634,996,685,557,736,608,425,970,659,531,165,710,582,399,1021,944,761,633, 139,995,684,556,373,735,607,969,658,347] [views:debug,2014-08-19T16:55:21.413,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/402. Updated state: replica (0) [ns_server:debug,2014-08-19T16:55:21.414,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",402,replica,0} [ns_server:debug,2014-08-19T16:55:21.430,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 116. Nacking mccouch update. [views:debug,2014-08-19T16:55:21.430,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/116. Updated state: replica (0) [views:debug,2014-08-19T16:55:21.430,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/135. Updated state: replica (0) [ns_server:debug,2014-08-19T16:55:21.431,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",116,replica,0} [ns_server:debug,2014-08-19T16:55:21.431,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",135,replica,0} [ns_server:debug,2014-08-19T16:55:21.431,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_tiles<0.4349.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,686,622,558,128,984,426,362,724,660,596,532,166,958,400,1022,762,698,634, 570,140,996,374,736,672,608,544,970,412,348,710,646,582,518,152,944,386,1008, 748,684,620,556,126,982,424,360,722,658,594,530,164,956,398,1020,760,696,632, 568,138,994,372,734,670,606,542,968,410,378,346,1000,740,708,676,644,612,580, 548,516,150,118,974,942,416,384,352,1006,746,714,682,650,618,586,554,522,156, 124,980,948,422,390,358,1012,752,720,688,656,624,592,560,528,162,130,986,954, 396,364,1018,758,726,694,662,630,598,566,534,168,136,992,960,402,370,764,732, 700,668,636,604,572,540,142,998,966,408,376,344,738,706,674,642,610,578,546, 514,148,116,972,940,414,382,350,1004,744,712,680,648,616,584,552,520,154,122, 1023,978,946,420,388,356,1010,718,654,590,526,160,952,394,1016,756,692,628, 564,134,990,368,730,666,602,538,964,406,342,704,640,576,512,146,938,380,1002, 742,678,614,550,120,976,418,354,716,652,588,524,158,950,392,1014,754,690,626, 562,132,988,366,728,664,600,536,170,962,404,766,702,638,574,144] [views:debug,2014-08-19T16:55:21.500,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/116. Updated state: replica (0) [ns_server:debug,2014-08-19T16:55:21.500,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",116,replica,0} [ns_server:debug,2014-08-19T16:55:21.565,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 400. Nacking mccouch update. [views:debug,2014-08-19T16:55:21.565,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/400. Updated state: replica (0) [ns_server:debug,2014-08-19T16:55:21.565,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",400,replica,0} [ns_server:debug,2014-08-19T16:55:21.565,ns_1@10.242.238.90:capi_set_view_manager-tiles<0.6184.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,686,622,558,984,426,724,660,596,532,958,400,1022,762,698,634,570,996,736, 704,672,640,608,576,544,512,970,938,412,1002,742,710,678,646,614,582,550,518, 976,944,418,1008,748,716,684,652,620,588,556,524,982,950,424,1014,754,722, 690,658,626,594,562,530,988,956,1020,760,728,696,664,632,600,568,536,994,962, 404,766,734,702,670,638,606,574,542,968,410,1000,740,708,676,644,612,580,548, 516,974,942,416,1006,746,714,682,650,618,586,554,522,980,948,422,1012,752, 720,688,656,624,592,560,528,986,954,1018,758,726,694,662,630,598,566,534,992, 960,402,764,732,700,668,636,604,572,540,998,966,408,738,706,674,642,610,578, 546,514,972,940,414,1004,744,712,680,648,616,584,552,520,1023,978,946,420, 1010,718,654,590,526,952,1016,756,692,628,564,990,730,666,602,538,964,406] [ns_server:debug,2014-08-19T16:55:21.582,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 133. Nacking mccouch update. [views:debug,2014-08-19T16:55:21.582,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/133. Updated state: replica (0) [ns_server:debug,2014-08-19T16:55:21.582,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",133,replica,0} [ns_server:debug,2014-08-19T16:55:21.583,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,984,673,545,724,596,413,958,647,519,153,698,570,387,1009,749,621,983, 672,361,723,595,957,646,697,569,1008,748,620,982,671,543,722,594,411,956,709, 645,581,517,151,1020,943,760,696,632,568,385,1007,994,747,683,619,555,981, 734,670,606,423,359,968,721,657,593,529,163,955,708,644,580,397,1019,942,759, 695,631,567,137,1006,993,746,682,618,554,371,980,733,669,605,541,967,720,656, 592,409,345,954,707,643,579,515,149,1018,941,758,694,630,566,383,1005,992, 745,681,617,553,979,732,668,604,421,357,966,719,655,591,527,161,953,706,642, 578,395,1017,940,757,693,629,565,135,1004,991,744,680,616,552,369,978,731, 667,603,539,965,718,654,590,407,343,952,705,641,577,513,147,1016,939,756,692, 628,564,381,1003,990,743,679,615,551,977,730,666,602,419,355,964,717,653,589, 525,159,951,704,640,576,393,1015,938,755,691,627,563,133,1002,989,742,678, 614,550,367,976,729,665,601,537,963,716,652,588,405,950,767,703,639,575,145, 1014,754,690,626,562,379,1001,988,741,677,613,549,975,728,664,600,417,353, 962,715,651,587,523,157,949,766,702,638,574,391,1013,753,689,625,561,1000, 987,740,676,612,548,365,974,727,663,599,535,169,961,714,650,586,403,948,765, 701,637,573,143,1012,999,752,688,624,560,377,986,739,675,611,547,973,726,662, 598,415,351,960,713,649,585,521,155,947,764,700,636,572,389,1011,998,751,687, 623,559,985,738,674,610,363,972,725,661,597,533,167,959,712,648,584,401,1023, 946,763,699,635,571,141,1010,997,686,558,375,737,609,971,660,349,711,583, 1022,945,762,634,996,685,557,736,608,425,970,659,531,165,710,582,399,1021, 944,761,633,139,995,684,556,373,735,607,969,658,347] [views:debug,2014-08-19T16:55:21.632,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/400. Updated state: replica (0) [ns_server:debug,2014-08-19T16:55:21.632,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",400,replica,0} [views:debug,2014-08-19T16:55:21.649,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/133. Updated state: replica (0) [ns_server:debug,2014-08-19T16:55:21.649,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",133,replica,0} [ns_server:debug,2014-08-19T16:55:21.685,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 114. Nacking mccouch update. [views:debug,2014-08-19T16:55:21.686,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/114. Updated state: replica (0) [ns_server:debug,2014-08-19T16:55:21.686,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",114,replica,0} [ns_server:debug,2014-08-19T16:55:21.686,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_tiles<0.4349.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,686,622,558,128,984,426,362,724,660,596,532,166,958,400,1022,762,698,634, 570,140,996,374,736,672,608,544,114,970,412,348,710,646,582,518,152,944,386, 1008,748,684,620,556,126,982,424,360,722,658,594,530,164,956,398,1020,760, 696,632,568,138,994,372,734,670,606,542,968,410,378,346,1000,740,708,676,644, 612,580,548,516,150,118,974,942,416,384,352,1006,746,714,682,650,618,586,554, 522,156,124,980,948,422,390,358,1012,752,720,688,656,624,592,560,528,162,130, 986,954,396,364,1018,758,726,694,662,630,598,566,534,168,136,992,960,402,370, 764,732,700,668,636,604,572,540,142,998,966,408,376,344,738,706,674,642,610, 578,546,514,148,116,972,940,414,382,350,1004,744,712,680,648,616,584,552,520, 154,122,1023,978,946,420,388,356,1010,718,654,590,526,160,952,394,1016,756, 692,628,564,134,990,368,730,666,602,538,964,406,342,704,640,576,512,146,938, 380,1002,742,678,614,550,120,976,418,354,716,652,588,524,158,950,392,1014, 754,690,626,562,132,988,366,728,664,600,536,170,962,404,766,702,638,574,144] [views:debug,2014-08-19T16:55:21.828,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/114. Updated state: replica (0) [ns_server:debug,2014-08-19T16:55:21.829,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",114,replica,0} [ns_server:debug,2014-08-19T16:55:21.987,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 398. Nacking mccouch update. [ns_server:debug,2014-08-19T16:55:21.987,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 131. Nacking mccouch update. [views:debug,2014-08-19T16:55:21.987,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/398. Updated state: replica (0) [ns_server:debug,2014-08-19T16:55:21.987,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",398,replica,0} [views:debug,2014-08-19T16:55:21.987,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/131. Updated state: replica (0) [ns_server:debug,2014-08-19T16:55:21.987,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",131,replica,0} [ns_server:debug,2014-08-19T16:55:21.987,ns_1@10.242.238.90:capi_set_view_manager-tiles<0.6184.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,686,622,558,984,426,724,660,596,532,958,400,1022,762,698,634,570,996,736, 704,672,640,608,576,544,512,970,938,412,1002,742,710,678,646,614,582,550,518, 976,944,418,1008,748,716,684,652,620,588,556,524,982,950,424,1014,754,722, 690,658,626,594,562,530,988,956,398,1020,760,728,696,664,632,600,568,536,994, 962,404,766,734,702,670,638,606,574,542,968,410,1000,740,708,676,644,612,580, 548,516,974,942,416,1006,746,714,682,650,618,586,554,522,980,948,422,1012, 752,720,688,656,624,592,560,528,986,954,1018,758,726,694,662,630,598,566,534, 992,960,402,764,732,700,668,636,604,572,540,998,966,408,738,706,674,642,610, 578,546,514,972,940,414,1004,744,712,680,648,616,584,552,520,1023,978,946, 420,1010,718,654,590,526,952,1016,756,692,628,564,990,730,666,602,538,964, 406] [ns_server:debug,2014-08-19T16:55:21.988,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,984,673,545,724,596,413,958,647,519,153,698,570,387,1009,749,621,983, 672,361,723,595,957,646,697,569,1008,748,620,982,671,543,722,594,411,956,645, 517,151,943,760,696,632,568,385,1007,994,747,683,619,555,981,734,670,606,423, 359,968,721,657,593,529,163,955,708,644,580,397,1019,942,759,695,631,567,137, 1006,993,746,682,618,554,371,980,733,669,605,541,967,720,656,592,409,345,954, 707,643,579,515,149,1018,941,758,694,630,566,383,1005,992,745,681,617,553, 979,732,668,604,421,357,966,719,655,591,527,161,953,706,642,578,395,1017,940, 757,693,629,565,135,1004,991,744,680,616,552,369,978,731,667,603,539,965,718, 654,590,407,343,952,705,641,577,513,147,1016,939,756,692,628,564,381,1003, 990,743,679,615,551,977,730,666,602,419,355,964,717,653,589,525,159,951,704, 640,576,393,1015,938,755,691,627,563,133,1002,989,742,678,614,550,367,976, 729,665,601,537,963,716,652,588,405,950,767,703,639,575,145,1014,754,690,626, 562,379,1001,988,741,677,613,549,975,728,664,600,417,353,962,715,651,587,523, 157,949,766,702,638,574,391,1013,753,689,625,561,131,1000,987,740,676,612, 548,365,974,727,663,599,535,169,961,714,650,586,403,948,765,701,637,573,143, 1012,999,752,688,624,560,377,986,739,675,611,547,973,726,662,598,415,351,960, 713,649,585,521,155,947,764,700,636,572,389,1011,998,751,687,623,559,985,738, 674,610,363,972,725,661,597,533,167,959,712,648,584,401,1023,946,763,699,635, 571,141,1010,997,686,558,375,737,609,971,660,349,711,583,1022,945,762,634, 996,685,557,736,608,425,970,659,531,165,710,582,399,1021,944,761,633,139,995, 684,556,373,735,607,969,658,347,709,581,1020] [views:debug,2014-08-19T16:55:22.079,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/398. Updated state: replica (0) [ns_server:debug,2014-08-19T16:55:22.079,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",398,replica,0} [views:debug,2014-08-19T16:55:22.079,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/131. Updated state: replica (0) [ns_server:debug,2014-08-19T16:55:22.080,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",131,replica,0} [ns_server:debug,2014-08-19T16:55:22.263,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 112. Nacking mccouch update. [views:debug,2014-08-19T16:55:22.263,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/112. Updated state: replica (0) [ns_server:debug,2014-08-19T16:55:22.263,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",112,replica,0} [ns_server:debug,2014-08-19T16:55:22.264,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_tiles<0.4349.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,686,622,558,128,984,426,362,724,660,596,532,166,958,400,1022,762,698,634, 570,140,996,374,736,672,608,544,114,970,412,348,710,646,582,518,152,944,386, 1008,748,684,620,556,126,982,424,360,722,658,594,530,164,956,398,1020,760, 696,632,568,138,994,372,734,670,606,542,112,968,410,378,346,1000,740,708,676, 644,612,580,548,516,150,118,974,942,416,384,352,1006,746,714,682,650,618,586, 554,522,156,124,980,948,422,390,358,1012,752,720,688,656,624,592,560,528,162, 130,986,954,396,364,1018,758,726,694,662,630,598,566,534,168,136,992,960,402, 370,764,732,700,668,636,604,572,540,142,998,966,408,376,344,738,706,674,642, 610,578,546,514,148,116,972,940,414,382,350,1004,744,712,680,648,616,584,552, 520,154,122,1023,978,946,420,388,356,1010,718,654,590,526,160,952,394,1016, 756,692,628,564,134,990,368,730,666,602,538,964,406,342,704,640,576,512,146, 938,380,1002,742,678,614,550,120,976,418,354,716,652,588,524,158,950,392, 1014,754,690,626,562,132,988,366,728,664,600,536,170,962,404,766,702,638,574, 144] [views:debug,2014-08-19T16:55:22.330,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/112. Updated state: replica (0) [ns_server:debug,2014-08-19T16:55:22.330,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",112,replica,0} [ns_server:debug,2014-08-19T16:55:22.461,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 129. Nacking mccouch update. [ns_server:debug,2014-08-19T16:55:22.461,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 396. Nacking mccouch update. [views:debug,2014-08-19T16:55:22.461,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/129. Updated state: replica (0) [views:debug,2014-08-19T16:55:22.461,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/396. Updated state: replica (0) [ns_server:debug,2014-08-19T16:55:22.461,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",129,replica,0} [ns_server:debug,2014-08-19T16:55:22.461,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",396,replica,0} [ns_server:debug,2014-08-19T16:55:22.462,ns_1@10.242.238.90:capi_set_view_manager-tiles<0.6184.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,686,622,558,984,426,724,660,596,532,958,400,1022,762,698,634,570,996,736, 704,672,640,608,576,544,512,970,938,412,1002,742,710,678,646,614,582,550,518, 976,944,418,1008,748,716,684,652,620,588,556,524,982,950,424,1014,754,722, 690,658,626,594,562,530,988,956,398,1020,760,728,696,664,632,600,568,536,994, 962,404,766,734,702,670,638,606,574,542,968,410,1000,740,708,676,644,612,580, 548,516,974,942,416,1006,746,714,682,650,618,586,554,522,980,948,422,1012, 752,720,688,656,624,592,560,528,986,954,396,1018,758,726,694,662,630,598,566, 534,992,960,402,764,732,700,668,636,604,572,540,998,966,408,738,706,674,642, 610,578,546,514,972,940,414,1004,744,712,680,648,616,584,552,520,1023,978, 946,420,1010,718,654,590,526,952,1016,756,692,628,564,990,730,666,602,538, 964,406] [ns_server:debug,2014-08-19T16:55:22.462,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,984,673,545,724,596,413,958,647,519,153,698,570,387,1009,749,621,983, 672,361,723,595,957,646,697,569,1008,748,620,982,671,543,722,594,411,956,645, 517,151,943,760,696,632,568,385,1007,994,747,683,619,555,981,734,670,606,423, 359,968,721,657,593,529,163,955,708,644,580,397,1019,942,759,695,631,567,137, 1006,993,746,682,618,554,371,980,733,669,605,541,967,720,656,592,409,345,954, 707,643,579,515,149,1018,941,758,694,630,566,383,1005,992,745,681,617,553, 979,732,668,604,421,357,966,719,655,591,527,161,953,706,642,578,395,1017,940, 757,693,629,565,135,1004,991,744,680,616,552,369,978,731,667,603,539,965,718, 654,590,407,343,952,705,641,577,513,147,1016,939,756,692,628,564,381,1003, 990,743,679,615,551,977,730,666,602,419,355,964,717,653,589,525,159,951,704, 640,576,393,1015,938,755,691,627,563,133,1002,989,742,678,614,550,367,976, 729,665,601,537,963,716,652,588,405,950,767,703,639,575,145,1014,754,690,626, 562,379,1001,988,741,677,613,549,975,728,664,600,417,353,962,715,651,587,523, 157,949,766,702,638,574,391,1013,753,689,625,561,131,1000,987,740,676,612, 548,365,974,727,663,599,535,169,961,714,650,586,403,948,765,701,637,573,143, 1012,999,752,688,624,560,377,986,739,675,611,547,973,726,662,598,415,351,960, 713,649,585,521,155,947,764,700,636,572,389,1011,998,751,687,623,559,129,985, 738,674,610,363,972,725,661,597,533,167,959,712,648,584,401,1023,946,763,699, 635,571,141,1010,997,686,558,375,737,609,971,660,349,711,583,1022,945,762, 634,996,685,557,736,608,425,970,659,531,165,710,582,399,1021,944,761,633,139, 995,684,556,373,735,607,969,658,347,709,581,1020] [views:debug,2014-08-19T16:55:22.541,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/396. Updated state: replica (0) [views:debug,2014-08-19T16:55:22.541,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/129. Updated state: replica (0) [ns_server:debug,2014-08-19T16:55:22.541,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",396,replica,0} [ns_server:debug,2014-08-19T16:55:22.542,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",129,replica,0} [ns_server:debug,2014-08-19T16:55:22.591,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 110. Nacking mccouch update. [views:debug,2014-08-19T16:55:22.591,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/110. Updated state: replica (0) [ns_server:debug,2014-08-19T16:55:22.591,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",110,replica,0} [ns_server:debug,2014-08-19T16:55:22.592,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_tiles<0.4349.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,686,622,558,128,984,426,362,724,660,596,532,166,958,400,1022,762,698,634, 570,140,996,374,736,672,608,544,114,970,412,348,710,646,582,518,152,944,386, 1008,748,684,620,556,126,982,424,360,722,658,594,530,164,956,398,1020,760, 696,632,568,138,994,372,734,670,606,542,112,968,410,346,740,708,676,644,612, 580,548,516,150,118,974,942,416,384,352,1006,746,714,682,650,618,586,554,522, 156,124,980,948,422,390,358,1012,752,720,688,656,624,592,560,528,162,130,986, 954,396,364,1018,758,726,694,662,630,598,566,534,168,136,992,960,402,370,764, 732,700,668,636,604,572,540,142,110,998,966,408,376,344,738,706,674,642,610, 578,546,514,148,116,972,940,414,382,350,1004,744,712,680,648,616,584,552,520, 154,122,1023,978,946,420,388,356,1010,718,654,590,526,160,952,394,1016,756, 692,628,564,134,990,368,730,666,602,538,964,406,342,704,640,576,512,146,938, 380,1002,742,678,614,550,120,976,418,354,716,652,588,524,158,950,392,1014, 754,690,626,562,132,988,366,728,664,600,536,170,962,404,766,702,638,574,144, 378,1000] [views:debug,2014-08-19T16:55:22.642,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/110. Updated state: replica (0) [ns_server:debug,2014-08-19T16:55:22.642,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",110,replica,0} [ns_server:debug,2014-08-19T16:55:22.709,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 394. Nacking mccouch update. [views:debug,2014-08-19T16:55:22.709,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/394. Updated state: replica (0) [ns_server:debug,2014-08-19T16:55:22.709,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",394,replica,0} [ns_server:debug,2014-08-19T16:55:22.709,ns_1@10.242.238.90:capi_set_view_manager-tiles<0.6184.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,686,622,558,984,426,724,660,596,532,958,400,1022,762,698,634,570,996,736, 704,672,640,608,576,544,512,970,938,412,1002,742,710,678,646,614,582,550,518, 976,944,418,1008,748,716,684,652,620,588,556,524,982,950,424,1014,754,722, 690,658,626,594,562,530,988,956,398,1020,760,728,696,664,632,600,568,536,994, 962,404,766,734,702,670,638,606,574,542,968,410,1000,740,708,676,644,612,580, 548,516,974,942,416,1006,746,714,682,650,618,586,554,522,980,948,422,1012, 752,720,688,656,624,592,560,528,986,954,396,1018,758,726,694,662,630,598,566, 534,992,960,402,764,732,700,668,636,604,572,540,998,966,408,738,706,674,642, 610,578,546,514,972,940,414,1004,744,712,680,648,616,584,552,520,1023,978, 946,420,1010,718,654,590,526,952,394,1016,756,692,628,564,990,730,666,602, 538,964,406] [ns_server:debug,2014-08-19T16:55:22.726,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 127. Nacking mccouch update. [views:debug,2014-08-19T16:55:22.726,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/127. Updated state: replica (0) [ns_server:debug,2014-08-19T16:55:22.726,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",127,replica,0} [ns_server:debug,2014-08-19T16:55:22.727,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,984,673,545,724,596,413,958,647,519,153,698,570,387,1009,749,621,127, 983,672,361,723,595,957,646,697,569,1008,748,620,982,671,543,722,594,411,956, 645,517,151,943,760,696,632,568,385,1007,994,747,683,619,555,981,734,670,606, 423,359,968,721,657,593,529,163,955,708,644,580,397,1019,942,759,695,631,567, 137,1006,993,746,682,618,554,371,980,733,669,605,541,967,720,656,592,409,345, 954,707,643,579,515,149,1018,941,758,694,630,566,383,1005,992,745,681,617, 553,979,732,668,604,421,357,966,719,655,591,527,161,953,706,642,578,395,1017, 940,757,693,629,565,135,1004,991,744,680,616,552,369,978,731,667,603,539,965, 718,654,590,407,343,952,705,641,577,513,147,1016,939,756,692,628,564,381, 1003,990,743,679,615,551,977,730,666,602,419,355,964,717,653,589,525,159,951, 704,640,576,393,1015,938,755,691,627,563,133,1002,989,742,678,614,550,367, 976,729,665,601,537,963,716,652,588,405,950,767,703,639,575,145,1014,754,690, 626,562,379,1001,988,741,677,613,549,975,728,664,600,417,353,962,715,651,587, 523,157,949,766,702,638,574,391,1013,753,689,625,561,131,1000,987,740,676, 612,548,365,974,727,663,599,535,169,961,714,650,586,403,948,765,701,637,573, 143,1012,999,752,688,624,560,377,986,739,675,611,547,973,726,662,598,415,351, 960,713,649,585,521,155,947,764,700,636,572,389,1011,998,751,687,623,559,129, 985,738,674,610,363,972,725,661,597,533,167,959,712,648,584,401,1023,946,763, 699,635,571,141,1010,997,686,558,375,737,609,971,660,349,711,583,1022,945, 762,634,996,685,557,736,608,425,970,659,531,165,710,582,399,1021,944,761,633, 139,995,684,556,373,735,607,969,658,347,709,581,1020] [views:debug,2014-08-19T16:55:22.776,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/394. Updated state: replica (0) [ns_server:debug,2014-08-19T16:55:22.776,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",394,replica,0} [ns_server:debug,2014-08-19T16:55:22.793,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 108. Nacking mccouch update. [views:debug,2014-08-19T16:55:22.793,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/108. Updated state: replica (0) [ns_server:debug,2014-08-19T16:55:22.793,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",108,replica,0} [views:debug,2014-08-19T16:55:22.793,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/127. Updated state: replica (0) [ns_server:debug,2014-08-19T16:55:22.793,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",127,replica,0} [ns_server:debug,2014-08-19T16:55:22.793,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_tiles<0.4349.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,686,622,558,128,984,426,362,724,660,596,532,166,958,400,1022,762,698,634, 570,140,996,374,736,672,608,544,114,970,412,348,710,646,582,518,152,944,386, 1008,748,684,620,556,126,982,424,360,722,658,594,530,164,956,398,1020,760, 696,632,568,138,994,372,734,670,606,542,112,968,410,346,740,708,676,644,612, 580,548,516,150,118,974,942,416,384,352,1006,746,714,682,650,618,586,554,522, 156,124,980,948,422,390,358,1012,752,720,688,656,624,592,560,528,162,130,986, 954,396,364,1018,758,726,694,662,630,598,566,534,168,136,992,960,402,370,764, 732,700,668,636,604,572,540,142,110,998,966,408,376,344,738,706,674,642,610, 578,546,514,148,116,972,940,414,382,350,1004,744,712,680,648,616,584,552,520, 154,122,1023,978,946,420,388,356,1010,718,654,590,526,160,952,394,1016,756, 692,628,564,134,990,368,730,666,602,538,108,964,406,342,704,640,576,512,146, 938,380,1002,742,678,614,550,120,976,418,354,716,652,588,524,158,950,392, 1014,754,690,626,562,132,988,366,728,664,600,536,170,962,404,766,702,638,574, 144,378,1000] [views:debug,2014-08-19T16:55:22.922,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/108. Updated state: replica (0) [ns_server:debug,2014-08-19T16:55:22.922,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",108,replica,0} [ns_server:debug,2014-08-19T16:55:23.057,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 125. Nacking mccouch update. [ns_server:debug,2014-08-19T16:55:23.057,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 392. Nacking mccouch update. [views:debug,2014-08-19T16:55:23.057,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/125. Updated state: replica (0) [views:debug,2014-08-19T16:55:23.057,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/392. Updated state: replica (0) [ns_server:debug,2014-08-19T16:55:23.057,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",125,replica,0} [ns_server:debug,2014-08-19T16:55:23.057,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",392,replica,0} [ns_server:debug,2014-08-19T16:55:23.058,ns_1@10.242.238.90:capi_set_view_manager-tiles<0.6184.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,686,622,558,984,426,724,660,596,532,958,400,1022,762,698,634,570,996,736, 704,672,640,608,576,544,512,970,938,412,1002,742,710,678,646,614,582,550,518, 976,944,418,1008,748,716,684,652,620,588,556,524,982,950,424,392,1014,754, 722,690,658,626,594,562,530,988,956,398,1020,760,728,696,664,632,600,568,536, 994,962,404,766,734,702,670,638,606,574,542,968,410,1000,740,708,676,644,612, 580,548,516,974,942,416,1006,746,714,682,650,618,586,554,522,980,948,422, 1012,752,720,688,656,624,592,560,528,986,954,396,1018,758,726,694,662,630, 598,566,534,992,960,402,764,732,700,668,636,604,572,540,998,966,408,738,706, 674,642,610,578,546,514,972,940,414,1004,744,712,680,648,616,584,552,520, 1023,978,946,420,1010,718,654,590,526,952,394,1016,756,692,628,564,990,730, 666,602,538,964,406] [ns_server:debug,2014-08-19T16:55:23.058,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,984,673,545,724,596,413,958,647,519,153,698,570,387,1009,749,621,127, 983,672,361,723,595,957,646,697,569,1008,748,620,982,671,543,722,594,411,956, 645,517,151,943,760,696,632,568,385,1007,994,747,683,619,555,125,981,734,670, 606,423,359,968,721,657,593,529,163,955,708,644,580,397,1019,942,759,695,631, 567,137,1006,993,746,682,618,554,371,980,733,669,605,541,967,720,656,592,409, 345,954,707,643,579,515,149,1018,941,758,694,630,566,383,1005,992,745,681, 617,553,979,732,668,604,421,357,966,719,655,591,527,161,953,706,642,578,395, 1017,940,757,693,629,565,135,1004,991,744,680,616,552,369,978,731,667,603, 539,965,718,654,590,407,343,952,705,641,577,513,147,1016,939,756,692,628,564, 381,1003,990,743,679,615,551,977,730,666,602,419,355,964,717,653,589,525,159, 951,704,640,576,393,1015,938,755,691,627,563,133,1002,989,742,678,614,550, 367,976,729,665,601,537,963,716,652,588,405,950,767,703,639,575,145,1014,754, 690,626,562,379,1001,988,741,677,613,549,975,728,664,600,417,353,962,715,651, 587,523,157,949,766,702,638,574,391,1013,753,689,625,561,131,1000,987,740, 676,612,548,365,974,727,663,599,535,169,961,714,650,586,403,948,765,701,637, 573,143,1012,999,752,688,624,560,377,986,739,675,611,547,973,726,662,598,415, 351,960,713,649,585,521,155,947,764,700,636,572,389,1011,998,751,687,623,559, 129,985,738,674,610,363,972,725,661,597,533,167,959,712,648,584,401,1023,946, 763,699,635,571,141,1010,997,686,558,375,737,609,971,660,349,711,583,1022, 945,762,634,996,685,557,736,608,425,970,659,531,165,710,582,399,1021,944,761, 633,139,995,684,556,373,735,607,969,658,347,709,581,1020] [ns_server:debug,2014-08-19T16:55:23.199,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 106. Nacking mccouch update. [views:debug,2014-08-19T16:55:23.199,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/106. Updated state: replica (0) [views:debug,2014-08-19T16:55:23.199,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/125. Updated state: replica (0) [ns_server:debug,2014-08-19T16:55:23.200,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",106,replica,0} [ns_server:debug,2014-08-19T16:55:23.200,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",125,replica,0} [ns_server:debug,2014-08-19T16:55:23.200,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_tiles<0.4349.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,686,622,558,128,984,426,362,724,660,596,532,166,958,400,1022,762,698,634, 570,140,996,374,736,672,608,544,114,970,412,348,710,646,582,518,152,944,386, 1008,748,684,620,556,126,982,424,360,722,658,594,530,164,956,398,1020,760, 696,632,568,138,994,372,734,670,606,542,112,968,410,346,740,708,676,644,612, 580,548,516,150,118,974,942,416,384,352,1006,746,714,682,650,618,586,554,522, 156,124,980,948,422,390,358,1012,752,720,688,656,624,592,560,528,162,130,986, 954,396,364,1018,758,726,694,662,630,598,566,534,168,136,992,960,402,370,764, 732,700,668,636,604,572,540,142,110,998,966,408,376,344,738,706,674,642,610, 578,546,514,148,116,972,940,414,382,350,1004,744,712,680,648,616,584,552,520, 154,122,1023,978,946,420,388,356,1010,718,654,590,526,160,952,394,1016,756, 692,628,564,134,990,368,730,666,602,538,108,964,406,342,704,640,576,512,146, 938,380,1002,742,678,614,550,120,976,418,354,716,652,588,524,158,950,392, 1014,754,690,626,562,132,988,366,728,664,600,536,170,106,962,404,766,702,638, 574,144,378,1000] [views:debug,2014-08-19T16:55:23.216,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/392. Updated state: replica (0) [ns_server:debug,2014-08-19T16:55:23.217,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",392,replica,0} [views:debug,2014-08-19T16:55:23.388,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/106. Updated state: replica (0) [ns_server:debug,2014-08-19T16:55:23.388,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",106,replica,0} [ns_server:debug,2014-08-19T16:55:23.634,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 390. Nacking mccouch update. [ns_server:debug,2014-08-19T16:55:23.634,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 123. Nacking mccouch update. [views:debug,2014-08-19T16:55:23.634,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/390. Updated state: replica (0) [views:debug,2014-08-19T16:55:23.634,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/123. Updated state: replica (0) [ns_server:debug,2014-08-19T16:55:23.635,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",390,replica,0} [ns_server:debug,2014-08-19T16:55:23.635,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",123,replica,0} [ns_server:debug,2014-08-19T16:55:23.635,ns_1@10.242.238.90:capi_set_view_manager-tiles<0.6184.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,686,622,558,984,426,724,660,596,532,958,400,1022,762,698,634,570,996,736, 672,608,544,970,938,412,1002,742,710,678,646,614,582,550,518,976,944,418, 1008,748,716,684,652,620,588,556,524,982,950,424,392,1014,754,722,690,658, 626,594,562,530,988,956,398,1020,760,728,696,664,632,600,568,536,994,962,404, 766,734,702,670,638,606,574,542,968,410,1000,740,708,676,644,612,580,548,516, 974,942,416,1006,746,714,682,650,618,586,554,522,980,948,422,390,1012,752, 720,688,656,624,592,560,528,986,954,396,1018,758,726,694,662,630,598,566,534, 992,960,402,764,732,700,668,636,604,572,540,998,966,408,738,706,674,642,610, 578,546,514,972,940,414,1004,744,712,680,648,616,584,552,520,1023,978,946, 420,1010,718,654,590,526,952,394,1016,756,692,628,564,990,730,666,602,538, 964,406,704,640,576,512] [ns_server:debug,2014-08-19T16:55:23.636,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,984,673,545,724,596,413,958,647,519,153,698,570,387,1009,749,621,127, 983,672,361,723,595,957,646,697,569,1008,748,620,982,671,543,722,594,411,956, 645,517,151,943,760,696,632,568,385,1007,994,747,683,619,555,125,981,734,670, 606,423,359,968,721,657,593,529,163,955,708,644,580,397,1019,942,759,695,631, 567,137,1006,993,746,682,618,554,371,980,733,669,605,541,967,720,656,592,409, 345,954,707,643,579,515,149,1018,941,758,694,630,566,383,1005,992,745,681, 617,553,123,979,732,668,604,421,357,966,719,655,591,527,161,953,706,642,578, 395,1017,940,757,693,629,565,135,1004,991,744,680,616,552,369,978,731,667, 603,539,965,718,654,590,407,343,952,705,641,577,513,147,1016,939,756,692,628, 564,381,1003,990,743,679,615,551,977,730,666,602,419,355,964,717,653,589,525, 159,951,704,640,576,393,1015,938,755,691,627,563,133,1002,989,742,678,614, 550,367,976,729,665,601,537,963,716,652,588,405,950,767,703,639,575,145,1014, 754,690,626,562,379,1001,988,741,677,613,549,975,728,664,600,417,353,962,715, 651,587,523,157,949,766,702,638,574,391,1013,753,689,625,561,131,1000,987, 740,676,612,548,365,974,727,663,599,535,169,961,714,650,586,403,948,765,701, 637,573,143,1012,999,752,688,624,560,377,986,739,675,611,547,973,726,662,598, 415,351,960,713,649,585,521,155,947,764,700,636,572,389,1011,998,751,687,623, 559,129,985,738,674,610,363,972,725,661,597,533,167,959,712,648,584,401,1023, 946,763,699,635,571,141,1010,997,686,558,375,737,609,971,660,349,711,583, 1022,945,762,634,996,685,557,736,608,425,970,659,531,165,710,582,399,1021, 944,761,633,139,995,684,556,373,735,607,969,658,347,709,581,1020] [ns_server:debug,2014-08-19T16:55:23.735,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 104. Nacking mccouch update. [views:debug,2014-08-19T16:55:23.735,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/104. Updated state: replica (0) [ns_server:debug,2014-08-19T16:55:23.735,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",104,replica,0} [views:debug,2014-08-19T16:55:23.735,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/123. Updated state: replica (0) [ns_server:debug,2014-08-19T16:55:23.735,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",123,replica,0} [views:debug,2014-08-19T16:55:23.735,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/390. Updated state: replica (0) [ns_server:debug,2014-08-19T16:55:23.735,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",390,replica,0} [ns_server:debug,2014-08-19T16:55:23.735,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_tiles<0.4349.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,686,622,558,128,984,426,362,724,660,596,532,166,958,400,1022,762,698,634, 570,140,996,374,736,672,608,544,114,970,412,348,710,646,582,518,152,944,386, 1008,748,684,620,556,126,982,424,360,722,658,594,530,164,956,398,1020,760, 696,632,568,138,994,372,734,670,606,542,112,968,410,346,740,708,676,644,612, 580,548,516,150,118,974,942,416,384,352,1006,746,714,682,650,618,586,554,522, 156,124,980,948,422,390,358,1012,752,720,688,656,624,592,560,528,162,130,986, 954,396,364,1018,758,726,694,662,630,598,566,534,168,136,104,992,960,402,370, 764,732,700,668,636,604,572,540,142,110,998,966,408,376,344,738,706,674,642, 610,578,546,514,148,116,972,940,414,382,350,1004,744,712,680,648,616,584,552, 520,154,122,1023,978,946,420,388,356,1010,718,654,590,526,160,952,394,1016, 756,692,628,564,134,990,368,730,666,602,538,108,964,406,342,704,640,576,512, 146,938,380,1002,742,678,614,550,120,976,418,354,716,652,588,524,158,950,392, 1014,754,690,626,562,132,988,366,728,664,600,536,170,106,962,404,766,702,638, 574,144,378,1000] [views:debug,2014-08-19T16:55:23.848,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/104. Updated state: replica (0) [ns_server:debug,2014-08-19T16:55:23.848,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",104,replica,0} [ns_server:debug,2014-08-19T16:55:23.994,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 388. Nacking mccouch update. [views:debug,2014-08-19T16:55:23.994,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/388. Updated state: replica (0) [ns_server:debug,2014-08-19T16:55:23.994,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",388,replica,0} [ns_server:debug,2014-08-19T16:55:23.995,ns_1@10.242.238.90:capi_set_view_manager-tiles<0.6184.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,686,622,558,984,426,724,660,596,532,958,400,1022,762,698,634,570,996,736, 672,608,544,970,938,412,1002,742,710,678,646,614,582,550,518,976,944,418, 1008,748,716,684,652,620,588,556,524,982,950,424,392,1014,754,722,690,658, 626,594,562,530,988,956,398,1020,760,728,696,664,632,600,568,536,994,962,404, 766,734,702,670,638,606,574,542,968,410,1000,740,708,676,644,612,580,548,516, 974,942,416,1006,746,714,682,650,618,586,554,522,980,948,422,390,1012,752, 720,688,656,624,592,560,528,986,954,396,1018,758,726,694,662,630,598,566,534, 992,960,402,764,732,700,668,636,604,572,540,998,966,408,738,706,674,642,610, 578,546,514,972,940,414,1004,744,712,680,648,616,584,552,520,1023,978,946, 420,388,1010,718,654,590,526,952,394,1016,756,692,628,564,990,730,666,602, 538,964,406,704,640,576,512] [ns_server:debug,2014-08-19T16:55:24.011,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 121. Nacking mccouch update. [views:debug,2014-08-19T16:55:24.011,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/121. Updated state: replica (0) [ns_server:debug,2014-08-19T16:55:24.011,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",121,replica,0} [ns_server:debug,2014-08-19T16:55:24.012,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,984,673,545,724,596,413,958,647,519,153,698,570,387,1009,749,621,127, 983,672,361,723,595,957,646,697,569,1008,748,620,982,671,543,722,594,411,956, 645,517,151,696,568,385,1007,994,747,683,619,555,125,981,734,670,606,423,359, 968,721,657,593,529,163,955,708,644,580,397,1019,942,759,695,631,567,137, 1006,993,746,682,618,554,371,980,733,669,605,541,967,720,656,592,409,345,954, 707,643,579,515,149,1018,941,758,694,630,566,383,1005,992,745,681,617,553, 123,979,732,668,604,421,357,966,719,655,591,527,161,953,706,642,578,395,1017, 940,757,693,629,565,135,1004,991,744,680,616,552,369,978,731,667,603,539,965, 718,654,590,407,343,952,705,641,577,513,147,1016,939,756,692,628,564,381, 1003,990,743,679,615,551,121,977,730,666,602,419,355,964,717,653,589,525,159, 951,704,640,576,393,1015,938,755,691,627,563,133,1002,989,742,678,614,550, 367,976,729,665,601,537,963,716,652,588,405,950,767,703,639,575,145,1014,754, 690,626,562,379,1001,988,741,677,613,549,975,728,664,600,417,353,962,715,651, 587,523,157,949,766,702,638,574,391,1013,753,689,625,561,131,1000,987,740, 676,612,548,365,974,727,663,599,535,169,961,714,650,586,403,948,765,701,637, 573,143,1012,999,752,688,624,560,377,986,739,675,611,547,973,726,662,598,415, 351,960,713,649,585,521,155,947,764,700,636,572,389,1011,998,751,687,623,559, 129,985,738,674,610,363,972,725,661,597,533,167,959,712,648,584,401,1023,946, 763,699,635,571,141,1010,997,686,558,375,737,609,971,660,349,711,583,1022, 945,762,634,996,685,557,736,608,425,970,659,531,165,710,582,399,1021,944,761, 633,139,995,684,556,373,735,607,969,658,347,709,581,1020,943,760,632] [ns_server:debug,2014-08-19T16:55:24.113,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 102. Nacking mccouch update. [views:debug,2014-08-19T16:55:24.113,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/102. Updated state: replica (0) [views:debug,2014-08-19T16:55:24.114,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/388. Updated state: replica (0) [ns_server:debug,2014-08-19T16:55:24.114,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",102,replica,0} [ns_server:debug,2014-08-19T16:55:24.114,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",388,replica,0} [ns_server:debug,2014-08-19T16:55:24.114,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_tiles<0.4349.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,686,622,558,128,984,426,362,724,660,596,532,166,102,958,400,1022,762,698, 634,570,140,996,374,736,672,608,544,114,970,412,348,710,646,582,518,152,944, 386,1008,748,684,620,556,126,982,424,360,722,658,594,530,164,956,398,1020, 760,696,632,568,138,994,372,734,670,606,542,112,968,410,346,740,708,676,644, 612,580,548,516,150,118,974,942,416,384,352,1006,746,714,682,650,618,586,554, 522,156,124,980,948,422,390,358,1012,752,720,688,656,624,592,560,528,162,130, 986,954,396,364,1018,758,726,694,662,630,598,566,534,168,136,104,992,960,402, 370,764,732,700,668,636,604,572,540,142,110,998,966,408,376,344,738,706,674, 642,610,578,546,514,148,116,972,940,414,382,350,1004,744,712,680,648,616,584, 552,520,154,122,1023,978,946,420,388,356,1010,718,654,590,526,160,952,394, 1016,756,692,628,564,134,990,368,730,666,602,538,108,964,406,342,704,640,576, 512,146,938,380,1002,742,678,614,550,120,976,418,354,716,652,588,524,158,950, 392,1014,754,690,626,562,132,988,366,728,664,600,536,170,106,962,404,766,702, 638,574,144,378,1000] [views:debug,2014-08-19T16:55:24.154,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/121. Updated state: replica (0) [ns_server:debug,2014-08-19T16:55:24.154,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",121,replica,0} [views:debug,2014-08-19T16:55:24.275,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/102. Updated state: replica (0) [ns_server:debug,2014-08-19T16:55:24.276,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",102,replica,0} [ns_server:debug,2014-08-19T16:55:24.431,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 386. Nacking mccouch update. [views:debug,2014-08-19T16:55:24.431,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/386. Updated state: replica (0) [ns_server:debug,2014-08-19T16:55:24.431,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",386,replica,0} [ns_server:debug,2014-08-19T16:55:24.432,ns_1@10.242.238.90:capi_set_view_manager-tiles<0.6184.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,686,622,558,984,426,724,660,596,532,958,400,1022,762,698,634,570,996,736, 672,608,544,970,938,412,1002,742,710,678,646,614,582,550,518,976,944,418,386, 1008,748,716,684,652,620,588,556,524,982,950,424,392,1014,754,722,690,658, 626,594,562,530,988,956,398,1020,760,728,696,664,632,600,568,536,994,962,404, 766,734,702,670,638,606,574,542,968,410,1000,740,708,676,644,612,580,548,516, 974,942,416,1006,746,714,682,650,618,586,554,522,980,948,422,390,1012,752, 720,688,656,624,592,560,528,986,954,396,1018,758,726,694,662,630,598,566,534, 992,960,402,764,732,700,668,636,604,572,540,998,966,408,738,706,674,642,610, 578,546,514,972,940,414,1004,744,712,680,648,616,584,552,520,1023,978,946, 420,388,1010,718,654,590,526,952,394,1016,756,692,628,564,990,730,666,602, 538,964,406,704,640,576,512] [ns_server:debug,2014-08-19T16:55:24.464,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 119. Nacking mccouch update. [views:debug,2014-08-19T16:55:24.465,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/119. Updated state: replica (0) [ns_server:debug,2014-08-19T16:55:24.465,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",119,replica,0} [ns_server:debug,2014-08-19T16:55:24.466,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,984,673,545,724,596,413,958,647,519,153,698,570,387,1009,749,621,127, 983,672,361,723,595,957,646,697,569,1008,748,620,982,671,543,722,594,411,956, 645,517,151,696,568,385,1007,994,747,683,619,555,125,981,734,670,606,423,359, 968,721,657,593,529,163,955,708,644,580,397,1019,942,759,695,631,567,137, 1006,993,746,682,618,554,371,980,733,669,605,541,967,720,656,592,409,345,954, 707,643,579,515,149,1018,941,758,694,630,566,383,1005,992,745,681,617,553, 123,979,732,668,604,421,357,966,719,655,591,527,161,953,706,642,578,395,1017, 940,757,693,629,565,135,1004,991,744,680,616,552,369,978,731,667,603,539,965, 718,654,590,407,343,952,705,641,577,513,147,1016,939,756,692,628,564,381, 1003,990,743,679,615,551,121,977,730,666,602,419,355,964,717,653,589,525,159, 951,704,640,576,393,1015,938,755,691,627,563,133,1002,989,742,678,614,550, 367,976,729,665,601,537,963,716,652,588,405,950,767,703,639,575,145,1014,754, 690,626,562,379,1001,988,741,677,613,549,119,975,728,664,600,417,353,962,715, 651,587,523,157,949,766,702,638,574,391,1013,753,689,625,561,131,1000,987, 740,676,612,548,365,974,727,663,599,535,169,961,714,650,586,403,948,765,701, 637,573,143,1012,999,752,688,624,560,377,986,739,675,611,547,973,726,662,598, 415,351,960,713,649,585,521,155,947,764,700,636,572,389,1011,998,751,687,623, 559,129,985,738,674,610,363,972,725,661,597,533,167,959,712,648,584,401,1023, 946,763,699,635,571,141,1010,997,686,558,375,737,609,971,660,349,711,583, 1022,945,762,634,996,685,557,736,608,425,970,659,531,165,710,582,399,1021, 944,761,633,139,995,684,556,373,735,607,969,658,347,709,581,1020,943,760,632] [views:debug,2014-08-19T16:55:24.565,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/386. Updated state: replica (0) [ns_server:debug,2014-08-19T16:55:24.565,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",386,replica,0} [ns_server:debug,2014-08-19T16:55:24.615,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 100. Nacking mccouch update. [views:debug,2014-08-19T16:55:24.615,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/100. Updated state: replica (0) [ns_server:debug,2014-08-19T16:55:24.615,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",100,replica,0} [views:debug,2014-08-19T16:55:24.615,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/119. Updated state: replica (0) [ns_server:debug,2014-08-19T16:55:24.616,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",119,replica,0} [ns_server:debug,2014-08-19T16:55:24.616,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_tiles<0.4349.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,686,622,558,128,984,426,362,724,660,596,532,166,102,958,400,1022,762,698, 634,570,140,996,374,736,672,608,544,114,970,412,348,710,646,582,518,152,944, 386,1008,748,684,620,556,126,982,424,360,722,658,594,530,164,100,956,398, 1020,760,696,632,568,138,994,372,734,670,606,542,112,968,410,346,708,644,580, 516,150,974,942,416,384,352,1006,746,714,682,650,618,586,554,522,156,124,980, 948,422,390,358,1012,752,720,688,656,624,592,560,528,162,130,986,954,396,364, 1018,758,726,694,662,630,598,566,534,168,136,104,992,960,402,370,764,732,700, 668,636,604,572,540,142,110,998,966,408,376,344,738,706,674,642,610,578,546, 514,148,116,972,940,414,382,350,1004,744,712,680,648,616,584,552,520,154,122, 1023,978,946,420,388,356,1010,718,654,590,526,160,952,394,1016,756,692,628, 564,134,990,368,730,666,602,538,108,964,406,342,704,640,576,512,146,938,380, 1002,742,678,614,550,120,976,418,354,716,652,588,524,158,950,392,1014,754, 690,626,562,132,988,366,728,664,600,536,170,106,962,404,766,702,638,574,144, 378,1000,740,676,612,548,118] [views:debug,2014-08-19T16:55:24.755,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/100. Updated state: replica (0) [ns_server:debug,2014-08-19T16:55:24.755,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",100,replica,0} [ns_server:debug,2014-08-19T16:55:24.887,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 384. Nacking mccouch update. [ns_server:debug,2014-08-19T16:55:24.887,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 117. Nacking mccouch update. [views:debug,2014-08-19T16:55:24.887,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/384. Updated state: replica (0) [views:debug,2014-08-19T16:55:24.887,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/117. Updated state: replica (0) [ns_server:debug,2014-08-19T16:55:24.887,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",384,replica,0} [ns_server:debug,2014-08-19T16:55:24.887,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",117,replica,0} [ns_server:debug,2014-08-19T16:55:24.887,ns_1@10.242.238.90:capi_set_view_manager-tiles<0.6184.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,686,622,558,984,426,724,660,596,532,958,400,1022,762,698,634,570,996,736, 672,608,544,970,938,412,1002,742,710,678,646,614,582,550,518,976,944,418,386, 1008,748,716,684,652,620,588,556,524,982,950,424,392,1014,754,722,690,658, 626,594,562,530,988,956,398,1020,760,728,696,664,632,600,568,536,994,962,404, 766,734,702,670,638,606,574,542,968,410,1000,740,708,676,644,612,580,548,516, 974,942,416,384,1006,746,714,682,650,618,586,554,522,980,948,422,390,1012, 752,720,688,656,624,592,560,528,986,954,396,1018,758,726,694,662,630,598,566, 534,992,960,402,764,732,700,668,636,604,572,540,998,966,408,738,706,674,642, 610,578,546,514,972,940,414,1004,744,712,680,648,616,584,552,520,1023,978, 946,420,388,1010,718,654,590,526,952,394,1016,756,692,628,564,990,730,666, 602,538,964,406,704,640,576,512] [ns_server:debug,2014-08-19T16:55:24.888,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,984,673,545,724,596,413,958,647,519,153,698,570,387,1009,749,621,127, 983,672,361,723,595,957,646,697,569,1008,748,620,982,671,543,722,594,411,956, 645,517,151,696,568,385,1007,994,747,683,619,555,125,981,734,670,606,423,359, 968,721,657,593,529,163,955,708,644,580,397,1019,942,759,695,631,567,137, 1006,993,746,682,618,554,371,980,733,669,605,541,967,720,656,592,409,345,954, 707,643,579,515,149,1018,941,758,694,630,566,383,1005,992,745,681,617,553, 123,979,732,668,604,421,357,966,719,655,591,527,161,953,706,642,578,395,1017, 940,757,693,629,565,135,1004,991,744,680,616,552,369,978,731,667,603,539,965, 718,654,590,407,343,952,705,641,577,513,147,1016,939,756,692,628,564,381, 1003,990,743,679,615,551,121,977,730,666,602,419,355,964,717,653,589,525,159, 951,704,640,576,393,1015,938,755,691,627,563,133,1002,989,742,678,614,550, 367,976,729,665,601,537,963,716,652,588,405,950,767,703,639,575,145,1014,754, 690,626,562,379,1001,988,741,677,613,549,119,975,728,664,600,417,353,962,715, 651,587,523,157,949,766,702,638,574,391,1013,753,689,625,561,131,1000,987, 740,676,612,548,365,974,727,663,599,535,169,961,714,650,586,403,948,765,701, 637,573,143,1012,999,752,688,624,560,377,986,739,675,611,547,117,973,726,662, 598,415,351,960,713,649,585,521,155,947,764,700,636,572,389,1011,998,751,687, 623,559,129,985,738,674,610,363,972,725,661,597,533,167,959,712,648,584,401, 1023,946,763,699,635,571,141,1010,997,686,558,375,737,609,971,660,349,711, 583,1022,945,762,634,996,685,557,736,608,425,970,659,531,165,710,582,399, 1021,944,761,633,139,995,684,556,373,735,607,969,658,347,709,581,1020,943, 760,632] [views:debug,2014-08-19T16:55:24.962,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/117. Updated state: replica (0) [ns_server:debug,2014-08-19T16:55:24.962,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",117,replica,0} [views:debug,2014-08-19T16:55:24.962,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/384. Updated state: replica (0) [ns_server:debug,2014-08-19T16:55:24.963,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",384,replica,0} [ns_server:debug,2014-08-19T16:55:25.071,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 98. Nacking mccouch update. [views:debug,2014-08-19T16:55:25.071,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/98. Updated state: replica (0) [ns_server:debug,2014-08-19T16:55:25.071,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",98,replica,0} [ns_server:debug,2014-08-19T16:55:25.072,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_tiles<0.4349.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,686,622,558,128,984,426,362,724,660,596,532,166,102,958,400,1022,762,698, 634,570,140,996,374,736,672,608,544,114,970,412,348,710,646,582,518,152,944, 386,1008,748,684,620,556,126,982,424,360,722,658,594,530,164,100,956,398, 1020,760,696,632,568,138,994,372,734,670,606,542,112,968,410,346,708,644,580, 516,150,974,942,416,384,352,1006,746,714,682,650,618,586,554,522,156,124,980, 948,422,390,358,1012,98,752,720,688,656,624,592,560,528,162,130,986,954,396, 364,1018,758,726,694,662,630,598,566,534,168,136,104,992,960,402,370,764,732, 700,668,636,604,572,540,142,110,998,966,408,376,344,738,706,674,642,610,578, 546,514,148,116,972,940,414,382,350,1004,744,712,680,648,616,584,552,520,154, 122,1023,978,946,420,388,356,1010,718,654,590,526,160,952,394,1016,756,692, 628,564,134,990,368,730,666,602,538,108,964,406,342,704,640,576,512,146,938, 380,1002,742,678,614,550,120,976,418,354,716,652,588,524,158,950,392,1014, 754,690,626,562,132,988,366,728,664,600,536,170,106,962,404,766,702,638,574, 144,378,1000,740,676,612,548,118] [views:debug,2014-08-19T16:55:25.139,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/98. Updated state: replica (0) [ns_server:debug,2014-08-19T16:55:25.139,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",98,replica,0} [ns_server:debug,2014-08-19T16:55:25.264,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 382. Nacking mccouch update. [views:debug,2014-08-19T16:55:25.264,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/382. Updated state: replica (0) [ns_server:debug,2014-08-19T16:55:25.264,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",382,replica,0} [ns_server:debug,2014-08-19T16:55:25.264,ns_1@10.242.238.90:capi_set_view_manager-tiles<0.6184.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,686,622,558,984,426,724,660,596,532,958,400,1022,762,698,634,570,996,736, 672,608,544,970,938,412,1002,742,710,678,646,614,582,550,518,976,944,418,386, 1008,748,716,684,652,620,588,556,524,982,950,424,392,1014,754,722,690,658, 626,594,562,530,988,956,398,1020,760,728,696,664,632,600,568,536,994,962,404, 766,734,702,670,638,606,574,542,968,410,1000,740,708,676,644,612,580,548,516, 974,942,416,384,1006,746,714,682,650,618,586,554,522,980,948,422,390,1012, 752,720,688,656,624,592,560,528,986,954,396,1018,758,726,694,662,630,598,566, 534,992,960,402,764,732,700,668,636,604,572,540,998,966,408,738,706,674,642, 610,578,546,514,972,940,414,382,1004,744,712,680,648,616,584,552,520,1023, 978,946,420,388,1010,718,654,590,526,952,394,1016,756,692,628,564,990,730, 666,602,538,964,406,704,640,576,512] [ns_server:debug,2014-08-19T16:55:25.429,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 115. Nacking mccouch update. [views:debug,2014-08-19T16:55:25.429,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/115. Updated state: replica (0) [ns_server:debug,2014-08-19T16:55:25.429,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",115,replica,0} [ns_server:debug,2014-08-19T16:55:25.430,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,984,673,545,724,596,413,958,647,519,153,698,570,387,1009,749,621,127, 983,672,361,723,595,957,646,697,569,1008,748,620,982,671,543,722,594,411,956, 645,517,151,696,568,385,1007,994,747,683,619,555,125,981,734,670,606,423,359, 968,721,657,593,529,163,955,708,644,580,397,1019,942,759,695,631,567,137, 1006,993,746,682,618,554,371,980,733,669,605,541,967,720,656,592,409,345,954, 707,643,579,515,149,1018,941,758,694,630,566,383,1005,992,745,681,617,553, 123,979,732,668,604,421,357,966,719,655,591,527,161,953,706,642,578,395,1017, 940,757,693,629,565,135,1004,991,744,680,616,552,369,978,731,667,603,539,965, 718,654,590,407,343,952,705,641,577,513,147,1016,939,756,692,628,564,381, 1003,990,743,679,615,551,121,977,730,666,602,419,355,964,717,653,589,525,159, 951,704,640,576,393,1015,938,755,691,627,563,133,1002,989,742,678,614,550, 367,976,729,665,601,537,963,716,652,588,405,950,767,703,639,575,145,1014,754, 690,626,562,379,1001,988,741,677,613,549,119,975,728,664,600,417,353,962,715, 651,587,523,157,949,766,702,638,574,391,1013,753,689,625,561,131,1000,987, 740,676,612,548,365,974,727,663,599,535,169,961,714,650,586,403,948,765,701, 637,573,143,1012,999,752,688,624,560,377,986,739,675,611,547,117,973,726,662, 598,415,351,960,713,649,585,521,155,947,764,700,636,572,389,1011,998,751,687, 623,559,129,985,738,674,610,363,972,725,661,597,533,167,959,712,648,584,401, 1023,946,763,699,635,571,141,1010,997,686,558,375,737,609,115,971,660,349, 711,583,1022,945,762,634,996,685,557,736,608,425,970,659,531,165,710,582,399, 1021,944,761,633,139,995,684,556,373,735,607,969,658,347,709,581,1020,943, 760,632] [views:debug,2014-08-19T16:55:25.533,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/382. Updated state: replica (0) [ns_server:debug,2014-08-19T16:55:25.533,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",382,replica,0} [views:debug,2014-08-19T16:55:25.549,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/115. Updated state: replica (0) [ns_server:debug,2014-08-19T16:55:25.549,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",115,replica,0} [ns_server:debug,2014-08-19T16:55:25.582,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 96. Nacking mccouch update. [views:debug,2014-08-19T16:55:25.583,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/96. Updated state: replica (0) [ns_server:debug,2014-08-19T16:55:25.583,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",96,replica,0} [ns_server:debug,2014-08-19T16:55:25.583,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_tiles<0.4349.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,686,622,558,128,984,426,362,724,660,596,532,166,102,958,400,1022,762,698, 634,570,140,996,374,736,672,608,544,114,970,412,348,710,646,582,518,152,944, 386,1008,748,684,620,556,126,982,424,360,722,658,594,530,164,100,956,398, 1020,760,696,632,568,138,994,372,734,670,606,542,112,968,410,346,708,644,580, 516,150,974,942,416,384,352,1006,746,714,682,650,618,586,554,522,156,124,980, 948,422,390,358,1012,98,752,720,688,656,624,592,560,528,162,130,986,954,396, 364,1018,758,726,694,662,630,598,566,534,168,136,104,992,960,402,370,764,732, 700,668,636,604,572,540,142,110,998,966,408,376,344,738,706,674,642,610,578, 546,514,148,116,972,940,414,382,350,1004,744,712,680,648,616,584,552,520,154, 122,1023,978,946,420,388,356,1010,96,718,654,590,526,160,952,394,1016,756, 692,628,564,134,990,368,730,666,602,538,108,964,406,342,704,640,576,512,146, 938,380,1002,742,678,614,550,120,976,418,354,716,652,588,524,158,950,392, 1014,754,690,626,562,132,988,366,728,664,600,536,170,106,962,404,766,702,638, 574,144,378,1000,740,676,612,548,118] [views:debug,2014-08-19T16:55:25.670,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/96. Updated state: replica (0) [ns_server:debug,2014-08-19T16:55:25.670,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",96,replica,0} [ns_server:debug,2014-08-19T16:55:25.767,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 113. Nacking mccouch update. [ns_server:debug,2014-08-19T16:55:25.767,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 380. Nacking mccouch update. [views:debug,2014-08-19T16:55:25.767,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/113. Updated state: replica (0) [views:debug,2014-08-19T16:55:25.767,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/380. Updated state: replica (0) [ns_server:debug,2014-08-19T16:55:25.767,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",113,replica,0} [ns_server:debug,2014-08-19T16:55:25.767,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",380,replica,0} [ns_server:debug,2014-08-19T16:55:25.768,ns_1@10.242.238.90:capi_set_view_manager-tiles<0.6184.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,686,622,558,984,426,724,660,596,532,958,400,1022,762,698,634,570,996,736, 672,608,544,970,412,742,710,678,646,614,582,550,518,976,944,418,386,1008,748, 716,684,652,620,588,556,524,982,950,424,392,1014,754,722,690,658,626,594,562, 530,988,956,398,1020,760,728,696,664,632,600,568,536,994,962,404,766,734,702, 670,638,606,574,542,968,410,1000,740,708,676,644,612,580,548,516,974,942,416, 384,1006,746,714,682,650,618,586,554,522,980,948,422,390,1012,752,720,688, 656,624,592,560,528,986,954,396,1018,758,726,694,662,630,598,566,534,992,960, 402,764,732,700,668,636,604,572,540,998,966,408,738,706,674,642,610,578,546, 514,972,940,414,382,1004,744,712,680,648,616,584,552,520,1023,978,946,420, 388,1010,718,654,590,526,952,394,1016,756,692,628,564,990,730,666,602,538, 964,406,704,640,576,512,938,380,1002] [ns_server:debug,2014-08-19T16:55:25.768,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,984,673,545,724,596,413,958,647,519,153,698,570,387,1009,749,621,127, 983,672,361,723,595,957,646,697,569,1008,748,620,982,671,543,722,594,411,956, 645,517,151,696,568,385,1007,994,747,683,619,555,125,981,734,670,606,423,359, 968,721,657,593,529,163,955,708,644,580,397,1019,942,759,695,631,567,137, 1006,993,746,682,618,554,371,980,733,669,605,541,967,720,656,592,409,345,954, 707,643,579,515,149,1018,941,758,694,630,566,383,1005,992,745,681,617,553, 123,979,732,668,604,421,357,966,719,655,591,527,161,953,706,642,578,395,1017, 940,757,693,629,565,135,1004,991,744,680,616,552,369,978,731,667,603,539,965, 718,654,590,407,343,952,705,641,577,513,147,1016,939,756,692,628,564,381, 1003,990,743,679,615,551,121,977,730,666,602,419,355,964,717,653,589,525,159, 951,704,640,576,393,1015,938,755,691,627,563,133,1002,989,742,678,614,550, 367,976,729,665,601,537,963,716,652,588,405,950,767,703,639,575,145,1014,754, 690,626,562,379,1001,988,741,677,613,549,119,975,728,664,600,417,353,962,715, 651,587,523,157,949,766,702,638,574,391,1013,753,689,625,561,131,1000,987, 740,676,612,548,365,974,727,663,599,535,169,961,714,650,586,403,948,765,701, 637,573,143,1012,999,752,688,624,560,377,986,739,675,611,547,117,973,726,662, 598,415,351,960,713,649,585,521,155,947,764,700,636,572,389,1011,998,751,687, 623,559,129,985,738,674,610,363,972,725,661,597,533,167,959,712,648,584,401, 1023,946,763,699,635,571,141,1010,997,686,558,375,737,609,115,971,660,349, 711,583,1022,945,762,634,996,685,557,736,608,425,970,659,531,165,710,582,399, 1021,944,761,633,139,995,684,556,373,735,607,113,969,658,347,709,581,1020, 943,760,632] [ns_server:debug,2014-08-19T16:55:25.859,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 94. Nacking mccouch update. [views:debug,2014-08-19T16:55:25.859,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/94. Updated state: replica (0) [ns_server:debug,2014-08-19T16:55:25.859,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",94,replica,0} [views:debug,2014-08-19T16:55:25.860,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/113. Updated state: replica (0) [ns_server:debug,2014-08-19T16:55:25.860,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",113,replica,0} [ns_server:debug,2014-08-19T16:55:25.860,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_tiles<0.4349.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,686,622,558,128,984,426,362,724,660,596,532,166,102,958,400,1022,762,698, 634,570,140,996,374,736,672,608,544,114,970,412,348,710,646,582,518,152,944, 386,1008,748,684,620,556,126,982,424,360,722,658,594,530,164,100,956,398, 1020,760,696,632,568,138,994,372,734,670,606,542,112,968,410,346,708,644,580, 516,150,974,942,416,384,352,1006,746,714,682,650,618,586,554,522,156,124,980, 948,422,390,358,1012,98,752,720,688,656,624,592,560,528,162,130,986,954,396, 364,1018,758,726,694,662,630,598,566,534,168,136,104,992,960,402,370,764,732, 700,668,636,604,572,540,142,110,998,966,408,376,344,738,706,674,642,610,578, 546,514,148,116,972,940,414,382,350,1004,744,712,680,648,616,584,552,520,154, 122,1023,978,946,420,388,356,1010,96,718,654,590,526,160,952,394,1016,756, 692,628,564,134,990,368,730,666,602,538,108,964,406,342,704,640,576,512,146, 938,380,1002,742,678,614,550,120,976,418,354,94,716,652,588,524,158,950,392, 1014,754,690,626,562,132,988,366,728,664,600,536,170,106,962,404,766,702,638, 574,144,378,1000,740,676,612,548,118] [views:debug,2014-08-19T16:55:25.909,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/380. Updated state: replica (0) [ns_server:debug,2014-08-19T16:55:25.910,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",380,replica,0} [views:debug,2014-08-19T16:55:25.994,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/94. Updated state: replica (0) [ns_server:debug,2014-08-19T16:55:25.994,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",94,replica,0} [ns_server:debug,2014-08-19T16:55:26.097,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 378. Nacking mccouch update. [ns_server:debug,2014-08-19T16:55:26.097,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 111. Nacking mccouch update. [views:debug,2014-08-19T16:55:26.098,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/378. Updated state: replica (0) [views:debug,2014-08-19T16:55:26.098,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/111. Updated state: replica (0) [ns_server:debug,2014-08-19T16:55:26.098,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",378,replica,0} [ns_server:debug,2014-08-19T16:55:26.098,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",111,replica,0} [ns_server:debug,2014-08-19T16:55:26.098,ns_1@10.242.238.90:capi_set_view_manager-tiles<0.6184.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,686,622,558,984,426,724,660,596,532,958,400,1022,762,698,634,570,996,736, 672,608,544,970,412,742,710,678,646,614,582,550,518,976,944,418,386,1008,748, 716,684,652,620,588,556,524,982,950,424,392,1014,754,722,690,658,626,594,562, 530,988,956,398,1020,760,728,696,664,632,600,568,536,994,962,404,766,734,702, 670,638,606,574,542,968,410,378,1000,740,708,676,644,612,580,548,516,974,942, 416,384,1006,746,714,682,650,618,586,554,522,980,948,422,390,1012,752,720, 688,656,624,592,560,528,986,954,396,1018,758,726,694,662,630,598,566,534,992, 960,402,764,732,700,668,636,604,572,540,998,966,408,738,706,674,642,610,578, 546,514,972,940,414,382,1004,744,712,680,648,616,584,552,520,1023,978,946, 420,388,1010,718,654,590,526,952,394,1016,756,692,628,564,990,730,666,602, 538,964,406,704,640,576,512,938,380,1002] [ns_server:debug,2014-08-19T16:55:26.099,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,984,673,545,724,596,413,958,647,519,153,698,570,387,1009,749,621,127, 983,672,361,723,595,957,646,697,569,1008,748,620,982,671,543,722,594,411,956, 645,517,151,696,568,385,1007,747,619,125,981,734,670,606,423,359,968,721,657, 593,529,163,955,708,644,580,397,1019,942,759,695,631,567,137,1006,993,746, 682,618,554,371,980,733,669,605,541,111,967,720,656,592,409,345,954,707,643, 579,515,149,1018,941,758,694,630,566,383,1005,992,745,681,617,553,123,979, 732,668,604,421,357,966,719,655,591,527,161,953,706,642,578,395,1017,940,757, 693,629,565,135,1004,991,744,680,616,552,369,978,731,667,603,539,965,718,654, 590,407,343,952,705,641,577,513,147,1016,939,756,692,628,564,381,1003,990, 743,679,615,551,121,977,730,666,602,419,355,964,717,653,589,525,159,951,704, 640,576,393,1015,938,755,691,627,563,133,1002,989,742,678,614,550,367,976, 729,665,601,537,963,716,652,588,405,950,767,703,639,575,145,1014,754,690,626, 562,379,1001,988,741,677,613,549,119,975,728,664,600,417,353,962,715,651,587, 523,157,949,766,702,638,574,391,1013,753,689,625,561,131,1000,987,740,676, 612,548,365,974,727,663,599,535,169,961,714,650,586,403,948,765,701,637,573, 143,1012,999,752,688,624,560,377,986,739,675,611,547,117,973,726,662,598,415, 351,960,713,649,585,521,155,947,764,700,636,572,389,1011,998,751,687,623,559, 129,985,738,674,610,363,972,725,661,597,533,167,959,712,648,584,401,1023,946, 763,699,635,571,141,1010,997,686,558,375,737,609,115,971,660,349,711,583, 1022,945,762,634,996,685,557,736,608,425,970,659,531,165,710,582,399,1021, 944,761,633,139,995,684,556,373,735,607,113,969,658,347,709,581,1020,943,760, 632,994,683,555] [ns_server:debug,2014-08-19T16:55:26.207,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 92. Nacking mccouch update. [views:debug,2014-08-19T16:55:26.207,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/92. Updated state: replica (0) [ns_server:debug,2014-08-19T16:55:26.208,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",92,replica,0} [views:debug,2014-08-19T16:55:26.208,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/378. Updated state: replica (0) [ns_server:debug,2014-08-19T16:55:26.208,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",378,replica,0} [ns_server:debug,2014-08-19T16:55:26.208,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_tiles<0.4349.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,686,622,558,128,984,426,362,724,660,596,532,166,102,958,400,1022,762,698, 634,570,140,996,374,736,672,608,544,114,970,412,348,710,646,582,518,152,944, 386,1008,748,684,620,556,126,982,424,360,722,658,594,530,164,100,956,398, 1020,760,696,632,568,138,994,372,734,670,606,542,112,968,410,346,708,644,580, 516,150,974,942,416,384,352,1006,92,746,714,682,650,618,586,554,522,156,124, 980,948,422,390,358,1012,98,752,720,688,656,624,592,560,528,162,130,986,954, 396,364,1018,758,726,694,662,630,598,566,534,168,136,104,992,960,402,370,764, 732,700,668,636,604,572,540,142,110,998,966,408,376,344,738,706,674,642,610, 578,546,514,148,116,972,940,414,382,350,1004,744,712,680,648,616,584,552,520, 154,122,1023,978,946,420,388,356,1010,96,718,654,590,526,160,952,394,1016, 756,692,628,564,134,990,368,730,666,602,538,108,964,406,342,704,640,576,512, 146,938,380,1002,742,678,614,550,120,976,418,354,94,716,652,588,524,158,950, 392,1014,754,690,626,562,132,988,366,728,664,600,536,170,106,962,404,766,702, 638,574,144,378,1000,740,676,612,548,118] [views:debug,2014-08-19T16:55:26.225,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/111. Updated state: replica (0) [ns_server:debug,2014-08-19T16:55:26.225,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",111,replica,0} [views:debug,2014-08-19T16:55:26.325,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/92. Updated state: replica (0) [ns_server:debug,2014-08-19T16:55:26.325,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",92,replica,0} [ns_server:debug,2014-08-19T16:55:26.576,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 109. Nacking mccouch update. [ns_server:debug,2014-08-19T16:55:26.576,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 376. Nacking mccouch update. [views:debug,2014-08-19T16:55:26.576,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/109. Updated state: replica (0) [views:debug,2014-08-19T16:55:26.576,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/376. Updated state: replica (0) [ns_server:debug,2014-08-19T16:55:26.576,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",109,replica,0} [ns_server:debug,2014-08-19T16:55:26.576,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",376,replica,0} [ns_server:debug,2014-08-19T16:55:26.577,ns_1@10.242.238.90:capi_set_view_manager-tiles<0.6184.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,686,622,558,984,426,724,660,596,532,958,400,1022,762,698,634,570,996,736, 672,608,544,970,412,742,710,678,646,614,582,550,518,976,944,418,386,1008,748, 716,684,652,620,588,556,524,982,950,424,392,1014,754,722,690,658,626,594,562, 530,988,956,398,1020,760,728,696,664,632,600,568,536,994,962,404,766,734,702, 670,638,606,574,542,968,410,378,1000,740,708,676,644,612,580,548,516,974,942, 416,384,1006,746,714,682,650,618,586,554,522,980,948,422,390,1012,752,720, 688,656,624,592,560,528,986,954,396,1018,758,726,694,662,630,598,566,534,992, 960,402,764,732,700,668,636,604,572,540,998,966,408,376,738,706,674,642,610, 578,546,514,972,940,414,382,1004,744,712,680,648,616,584,552,520,1023,978, 946,420,388,1010,718,654,590,526,952,394,1016,756,692,628,564,990,730,666, 602,538,964,406,704,640,576,512,938,380,1002] [ns_server:debug,2014-08-19T16:55:26.578,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,984,673,545,724,596,413,958,647,519,153,698,570,387,1009,749,621,127, 983,672,361,723,595,957,646,697,569,1008,748,620,982,671,543,722,594,411,956, 645,517,151,696,568,385,1007,747,619,125,981,734,670,606,423,359,968,721,657, 593,529,163,955,708,644,580,397,1019,942,759,695,631,567,137,1006,993,746, 682,618,554,371,980,733,669,605,541,111,967,720,656,592,409,345,954,707,643, 579,515,149,1018,941,758,694,630,566,383,1005,992,745,681,617,553,123,979, 732,668,604,421,357,966,719,655,591,527,161,953,706,642,578,395,1017,940,757, 693,629,565,135,1004,991,744,680,616,552,369,978,731,667,603,539,109,965,718, 654,590,407,343,952,705,641,577,513,147,1016,939,756,692,628,564,381,1003, 990,743,679,615,551,121,977,730,666,602,419,355,964,717,653,589,525,159,951, 704,640,576,393,1015,938,755,691,627,563,133,1002,989,742,678,614,550,367, 976,729,665,601,537,963,716,652,588,405,950,767,703,639,575,145,1014,754,690, 626,562,379,1001,988,741,677,613,549,119,975,728,664,600,417,353,962,715,651, 587,523,157,949,766,702,638,574,391,1013,753,689,625,561,131,1000,987,740, 676,612,548,365,974,727,663,599,535,169,961,714,650,586,403,948,765,701,637, 573,143,1012,999,752,688,624,560,377,986,739,675,611,547,117,973,726,662,598, 415,351,960,713,649,585,521,155,947,764,700,636,572,389,1011,998,751,687,623, 559,129,985,738,674,610,363,972,725,661,597,533,167,959,712,648,584,401,1023, 946,763,699,635,571,141,1010,997,686,558,375,737,609,115,971,660,349,711,583, 1022,945,762,634,996,685,557,736,608,425,970,659,531,165,710,582,399,1021, 944,761,633,139,995,684,556,373,735,607,113,969,658,347,709,581,1020,943,760, 632,994,683,555] [views:debug,2014-08-19T16:55:26.643,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/376. Updated state: replica (0) [views:debug,2014-08-19T16:55:26.643,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/109. Updated state: replica (0) [ns_server:debug,2014-08-19T16:55:26.644,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",376,replica,0} [ns_server:debug,2014-08-19T16:55:26.644,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",109,replica,0} [ns_server:debug,2014-08-19T16:55:26.685,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 90. Nacking mccouch update. [views:debug,2014-08-19T16:55:26.685,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/90. Updated state: replica (0) [ns_server:debug,2014-08-19T16:55:26.685,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",90,replica,0} [ns_server:debug,2014-08-19T16:55:26.685,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_tiles<0.4349.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,686,622,558,128,984,426,362,724,660,596,532,166,102,958,400,1022,762,698, 634,570,140,996,374,736,672,608,544,114,970,412,348,710,646,582,518,152,944, 386,1008,748,684,620,556,126,982,424,360,722,658,594,530,164,100,956,398, 1020,760,696,632,568,138,994,372,734,670,606,542,112,968,410,346,708,644,580, 516,150,942,384,1006,92,746,714,682,650,618,586,554,522,156,124,980,948,422, 390,358,1012,98,752,720,688,656,624,592,560,528,162,130,986,954,396,364,1018, 758,726,694,662,630,598,566,534,168,136,104,992,960,402,370,764,732,700,668, 636,604,572,540,142,110,998,966,408,376,344,738,706,674,642,610,578,546,514, 148,116,972,940,414,382,350,1004,90,744,712,680,648,616,584,552,520,154,122, 1023,978,946,420,388,356,1010,96,718,654,590,526,160,952,394,1016,756,692, 628,564,134,990,368,730,666,602,538,108,964,406,342,704,640,576,512,146,938, 380,1002,742,678,614,550,120,976,418,354,94,716,652,588,524,158,950,392,1014, 754,690,626,562,132,988,366,728,664,600,536,170,106,962,404,766,702,638,574, 144,378,1000,740,676,612,548,118,974,416,352] [views:debug,2014-08-19T16:55:26.789,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/90. Updated state: replica (0) [ns_server:debug,2014-08-19T16:55:26.789,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",90,replica,0} [ns_server:debug,2014-08-19T16:55:26.953,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 374. Nacking mccouch update. [ns_server:debug,2014-08-19T16:55:26.953,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 107. Nacking mccouch update. [views:debug,2014-08-19T16:55:26.953,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/374. Updated state: replica (0) [views:debug,2014-08-19T16:55:26.954,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/107. Updated state: replica (0) [ns_server:debug,2014-08-19T16:55:26.955,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",374,replica,0} [ns_server:debug,2014-08-19T16:55:26.955,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",107,replica,0} [ns_server:debug,2014-08-19T16:55:26.955,ns_1@10.242.238.90:capi_set_view_manager-tiles<0.6184.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,686,622,558,984,426,724,660,596,532,958,400,1022,762,698,634,570,996,374, 736,672,608,544,970,412,742,710,678,646,614,582,550,518,976,944,418,386,1008, 748,716,684,652,620,588,556,524,982,950,424,392,1014,754,722,690,658,626,594, 562,530,988,956,398,1020,760,728,696,664,632,600,568,536,994,962,404,766,734, 702,670,638,606,574,542,968,410,378,1000,740,708,676,644,612,580,548,516,974, 942,416,384,1006,746,714,682,650,618,586,554,522,980,948,422,390,1012,752, 720,688,656,624,592,560,528,986,954,396,1018,758,726,694,662,630,598,566,534, 992,960,402,764,732,700,668,636,604,572,540,998,966,408,376,738,706,674,642, 610,578,546,514,972,940,414,382,1004,744,712,680,648,616,584,552,520,1023, 978,946,420,388,1010,718,654,590,526,952,394,1016,756,692,628,564,990,730, 666,602,538,964,406,704,640,576,512,938,380,1002] [ns_server:debug,2014-08-19T16:55:26.956,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,984,673,545,724,596,413,958,647,519,153,698,570,387,1009,749,621,127, 983,672,361,723,595,957,646,697,569,1008,748,620,982,671,543,722,594,411,956, 645,517,151,696,568,385,1007,747,619,125,981,734,670,606,423,359,968,721,657, 593,529,163,955,708,644,580,397,1019,942,759,695,631,567,137,1006,993,746, 682,618,554,371,980,733,669,605,541,111,967,720,656,592,409,345,954,707,643, 579,515,149,1018,941,758,694,630,566,383,1005,992,745,681,617,553,123,979, 732,668,604,421,357,966,719,655,591,527,161,953,706,642,578,395,1017,940,757, 693,629,565,135,1004,991,744,680,616,552,369,978,731,667,603,539,109,965,718, 654,590,407,343,952,705,641,577,513,147,1016,939,756,692,628,564,381,1003, 990,743,679,615,551,121,977,730,666,602,419,355,964,717,653,589,525,159,951, 704,640,576,393,1015,938,755,691,627,563,133,1002,989,742,678,614,550,367, 976,729,665,601,537,107,963,716,652,588,405,950,767,703,639,575,145,1014,754, 690,626,562,379,1001,988,741,677,613,549,119,975,728,664,600,417,353,962,715, 651,587,523,157,949,766,702,638,574,391,1013,753,689,625,561,131,1000,987, 740,676,612,548,365,974,727,663,599,535,169,961,714,650,586,403,948,765,701, 637,573,143,1012,999,752,688,624,560,377,986,739,675,611,547,117,973,726,662, 598,415,351,960,713,649,585,521,155,947,764,700,636,572,389,1011,998,751,687, 623,559,129,985,738,674,610,363,972,725,661,597,533,167,959,712,648,584,401, 1023,946,763,699,635,571,141,1010,997,686,558,375,737,609,115,971,660,349, 711,583,1022,945,762,634,996,685,557,736,608,425,970,659,531,165,710,582,399, 1021,944,761,633,139,995,684,556,373,735,607,113,969,658,347,709,581,1020, 943,760,632,994,683,555] [views:debug,2014-08-19T16:55:27.070,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/374. Updated state: replica (0) [ns_server:debug,2014-08-19T16:55:27.070,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",374,replica,0} [views:debug,2014-08-19T16:55:27.070,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/107. Updated state: replica (0) [ns_server:debug,2014-08-19T16:55:27.070,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",107,replica,0} [ns_server:debug,2014-08-19T16:55:27.129,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 88. Nacking mccouch update. [views:debug,2014-08-19T16:55:27.129,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/88. Updated state: replica (0) [ns_server:debug,2014-08-19T16:55:27.129,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",88,replica,0} [ns_server:debug,2014-08-19T16:55:27.129,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_tiles<0.4349.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,686,622,558,128,984,426,362,724,660,596,532,166,102,958,400,1022,762,698, 634,570,140,996,374,736,672,608,544,114,970,412,348,88,710,646,582,518,152, 944,386,1008,748,684,620,556,126,982,424,360,722,658,594,530,164,100,956,398, 1020,760,696,632,568,138,994,372,734,670,606,542,112,968,410,346,708,644,580, 516,150,942,384,1006,92,746,714,682,650,618,586,554,522,156,124,980,948,422, 390,358,1012,98,752,720,688,656,624,592,560,528,162,130,986,954,396,364,1018, 758,726,694,662,630,598,566,534,168,136,104,992,960,402,370,764,732,700,668, 636,604,572,540,142,110,998,966,408,376,344,738,706,674,642,610,578,546,514, 148,116,972,940,414,382,350,1004,90,744,712,680,648,616,584,552,520,154,122, 1023,978,946,420,388,356,1010,96,718,654,590,526,160,952,394,1016,756,692, 628,564,134,990,368,730,666,602,538,108,964,406,342,704,640,576,512,146,938, 380,1002,742,678,614,550,120,976,418,354,94,716,652,588,524,158,950,392,1014, 754,690,626,562,132,988,366,728,664,600,536,170,106,962,404,766,702,638,574, 144,378,1000,740,676,612,548,118,974,416,352] [views:debug,2014-08-19T16:55:27.238,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/88. Updated state: replica (0) [ns_server:debug,2014-08-19T16:55:27.238,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",88,replica,0} [ns_server:debug,2014-08-19T16:55:27.389,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 372. Nacking mccouch update. [views:debug,2014-08-19T16:55:27.390,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/372. Updated state: replica (0) [ns_server:debug,2014-08-19T16:55:27.390,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",372,replica,0} [ns_server:debug,2014-08-19T16:55:27.390,ns_1@10.242.238.90:capi_set_view_manager-tiles<0.6184.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,686,622,558,984,426,724,660,596,532,958,400,1022,762,698,634,570,996,374, 736,672,608,544,970,412,742,710,678,646,614,582,550,518,976,944,418,386,1008, 748,716,684,652,620,588,556,524,982,950,424,392,1014,754,722,690,658,626,594, 562,530,988,956,398,1020,760,728,696,664,632,600,568,536,994,962,404,372,766, 734,702,670,638,606,574,542,968,410,378,1000,740,708,676,644,612,580,548,516, 974,942,416,384,1006,746,714,682,650,618,586,554,522,980,948,422,390,1012, 752,720,688,656,624,592,560,528,986,954,396,1018,758,726,694,662,630,598,566, 534,992,960,402,764,732,700,668,636,604,572,540,998,966,408,376,738,706,674, 642,610,578,546,514,972,940,414,382,1004,744,712,680,648,616,584,552,520, 1023,978,946,420,388,1010,718,654,590,526,952,394,1016,756,692,628,564,990, 730,666,602,538,964,406,704,640,576,512,938,380,1002] [ns_server:debug,2014-08-19T16:55:27.438,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 105. Nacking mccouch update. [views:debug,2014-08-19T16:55:27.438,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/105. Updated state: replica (0) [ns_server:debug,2014-08-19T16:55:27.438,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",105,replica,0} [ns_server:debug,2014-08-19T16:55:27.439,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,984,673,545,724,596,413,958,647,519,153,698,570,387,1009,749,621,127, 983,672,361,723,595,957,646,697,569,1008,748,620,982,671,543,722,594,411,956, 645,517,151,696,568,385,1007,747,619,125,981,734,670,606,423,359,968,721,657, 593,529,163,955,708,644,580,397,1019,942,759,695,631,567,137,1006,993,746, 682,618,554,371,980,733,669,605,541,111,967,720,656,592,409,345,954,707,643, 579,515,149,1018,941,758,694,630,566,383,1005,992,745,681,617,553,123,979, 732,668,604,421,357,966,719,655,591,527,161,953,706,642,578,395,1017,940,757, 693,629,565,135,1004,991,744,680,616,552,369,978,731,667,603,539,109,965,718, 654,590,407,343,952,705,641,577,513,147,1016,939,756,692,628,564,381,1003, 990,743,679,615,551,121,977,730,666,602,419,355,964,717,653,589,525,159,951, 704,640,576,393,1015,938,755,691,627,563,133,1002,989,742,678,614,550,367, 976,729,665,601,537,107,963,716,652,588,405,950,767,703,639,575,145,1014,754, 690,626,562,379,1001,988,741,677,613,549,119,975,728,664,600,417,353,962,715, 651,587,523,157,949,766,702,638,574,391,1013,753,689,625,561,131,1000,987, 740,676,612,548,365,974,727,663,599,535,169,105,961,714,650,586,403,948,765, 701,637,573,143,1012,999,752,688,624,560,377,986,739,675,611,547,117,973,726, 662,598,415,351,960,713,649,585,521,155,947,764,700,636,572,389,1011,998,751, 687,623,559,129,985,738,674,610,363,972,725,661,597,533,167,959,712,648,584, 401,1023,946,763,699,635,571,141,1010,997,686,558,375,737,609,115,971,660, 349,711,583,1022,945,762,634,996,685,557,736,608,425,970,659,531,165,710,582, 399,1021,944,761,633,139,995,684,556,373,735,607,113,969,658,347,709,581, 1020,943,760,632,994,683,555] [ns_server:debug,2014-08-19T16:55:27.538,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 86. Nacking mccouch update. [views:debug,2014-08-19T16:55:27.539,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/86. Updated state: replica (0) [ns_server:debug,2014-08-19T16:55:27.539,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",86,replica,0} [views:debug,2014-08-19T16:55:27.539,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/372. Updated state: replica (0) [ns_server:debug,2014-08-19T16:55:27.539,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",372,replica,0} [ns_server:debug,2014-08-19T16:55:27.539,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_tiles<0.4349.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,686,622,558,128,984,426,362,724,660,596,532,166,102,958,400,1022,762,698, 634,570,140,996,374,736,672,608,544,114,970,412,348,88,710,646,582,518,152, 944,386,1008,748,684,620,556,126,982,424,360,722,658,594,530,164,100,956,398, 1020,760,696,632,568,138,994,372,734,670,606,542,112,968,410,346,86,708,644, 580,516,150,942,384,1006,92,746,714,682,650,618,586,554,522,156,124,980,948, 422,390,358,1012,98,752,720,688,656,624,592,560,528,162,130,986,954,396,364, 1018,758,726,694,662,630,598,566,534,168,136,104,992,960,402,370,764,732,700, 668,636,604,572,540,142,110,998,966,408,376,344,738,706,674,642,610,578,546, 514,148,116,972,940,414,382,350,1004,90,744,712,680,648,616,584,552,520,154, 122,1023,978,946,420,388,356,1010,96,718,654,590,526,160,952,394,1016,756, 692,628,564,134,990,368,730,666,602,538,108,964,406,342,704,640,576,512,146, 938,380,1002,742,678,614,550,120,976,418,354,94,716,652,588,524,158,950,392, 1014,754,690,626,562,132,988,366,728,664,600,536,170,106,962,404,766,702,638, 574,144,378,1000,740,676,612,548,118,974,416,352] [views:debug,2014-08-19T16:55:27.572,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/105. Updated state: replica (0) [ns_server:debug,2014-08-19T16:55:27.573,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",105,replica,0} [views:debug,2014-08-19T16:55:27.672,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/86. Updated state: replica (0) [ns_server:debug,2014-08-19T16:55:27.672,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",86,replica,0} [ns_server:debug,2014-08-19T16:55:27.954,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 370. Nacking mccouch update. [views:debug,2014-08-19T16:55:27.954,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/370. Updated state: replica (0) [ns_server:debug,2014-08-19T16:55:27.954,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",370,replica,0} [ns_server:debug,2014-08-19T16:55:27.954,ns_1@10.242.238.90:capi_set_view_manager-tiles<0.6184.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,686,622,558,984,426,724,660,596,532,958,400,1022,762,698,634,570,996,374, 736,672,608,544,970,412,710,646,582,518,976,944,418,386,1008,748,716,684,652, 620,588,556,524,982,950,424,392,1014,754,722,690,658,626,594,562,530,988,956, 398,1020,760,728,696,664,632,600,568,536,994,962,404,372,766,734,702,670,638, 606,574,542,968,410,378,1000,740,708,676,644,612,580,548,516,974,942,416,384, 1006,746,714,682,650,618,586,554,522,980,948,422,390,1012,752,720,688,656, 624,592,560,528,986,954,396,1018,758,726,694,662,630,598,566,534,992,960,402, 370,764,732,700,668,636,604,572,540,998,966,408,376,738,706,674,642,610,578, 546,514,972,940,414,382,1004,744,712,680,648,616,584,552,520,1023,978,946, 420,388,1010,718,654,590,526,952,394,1016,756,692,628,564,990,730,666,602, 538,964,406,704,640,576,512,938,380,1002,742,678,614,550] [ns_server:debug,2014-08-19T16:55:27.995,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 103. Nacking mccouch update. [views:debug,2014-08-19T16:55:27.995,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/103. Updated state: replica (0) [ns_server:debug,2014-08-19T16:55:27.995,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",103,replica,0} [ns_server:debug,2014-08-19T16:55:27.996,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,984,673,545,724,596,413,958,647,519,153,698,570,387,1009,749,621,127, 983,672,361,723,595,957,646,697,569,1008,748,620,982,671,543,722,594,411,956, 645,517,151,696,568,385,1007,747,619,125,981,734,670,606,423,359,968,721,657, 593,529,163,955,708,644,580,397,1019,942,759,695,631,567,137,1006,993,746, 682,618,554,371,980,733,669,605,541,111,967,720,656,592,409,345,954,707,643, 579,515,149,1018,941,758,694,630,566,383,1005,992,745,681,617,553,123,979, 732,668,604,421,357,966,719,655,591,527,161,953,706,642,578,395,1017,940,757, 693,629,565,135,1004,991,744,680,616,552,369,978,731,667,603,539,109,965,718, 654,590,407,343,952,705,641,577,513,147,1016,939,756,692,628,564,381,1003, 990,743,679,615,551,121,977,730,666,602,419,355,964,717,653,589,525,159,951, 704,640,576,393,1015,938,755,691,627,563,133,1002,989,742,678,614,550,367, 976,729,665,601,537,107,963,716,652,588,405,950,767,703,639,575,145,1014,754, 690,626,562,379,1001,988,741,677,613,549,119,975,728,664,600,417,353,962,715, 651,587,523,157,949,766,702,638,574,391,1013,753,689,625,561,131,1000,987, 740,676,612,548,365,974,727,663,599,535,169,105,961,714,650,586,403,948,765, 701,637,573,143,1012,999,752,688,624,560,377,986,739,675,611,547,117,973,726, 662,598,415,351,960,713,649,585,521,155,947,764,700,636,572,389,1011,998,751, 687,623,559,129,985,738,674,610,363,972,725,661,597,533,167,103,959,712,648, 584,401,1023,946,763,699,635,571,141,1010,997,686,558,375,737,609,115,971, 660,349,711,583,1022,945,762,634,996,685,557,736,608,425,970,659,531,165,710, 582,399,1021,944,761,633,139,995,684,556,373,735,607,113,969,658,347,709,581, 1020,943,760,632,994,683,555] [views:debug,2014-08-19T16:55:28.121,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/370. Updated state: replica (0) [ns_server:debug,2014-08-19T16:55:28.121,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",370,replica,0} [ns_server:debug,2014-08-19T16:55:28.153,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 1021. Nacking mccouch update. [views:debug,2014-08-19T16:55:28.154,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/1021. Updated state: replica (0) [ns_server:debug,2014-08-19T16:55:28.154,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",1021,replica,0} [views:debug,2014-08-19T16:55:28.154,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/103. Updated state: replica (0) [ns_server:debug,2014-08-19T16:55:28.154,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",103,replica,0} [ns_server:debug,2014-08-19T16:55:28.154,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_tiles<0.4349.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,686,622,558,128,984,426,362,724,660,596,532,166,102,958,400,1022,762,698, 634,570,140,996,374,736,672,608,544,114,970,412,348,88,710,646,582,518,152, 1021,944,386,1008,748,684,620,556,126,982,424,360,722,658,594,530,164,100, 956,398,1020,760,696,632,568,138,994,372,734,670,606,542,112,968,410,346,86, 708,644,580,516,150,942,384,1006,92,746,714,682,650,618,586,554,522,156,124, 980,948,422,390,358,1012,98,752,720,688,656,624,592,560,528,162,130,986,954, 396,364,1018,758,726,694,662,630,598,566,534,168,136,104,992,960,402,370,764, 732,700,668,636,604,572,540,142,110,998,966,408,376,344,738,706,674,642,610, 578,546,514,148,116,972,940,414,382,350,1004,90,744,712,680,648,616,584,552, 520,154,122,1023,978,946,420,388,356,1010,96,718,654,590,526,160,952,394, 1016,756,692,628,564,134,990,368,730,666,602,538,108,964,406,342,704,640,576, 512,146,938,380,1002,742,678,614,550,120,976,418,354,94,716,652,588,524,158, 950,392,1014,754,690,626,562,132,988,366,728,664,600,536,170,106,962,404,766, 702,638,574,144,378,1000,740,676,612,548,118,974,416,352] [views:debug,2014-08-19T16:55:28.258,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/1021. Updated state: replica (0) [ns_server:debug,2014-08-19T16:55:28.258,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",1021,replica,0} [ns_server:debug,2014-08-19T16:55:28.372,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 101. Nacking mccouch update. [ns_server:debug,2014-08-19T16:55:28.372,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 368. Nacking mccouch update. [views:debug,2014-08-19T16:55:28.372,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/101. Updated state: replica (0) [views:debug,2014-08-19T16:55:28.372,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/368. Updated state: replica (0) [ns_server:debug,2014-08-19T16:55:28.373,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",368,replica,0} [ns_server:debug,2014-08-19T16:55:28.373,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",101,replica,0} [ns_server:debug,2014-08-19T16:55:28.373,ns_1@10.242.238.90:capi_set_view_manager-tiles<0.6184.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,686,622,558,984,426,724,660,596,532,958,400,1022,762,698,634,570,996,374, 736,672,608,544,970,412,710,646,582,518,976,944,418,386,1008,748,716,684,652, 620,588,556,524,982,950,424,392,1014,754,722,690,658,626,594,562,530,988,956, 398,1020,760,728,696,664,632,600,568,536,994,962,404,372,766,734,702,670,638, 606,574,542,968,410,378,1000,740,708,676,644,612,580,548,516,974,942,416,384, 1006,746,714,682,650,618,586,554,522,980,948,422,390,1012,752,720,688,656, 624,592,560,528,986,954,396,1018,758,726,694,662,630,598,566,534,992,960,402, 370,764,732,700,668,636,604,572,540,998,966,408,376,738,706,674,642,610,578, 546,514,972,940,414,382,1004,744,712,680,648,616,584,552,520,1023,978,946, 420,388,1010,718,654,590,526,952,394,1016,756,692,628,564,990,368,730,666, 602,538,964,406,704,640,576,512,938,380,1002,742,678,614,550] [ns_server:debug,2014-08-19T16:55:28.374,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,984,673,545,724,596,413,958,647,519,153,698,570,387,1009,749,621,127, 983,672,361,723,595,101,957,646,697,569,1008,748,620,982,671,543,722,594,411, 956,645,517,151,696,568,385,1007,747,619,125,981,670,359,968,721,657,593,529, 163,955,708,644,580,397,1019,942,759,695,631,567,137,1006,993,746,682,618, 554,371,980,733,669,605,541,111,967,720,656,592,409,345,954,707,643,579,515, 149,1018,941,758,694,630,566,383,1005,992,745,681,617,553,123,979,732,668, 604,421,357,966,719,655,591,527,161,953,706,642,578,395,1017,940,757,693,629, 565,135,1004,991,744,680,616,552,369,978,731,667,603,539,109,965,718,654,590, 407,343,952,705,641,577,513,147,1016,939,756,692,628,564,381,1003,990,743, 679,615,551,121,977,730,666,602,419,355,964,717,653,589,525,159,951,704,640, 576,393,1015,938,755,691,627,563,133,1002,989,742,678,614,550,367,976,729, 665,601,537,107,963,716,652,588,405,950,767,703,639,575,145,1014,754,690,626, 562,379,1001,988,741,677,613,549,119,975,728,664,600,417,353,962,715,651,587, 523,157,949,766,702,638,574,391,1013,753,689,625,561,131,1000,987,740,676, 612,548,365,974,727,663,599,535,169,105,961,714,650,586,403,948,765,701,637, 573,143,1012,999,752,688,624,560,377,986,739,675,611,547,117,973,726,662,598, 415,351,960,713,649,585,521,155,947,764,700,636,572,389,1011,998,751,687,623, 559,129,985,738,674,610,363,972,725,661,597,533,167,103,959,712,648,584,401, 1023,946,763,699,635,571,141,1010,997,686,558,375,737,609,115,971,660,349, 711,583,1022,945,762,634,996,685,557,736,608,425,970,659,531,165,710,582,399, 1021,944,761,633,139,995,684,556,373,735,607,113,969,658,347,709,581,1020, 943,760,632,994,683,555,734,606,423] [views:debug,2014-08-19T16:55:28.441,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/101. Updated state: replica (0) [views:debug,2014-08-19T16:55:28.441,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/368. Updated state: replica (0) [ns_server:debug,2014-08-19T16:55:28.441,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",101,replica,0} [ns_server:debug,2014-08-19T16:55:28.441,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",368,replica,0} [ns_server:debug,2014-08-19T16:55:28.490,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 1019. Nacking mccouch update. [views:debug,2014-08-19T16:55:28.490,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/1019. Updated state: replica (0) [ns_server:debug,2014-08-19T16:55:28.491,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",1019,replica,0} [ns_server:debug,2014-08-19T16:55:28.491,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_tiles<0.4349.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,686,622,558,128,984,426,362,724,660,596,532,166,102,958,400,1022,762,698, 634,570,140,996,374,736,672,608,544,114,970,412,348,88,710,646,582,518,152, 1021,944,386,1008,748,684,620,556,126,982,424,360,722,658,594,530,164,100, 956,398,1020,760,696,632,568,138,994,372,734,670,606,542,112,968,410,346,86, 708,644,580,516,150,1019,942,384,1006,92,746,714,682,650,618,586,554,522,156, 124,980,948,422,390,358,1012,98,752,720,688,656,624,592,560,528,162,130,986, 954,396,364,1018,758,726,694,662,630,598,566,534,168,136,104,992,960,402,370, 764,732,700,668,636,604,572,540,142,110,998,966,408,376,344,738,706,674,642, 610,578,546,514,148,116,972,940,414,382,350,1004,90,744,712,680,648,616,584, 552,520,154,122,1023,978,946,420,388,356,1010,96,718,654,590,526,160,952,394, 1016,756,692,628,564,134,990,368,730,666,602,538,108,964,406,342,704,640,576, 512,146,938,380,1002,742,678,614,550,120,976,418,354,94,716,652,588,524,158, 950,392,1014,754,690,626,562,132,988,366,728,664,600,536,170,106,962,404,766, 702,638,574,144,378,1000,740,676,612,548,118,974,416,352] [views:debug,2014-08-19T16:55:28.611,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/1019. Updated state: replica (0) [ns_server:debug,2014-08-19T16:55:28.611,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",1019,replica,0} [ns_server:debug,2014-08-19T16:55:28.775,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 366. Nacking mccouch update. [views:debug,2014-08-19T16:55:28.775,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/366. Updated state: replica (0) [ns_server:debug,2014-08-19T16:55:28.775,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",366,replica,0} [ns_server:debug,2014-08-19T16:55:28.776,ns_1@10.242.238.90:capi_set_view_manager-tiles<0.6184.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,686,622,558,984,426,724,660,596,532,958,400,1022,762,698,634,570,996,374, 736,672,608,544,970,412,710,646,582,518,976,944,418,386,1008,748,716,684,652, 620,588,556,524,982,950,424,392,1014,754,722,690,658,626,594,562,530,988,956, 398,366,1020,760,728,696,664,632,600,568,536,994,962,404,372,766,734,702,670, 638,606,574,542,968,410,378,1000,740,708,676,644,612,580,548,516,974,942,416, 384,1006,746,714,682,650,618,586,554,522,980,948,422,390,1012,752,720,688, 656,624,592,560,528,986,954,396,1018,758,726,694,662,630,598,566,534,992,960, 402,370,764,732,700,668,636,604,572,540,998,966,408,376,738,706,674,642,610, 578,546,514,972,940,414,382,1004,744,712,680,648,616,584,552,520,1023,978, 946,420,388,1010,718,654,590,526,952,394,1016,756,692,628,564,990,368,730, 666,602,538,964,406,704,640,576,512,938,380,1002,742,678,614,550] [ns_server:debug,2014-08-19T16:55:28.792,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 99. Nacking mccouch update. [views:debug,2014-08-19T16:55:28.792,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/99. Updated state: replica (0) [ns_server:debug,2014-08-19T16:55:28.792,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",99,replica,0} [ns_server:debug,2014-08-19T16:55:28.793,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,984,673,545,724,596,413,958,647,519,153,698,570,387,1009,749,621,127, 983,672,361,723,595,101,957,646,697,569,1008,748,620,982,671,543,722,594,411, 956,645,517,151,696,568,385,1007,747,619,125,981,670,359,99,968,721,657,593, 529,163,955,708,644,580,397,1019,942,759,695,631,567,137,1006,993,746,682, 618,554,371,980,733,669,605,541,111,967,720,656,592,409,345,954,707,643,579, 515,149,1018,941,758,694,630,566,383,1005,992,745,681,617,553,123,979,732, 668,604,421,357,966,719,655,591,527,161,953,706,642,578,395,1017,940,757,693, 629,565,135,1004,991,744,680,616,552,369,978,731,667,603,539,109,965,718,654, 590,407,343,952,705,641,577,513,147,1016,939,756,692,628,564,381,1003,990, 743,679,615,551,121,977,730,666,602,419,355,964,717,653,589,525,159,951,704, 640,576,393,1015,938,755,691,627,563,133,1002,989,742,678,614,550,367,976, 729,665,601,537,107,963,716,652,588,405,950,767,703,639,575,145,1014,754,690, 626,562,379,1001,988,741,677,613,549,119,975,728,664,600,417,353,962,715,651, 587,523,157,949,766,702,638,574,391,1013,753,689,625,561,131,1000,987,740, 676,612,548,365,974,727,663,599,535,169,105,961,714,650,586,403,948,765,701, 637,573,143,1012,999,752,688,624,560,377,986,739,675,611,547,117,973,726,662, 598,415,351,960,713,649,585,521,155,947,764,700,636,572,389,1011,998,751,687, 623,559,129,985,738,674,610,363,972,725,661,597,533,167,103,959,712,648,584, 401,1023,946,763,699,635,571,141,1010,997,686,558,375,737,609,115,971,660, 349,711,583,1022,945,762,634,996,685,557,736,608,425,970,659,531,165,710,582, 399,1021,944,761,633,139,995,684,556,373,735,607,113,969,658,347,709,581, 1020,943,760,632,994,683,555,734,606,423] [ns_server:debug,2014-08-19T16:55:28.917,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 1017. Nacking mccouch update. [views:debug,2014-08-19T16:55:28.918,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/1017. Updated state: replica (0) [ns_server:debug,2014-08-19T16:55:28.918,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",1017,replica,0} [views:debug,2014-08-19T16:55:28.918,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/366. Updated state: replica (0) [ns_server:debug,2014-08-19T16:55:28.918,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",366,replica,0} [ns_server:debug,2014-08-19T16:55:28.919,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_tiles<0.4349.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,686,622,558,128,984,426,362,724,660,596,532,166,102,958,400,1022,762,698, 634,570,140,996,374,736,672,608,544,114,970,412,348,88,710,646,582,518,152, 1021,944,386,1008,748,684,620,556,126,982,424,360,722,658,594,530,164,100, 956,398,1020,760,696,632,568,138,994,372,734,670,606,542,112,968,410,346,86, 708,644,580,516,150,1019,942,384,1006,746,682,618,554,124,980,948,422,390, 358,1012,98,752,720,688,656,624,592,560,528,162,130,986,954,396,364,1018,758, 726,694,662,630,598,566,534,168,136,104,992,960,402,370,764,732,700,668,636, 604,572,540,142,110,998,966,408,376,344,738,706,674,642,610,578,546,514,148, 116,1017,972,940,414,382,350,1004,90,744,712,680,648,616,584,552,520,154,122, 1023,978,946,420,388,356,1010,96,718,654,590,526,160,952,394,1016,756,692, 628,564,134,990,368,730,666,602,538,108,964,406,342,704,640,576,512,146,938, 380,1002,742,678,614,550,120,976,418,354,94,716,652,588,524,158,950,392,1014, 754,690,626,562,132,988,366,728,664,600,536,170,106,962,404,766,702,638,574, 144,378,1000,740,676,612,548,118,974,416,352,92,714,650,586,522,156] [views:debug,2014-08-19T16:55:28.952,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/99. Updated state: replica (0) [ns_server:debug,2014-08-19T16:55:28.952,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",99,replica,0} [views:debug,2014-08-19T16:55:29.056,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/1017. Updated state: replica (0) [ns_server:debug,2014-08-19T16:55:29.056,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",1017,replica,0} [ns_server:debug,2014-08-19T16:55:29.323,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 97. Nacking mccouch update. [ns_server:debug,2014-08-19T16:55:29.323,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 364. Nacking mccouch update. [views:debug,2014-08-19T16:55:29.323,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/364. Updated state: replica (0) [views:debug,2014-08-19T16:55:29.323,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/97. Updated state: replica (0) [ns_server:debug,2014-08-19T16:55:29.323,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",364,replica,0} [ns_server:debug,2014-08-19T16:55:29.323,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",97,replica,0} [ns_server:debug,2014-08-19T16:55:29.324,ns_1@10.242.238.90:capi_set_view_manager-tiles<0.6184.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,686,622,558,984,426,724,660,596,532,958,400,1022,762,698,634,570,996,374, 736,672,608,544,970,412,710,646,582,518,976,944,418,386,1008,748,716,684,652, 620,588,556,524,982,950,424,392,1014,754,722,690,658,626,594,562,530,988,956, 398,366,1020,760,728,696,664,632,600,568,536,994,962,404,372,766,734,702,670, 638,606,574,542,968,410,378,1000,740,708,676,644,612,580,548,516,974,942,416, 384,1006,746,714,682,650,618,586,554,522,980,948,422,390,1012,752,720,688, 656,624,592,560,528,986,954,396,364,1018,758,726,694,662,630,598,566,534,992, 960,402,370,764,732,700,668,636,604,572,540,998,966,408,376,738,706,674,642, 610,578,546,514,972,940,414,382,1004,744,712,680,648,616,584,552,520,1023, 978,946,420,388,1010,718,654,590,526,952,394,1016,756,692,628,564,990,368, 730,666,602,538,964,406,704,640,576,512,938,380,1002,742,678,614,550] [ns_server:debug,2014-08-19T16:55:29.324,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,984,673,545,724,596,413,958,647,519,153,698,570,387,1009,749,621,127, 983,672,361,723,595,101,957,646,697,569,1008,748,620,982,671,543,722,594,411, 956,645,517,151,696,568,385,1007,747,619,125,981,670,359,99,968,721,657,593, 529,163,955,708,644,580,397,1019,942,759,695,631,567,137,1006,993,746,682, 618,554,371,980,733,669,605,541,111,967,720,656,592,409,345,954,707,643,579, 515,149,1018,941,758,694,630,566,383,1005,992,745,681,617,553,123,979,732, 668,604,421,357,97,966,719,655,591,527,161,953,706,642,578,395,1017,940,757, 693,629,565,135,1004,991,744,680,616,552,369,978,731,667,603,539,109,965,718, 654,590,407,343,952,705,641,577,513,147,1016,939,756,692,628,564,381,1003, 990,743,679,615,551,121,977,730,666,602,419,355,964,717,653,589,525,159,951, 704,640,576,393,1015,938,755,691,627,563,133,1002,989,742,678,614,550,367, 976,729,665,601,537,107,963,716,652,588,405,950,767,703,639,575,145,1014,754, 690,626,562,379,1001,988,741,677,613,549,119,975,728,664,600,417,353,962,715, 651,587,523,157,949,766,702,638,574,391,1013,753,689,625,561,131,1000,987, 740,676,612,548,365,974,727,663,599,535,169,105,961,714,650,586,403,948,765, 701,637,573,143,1012,999,752,688,624,560,377,986,739,675,611,547,117,973,726, 662,598,415,351,960,713,649,585,521,155,947,764,700,636,572,389,1011,998,751, 687,623,559,129,985,738,674,610,363,972,725,661,597,533,167,103,959,712,648, 584,401,1023,946,763,699,635,571,141,1010,997,686,558,375,737,609,115,971, 660,349,711,583,1022,945,762,634,996,685,557,736,608,425,970,659,531,165,710, 582,399,1021,944,761,633,139,995,684,556,373,735,607,113,969,658,347,709,581, 1020,943,760,632,994,683,555,734,606,423] [ns_server:debug,2014-08-19T16:55:29.406,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 1015. Nacking mccouch update. [views:debug,2014-08-19T16:55:29.406,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/1015. Updated state: replica (0) [ns_server:debug,2014-08-19T16:55:29.407,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",1015,replica,0} [ns_server:debug,2014-08-19T16:55:29.407,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_tiles<0.4349.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,686,622,558,128,984,426,362,724,660,596,532,166,102,958,400,1022,762,698, 634,570,140,996,374,736,672,608,544,114,970,412,348,88,710,646,582,518,152, 1021,944,386,1008,748,684,620,556,126,982,424,360,722,658,594,530,164,100, 956,398,1020,760,696,632,568,138,994,372,734,670,606,542,112,968,410,346,86, 708,644,580,516,150,1019,942,384,1006,746,682,618,554,124,980,948,422,390, 358,1012,98,752,720,688,656,624,592,560,528,162,130,986,954,396,364,1018,758, 726,694,662,630,598,566,534,168,136,104,992,960,402,370,764,732,700,668,636, 604,572,540,142,110,998,966,408,376,344,738,706,674,642,610,578,546,514,148, 116,1017,972,940,414,382,350,1004,90,744,712,680,648,616,584,552,520,154,122, 1023,978,946,420,388,356,1010,96,718,654,590,526,160,952,394,1016,756,692, 628,564,134,990,368,730,666,602,538,108,964,406,342,704,640,576,512,146,1015, 938,380,1002,742,678,614,550,120,976,418,354,94,716,652,588,524,158,950,392, 1014,754,690,626,562,132,988,366,728,664,600,536,170,106,962,404,766,702,638, 574,144,378,1000,740,676,612,548,118,974,416,352,92,714,650,586,522,156] [views:debug,2014-08-19T16:55:29.424,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/364. Updated state: replica (0) [views:debug,2014-08-19T16:55:29.424,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/97. Updated state: replica (0) [ns_server:debug,2014-08-19T16:55:29.424,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",364,replica,0} [ns_server:debug,2014-08-19T16:55:29.424,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",97,replica,0} [views:debug,2014-08-19T16:55:29.499,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/1015. Updated state: replica (0) [ns_server:debug,2014-08-19T16:55:29.499,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",1015,replica,0} [ns_server:debug,2014-08-19T16:55:29.626,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 362. Nacking mccouch update. [ns_server:debug,2014-08-19T16:55:29.626,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 95. Nacking mccouch update. [views:debug,2014-08-19T16:55:29.626,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/362. Updated state: replica (0) [views:debug,2014-08-19T16:55:29.626,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/95. Updated state: replica (0) [ns_server:debug,2014-08-19T16:55:29.626,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",362,replica,0} [ns_server:debug,2014-08-19T16:55:29.626,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",95,replica,0} [ns_server:debug,2014-08-19T16:55:29.627,ns_1@10.242.238.90:capi_set_view_manager-tiles<0.6184.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,686,622,558,984,426,362,724,660,596,532,958,400,1022,762,698,634,570,996, 374,736,672,608,544,970,412,710,646,582,518,976,944,418,386,1008,748,716,684, 652,620,588,556,524,982,950,424,392,1014,754,722,690,658,626,594,562,530,988, 956,398,366,1020,760,728,696,664,632,600,568,536,994,962,404,372,766,734,702, 670,638,606,574,542,968,410,378,1000,740,708,676,644,612,580,548,516,974,942, 416,384,1006,746,714,682,650,618,586,554,522,980,948,422,390,1012,752,720, 688,656,624,592,560,528,986,954,396,364,1018,758,726,694,662,630,598,566,534, 992,960,402,370,764,732,700,668,636,604,572,540,998,966,408,376,738,706,674, 642,610,578,546,514,972,940,414,382,1004,744,712,680,648,616,584,552,520, 1023,978,946,420,388,1010,718,654,590,526,952,394,1016,756,692,628,564,990, 368,730,666,602,538,964,406,704,640,576,512,938,380,1002,742,678,614,550] [ns_server:debug,2014-08-19T16:55:29.628,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,984,673,545,724,596,413,958,647,519,153,698,570,387,1009,749,621,127, 983,672,361,723,595,101,957,646,697,569,1008,748,620,982,671,543,722,594,411, 956,645,517,151,696,568,385,1007,747,619,125,981,670,359,99,968,721,657,593, 529,163,955,708,644,580,397,1019,942,759,695,631,567,137,1006,993,746,682, 618,554,371,980,733,669,605,541,111,967,720,656,592,409,345,954,707,643,579, 515,149,1018,941,758,694,630,566,383,1005,992,745,681,617,553,123,979,732, 668,604,421,357,97,966,719,655,591,527,161,953,706,642,578,395,1017,940,757, 693,629,565,135,1004,991,744,680,616,552,369,978,731,667,603,539,109,965,718, 654,590,407,343,952,705,641,577,513,147,1016,939,756,692,628,564,381,1003, 990,743,679,615,551,121,977,730,666,602,419,355,964,95,717,653,589,525,159, 951,704,640,576,393,1015,938,755,691,627,563,133,1002,989,742,678,614,550, 367,976,729,665,601,537,107,963,716,652,588,405,950,767,703,639,575,145,1014, 754,690,626,562,379,1001,988,741,677,613,549,119,975,728,664,600,417,353,962, 715,651,587,523,157,949,766,702,638,574,391,1013,753,689,625,561,131,1000, 987,740,676,612,548,365,974,727,663,599,535,169,105,961,714,650,586,403,948, 765,701,637,573,143,1012,999,752,688,624,560,377,986,739,675,611,547,117,973, 726,662,598,415,351,960,713,649,585,521,155,947,764,700,636,572,389,1011,998, 751,687,623,559,129,985,738,674,610,363,972,725,661,597,533,167,103,959,712, 648,584,401,1023,946,763,699,635,571,141,1010,997,686,558,375,737,609,115, 971,660,349,711,583,1022,945,762,634,996,685,557,736,608,425,970,659,531,165, 710,582,399,1021,944,761,633,139,995,684,556,373,735,607,113,969,658,347,709, 581,1020,943,760,632,994,683,555,734,606,423] [ns_server:debug,2014-08-19T16:55:29.643,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 1013. Nacking mccouch update. [views:debug,2014-08-19T16:55:29.643,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/1013. Updated state: replica (0) [ns_server:debug,2014-08-19T16:55:29.643,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",1013,replica,0} [ns_server:debug,2014-08-19T16:55:29.644,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_tiles<0.4349.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,686,622,558,128,984,426,362,724,660,596,532,166,102,958,400,1022,762,698, 634,570,140,996,374,736,672,608,544,114,970,412,348,88,710,646,582,518,152, 1021,944,386,1008,748,684,620,556,126,982,424,360,722,658,594,530,164,100, 956,398,1020,760,696,632,568,138,994,372,734,670,606,542,112,968,410,346,86, 708,644,580,516,150,1019,942,384,1006,746,682,618,554,124,980,948,422,390, 358,1012,98,752,720,688,656,624,592,560,528,162,130,986,954,396,364,1018,758, 726,694,662,630,598,566,534,168,136,104,992,960,402,370,764,732,700,668,636, 604,572,540,142,110,998,966,408,376,344,738,706,674,642,610,578,546,514,148, 116,1017,972,940,414,382,350,1004,90,744,712,680,648,616,584,552,520,154,122, 1023,978,946,420,388,356,1010,96,718,654,590,526,160,952,394,1016,756,692, 628,564,134,990,368,730,666,602,538,108,964,406,342,704,640,576,512,146,1015, 938,380,1002,742,678,614,550,120,976,418,354,94,716,652,588,524,158,950,392, 1014,754,690,626,562,132,988,366,728,664,600,536,170,106,962,404,766,702,638, 574,144,1013,378,1000,740,676,612,548,118,974,416,352,92,714,650,586,522,156] [views:debug,2014-08-19T16:55:29.719,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/95. Updated state: replica (0) [ns_server:debug,2014-08-19T16:55:29.719,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",95,replica,0} [views:debug,2014-08-19T16:55:29.719,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/362. Updated state: replica (0) [ns_server:debug,2014-08-19T16:55:29.719,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",362,replica,0} [views:debug,2014-08-19T16:55:29.735,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/1013. Updated state: replica (0) [ns_server:debug,2014-08-19T16:55:29.735,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",1013,replica,0} [ns_server:debug,2014-08-19T16:55:29.961,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 360. Nacking mccouch update. [ns_server:debug,2014-08-19T16:55:29.961,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 93. Nacking mccouch update. [ns_server:debug,2014-08-19T16:55:29.961,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 1011. Nacking mccouch update. [views:debug,2014-08-19T16:55:29.961,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/360. Updated state: replica (0) [ns_server:debug,2014-08-19T16:55:29.961,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",360,replica,0} [views:debug,2014-08-19T16:55:29.961,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/93. Updated state: replica (0) [views:debug,2014-08-19T16:55:29.961,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/1011. Updated state: replica (0) [ns_server:debug,2014-08-19T16:55:29.962,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",93,replica,0} [ns_server:debug,2014-08-19T16:55:29.962,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",1011,replica,0} [ns_server:debug,2014-08-19T16:55:29.962,ns_1@10.242.238.90:capi_set_view_manager-tiles<0.6184.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,686,622,558,984,426,362,724,660,596,532,958,400,1022,762,698,634,570,996, 374,736,672,608,544,970,412,710,646,582,518,944,386,1008,748,716,684,652,620, 588,556,524,982,950,424,392,360,1014,754,722,690,658,626,594,562,530,988,956, 398,366,1020,760,728,696,664,632,600,568,536,994,962,404,372,766,734,702,670, 638,606,574,542,968,410,378,1000,740,708,676,644,612,580,548,516,974,942,416, 384,1006,746,714,682,650,618,586,554,522,980,948,422,390,1012,752,720,688, 656,624,592,560,528,986,954,396,364,1018,758,726,694,662,630,598,566,534,992, 960,402,370,764,732,700,668,636,604,572,540,998,966,408,376,738,706,674,642, 610,578,546,514,972,940,414,382,1004,744,712,680,648,616,584,552,520,1023, 978,946,420,388,1010,718,654,590,526,952,394,1016,756,692,628,564,990,368, 730,666,602,538,964,406,704,640,576,512,938,380,1002,742,678,614,550,976,418] [ns_server:debug,2014-08-19T16:55:29.962,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_tiles<0.4349.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,686,622,558,128,984,426,362,724,660,596,532,166,102,958,400,1022,762,698, 634,570,140,996,374,736,672,608,544,114,970,412,348,88,710,646,582,518,152, 1021,944,386,1008,748,684,620,556,126,982,424,360,722,658,594,530,164,100, 956,398,1020,760,696,632,568,138,994,372,734,670,606,542,112,968,410,346,86, 708,644,580,516,150,1019,942,384,1006,746,682,618,554,124,980,948,422,390, 358,1012,98,752,720,688,656,624,592,560,528,162,130,986,954,396,364,1018,758, 726,694,662,630,598,566,534,168,136,104,992,960,402,370,764,732,700,668,636, 604,572,540,142,110,1011,998,966,408,376,344,738,706,674,642,610,578,546,514, 148,116,1017,972,940,414,382,350,1004,90,744,712,680,648,616,584,552,520,154, 122,1023,978,946,420,388,356,1010,96,718,654,590,526,160,952,394,1016,756, 692,628,564,134,990,368,730,666,602,538,108,964,406,342,704,640,576,512,146, 1015,938,380,1002,742,678,614,550,120,976,418,354,94,716,652,588,524,158,950, 392,1014,754,690,626,562,132,988,366,728,664,600,536,170,106,962,404,766,702, 638,574,144,1013,378,1000,740,676,612,548,118,974,416,352,92,714,650,586,522, 156] [ns_server:debug,2014-08-19T16:55:29.963,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,984,673,545,724,596,413,958,647,519,153,698,570,387,1009,749,621,127, 983,672,361,723,595,101,957,646,697,569,1008,748,620,982,671,543,722,594,411, 956,645,517,151,696,568,385,1007,747,619,125,981,670,359,99,968,721,657,593, 529,163,955,708,644,580,397,1019,942,759,695,631,567,137,1006,993,746,682, 618,554,371,980,733,669,605,541,111,967,720,656,592,409,345,954,707,643,579, 515,149,1018,941,758,694,630,566,383,1005,992,745,681,617,553,123,979,732, 668,604,421,357,97,966,719,655,591,527,161,953,706,642,578,395,1017,940,757, 693,629,565,135,1004,991,744,680,616,552,369,978,731,667,603,539,109,965,718, 654,590,407,343,952,705,641,577,513,147,1016,939,756,692,628,564,381,1003, 990,743,679,615,551,121,977,730,666,602,419,355,964,95,717,653,589,525,159, 951,704,640,576,393,1015,938,755,691,627,563,133,1002,989,742,678,614,550, 367,976,729,665,601,537,107,963,716,652,588,405,950,767,703,639,575,145,1014, 754,690,626,562,379,1001,988,741,677,613,549,119,975,728,664,600,417,353,962, 93,715,651,587,523,157,949,766,702,638,574,391,1013,753,689,625,561,131,1000, 987,740,676,612,548,365,974,727,663,599,535,169,105,961,714,650,586,403,948, 765,701,637,573,143,1012,999,752,688,624,560,377,986,739,675,611,547,117,973, 726,662,598,415,351,960,713,649,585,521,155,947,764,700,636,572,389,1011,998, 751,687,623,559,129,985,738,674,610,363,972,725,661,597,533,167,103,959,712, 648,584,401,1023,946,763,699,635,571,141,1010,997,686,558,375,737,609,115, 971,660,349,711,583,1022,945,762,634,996,685,557,736,608,425,970,659,531,165, 710,582,399,1021,944,761,633,139,995,684,556,373,735,607,113,969,658,347,709, 581,1020,943,760,632,994,683,555,734,606,423] [views:debug,2014-08-19T16:55:30.087,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/360. Updated state: replica (0) [ns_server:debug,2014-08-19T16:55:30.087,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",360,replica,0} [views:debug,2014-08-19T16:55:30.104,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/93. Updated state: replica (0) [views:debug,2014-08-19T16:55:30.104,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/1011. Updated state: replica (0) [ns_server:debug,2014-08-19T16:55:30.104,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",93,replica,0} [ns_server:debug,2014-08-19T16:55:30.104,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",1011,replica,0} [ns_server:debug,2014-08-19T16:55:30.463,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 358. Nacking mccouch update. [views:debug,2014-08-19T16:55:30.463,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/358. Updated state: replica (0) [ns_server:debug,2014-08-19T16:55:30.463,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",358,replica,0} [ns_server:debug,2014-08-19T16:55:30.464,ns_1@10.242.238.90:capi_set_view_manager-tiles<0.6184.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,686,622,558,984,426,362,724,660,596,532,958,400,1022,762,698,634,570,996, 374,736,672,608,544,970,412,710,646,582,518,944,386,1008,748,716,684,652,620, 588,556,524,982,950,424,392,360,1014,754,722,690,658,626,594,562,530,988,956, 398,366,1020,760,728,696,664,632,600,568,536,994,962,404,372,766,734,702,670, 638,606,574,542,968,410,378,1000,740,708,676,644,612,580,548,516,974,942,416, 384,1006,746,714,682,650,618,586,554,522,980,948,422,390,358,1012,752,720, 688,656,624,592,560,528,986,954,396,364,1018,758,726,694,662,630,598,566,534, 992,960,402,370,764,732,700,668,636,604,572,540,998,966,408,376,738,706,674, 642,610,578,546,514,972,940,414,382,1004,744,712,680,648,616,584,552,520, 1023,978,946,420,388,1010,718,654,590,526,952,394,1016,756,692,628,564,990, 368,730,666,602,538,964,406,704,640,576,512,938,380,1002,742,678,614,550,976, 418] [ns_server:debug,2014-08-19T16:55:30.505,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 91. Nacking mccouch update. [views:debug,2014-08-19T16:55:30.505,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/91. Updated state: replica (0) [ns_server:debug,2014-08-19T16:55:30.505,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",91,replica,0} [ns_server:debug,2014-08-19T16:55:30.506,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,984,673,545,724,596,413,958,647,519,153,698,570,387,1009,749,621,127, 983,672,361,723,595,101,957,646,697,569,1008,748,620,982,671,543,722,594,411, 956,645,517,151,696,568,385,1007,747,619,125,981,670,359,99,721,593,955,708, 644,580,397,1019,942,759,695,631,567,137,1006,993,746,682,618,554,371,980, 733,669,605,541,111,967,720,656,592,409,345,954,707,643,579,515,149,1018,941, 758,694,630,566,383,1005,992,745,681,617,553,123,979,732,668,604,421,357,97, 966,719,655,591,527,161,953,706,642,578,395,1017,940,757,693,629,565,135, 1004,991,744,680,616,552,369,978,731,667,603,539,109,965,718,654,590,407,343, 952,705,641,577,513,147,1016,939,756,692,628,564,381,1003,990,743,679,615, 551,121,977,730,666,602,419,355,964,95,717,653,589,525,159,951,704,640,576, 393,1015,938,755,691,627,563,133,1002,989,742,678,614,550,367,976,729,665, 601,537,107,963,716,652,588,405,950,767,703,639,575,145,1014,754,690,626,562, 379,1001,988,741,677,613,549,119,975,728,664,600,417,353,962,93,715,651,587, 523,157,949,766,702,638,574,391,1013,753,689,625,561,131,1000,987,740,676, 612,548,365,974,727,663,599,535,169,105,961,714,650,586,403,948,765,701,637, 573,143,1012,999,752,688,624,560,377,986,739,675,611,547,117,973,726,662,598, 415,351,960,91,713,649,585,521,155,947,764,700,636,572,389,1011,998,751,687, 623,559,129,985,738,674,610,363,972,725,661,597,533,167,103,959,712,648,584, 401,1023,946,763,699,635,571,141,1010,997,686,558,375,737,609,115,971,660, 349,711,583,1022,945,762,634,996,685,557,736,608,425,970,659,531,165,710,582, 399,1021,944,761,633,139,995,684,556,373,735,607,113,969,658,347,709,581, 1020,943,760,632,994,683,555,734,606,423,968,657,529,163] [ns_server:debug,2014-08-19T16:55:30.538,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 1009. Nacking mccouch update. [views:debug,2014-08-19T16:55:30.538,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/1009. Updated state: replica (0) [ns_server:debug,2014-08-19T16:55:30.539,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",1009,replica,0} [ns_server:debug,2014-08-19T16:55:30.539,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_tiles<0.4349.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,686,622,558,128,984,426,362,724,660,596,532,166,102,958,400,1022,762,698, 634,570,140,1009,996,374,736,672,608,544,114,970,412,348,88,710,646,582,518, 152,1021,944,386,1008,748,684,620,556,126,982,424,360,722,658,594,530,164, 100,956,398,1020,760,696,632,568,138,994,372,734,670,606,542,112,968,410,346, 86,708,644,580,516,150,1019,942,384,1006,746,682,618,554,124,980,948,422,390, 358,1012,98,752,720,688,656,624,592,560,528,162,130,986,954,396,364,1018,758, 726,694,662,630,598,566,534,168,136,104,992,960,402,370,764,732,700,668,636, 604,572,540,142,110,1011,998,966,408,376,344,738,706,674,642,610,578,546,514, 148,116,1017,972,940,414,382,350,1004,90,744,712,680,648,616,584,552,520,154, 122,1023,978,946,420,388,356,1010,96,718,654,590,526,160,952,394,1016,756, 692,628,564,134,990,368,730,666,602,538,108,964,406,342,704,640,576,512,146, 1015,938,380,1002,742,678,614,550,120,976,418,354,94,716,652,588,524,158,950, 392,1014,754,690,626,562,132,988,366,728,664,600,536,170,106,962,404,766,702, 638,574,144,1013,378,1000,740,676,612,548,118,974,416,352,92,714,650,586,522, 156] [views:debug,2014-08-19T16:55:30.589,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/358. Updated state: replica (0) [ns_server:debug,2014-08-19T16:55:30.589,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",358,replica,0} [views:debug,2014-08-19T16:55:30.622,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/91. Updated state: replica (0) [ns_server:debug,2014-08-19T16:55:30.622,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",91,replica,0} [views:debug,2014-08-19T16:55:30.639,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/1009. Updated state: replica (0) [ns_server:debug,2014-08-19T16:55:30.639,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",1009,replica,0} [ns_server:debug,2014-08-19T16:55:30.878,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 356. Nacking mccouch update. [views:debug,2014-08-19T16:55:30.878,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/356. Updated state: replica (0) [ns_server:debug,2014-08-19T16:55:30.879,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",356,replica,0} [ns_server:debug,2014-08-19T16:55:30.879,ns_1@10.242.238.90:capi_set_view_manager-tiles<0.6184.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,686,622,558,984,426,362,724,660,596,532,958,400,1022,762,698,634,570,996, 374,736,672,608,544,970,412,710,646,582,518,944,386,1008,748,716,684,652,620, 588,556,524,982,950,424,392,360,1014,754,722,690,658,626,594,562,530,988,956, 398,366,1020,760,728,696,664,632,600,568,536,994,962,404,372,766,734,702,670, 638,606,574,542,968,410,378,1000,740,708,676,644,612,580,548,516,974,942,416, 384,1006,746,714,682,650,618,586,554,522,980,948,422,390,358,1012,752,720, 688,656,624,592,560,528,986,954,396,364,1018,758,726,694,662,630,598,566,534, 992,960,402,370,764,732,700,668,636,604,572,540,998,966,408,376,738,706,674, 642,610,578,546,514,972,940,414,382,1004,744,712,680,648,616,584,552,520, 1023,978,946,420,388,356,1010,718,654,590,526,952,394,1016,756,692,628,564, 990,368,730,666,602,538,964,406,704,640,576,512,938,380,1002,742,678,614,550, 976,418] [ns_server:debug,2014-08-19T16:55:30.945,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 89. Nacking mccouch update. [ns_server:debug,2014-08-19T16:55:30.945,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 1007. Nacking mccouch update. [views:debug,2014-08-19T16:55:30.945,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/89. Updated state: replica (0) [views:debug,2014-08-19T16:55:30.945,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/1007. Updated state: replica (0) [ns_server:debug,2014-08-19T16:55:30.946,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",1007,replica,0} [ns_server:debug,2014-08-19T16:55:30.946,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",89,replica,0} [ns_server:debug,2014-08-19T16:55:30.946,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_tiles<0.4349.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,686,622,558,128,984,426,362,724,660,596,532,166,102,958,400,1022,762,698, 634,570,140,1009,996,374,736,672,608,544,114,970,412,348,88,710,646,582,518, 152,1021,944,386,1008,748,684,620,556,126,982,424,360,722,658,594,530,164, 100,956,398,1020,760,696,632,568,138,1007,994,372,734,670,606,542,112,968, 410,346,86,708,644,580,516,150,1019,942,384,1006,746,682,618,554,124,980,422, 358,98,752,720,688,656,624,592,560,528,162,130,986,954,396,364,1018,758,726, 694,662,630,598,566,534,168,136,104,992,960,402,370,764,732,700,668,636,604, 572,540,142,110,1011,998,966,408,376,344,738,706,674,642,610,578,546,514,148, 116,1017,972,940,414,382,350,1004,90,744,712,680,648,616,584,552,520,154,122, 1023,978,946,420,388,356,1010,96,718,654,590,526,160,952,394,1016,756,692, 628,564,134,990,368,730,666,602,538,108,964,406,342,704,640,576,512,146,1015, 938,380,1002,742,678,614,550,120,976,418,354,94,716,652,588,524,158,950,392, 1014,754,690,626,562,132,988,366,728,664,600,536,170,106,962,404,766,702,638, 574,144,1013,378,1000,740,676,612,548,118,974,416,352,92,714,650,586,522,156, 948,390,1012] [ns_server:debug,2014-08-19T16:55:30.947,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,984,673,545,724,596,413,958,647,519,153,698,570,387,1009,749,621,127, 983,672,361,723,595,101,957,646,697,569,1008,748,620,982,671,543,722,594,411, 956,645,517,151,696,568,385,1007,747,619,125,981,670,359,99,721,593,955,708, 644,580,397,1019,942,759,695,631,567,137,1006,993,746,682,618,554,371,980, 733,669,605,541,111,967,720,656,592,409,345,954,707,643,579,515,149,1018,941, 758,694,630,566,383,1005,992,745,681,617,553,123,979,732,668,604,421,357,97, 966,719,655,591,527,161,953,706,642,578,395,1017,940,757,693,629,565,135, 1004,991,744,680,616,552,369,978,731,667,603,539,109,965,718,654,590,407,343, 952,705,641,577,513,147,1016,939,756,692,628,564,381,1003,990,743,679,615, 551,121,977,730,666,602,419,355,964,95,717,653,589,525,159,951,704,640,576, 393,1015,938,755,691,627,563,133,1002,989,742,678,614,550,367,976,729,665, 601,537,107,963,716,652,588,405,950,767,703,639,575,145,1014,754,690,626,562, 379,1001,988,741,677,613,549,119,975,728,664,600,417,353,962,93,715,651,587, 523,157,949,766,702,638,574,391,1013,753,689,625,561,131,1000,987,740,676, 612,548,365,974,727,663,599,535,169,105,961,714,650,586,403,948,765,701,637, 573,143,1012,999,752,688,624,560,377,986,739,675,611,547,117,973,726,662,598, 415,351,960,91,713,649,585,521,155,947,764,700,636,572,389,1011,998,751,687, 623,559,129,985,738,674,610,363,972,725,661,597,533,167,103,959,712,648,584, 401,1023,946,763,699,635,571,141,1010,997,686,558,375,737,609,115,971,660, 349,89,711,583,1022,945,762,634,996,685,557,736,608,425,970,659,531,165,710, 582,399,1021,944,761,633,139,995,684,556,373,735,607,113,969,658,347,709,581, 1020,943,760,632,994,683,555,734,606,423,968,657,529,163] [views:debug,2014-08-19T16:55:31.038,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/356. Updated state: replica (0) [ns_server:debug,2014-08-19T16:55:31.038,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",356,replica,0} [views:debug,2014-08-19T16:55:31.071,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/1007. Updated state: replica (0) [ns_server:debug,2014-08-19T16:55:31.071,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",1007,replica,0} [views:debug,2014-08-19T16:55:31.071,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/89. Updated state: replica (0) [ns_server:debug,2014-08-19T16:55:31.071,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",89,replica,0} [ns_server:debug,2014-08-19T16:55:31.414,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 354. Nacking mccouch update. [views:debug,2014-08-19T16:55:31.414,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/354. Updated state: replica (0) [ns_server:debug,2014-08-19T16:55:31.414,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",354,replica,0} [ns_server:debug,2014-08-19T16:55:31.414,ns_1@10.242.238.90:capi_set_view_manager-tiles<0.6184.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,686,622,558,984,426,362,724,660,596,532,958,400,1022,762,698,634,570,996, 374,736,672,608,544,970,412,710,646,582,518,944,386,1008,748,716,684,652,620, 588,556,524,982,950,424,392,360,1014,754,722,690,658,626,594,562,530,988,956, 398,366,1020,760,728,696,664,632,600,568,536,994,962,404,372,766,734,702,670, 638,606,574,542,968,410,378,1000,740,708,676,644,612,580,548,516,974,942,416, 384,1006,746,714,682,650,618,586,554,522,980,948,422,390,358,1012,752,720, 688,656,624,592,560,528,986,954,396,364,1018,758,726,694,662,630,598,566,534, 992,960,402,370,764,732,700,668,636,604,572,540,998,966,408,376,738,706,674, 642,610,578,546,514,972,940,414,382,1004,744,712,680,648,616,584,552,520, 1023,978,946,420,388,356,1010,718,654,590,526,952,394,1016,756,692,628,564, 990,368,730,666,602,538,964,406,704,640,576,512,938,380,1002,742,678,614,550, 976,418,354] [ns_server:debug,2014-08-19T16:55:31.489,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 1005. Nacking mccouch update. [views:debug,2014-08-19T16:55:31.489,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/1005. Updated state: replica (0) [ns_server:debug,2014-08-19T16:55:31.490,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",1005,replica,0} [ns_server:debug,2014-08-19T16:55:31.490,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_tiles<0.4349.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,686,622,558,128,984,426,362,724,660,596,532,166,102,958,400,1022,762,698, 634,570,140,1009,996,374,736,672,608,544,114,970,412,348,88,710,646,582,518, 152,1021,944,386,1008,748,684,620,556,126,982,424,360,722,658,594,530,164, 100,956,398,1020,760,696,632,568,138,1007,994,372,734,670,606,542,112,968, 410,346,86,708,644,580,516,150,1019,942,384,1006,746,682,618,554,124,980,422, 358,98,752,720,688,656,624,592,560,528,162,130,986,954,396,364,1018,758,726, 694,662,630,598,566,534,168,136,104,1005,992,960,402,370,764,732,700,668,636, 604,572,540,142,110,1011,998,966,408,376,344,738,706,674,642,610,578,546,514, 148,116,1017,972,940,414,382,350,1004,90,744,712,680,648,616,584,552,520,154, 122,1023,978,946,420,388,356,1010,96,718,654,590,526,160,952,394,1016,756, 692,628,564,134,990,368,730,666,602,538,108,964,406,342,704,640,576,512,146, 1015,938,380,1002,742,678,614,550,120,976,418,354,94,716,652,588,524,158,950, 392,1014,754,690,626,562,132,988,366,728,664,600,536,170,106,962,404,766,702, 638,574,144,1013,378,1000,740,676,612,548,118,974,416,352,92,714,650,586,522, 156,948,390,1012] [ns_server:debug,2014-08-19T16:55:31.522,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 87. Nacking mccouch update. [views:debug,2014-08-19T16:55:31.522,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/87. Updated state: replica (0) [ns_server:debug,2014-08-19T16:55:31.523,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",87,replica,0} [ns_server:debug,2014-08-19T16:55:31.524,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,984,673,545,724,596,413,958,647,519,153,698,570,387,1009,749,621,127, 983,672,361,723,595,101,957,646,697,569,1008,748,620,982,671,543,722,594,411, 956,645,517,151,696,568,385,1007,747,619,125,981,670,359,99,721,593,955,708, 644,580,397,1019,942,759,695,631,567,137,1006,993,746,682,618,554,371,980, 733,669,605,541,111,967,720,656,592,409,345,954,707,643,579,515,149,1018,941, 758,694,630,566,383,1005,992,745,681,617,553,123,979,732,668,604,421,357,97, 966,719,655,591,527,161,953,706,642,578,395,1017,940,757,693,629,565,135, 1004,991,744,680,616,552,369,978,731,667,603,539,109,965,718,654,590,407,343, 952,705,641,577,513,147,1016,939,756,692,628,564,381,1003,990,743,679,615, 551,121,977,730,666,602,419,355,964,95,717,653,589,525,159,951,704,640,576, 393,1015,938,755,691,627,563,133,1002,989,742,678,614,550,367,976,729,665, 601,537,107,963,716,652,588,405,950,767,703,639,575,145,1014,754,690,626,562, 379,1001,988,741,677,613,549,119,975,728,664,600,417,353,962,93,715,651,587, 523,157,949,766,702,638,574,391,1013,753,689,625,561,131,1000,987,740,676, 612,548,365,974,727,663,599,535,169,105,961,714,650,586,403,948,765,701,637, 573,143,1012,999,752,688,624,560,377,986,739,675,611,547,117,973,726,662,598, 415,351,960,91,713,649,585,521,155,947,764,700,636,572,389,1011,998,751,687, 623,559,129,985,738,674,610,363,972,725,661,597,533,167,103,959,712,648,584, 401,1023,946,763,699,635,571,141,1010,997,686,558,375,737,609,115,971,660, 349,89,711,583,1022,945,762,634,996,685,557,736,608,425,970,659,531,165,710, 582,399,1021,944,761,633,139,995,684,556,373,735,607,113,969,658,347,87,709, 581,1020,943,760,632,994,683,555,734,606,423,968,657,529,163] [views:debug,2014-08-19T16:55:31.540,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/354. Updated state: replica (0) [ns_server:debug,2014-08-19T16:55:31.540,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",354,replica,0} [views:debug,2014-08-19T16:55:31.598,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/1005. Updated state: replica (0) [ns_server:debug,2014-08-19T16:55:31.598,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",1005,replica,0} [views:debug,2014-08-19T16:55:31.640,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/87. Updated state: replica (0) [ns_server:debug,2014-08-19T16:55:31.640,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",87,replica,0} [ns_server:debug,2014-08-19T16:55:31.867,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 352. Nacking mccouch update. [views:debug,2014-08-19T16:55:31.867,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/352. Updated state: replica (0) [ns_server:debug,2014-08-19T16:55:31.868,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",352,replica,0} [ns_server:debug,2014-08-19T16:55:31.868,ns_1@10.242.238.90:capi_set_view_manager-tiles<0.6184.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,686,622,558,984,426,362,724,660,596,532,958,400,1022,762,698,634,570,996, 374,736,672,608,544,970,412,710,646,582,518,944,386,1008,748,716,684,652,620, 588,556,524,982,950,424,392,360,1014,754,722,690,658,626,594,562,530,988,956, 398,366,1020,760,728,696,664,632,600,568,536,994,962,404,372,766,734,702,670, 638,606,574,542,968,410,378,1000,740,708,676,644,612,580,548,516,974,942,416, 384,352,1006,746,714,682,650,618,586,554,522,980,948,422,390,358,1012,752, 720,688,656,624,592,560,528,986,954,396,364,1018,758,726,694,662,630,598,566, 534,992,960,402,370,764,732,700,668,636,604,572,540,998,966,408,376,738,706, 674,642,610,578,546,514,972,940,414,382,1004,744,712,680,648,616,584,552,520, 1023,978,946,420,388,356,1010,718,654,590,526,952,394,1016,756,692,628,564, 990,368,730,666,602,538,964,406,704,640,576,512,938,380,1002,742,678,614,550, 976,418,354] [views:debug,2014-08-19T16:55:32.009,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/352. Updated state: replica (0) [ns_server:debug,2014-08-19T16:55:32.009,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",352,replica,0} [ns_server:debug,2014-08-19T16:55:32.051,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 546. Nacking mccouch update. [ns_server:debug,2014-08-19T16:55:32.051,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 1003. Nacking mccouch update. [views:debug,2014-08-19T16:55:32.051,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/546. Updated state: active (0) [ns_server:debug,2014-08-19T16:55:32.051,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",546,active,0} [views:debug,2014-08-19T16:55:32.051,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/1003. Updated state: replica (0) [ns_server:debug,2014-08-19T16:55:32.051,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",1003,replica,0} [ns_server:debug,2014-08-19T16:55:32.052,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_tiles<0.4349.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,686,622,558,128,984,426,362,724,660,596,532,166,102,958,400,1022,762,698, 634,570,140,1009,996,374,736,672,608,544,114,970,412,348,88,710,646,582,518, 152,1021,944,386,1008,748,684,620,556,126,982,424,360,722,658,594,530,164, 100,956,398,1020,760,696,632,568,138,1007,994,372,734,670,606,542,112,968, 410,346,86,708,644,580,516,150,1019,942,384,1006,746,682,618,554,124,980,422, 358,98,752,720,688,656,624,592,560,528,162,130,986,954,396,364,1018,758,726, 694,662,630,598,566,534,168,136,104,1005,992,960,402,370,764,732,700,668,636, 604,572,540,142,110,1011,998,966,408,376,344,738,706,674,642,610,578,546,514, 148,116,1017,972,940,414,382,350,1004,90,744,712,680,648,616,584,552,520,154, 122,1023,978,946,420,388,356,1010,96,718,654,590,526,160,952,394,1016,756, 692,628,564,134,1003,990,368,730,666,602,538,108,964,406,342,704,640,576,512, 146,1015,938,380,1002,742,678,614,550,120,976,418,354,94,716,652,588,524,158, 950,392,1014,754,690,626,562,132,988,366,728,664,600,536,170,106,962,404,766, 702,638,574,144,1013,378,1000,740,676,612,548,118,974,416,352,92,714,650,586, 522,156,948,390,1012] [ns_server:debug,2014-08-19T16:55:32.052,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,984,673,545,724,596,413,958,647,519,153,698,570,387,1009,749,621,127, 983,672,361,723,595,101,957,646,697,569,1008,748,620,982,671,543,722,594,411, 956,645,517,151,696,568,385,1007,747,619,125,981,670,359,99,721,593,955,708, 644,580,397,1019,942,759,695,631,567,137,1006,993,746,682,618,554,371,980, 733,669,605,541,111,967,720,656,592,409,345,954,707,643,579,515,149,1018,941, 758,694,630,566,383,1005,992,745,681,617,553,123,979,732,668,604,421,357,97, 966,719,655,591,527,161,953,706,642,578,395,1017,940,757,693,629,565,135, 1004,991,744,680,616,552,369,978,731,667,603,539,109,965,718,654,590,407,343, 952,705,641,577,513,147,1016,939,756,692,628,564,381,1003,990,743,679,615, 551,121,977,730,666,602,419,355,964,95,717,653,589,525,159,951,704,640,576, 393,1015,938,755,691,627,563,133,1002,989,742,678,614,550,367,976,729,665, 601,537,107,963,716,652,588,405,950,767,703,639,575,145,1014,754,690,626,562, 379,1001,988,741,677,613,549,119,975,728,664,600,417,353,962,93,715,651,587, 523,157,949,766,702,638,574,391,1013,753,689,625,561,131,1000,987,740,676, 612,548,365,974,727,663,599,535,169,105,961,714,650,586,403,948,765,701,637, 573,143,1012,999,752,688,624,560,377,986,739,675,611,547,117,973,726,662,598, 415,351,960,91,713,649,585,521,155,947,764,700,636,572,389,1011,998,751,687, 623,559,129,985,738,674,610,546,363,972,725,661,597,533,167,103,959,712,648, 584,401,1023,946,763,699,635,571,141,1010,997,686,558,375,737,609,115,971, 660,349,89,711,583,1022,945,762,634,996,685,557,736,608,425,970,659,531,165, 710,582,399,1021,944,761,633,139,995,684,556,373,735,607,113,969,658,347,87, 709,581,1020,943,760,632,994,683,555,734,606,423,968,657,529,163] [views:debug,2014-08-19T16:55:32.185,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/1003. Updated state: replica (0) [views:debug,2014-08-19T16:55:32.186,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/546. Updated state: active (0) [ns_server:debug,2014-08-19T16:55:32.186,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",1003,replica,0} [ns_server:debug,2014-08-19T16:55:32.186,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",546,active,0} [ns_server:debug,2014-08-19T16:55:32.335,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 350. Nacking mccouch update. [views:debug,2014-08-19T16:55:32.335,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/350. Updated state: replica (0) [ns_server:debug,2014-08-19T16:55:32.336,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",350,replica,0} [ns_server:debug,2014-08-19T16:55:32.336,ns_1@10.242.238.90:capi_set_view_manager-tiles<0.6184.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,686,622,558,984,426,362,724,660,596,532,958,400,1022,762,698,634,570,996, 374,736,672,608,544,970,412,710,646,582,518,944,386,1008,748,684,620,556,982, 950,424,392,360,1014,754,722,690,658,626,594,562,530,988,956,398,366,1020, 760,728,696,664,632,600,568,536,994,962,404,372,766,734,702,670,638,606,574, 542,968,410,378,1000,740,708,676,644,612,580,548,516,974,942,416,384,352, 1006,746,714,682,650,618,586,554,522,980,948,422,390,358,1012,752,720,688, 656,624,592,560,528,986,954,396,364,1018,758,726,694,662,630,598,566,534,992, 960,402,370,764,732,700,668,636,604,572,540,998,966,408,376,738,706,674,642, 610,578,546,514,972,940,414,382,350,1004,744,712,680,648,616,584,552,520, 1023,978,946,420,388,356,1010,718,654,590,526,952,394,1016,756,692,628,564, 990,368,730,666,602,538,964,406,704,640,576,512,938,380,1002,742,678,614,550, 976,418,354,716,652,588,524] [views:debug,2014-08-19T16:55:32.386,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/350. Updated state: replica (0) [ns_server:debug,2014-08-19T16:55:32.386,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",350,replica,0} [ns_server:debug,2014-08-19T16:55:32.479,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 1001. Nacking mccouch update. [ns_server:debug,2014-08-19T16:55:32.479,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 544. Nacking mccouch update. [views:debug,2014-08-19T16:55:32.479,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/1001. Updated state: replica (0) [views:debug,2014-08-19T16:55:32.479,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/544. Updated state: active (0) [ns_server:debug,2014-08-19T16:55:32.479,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",1001,replica,0} [ns_server:debug,2014-08-19T16:55:32.479,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",544,active,0} [ns_server:debug,2014-08-19T16:55:32.480,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_tiles<0.4349.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,686,622,558,128,984,426,362,724,660,596,532,166,102,958,400,1022,762,698, 634,570,140,1009,996,374,736,672,608,544,114,970,412,348,88,710,646,582,518, 152,1021,944,386,1008,748,684,620,556,126,982,424,360,722,658,594,530,164, 100,956,398,1020,760,696,632,568,138,1007,994,372,734,670,606,542,112,968, 410,346,86,708,644,580,516,150,1019,942,384,1006,746,682,618,554,124,980,422, 358,98,752,720,688,656,624,592,560,528,162,130,986,954,396,364,1018,758,726, 694,662,630,598,566,534,168,136,104,1005,992,960,402,370,764,732,700,668,636, 604,572,540,142,110,1011,998,966,408,376,344,738,706,674,642,610,578,546,514, 148,116,1017,972,940,414,382,350,1004,90,744,712,680,648,616,584,552,520,154, 122,1023,978,946,420,388,356,1010,96,718,654,590,526,160,952,394,1016,756, 692,628,564,134,1003,990,368,730,666,602,538,108,964,406,342,704,640,576,512, 146,1015,938,380,1002,742,678,614,550,120,976,418,354,94,716,652,588,524,158, 950,392,1014,754,690,626,562,132,1001,988,366,728,664,600,536,170,106,962, 404,766,702,638,574,144,1013,378,1000,740,676,612,548,118,974,416,352,92,714, 650,586,522,156,948,390,1012] [ns_server:debug,2014-08-19T16:55:32.481,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,984,673,545,724,596,413,958,647,519,153,698,570,387,1009,749,621,127, 983,672,544,361,723,595,101,957,646,697,569,1008,748,620,982,671,543,722,594, 411,956,645,517,151,696,568,385,1007,747,619,125,981,670,359,99,721,593,955, 708,644,580,397,1019,942,759,695,631,567,137,1006,993,746,682,618,554,371, 980,733,669,605,541,111,967,720,656,592,409,345,954,707,643,579,515,149,1018, 941,758,694,630,566,383,1005,992,745,681,617,553,123,979,732,668,604,421,357, 97,966,719,655,591,527,161,953,706,642,578,395,1017,940,757,693,629,565,135, 1004,991,744,680,616,552,369,978,731,667,603,539,109,965,718,654,590,407,343, 952,705,641,577,513,147,1016,939,756,692,628,564,381,1003,990,743,679,615, 551,121,977,730,666,602,419,355,964,95,717,653,589,525,159,951,704,640,576, 393,1015,938,755,691,627,563,133,1002,989,742,678,614,550,367,976,729,665, 601,537,107,963,716,652,588,405,950,767,703,639,575,145,1014,754,690,626,562, 379,1001,988,741,677,613,549,119,975,728,664,600,417,353,962,93,715,651,587, 523,157,949,766,702,638,574,391,1013,753,689,625,561,131,1000,987,740,676, 612,548,365,974,727,663,599,535,169,105,961,714,650,586,403,948,765,701,637, 573,143,1012,999,752,688,624,560,377,986,739,675,611,547,117,973,726,662,598, 415,351,960,91,713,649,585,521,155,947,764,700,636,572,389,1011,998,751,687, 623,559,129,985,738,674,610,546,363,972,725,661,597,533,167,103,959,712,648, 584,401,1023,946,763,699,635,571,141,1010,997,686,558,375,737,609,115,971, 660,349,89,711,583,1022,945,762,634,996,685,557,736,608,425,970,659,531,165, 710,582,399,1021,944,761,633,139,995,684,556,373,735,607,113,969,658,347,87, 709,581,1020,943,760,632,994,683,555,734,606,423,968,657,529,163] [views:debug,2014-08-19T16:55:32.562,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/544. Updated state: active (0) [views:debug,2014-08-19T16:55:32.562,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/1001. Updated state: replica (0) [ns_server:debug,2014-08-19T16:55:32.562,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",544,active,0} [ns_server:debug,2014-08-19T16:55:32.563,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",1001,replica,0} [ns_server:debug,2014-08-19T16:55:32.812,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 348. Nacking mccouch update. [views:debug,2014-08-19T16:55:32.813,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/348. Updated state: replica (0) [ns_server:debug,2014-08-19T16:55:32.813,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",348,replica,0} [ns_server:debug,2014-08-19T16:55:32.813,ns_1@10.242.238.90:capi_set_view_manager-tiles<0.6184.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,686,622,558,984,426,362,724,660,596,532,958,400,1022,762,698,634,570,996, 374,736,672,608,544,970,412,348,710,646,582,518,944,386,1008,748,684,620,556, 982,950,424,392,360,1014,754,722,690,658,626,594,562,530,988,956,398,366, 1020,760,728,696,664,632,600,568,536,994,962,404,372,766,734,702,670,638,606, 574,542,968,410,378,1000,740,708,676,644,612,580,548,516,974,942,416,384,352, 1006,746,714,682,650,618,586,554,522,980,948,422,390,358,1012,752,720,688, 656,624,592,560,528,986,954,396,364,1018,758,726,694,662,630,598,566,534,992, 960,402,370,764,732,700,668,636,604,572,540,998,966,408,376,738,706,674,642, 610,578,546,514,972,940,414,382,350,1004,744,712,680,648,616,584,552,520, 1023,978,946,420,388,356,1010,718,654,590,526,952,394,1016,756,692,628,564, 990,368,730,666,602,538,964,406,704,640,576,512,938,380,1002,742,678,614,550, 976,418,354,716,652,588,524] [views:debug,2014-08-19T16:55:32.880,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/348. Updated state: replica (0) [ns_server:debug,2014-08-19T16:55:32.880,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",348,replica,0} [ns_server:debug,2014-08-19T16:55:32.973,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 542. Nacking mccouch update. [views:debug,2014-08-19T16:55:32.973,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/542. Updated state: active (0) [ns_server:debug,2014-08-19T16:55:32.973,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",542,active,0} [ns_server:debug,2014-08-19T16:55:32.974,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 999. Nacking mccouch update. [views:debug,2014-08-19T16:55:32.974,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/999. Updated state: replica (0) [ns_server:debug,2014-08-19T16:55:32.974,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",999,replica,0} [ns_server:debug,2014-08-19T16:55:32.975,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,984,673,545,724,596,413,958,647,519,153,698,570,387,1009,749,621,127, 983,672,544,361,723,595,101,957,646,697,569,1008,748,620,982,671,543,722,594, 411,956,645,517,151,696,568,385,1007,747,619,125,981,670,542,359,99,721,593, 955,644,942,759,695,631,567,137,1006,993,746,682,618,554,371,980,733,669,605, 541,111,967,720,656,592,409,345,954,707,643,579,515,149,1018,941,758,694,630, 566,383,1005,992,745,681,617,553,123,979,732,668,604,421,357,97,966,719,655, 591,527,161,953,706,642,578,395,1017,940,757,693,629,565,135,1004,991,744, 680,616,552,369,978,731,667,603,539,109,965,718,654,590,407,343,952,705,641, 577,513,147,1016,939,756,692,628,564,381,1003,990,743,679,615,551,121,977, 730,666,602,419,355,964,95,717,653,589,525,159,951,704,640,576,393,1015,938, 755,691,627,563,133,1002,989,742,678,614,550,367,976,729,665,601,537,107,963, 716,652,588,405,950,767,703,639,575,145,1014,754,690,626,562,379,1001,988, 741,677,613,549,119,975,728,664,600,417,353,962,93,715,651,587,523,157,949, 766,702,638,574,391,1013,753,689,625,561,131,1000,987,740,676,612,548,365, 974,727,663,599,535,169,105,961,714,650,586,403,948,765,701,637,573,143,1012, 999,752,688,624,560,377,986,739,675,611,547,117,973,726,662,598,415,351,960, 91,713,649,585,521,155,947,764,700,636,572,389,1011,998,751,687,623,559,129, 985,738,674,610,546,363,972,725,661,597,533,167,103,959,712,648,584,401,1023, 946,763,699,635,571,141,1010,997,686,558,375,737,609,115,971,660,349,89,711, 583,1022,945,762,634,996,685,557,736,608,425,970,659,531,165,710,582,399, 1021,944,761,633,139,995,684,556,373,735,607,113,969,658,347,87,709,581,1020, 943,760,632,994,683,555,734,606,423,968,657,529,163,708,580,397,1019] [ns_server:debug,2014-08-19T16:55:32.975,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_tiles<0.4349.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,686,622,558,128,984,426,362,724,660,596,532,166,102,958,400,1022,762,698, 634,570,140,1009,996,374,736,672,608,544,114,970,412,348,88,710,646,582,518, 152,1021,944,386,1008,748,684,620,556,126,982,424,360,722,658,594,530,164, 100,956,398,1020,760,696,632,568,138,1007,994,372,734,670,606,542,112,968, 410,346,86,708,644,580,516,150,1019,942,384,1006,746,682,618,554,124,980,422, 358,999,98,752,720,688,656,624,592,560,528,162,130,986,954,396,364,1018,758, 726,694,662,630,598,566,534,168,136,104,1005,992,960,402,370,764,732,700,668, 636,604,572,540,142,110,1011,998,966,408,376,344,738,706,674,642,610,578,546, 514,148,116,1017,972,940,414,382,350,1004,90,744,712,680,648,616,584,552,520, 154,122,1023,978,946,420,388,356,1010,96,718,654,590,526,160,952,394,1016, 756,692,628,564,134,1003,990,368,730,666,602,538,108,964,406,342,704,640,576, 512,146,1015,938,380,1002,742,678,614,550,120,976,418,354,94,716,652,588,524, 158,950,392,1014,754,690,626,562,132,1001,988,366,728,664,600,536,170,106, 962,404,766,702,638,574,144,1013,378,1000,740,676,612,548,118,974,416,352,92, 714,650,586,522,156,948,390,1012] [views:debug,2014-08-19T16:55:33.041,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/542. Updated state: active (0) [ns_server:debug,2014-08-19T16:55:33.041,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",542,active,0} [views:debug,2014-08-19T16:55:33.058,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/999. Updated state: replica (0) [ns_server:debug,2014-08-19T16:55:33.058,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",999,replica,0} [ns_server:debug,2014-08-19T16:55:33.108,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 346. Nacking mccouch update. [views:debug,2014-08-19T16:55:33.108,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/346. Updated state: replica (0) [ns_server:debug,2014-08-19T16:55:33.108,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",346,replica,0} [ns_server:debug,2014-08-19T16:55:33.109,ns_1@10.242.238.90:capi_set_view_manager-tiles<0.6184.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,686,622,558,984,426,362,724,660,596,532,958,400,1022,762,698,634,570,996, 374,736,672,608,544,970,412,348,710,646,582,518,944,386,1008,748,684,620,556, 982,950,424,392,360,1014,754,722,690,658,626,594,562,530,988,956,398,366, 1020,760,728,696,664,632,600,568,536,994,962,404,372,766,734,702,670,638,606, 574,542,968,410,378,346,1000,740,708,676,644,612,580,548,516,974,942,416,384, 352,1006,746,714,682,650,618,586,554,522,980,948,422,390,358,1012,752,720, 688,656,624,592,560,528,986,954,396,364,1018,758,726,694,662,630,598,566,534, 992,960,402,370,764,732,700,668,636,604,572,540,998,966,408,376,738,706,674, 642,610,578,546,514,972,940,414,382,350,1004,744,712,680,648,616,584,552,520, 1023,978,946,420,388,356,1010,718,654,590,526,952,394,1016,756,692,628,564, 990,368,730,666,602,538,964,406,704,640,576,512,938,380,1002,742,678,614,550, 976,418,354,716,652,588,524] [views:debug,2014-08-19T16:55:33.242,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/346. Updated state: replica (0) [ns_server:debug,2014-08-19T16:55:33.243,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",346,replica,0} [ns_server:debug,2014-08-19T16:55:33.376,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 540. Nacking mccouch update. [ns_server:debug,2014-08-19T16:55:33.376,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 997. Nacking mccouch update. [views:debug,2014-08-19T16:55:33.376,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/540. Updated state: active (0) [ns_server:debug,2014-08-19T16:55:33.376,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",540,active,0} [views:debug,2014-08-19T16:55:33.376,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/997. Updated state: replica (0) [ns_server:debug,2014-08-19T16:55:33.376,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",997,replica,0} [ns_server:debug,2014-08-19T16:55:33.377,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_tiles<0.4349.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,750,686,622,558,128,984,426,362,724,660,596,532,166,102,958,400,1022,762, 698,634,570,140,1009,996,374,736,672,608,544,114,970,412,348,88,710,646,582, 518,152,1021,944,386,1008,748,684,620,556,126,982,424,360,722,658,594,530, 164,100,956,398,1020,760,696,632,568,138,1007,994,372,734,670,606,542,112, 968,410,346,86,708,644,580,516,150,1019,942,384,1006,746,682,618,554,124,980, 422,358,98,720,656,592,528,162,986,954,396,364,1018,758,726,694,662,630,598, 566,534,168,136,104,1005,992,960,402,370,764,732,700,668,636,604,572,540,142, 110,1011,998,966,408,376,344,738,706,674,642,610,578,546,514,148,116,1017, 972,940,414,382,350,1004,90,744,712,680,648,616,584,552,520,154,122,1023,978, 946,420,388,356,1010,96,718,654,590,526,160,952,394,1016,756,692,628,564,134, 1003,990,368,730,666,602,538,108,964,406,342,704,640,576,512,146,1015,938, 380,1002,742,678,614,550,120,976,418,354,94,716,652,588,524,158,950,392,1014, 754,690,626,562,132,1001,988,366,728,664,600,536,170,106,962,404,766,702,638, 574,144,1013,378,1000,740,676,612,548,118,974,416,352,92,714,650,586,522,156, 948,390,1012,999,752,688,624,560,130] [ns_server:debug,2014-08-19T16:55:33.378,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,984,673,545,724,596,413,958,647,519,153,698,570,387,1009,749,621,127, 983,672,544,361,723,595,101,957,646,697,569,1008,748,620,982,671,543,722,594, 411,956,645,517,151,696,568,385,1007,747,619,125,981,670,542,359,99,721,593, 955,644,942,759,695,631,567,137,1006,993,746,682,618,554,371,980,733,669,605, 541,111,967,720,656,592,409,345,954,707,643,579,515,149,1018,941,758,694,630, 566,383,1005,992,745,681,617,553,123,979,732,668,604,540,421,357,97,966,719, 655,591,527,161,953,706,642,578,395,1017,940,757,693,629,565,135,1004,991, 744,680,616,552,369,978,731,667,603,539,109,965,718,654,590,407,343,952,705, 641,577,513,147,1016,939,756,692,628,564,381,1003,990,743,679,615,551,121, 977,730,666,602,419,355,964,95,717,653,589,525,159,951,704,640,576,393,1015, 938,755,691,627,563,133,1002,989,742,678,614,550,367,976,729,665,601,537,107, 963,716,652,588,405,950,767,703,639,575,145,1014,754,690,626,562,379,1001, 988,741,677,613,549,119,975,728,664,600,417,353,962,93,715,651,587,523,157, 949,766,702,638,574,391,1013,753,689,625,561,131,1000,987,740,676,612,548, 365,974,727,663,599,535,169,105,961,714,650,586,403,948,765,701,637,573,143, 1012,999,752,688,624,560,377,986,739,675,611,547,117,973,726,662,598,415,351, 960,91,713,649,585,521,155,947,764,700,636,572,389,1011,998,751,687,623,559, 129,985,738,674,610,546,363,972,725,661,597,533,167,103,959,712,648,584,401, 1023,946,763,699,635,571,141,1010,997,686,558,375,737,609,115,971,660,349,89, 711,583,1022,945,762,634,996,685,557,736,608,425,970,659,531,165,710,582,399, 1021,944,761,633,139,995,684,556,373,735,607,113,969,658,347,87,709,581,1020, 943,760,632,994,683,555,734,606,423,968,657,529,163,708,580,397,1019] [ns_server:debug,2014-08-19T16:55:33.535,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 344. Nacking mccouch update. [views:debug,2014-08-19T16:55:33.535,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/344. Updated state: replica (0) [ns_server:debug,2014-08-19T16:55:33.535,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",344,replica,0} [ns_server:debug,2014-08-19T16:55:33.536,ns_1@10.242.238.90:capi_set_view_manager-tiles<0.6184.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,686,622,558,984,426,362,724,660,596,532,958,400,1022,762,698,634,570,996, 374,736,672,608,544,970,412,348,710,646,582,518,944,386,1008,748,684,620,556, 982,950,424,392,360,1014,754,722,690,658,626,594,562,530,988,956,398,366, 1020,760,728,696,664,632,600,568,536,994,962,404,372,766,734,702,670,638,606, 574,542,968,410,378,346,1000,740,708,676,644,612,580,548,516,974,942,416,384, 352,1006,746,714,682,650,618,586,554,522,980,948,422,390,358,1012,752,720, 688,656,624,592,560,528,986,954,396,364,1018,758,726,694,662,630,598,566,534, 992,960,402,370,764,732,700,668,636,604,572,540,998,966,408,376,344,738,706, 674,642,610,578,546,514,972,940,414,382,350,1004,744,712,680,648,616,584,552, 520,1023,978,946,420,388,356,1010,718,654,590,526,952,394,1016,756,692,628, 564,990,368,730,666,602,538,964,406,704,640,576,512,938,380,1002,742,678,614, 550,976,418,354,716,652,588,524] [views:debug,2014-08-19T16:55:33.569,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/997. Updated state: replica (0) [ns_server:debug,2014-08-19T16:55:33.569,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",997,replica,0} [views:debug,2014-08-19T16:55:33.569,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/540. Updated state: active (0) [ns_server:debug,2014-08-19T16:55:33.569,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",540,active,0} [views:debug,2014-08-19T16:55:33.694,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/344. Updated state: replica (0) [ns_server:debug,2014-08-19T16:55:33.694,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",344,replica,0} [ns_server:debug,2014-08-19T16:55:33.861,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 995. Nacking mccouch update. [views:debug,2014-08-19T16:55:33.861,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/995. Updated state: replica (0) [ns_server:debug,2014-08-19T16:55:33.861,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",995,replica,0} [ns_server:debug,2014-08-19T16:55:33.862,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_tiles<0.4349.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,750,686,622,558,128,984,426,362,724,660,596,532,166,102,958,400,1022,762, 698,634,570,140,1009,996,374,736,672,608,544,114,970,412,348,88,710,646,582, 518,152,1021,944,386,1008,995,748,684,620,556,126,982,424,360,722,658,594, 530,164,100,956,398,1020,760,696,632,568,138,1007,994,372,734,670,606,542, 112,968,410,346,86,708,644,580,516,150,1019,942,384,1006,746,682,618,554,124, 980,422,358,98,720,656,592,528,162,986,954,396,364,1018,758,726,694,662,630, 598,566,534,168,136,104,1005,992,960,402,370,764,732,700,668,636,604,572,540, 142,110,1011,998,966,408,376,344,738,706,674,642,610,578,546,514,148,116, 1017,972,940,414,382,350,1004,90,744,712,680,648,616,584,552,520,154,122, 1023,978,946,420,388,356,1010,96,718,654,590,526,160,952,394,1016,756,692, 628,564,134,1003,990,368,730,666,602,538,108,964,406,342,704,640,576,512,146, 1015,938,380,1002,742,678,614,550,120,976,418,354,94,716,652,588,524,158,950, 392,1014,754,690,626,562,132,1001,988,366,728,664,600,536,170,106,962,404, 766,702,638,574,144,1013,378,1000,740,676,612,548,118,974,416,352,92,714,650, 586,522,156,948,390,1012,999,752,688,624,560,130] [ns_server:debug,2014-08-19T16:55:33.949,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 538. Nacking mccouch update. [ns_server:debug,2014-08-19T16:55:33.949,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 342. Nacking mccouch update. [views:debug,2014-08-19T16:55:33.950,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/538. Updated state: active (0) [views:debug,2014-08-19T16:55:33.950,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/342. Updated state: replica (0) [ns_server:debug,2014-08-19T16:55:33.950,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",538,active,0} [ns_server:debug,2014-08-19T16:55:33.950,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",342,replica,0} [ns_server:debug,2014-08-19T16:55:33.951,ns_1@10.242.238.90:capi_set_view_manager-tiles<0.6184.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,686,622,558,984,426,362,724,660,596,532,958,400,1022,762,698,634,570,996, 374,736,672,608,544,970,412,348,710,646,582,518,944,386,1008,748,684,620,556, 982,950,424,392,360,1014,754,722,690,658,626,594,562,530,988,956,398,366, 1020,760,728,696,664,632,600,568,536,994,962,404,372,766,734,702,670,638,606, 574,542,968,410,378,346,1000,740,708,676,644,612,580,548,516,974,942,416,384, 352,1006,746,714,682,650,618,586,554,522,980,948,422,390,358,1012,752,720, 688,656,624,592,560,528,986,954,396,364,1018,758,726,694,662,630,598,566,534, 992,960,402,370,764,732,700,668,636,604,572,540,998,966,408,376,344,738,706, 674,642,610,578,546,514,972,940,414,382,350,1004,744,712,680,648,616,584,552, 520,1023,978,946,420,388,356,1010,718,654,590,526,952,394,1016,756,692,628, 564,990,368,730,666,602,538,964,406,342,704,640,576,512,938,380,1002,742,678, 614,550,976,418,354,716,652,588,524] [ns_server:debug,2014-08-19T16:55:33.951,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,984,673,545,724,596,413,958,647,519,153,698,570,387,1009,749,621,127, 983,672,544,361,723,595,101,957,646,697,569,1008,748,620,982,671,543,722,594, 411,956,645,517,151,696,568,385,1007,747,619,125,981,670,542,359,99,721,593, 955,644,942,759,695,631,567,137,1006,993,746,682,618,554,371,980,733,669,605, 541,111,967,720,656,592,409,345,954,707,643,579,515,149,1018,941,758,694,630, 566,383,1005,992,745,681,617,553,123,979,732,668,604,540,421,357,97,966,719, 655,591,527,161,953,706,642,578,395,1017,940,757,693,629,565,135,1004,991, 744,680,616,552,369,978,731,667,603,539,109,965,718,654,590,407,343,952,705, 641,577,513,147,1016,939,756,692,628,564,381,1003,990,743,679,615,551,121, 977,730,666,602,538,419,355,964,95,717,653,589,525,159,951,704,640,576,393, 1015,938,755,691,627,563,133,1002,989,742,678,614,550,367,976,729,665,601, 537,107,963,716,652,588,405,950,767,703,639,575,145,1014,754,690,626,562,379, 1001,988,741,677,613,549,119,975,728,664,600,417,353,962,93,715,651,587,523, 157,949,766,702,638,574,391,1013,753,689,625,561,131,1000,987,740,676,612, 548,365,974,727,663,599,535,169,105,961,714,650,586,403,948,765,701,637,573, 143,1012,999,752,688,624,560,377,986,739,675,611,547,117,973,726,662,598,415, 351,960,91,713,649,585,521,155,947,764,700,636,572,389,1011,998,751,687,623, 559,129,985,738,674,610,546,363,972,725,661,597,533,167,103,959,712,648,584, 401,1023,946,763,699,635,571,141,1010,997,686,558,375,737,609,115,971,660, 349,89,711,583,1022,945,762,634,996,685,557,736,608,425,970,659,531,165,710, 582,399,1021,944,761,633,139,995,684,556,373,735,607,113,969,658,347,87,709, 581,1020,943,760,632,994,683,555,734,606,423,968,657,529,163,708,580,397, 1019] [views:debug,2014-08-19T16:55:33.967,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/995. Updated state: replica (0) [ns_server:debug,2014-08-19T16:55:33.967,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",995,replica,0} [views:debug,2014-08-19T16:55:34.167,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/538. Updated state: active (0) [views:debug,2014-08-19T16:55:34.168,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/342. Updated state: replica (0) [ns_server:debug,2014-08-19T16:55:34.168,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",538,active,0} [ns_server:debug,2014-08-19T16:55:34.168,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",342,replica,0} [ns_server:debug,2014-08-19T16:55:34.276,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 993. Nacking mccouch update. [views:debug,2014-08-19T16:55:34.276,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/993. Updated state: replica (0) [ns_server:debug,2014-08-19T16:55:34.277,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",993,replica,0} [ns_server:debug,2014-08-19T16:55:34.278,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_tiles<0.4349.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,750,686,622,558,128,984,426,362,724,660,596,532,166,102,958,400,1022,762, 698,634,570,140,1009,996,374,736,672,608,544,114,970,412,348,88,710,646,582, 518,152,1021,944,386,1008,995,748,684,620,556,126,982,424,360,722,658,594, 530,164,100,956,398,1020,760,696,632,568,138,1007,994,372,734,670,606,542, 112,968,410,346,86,708,644,580,516,150,1019,942,384,1006,993,746,682,618,554, 124,980,422,358,98,720,656,592,528,162,986,954,396,364,1018,758,726,694,662, 630,598,566,534,168,136,104,1005,992,960,402,370,764,732,700,668,636,604,572, 540,142,110,1011,998,966,408,376,344,738,706,674,642,610,578,546,514,148,116, 1017,972,940,414,382,350,1004,90,744,712,680,648,616,584,552,520,154,122, 1023,978,946,420,388,356,1010,96,718,654,590,526,160,952,394,1016,756,692, 628,564,134,1003,990,368,730,666,602,538,108,964,406,342,704,640,576,512,146, 1015,938,380,1002,742,678,614,550,120,976,418,354,94,716,652,588,524,158,950, 392,1014,754,690,626,562,132,1001,988,366,728,664,600,536,170,106,962,404, 766,702,638,574,144,1013,378,1000,740,676,612,548,118,974,416,352,92,714,650, 586,522,156,948,390,1012,999,752,688,624,560,130] [views:debug,2014-08-19T16:55:34.335,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/993. Updated state: replica (0) [ns_server:debug,2014-08-19T16:55:34.336,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",993,replica,0} [ns_server:debug,2014-08-19T16:55:34.402,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 536. Nacking mccouch update. [ns_server:debug,2014-08-19T16:55:34.402,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 170. Nacking mccouch update. [views:debug,2014-08-19T16:55:34.402,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/536. Updated state: active (0) [views:debug,2014-08-19T16:55:34.402,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/170. Updated state: replica (0) [ns_server:debug,2014-08-19T16:55:34.403,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",536,active,0} [ns_server:debug,2014-08-19T16:55:34.403,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",170,replica,0} [ns_server:debug,2014-08-19T16:55:34.403,ns_1@10.242.238.90:capi_set_view_manager-tiles<0.6184.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,686,622,558,984,426,362,724,660,596,532,958,400,1022,762,698,634,570,996, 374,736,672,608,544,970,412,348,710,646,582,518,944,386,1008,748,684,620,556, 982,424,360,754,722,690,658,626,594,562,530,988,956,398,366,1020,760,728,696, 664,632,600,568,536,170,994,962,404,372,766,734,702,670,638,606,574,542,968, 410,378,346,1000,740,708,676,644,612,580,548,516,974,942,416,384,352,1006, 746,714,682,650,618,586,554,522,980,948,422,390,358,1012,752,720,688,656,624, 592,560,528,986,954,396,364,1018,758,726,694,662,630,598,566,534,992,960,402, 370,764,732,700,668,636,604,572,540,998,966,408,376,344,738,706,674,642,610, 578,546,514,972,940,414,382,350,1004,744,712,680,648,616,584,552,520,1023, 978,946,420,388,356,1010,718,654,590,526,952,394,1016,756,692,628,564,990, 368,730,666,602,538,964,406,342,704,640,576,512,938,380,1002,742,678,614,550, 976,418,354,716,652,588,524,950,392,1014] [ns_server:debug,2014-08-19T16:55:34.404,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,984,673,545,724,596,413,958,647,519,153,698,570,387,1009,749,621,127, 983,672,544,361,723,595,101,957,646,697,569,1008,748,620,982,671,543,722,594, 411,956,645,517,151,696,568,385,1007,747,619,125,981,670,542,359,99,721,593, 955,644,942,759,695,631,567,137,1006,993,746,682,618,554,371,980,733,669,605, 541,111,967,720,656,592,409,345,954,707,643,579,515,149,1018,941,758,694,630, 566,383,1005,992,745,681,617,553,123,979,732,668,604,540,421,357,97,966,719, 655,591,527,161,953,706,642,578,395,1017,940,757,693,629,565,135,1004,991, 744,680,616,552,369,978,731,667,603,539,109,965,718,654,590,407,343,952,705, 641,577,513,147,1016,939,756,692,628,564,381,1003,990,743,679,615,551,121, 977,730,666,602,538,419,355,964,95,717,653,589,525,159,951,704,640,576,393, 1015,938,755,691,627,563,133,1002,989,742,678,614,550,367,976,729,665,601, 537,107,963,716,652,588,405,950,767,703,639,575,145,1014,754,690,626,562,379, 1001,988,741,677,613,549,119,975,728,664,600,536,417,353,962,93,715,651,587, 523,157,949,766,702,638,574,391,1013,753,689,625,561,131,1000,987,740,676, 612,548,365,974,727,663,599,535,169,105,961,714,650,586,403,948,765,701,637, 573,143,1012,999,752,688,624,560,377,986,739,675,611,547,117,973,726,662,598, 415,351,960,91,713,649,585,521,155,947,764,700,636,572,389,1011,998,751,687, 623,559,129,985,738,674,610,546,363,972,725,661,597,533,167,103,959,712,648, 584,401,1023,946,763,699,635,571,141,1010,997,686,558,375,737,609,115,971, 660,349,89,711,583,1022,945,762,634,996,685,557,736,608,425,970,659,531,165, 710,582,399,1021,944,761,633,139,995,684,556,373,735,607,113,969,658,347,87, 709,581,1020,943,760,632,994,683,555,734,606,423,968,657,529,163,708,580,397, 1019] [views:debug,2014-08-19T16:55:34.470,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/536. Updated state: active (0) [ns_server:debug,2014-08-19T16:55:34.470,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",536,active,0} [views:debug,2014-08-19T16:55:34.470,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/170. Updated state: replica (0) [ns_server:debug,2014-08-19T16:55:34.470,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",170,replica,0} [ns_server:debug,2014-08-19T16:55:34.570,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 991. Nacking mccouch update. [views:debug,2014-08-19T16:55:34.570,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/991. Updated state: replica (0) [ns_server:debug,2014-08-19T16:55:34.570,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",991,replica,0} [ns_server:debug,2014-08-19T16:55:34.571,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_tiles<0.4349.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,750,686,622,558,128,984,426,362,724,660,596,532,166,102,958,400,1022,762, 698,634,570,140,1009,996,374,736,672,608,544,114,970,412,348,88,710,646,582, 518,152,1021,944,386,1008,995,748,684,620,556,126,982,424,360,722,658,594, 530,164,100,956,398,1020,760,696,632,568,138,1007,994,372,734,670,606,542, 112,968,410,346,86,708,644,580,516,150,1019,942,384,1006,993,746,682,618,554, 124,980,422,358,98,720,656,592,528,162,986,954,396,364,1018,758,726,694,662, 630,598,566,534,168,136,104,1005,992,960,402,370,764,732,700,668,636,604,572, 540,142,110,1011,998,966,408,376,344,738,706,674,642,610,578,546,514,148,116, 1017,972,940,414,382,350,1004,991,90,744,712,680,648,616,584,552,520,154,122, 1023,978,946,420,388,356,1010,96,718,654,590,526,160,952,394,1016,756,692, 628,564,134,1003,990,368,730,666,602,538,108,964,406,342,704,640,576,512,146, 1015,938,380,1002,742,678,614,550,120,976,418,354,94,716,652,588,524,158,950, 392,1014,754,690,626,562,132,1001,988,366,728,664,600,536,170,106,962,404, 766,702,638,574,144,1013,378,1000,740,676,612,548,118,974,416,352,92,714,650, 586,522,156,948,390,1012,999,752,688,624,560,130] [views:debug,2014-08-19T16:55:34.654,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/991. Updated state: replica (0) [ns_server:debug,2014-08-19T16:55:34.654,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",991,replica,0} [ns_server:debug,2014-08-19T16:55:34.783,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 168. Nacking mccouch update. [views:debug,2014-08-19T16:55:34.784,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/168. Updated state: replica (0) [ns_server:debug,2014-08-19T16:55:34.784,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",168,replica,0} [ns_server:debug,2014-08-19T16:55:34.784,ns_1@10.242.238.90:capi_set_view_manager-tiles<0.6184.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,686,622,558,984,426,362,724,660,596,532,958,400,1022,762,698,634,570,996, 374,736,672,608,544,970,412,348,710,646,582,518,944,386,1008,748,684,620,556, 982,424,360,754,722,690,658,626,594,562,530,988,956,398,366,1020,760,728,696, 664,632,600,568,536,170,994,962,404,372,766,734,702,670,638,606,574,542,968, 410,378,346,1000,740,708,676,644,612,580,548,516,974,942,416,384,352,1006, 746,714,682,650,618,586,554,522,980,948,422,390,358,1012,752,720,688,656,624, 592,560,528,986,954,396,364,1018,758,726,694,662,630,598,566,534,168,992,960, 402,370,764,732,700,668,636,604,572,540,998,966,408,376,344,738,706,674,642, 610,578,546,514,972,940,414,382,350,1004,744,712,680,648,616,584,552,520, 1023,978,946,420,388,356,1010,718,654,590,526,952,394,1016,756,692,628,564, 990,368,730,666,602,538,964,406,342,704,640,576,512,938,380,1002,742,678,614, 550,976,418,354,716,652,588,524,950,392,1014] [ns_server:debug,2014-08-19T16:55:34.829,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 534. Nacking mccouch update. [views:debug,2014-08-19T16:55:34.830,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/534. Updated state: active (0) [ns_server:debug,2014-08-19T16:55:34.830,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",534,active,0} [ns_server:debug,2014-08-19T16:55:34.831,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,984,673,545,724,596,413,958,647,519,153,698,570,387,1009,749,621,127, 983,672,544,361,723,595,101,957,646,697,569,1008,748,620,982,671,543,722,594, 411,956,645,517,151,696,568,385,1007,747,619,125,981,670,542,359,99,721,593, 955,644,942,759,695,631,567,137,1006,993,746,682,618,554,371,980,733,669,605, 541,111,967,720,656,592,409,345,954,707,643,579,515,149,1018,941,758,694,630, 566,383,1005,992,745,681,617,553,123,979,732,668,604,540,421,357,97,966,719, 655,591,527,161,953,706,642,578,395,1017,940,757,693,629,565,135,1004,991, 744,680,616,552,369,978,731,667,603,539,109,965,718,654,590,407,343,952,705, 641,577,513,147,1016,939,756,692,628,564,381,1003,990,743,679,615,551,121, 977,730,666,602,538,419,355,964,95,717,653,589,525,159,951,704,640,576,393, 1015,938,755,691,627,563,133,1002,989,742,678,614,550,367,976,729,665,601, 537,107,963,716,652,588,405,950,767,703,639,575,145,1014,754,690,626,562,379, 1001,988,741,677,613,549,119,975,728,664,600,536,417,353,962,93,715,651,587, 523,157,949,766,702,638,574,391,1013,753,689,625,561,131,1000,987,740,676, 612,548,365,974,727,663,599,535,169,105,961,714,650,586,403,948,765,701,637, 573,143,1012,999,752,688,624,560,377,986,739,675,611,547,117,973,726,662,598, 534,415,351,960,91,713,649,585,521,155,947,764,700,636,572,389,1011,998,751, 687,623,559,129,985,738,674,610,546,363,972,725,661,597,533,167,103,959,712, 648,584,401,1023,946,763,699,635,571,141,1010,997,686,558,375,737,609,115, 971,660,349,89,711,583,1022,945,762,634,996,685,557,736,608,425,970,659,531, 165,710,582,399,1021,944,761,633,139,995,684,556,373,735,607,113,969,658,347, 87,709,581,1020,943,760,632,994,683,555,734,606,423,968,657,529,163,708,580, 397,1019] [ns_server:debug,2014-08-19T16:55:34.938,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 989. Nacking mccouch update. [views:debug,2014-08-19T16:55:34.938,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/989. Updated state: replica (0) [ns_server:debug,2014-08-19T16:55:34.939,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",989,replica,0} [views:debug,2014-08-19T16:55:34.939,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/168. Updated state: replica (0) [ns_server:debug,2014-08-19T16:55:34.939,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",168,replica,0} [ns_server:debug,2014-08-19T16:55:34.939,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_tiles<0.4349.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,750,686,622,558,128,984,426,362,724,660,596,532,166,102,958,400,1022,762, 698,634,570,140,1009,996,374,736,672,608,544,114,970,412,348,88,710,646,582, 518,152,1021,944,386,1008,995,748,684,620,556,126,982,424,360,722,658,594, 530,164,100,956,398,1020,760,696,632,568,138,1007,994,372,734,670,606,542, 112,968,410,346,86,708,644,580,516,150,1019,942,384,1006,993,746,682,618,554, 124,980,422,358,98,720,656,592,528,162,986,954,396,364,1018,758,726,694,662, 630,598,566,534,168,136,104,1005,992,960,402,370,764,732,700,668,636,604,572, 540,142,110,1011,998,966,408,376,344,738,706,674,642,610,578,546,514,148,116, 1017,972,940,414,382,350,1004,991,90,744,712,680,648,616,584,552,520,154,122, 1023,978,946,420,388,356,1010,96,718,654,590,526,160,952,394,1016,756,692, 628,564,134,1003,990,368,730,666,602,538,108,964,406,342,704,640,576,512,146, 1015,938,380,1002,989,742,678,614,550,120,976,418,354,94,716,652,588,524,158, 950,392,1014,754,690,626,562,132,1001,988,366,728,664,600,536,170,106,962, 404,766,702,638,574,144,1013,378,1000,740,676,612,548,118,974,416,352,92,714, 650,586,522,156,948,390,1012,999,752,688,624,560,130] [views:debug,2014-08-19T16:55:34.967,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/534. Updated state: active (0) [ns_server:debug,2014-08-19T16:55:34.968,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",534,active,0} [views:debug,2014-08-19T16:55:35.056,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/989. Updated state: replica (0) [ns_server:debug,2014-08-19T16:55:35.056,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",989,replica,0} [ns_server:debug,2014-08-19T16:55:35.261,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 166. Nacking mccouch update. [ns_server:debug,2014-08-19T16:55:35.261,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 532. Nacking mccouch update. [views:debug,2014-08-19T16:55:35.261,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/166. Updated state: replica (0) [views:debug,2014-08-19T16:55:35.261,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/532. Updated state: active (0) [ns_server:debug,2014-08-19T16:55:35.261,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",166,replica,0} [ns_server:debug,2014-08-19T16:55:35.262,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",532,active,0} [ns_server:debug,2014-08-19T16:55:35.262,ns_1@10.242.238.90:capi_set_view_manager-tiles<0.6184.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,686,622,558,984,426,362,724,660,596,532,166,958,400,1022,762,698,634,570, 996,374,736,672,608,544,970,412,348,710,646,582,518,944,386,1008,748,684,620, 556,982,424,360,754,722,690,658,626,594,562,530,988,956,398,366,1020,760,728, 696,664,632,600,568,536,170,994,962,404,372,766,734,702,670,638,606,574,542, 968,410,378,346,1000,740,708,676,644,612,580,548,516,974,942,416,384,352, 1006,746,714,682,650,618,586,554,522,980,948,422,390,358,1012,752,720,688, 656,624,592,560,528,986,954,396,364,1018,758,726,694,662,630,598,566,534,168, 992,960,402,370,764,732,700,668,636,604,572,540,998,966,408,376,344,738,706, 674,642,610,578,546,514,972,940,414,382,350,1004,744,712,680,648,616,584,552, 520,1023,978,946,420,388,356,1010,718,654,590,526,952,394,1016,756,692,628, 564,990,368,730,666,602,538,964,406,342,704,640,576,512,938,380,1002,742,678, 614,550,976,418,354,716,652,588,524,950,392,1014] [ns_server:debug,2014-08-19T16:55:35.263,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,984,673,545,724,596,413,958,647,519,153,698,570,387,1009,749,621,127, 983,672,544,361,723,595,101,957,646,697,569,1008,748,620,982,671,543,722,594, 411,956,645,517,151,696,568,385,1007,747,619,125,981,670,542,359,99,721,593, 955,644,695,567,1006,993,746,682,618,554,371,980,733,669,605,541,111,967,720, 656,592,409,345,954,707,643,579,515,149,1018,941,758,694,630,566,383,1005, 992,745,681,617,553,123,979,732,668,604,540,421,357,97,966,719,655,591,527, 161,953,706,642,578,395,1017,940,757,693,629,565,135,1004,991,744,680,616, 552,369,978,731,667,603,539,109,965,718,654,590,407,343,952,705,641,577,513, 147,1016,939,756,692,628,564,381,1003,990,743,679,615,551,121,977,730,666, 602,538,419,355,964,95,717,653,589,525,159,951,704,640,576,393,1015,938,755, 691,627,563,133,1002,989,742,678,614,550,367,976,729,665,601,537,107,963,716, 652,588,405,950,767,703,639,575,145,1014,754,690,626,562,379,1001,988,741, 677,613,549,119,975,728,664,600,536,417,353,962,93,715,651,587,523,157,949, 766,702,638,574,391,1013,753,689,625,561,131,1000,987,740,676,612,548,365, 974,727,663,599,535,169,105,961,714,650,586,403,948,765,701,637,573,143,1012, 999,752,688,624,560,377,986,739,675,611,547,117,973,726,662,598,534,415,351, 960,91,713,649,585,521,155,947,764,700,636,572,389,1011,998,751,687,623,559, 129,985,738,674,610,546,363,972,725,661,597,533,167,103,959,712,648,584,401, 1023,946,763,699,635,571,141,1010,997,686,558,375,737,609,115,971,660,532, 349,89,711,583,1022,945,762,634,996,685,557,736,608,425,970,659,531,165,710, 582,399,1021,944,761,633,139,995,684,556,373,735,607,113,969,658,347,87,709, 581,1020,943,760,632,994,683,555,734,606,423,968,657,529,163,708,580,397, 1019,942,759,631,137] [ns_server:debug,2014-08-19T16:55:35.353,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 987. Nacking mccouch update. [views:debug,2014-08-19T16:55:35.353,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/987. Updated state: replica (0) [ns_server:debug,2014-08-19T16:55:35.354,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",987,replica,0} [views:debug,2014-08-19T16:55:35.354,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/532. Updated state: active (0) [views:debug,2014-08-19T16:55:35.354,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/166. Updated state: replica (0) [ns_server:debug,2014-08-19T16:55:35.354,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",532,active,0} [ns_server:debug,2014-08-19T16:55:35.354,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",166,replica,0} [ns_server:debug,2014-08-19T16:55:35.354,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_tiles<0.4349.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,750,686,622,558,128,984,426,362,724,660,596,532,166,102,958,400,1022,762, 698,634,570,140,1009,996,374,736,672,608,544,114,970,412,348,88,710,646,582, 518,152,1021,944,386,1008,995,748,684,620,556,126,982,424,360,722,658,594, 530,164,100,956,398,1020,760,696,632,568,138,1007,994,372,734,670,606,542, 112,968,410,346,86,708,644,580,516,150,1019,942,384,1006,993,746,682,618,554, 124,980,422,358,98,720,656,592,528,162,954,396,1018,758,726,694,662,630,598, 566,534,168,136,104,1005,992,960,402,370,764,732,700,668,636,604,572,540,142, 110,1011,998,966,408,376,344,738,706,674,642,610,578,546,514,148,116,1017, 972,940,414,382,350,1004,991,90,744,712,680,648,616,584,552,520,154,122,1023, 978,946,420,388,356,1010,96,718,654,590,526,160,952,394,1016,756,692,628,564, 134,1003,990,368,730,666,602,538,108,964,406,342,704,640,576,512,146,1015, 938,380,1002,989,742,678,614,550,120,976,418,354,94,716,652,588,524,158,950, 392,1014,754,690,626,562,132,1001,988,366,728,664,600,536,170,106,962,404, 766,702,638,574,144,1013,378,1000,987,740,676,612,548,118,974,416,352,92,714, 650,586,522,156,948,390,1012,999,752,688,624,560,130,986,364] [views:debug,2014-08-19T16:55:35.505,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/987. Updated state: replica (0) [ns_server:debug,2014-08-19T16:55:35.505,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",987,replica,0} [ns_server:debug,2014-08-19T16:55:35.688,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 164. Nacking mccouch update. [views:debug,2014-08-19T16:55:35.689,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/164. Updated state: replica (0) [ns_server:debug,2014-08-19T16:55:35.688,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 530. Nacking mccouch update. [ns_server:debug,2014-08-19T16:55:35.689,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",164,replica,0} [views:debug,2014-08-19T16:55:35.689,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/530. Updated state: active (0) [ns_server:debug,2014-08-19T16:55:35.689,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",530,active,0} [ns_server:debug,2014-08-19T16:55:35.689,ns_1@10.242.238.90:capi_set_view_manager-tiles<0.6184.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,686,622,558,984,426,362,724,660,596,532,166,958,400,1022,762,698,634,570, 996,374,736,672,608,544,970,412,348,710,646,582,518,944,386,1008,748,684,620, 556,982,424,360,754,722,690,658,626,594,562,530,164,988,956,398,366,1020,760, 728,696,664,632,600,568,536,170,994,962,404,372,766,734,702,670,638,606,574, 542,968,410,378,346,1000,740,708,676,644,612,580,548,516,974,942,416,384,352, 1006,746,714,682,650,618,586,554,522,980,948,422,390,358,1012,752,720,688, 656,624,592,560,528,986,954,396,364,1018,758,726,694,662,630,598,566,534,168, 992,960,402,370,764,732,700,668,636,604,572,540,998,966,408,376,344,738,706, 674,642,610,578,546,514,972,940,414,382,350,1004,744,712,680,648,616,584,552, 520,1023,978,946,420,388,356,1010,718,654,590,526,952,394,1016,756,692,628, 564,990,368,730,666,602,538,964,406,342,704,640,576,512,938,380,1002,742,678, 614,550,976,418,354,716,652,588,524,950,392,1014] [ns_server:debug,2014-08-19T16:55:35.690,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,984,673,545,724,596,413,958,647,519,153,698,570,387,1009,749,621,127, 983,672,544,361,723,595,101,957,646,697,569,1008,748,620,982,671,543,722,594, 411,956,645,517,151,696,568,385,1007,747,619,125,981,670,542,359,99,721,593, 955,644,695,567,1006,993,746,682,618,554,371,980,733,669,605,541,111,967,720, 656,592,409,345,954,707,643,579,515,149,1018,941,758,694,630,566,383,1005, 992,745,681,617,553,123,979,732,668,604,540,421,357,97,966,719,655,591,527, 161,953,706,642,578,395,1017,940,757,693,629,565,135,1004,991,744,680,616, 552,369,978,731,667,603,539,109,965,718,654,590,407,343,952,705,641,577,513, 147,1016,939,756,692,628,564,381,1003,990,743,679,615,551,121,977,730,666, 602,538,419,355,964,95,717,653,589,525,159,951,704,640,576,393,1015,938,755, 691,627,563,133,1002,989,742,678,614,550,367,976,729,665,601,537,107,963,716, 652,588,405,950,767,703,639,575,145,1014,754,690,626,562,379,1001,988,741, 677,613,549,119,975,728,664,600,536,417,353,962,93,715,651,587,523,157,949, 766,702,638,574,391,1013,753,689,625,561,131,1000,987,740,676,612,548,365, 974,727,663,599,535,169,105,961,714,650,586,403,948,765,701,637,573,143,1012, 999,752,688,624,560,377,986,739,675,611,547,117,973,726,662,598,534,415,351, 960,91,713,649,585,521,155,947,764,700,636,572,389,1011,998,751,687,623,559, 129,985,738,674,610,546,363,972,725,661,597,533,167,103,959,712,648,584,401, 1023,946,763,699,635,571,141,1010,997,686,558,375,737,609,115,971,660,532, 349,89,711,583,1022,945,762,634,996,685,557,736,608,425,970,659,531,165,710, 582,399,1021,944,761,633,139,995,684,556,373,735,607,113,969,658,530,347,87, 709,581,1020,943,760,632,994,683,555,734,606,423,968,657,529,163,708,580,397, 1019,942,759,631,137] [ns_server:debug,2014-08-19T16:55:35.797,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 985. Nacking mccouch update. [views:debug,2014-08-19T16:55:35.797,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/985. Updated state: replica (0) [ns_server:debug,2014-08-19T16:55:35.797,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",985,replica,0} [views:debug,2014-08-19T16:55:35.797,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/530. Updated state: active (0) [views:debug,2014-08-19T16:55:35.798,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/164. Updated state: replica (0) [ns_server:debug,2014-08-19T16:55:35.798,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",530,active,0} [ns_server:debug,2014-08-19T16:55:35.798,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",164,replica,0} [ns_server:debug,2014-08-19T16:55:35.798,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_tiles<0.4349.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,750,686,622,558,128,984,426,362,724,660,596,532,166,102,958,400,1022,762, 698,634,570,140,1009,996,374,736,672,608,544,114,970,412,348,88,710,646,582, 518,152,1021,944,386,1008,995,748,684,620,556,126,982,424,360,722,658,594, 530,164,100,956,398,1020,760,696,632,568,138,1007,994,372,734,670,606,542, 112,968,410,346,86,708,644,580,516,150,1019,942,384,1006,993,746,682,618,554, 124,980,422,358,98,720,656,592,528,162,954,396,1018,758,726,694,662,630,598, 566,534,168,136,104,1005,992,960,402,370,764,732,700,668,636,604,572,540,142, 110,1011,998,966,408,376,344,985,738,706,674,642,610,578,546,514,148,116, 1017,972,940,414,382,350,1004,991,90,744,712,680,648,616,584,552,520,154,122, 1023,978,946,420,388,356,1010,96,718,654,590,526,160,952,394,1016,756,692, 628,564,134,1003,990,368,730,666,602,538,108,964,406,342,704,640,576,512,146, 1015,938,380,1002,989,742,678,614,550,120,976,418,354,94,716,652,588,524,158, 950,392,1014,754,690,626,562,132,1001,988,366,728,664,600,536,170,106,962, 404,766,702,638,574,144,1013,378,1000,987,740,676,612,548,118,974,416,352,92, 714,650,586,522,156,948,390,1012,999,752,688,624,560,130,986,364] [views:debug,2014-08-19T16:55:35.962,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/985. Updated state: replica (0) [ns_server:debug,2014-08-19T16:55:35.962,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",985,replica,0} [ns_server:debug,2014-08-19T16:55:36.133,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 528. Nacking mccouch update. [ns_server:debug,2014-08-19T16:55:36.133,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 162. Nacking mccouch update. [views:debug,2014-08-19T16:55:36.133,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/528. Updated state: active (0) [views:debug,2014-08-19T16:55:36.133,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/162. Updated state: replica (0) [ns_server:debug,2014-08-19T16:55:36.133,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",528,active,0} [ns_server:debug,2014-08-19T16:55:36.133,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",162,replica,0} [ns_server:debug,2014-08-19T16:55:36.134,ns_1@10.242.238.90:capi_set_view_manager-tiles<0.6184.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,686,622,558,984,426,362,724,660,596,532,166,958,400,1022,762,698,634,570, 996,374,736,672,608,544,970,412,348,710,646,582,518,944,386,1008,748,684,620, 556,982,424,360,754,722,690,658,626,594,562,530,164,988,956,398,366,1020,760, 728,696,664,632,600,568,536,170,994,962,404,372,766,734,702,670,638,606,574, 542,968,410,378,346,1000,740,708,676,644,612,580,548,516,974,942,416,384,352, 1006,746,714,682,650,618,586,554,522,980,948,422,390,358,1012,752,720,688, 656,624,592,560,528,162,986,954,396,364,1018,758,726,694,662,630,598,566,534, 168,992,960,402,370,764,732,700,668,636,604,572,540,998,966,408,376,344,738, 706,674,642,610,578,546,514,972,940,414,382,350,1004,744,712,680,648,616,584, 552,520,1023,978,946,420,388,356,1010,718,654,590,526,952,394,1016,756,692, 628,564,990,368,730,666,602,538,964,406,342,704,640,576,512,938,380,1002,742, 678,614,550,976,418,354,716,652,588,524,950,392,1014] [ns_server:debug,2014-08-19T16:55:36.134,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,984,673,545,724,596,413,958,647,519,153,698,570,387,1009,749,621,127, 983,672,544,361,723,595,101,957,646,697,569,1008,748,620,982,671,543,722,594, 411,956,645,517,151,696,568,385,1007,747,619,125,981,670,542,359,99,721,593, 955,644,695,567,1006,993,746,682,618,554,371,980,733,669,605,541,111,967,720, 656,592,528,409,345,954,707,643,579,515,149,1018,941,758,694,630,566,383, 1005,992,745,681,617,553,123,979,732,668,604,540,421,357,97,966,719,655,591, 527,161,953,706,642,578,395,1017,940,757,693,629,565,135,1004,991,744,680, 616,552,369,978,731,667,603,539,109,965,718,654,590,407,343,952,705,641,577, 513,147,1016,939,756,692,628,564,381,1003,990,743,679,615,551,121,977,730, 666,602,538,419,355,964,95,717,653,589,525,159,951,704,640,576,393,1015,938, 755,691,627,563,133,1002,989,742,678,614,550,367,976,729,665,601,537,107,963, 716,652,588,405,950,767,703,639,575,145,1014,754,690,626,562,379,1001,988, 741,677,613,549,119,975,728,664,600,536,417,353,962,93,715,651,587,523,157, 949,766,702,638,574,391,1013,753,689,625,561,131,1000,987,740,676,612,548, 365,974,727,663,599,535,169,105,961,714,650,586,403,948,765,701,637,573,143, 1012,999,752,688,624,560,377,986,739,675,611,547,117,973,726,662,598,534,415, 351,960,91,713,649,585,521,155,947,764,700,636,572,389,1011,998,751,687,623, 559,129,985,738,674,610,546,363,972,725,661,597,533,167,103,959,712,648,584, 401,1023,946,763,699,635,571,141,1010,997,686,558,375,737,609,115,971,660, 532,349,89,711,583,1022,945,762,634,996,685,557,736,608,425,970,659,531,165, 710,582,399,1021,944,761,633,139,995,684,556,373,735,607,113,969,658,530,347, 87,709,581,1020,943,760,632,994,683,555,734,606,423,968,657,529,163,708,580, 397,1019,942,759,631,137] [ns_server:debug,2014-08-19T16:55:36.225,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 983. Nacking mccouch update. [views:debug,2014-08-19T16:55:36.225,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/983. Updated state: replica (0) [ns_server:debug,2014-08-19T16:55:36.225,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",983,replica,0} [ns_server:debug,2014-08-19T16:55:36.226,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_tiles<0.4349.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,750,686,622,558,128,984,426,362,724,660,596,532,166,102,958,400,1022,762, 698,634,570,140,1009,996,374,983,736,672,608,544,114,970,412,348,88,710,646, 582,518,152,1021,944,386,1008,995,748,684,620,556,126,982,424,360,722,658, 594,530,164,100,956,398,1020,760,696,632,568,138,1007,994,372,734,670,606, 542,112,968,410,346,86,708,644,580,516,150,1019,942,384,1006,993,746,682,618, 554,124,980,422,358,98,720,656,592,528,162,954,396,1018,758,726,694,662,630, 598,566,534,168,136,104,1005,992,960,402,370,764,732,700,668,636,604,572,540, 142,110,1011,998,966,408,376,344,985,738,706,674,642,610,578,546,514,148,116, 1017,972,940,414,382,350,1004,991,90,744,712,680,648,616,584,552,520,154,122, 1023,978,946,420,388,356,1010,96,718,654,590,526,160,952,394,1016,756,692, 628,564,134,1003,990,368,730,666,602,538,108,964,406,342,704,640,576,512,146, 1015,938,380,1002,989,742,678,614,550,120,976,418,354,94,716,652,588,524,158, 950,392,1014,754,690,626,562,132,1001,988,366,728,664,600,536,170,106,962, 404,766,702,638,574,144,1013,378,1000,987,740,676,612,548,118,974,416,352,92, 714,650,586,522,156,948,390,1012,999,752,688,624,560,130,986,364] [views:debug,2014-08-19T16:55:36.242,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/528. Updated state: active (0) [views:debug,2014-08-19T16:55:36.242,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/162. Updated state: replica (0) [ns_server:debug,2014-08-19T16:55:36.242,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",528,active,0} [ns_server:debug,2014-08-19T16:55:36.242,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",162,replica,0} [views:debug,2014-08-19T16:55:36.318,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/983. Updated state: replica (0) [ns_server:debug,2014-08-19T16:55:36.318,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",983,replica,0} [ns_server:debug,2014-08-19T16:55:36.493,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 160. Nacking mccouch update. [views:debug,2014-08-19T16:55:36.493,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/160. Updated state: replica (0) [ns_server:debug,2014-08-19T16:55:36.493,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",160,replica,0} [ns_server:debug,2014-08-19T16:55:36.494,ns_1@10.242.238.90:capi_set_view_manager-tiles<0.6184.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,686,622,558,984,426,362,724,660,596,532,166,958,400,1022,762,698,634,570, 996,374,736,672,608,544,970,412,348,710,646,582,518,944,386,1008,748,684,620, 556,982,424,360,722,658,594,530,164,988,956,398,366,1020,760,728,696,664,632, 600,568,536,170,994,962,404,372,766,734,702,670,638,606,574,542,968,410,378, 346,1000,740,708,676,644,612,580,548,516,974,942,416,384,352,1006,746,714, 682,650,618,586,554,522,980,948,422,390,358,1012,752,720,688,656,624,592,560, 528,162,986,954,396,364,1018,758,726,694,662,630,598,566,534,168,992,960,402, 370,764,732,700,668,636,604,572,540,998,966,408,376,344,738,706,674,642,610, 578,546,514,972,940,414,382,350,1004,744,712,680,648,616,584,552,520,1023, 978,946,420,388,356,1010,718,654,590,526,160,952,394,1016,756,692,628,564, 990,368,730,666,602,538,964,406,342,704,640,576,512,938,380,1002,742,678,614, 550,976,418,354,716,652,588,524,950,392,1014,754,690,626,562] [ns_server:debug,2014-08-19T16:55:36.577,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 526. Nacking mccouch update. [views:debug,2014-08-19T16:55:36.577,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/526. Updated state: active (0) [ns_server:debug,2014-08-19T16:55:36.577,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",526,active,0} [ns_server:debug,2014-08-19T16:55:36.578,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,984,673,545,724,596,413,958,647,519,153,698,570,387,1009,749,621,127, 983,672,544,361,723,595,101,957,646,697,569,1008,748,620,982,671,543,722,594, 411,956,645,517,151,696,568,385,1007,747,619,125,981,670,542,359,99,721,593, 955,644,695,567,1006,993,746,682,618,554,371,980,733,669,605,541,111,967,720, 656,592,528,409,345,954,707,643,579,515,149,1018,941,758,694,630,566,383, 1005,992,745,681,617,553,123,979,732,668,604,540,421,357,97,966,719,655,591, 527,161,953,706,642,578,395,1017,940,757,693,629,565,135,1004,991,744,680, 616,552,369,978,731,667,603,539,109,965,718,654,590,526,407,343,952,705,641, 577,513,147,1016,939,756,692,628,564,381,1003,990,743,679,615,551,121,977, 730,666,602,538,419,355,964,95,717,653,589,525,159,951,704,640,576,393,1015, 938,755,691,627,563,133,1002,989,742,678,614,550,367,976,729,665,601,537,107, 963,716,652,588,405,950,767,703,639,575,145,1014,754,690,626,562,379,1001, 988,741,677,613,549,119,975,728,664,600,536,417,353,962,93,715,651,587,523, 157,949,766,702,638,574,391,1013,753,689,625,561,131,1000,987,740,676,612, 548,365,974,727,663,599,535,169,105,961,714,650,586,403,948,765,701,637,573, 143,1012,999,752,688,624,560,377,986,739,675,611,547,117,973,726,662,598,534, 415,351,960,91,713,649,585,521,155,947,764,700,636,572,389,1011,998,751,687, 623,559,129,985,738,674,610,546,363,972,725,661,597,533,167,103,959,712,648, 584,401,1023,946,763,699,635,571,141,1010,997,686,558,375,737,609,115,971, 660,532,349,89,711,583,1022,945,762,634,996,685,557,736,608,425,970,659,531, 165,710,582,399,1021,944,761,633,139,995,684,556,373,735,607,113,969,658,530, 347,87,709,581,1020,943,760,632,994,683,555,734,606,423,968,657,529,163,708, 580,397,1019,942,759,631,137] [ns_server:debug,2014-08-19T16:55:36.593,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 981. Nacking mccouch update. [views:debug,2014-08-19T16:55:36.594,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/981. Updated state: replica (0) [ns_server:debug,2014-08-19T16:55:36.594,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",981,replica,0} [ns_server:debug,2014-08-19T16:55:36.594,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_tiles<0.4349.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,750,686,622,558,128,984,426,362,724,660,596,532,166,102,958,400,1022,762, 698,634,570,140,1009,996,374,983,736,672,608,544,114,970,412,348,88,710,646, 582,518,152,1021,944,386,1008,995,748,684,620,556,126,982,424,360,722,658, 594,530,164,100,956,398,1020,760,696,632,568,138,1007,994,372,981,734,670, 606,542,112,968,410,346,86,708,644,580,516,150,1019,942,384,1006,993,746,682, 618,554,124,980,422,358,98,720,656,592,528,162,954,396,1018,758,726,694,662, 630,598,566,534,168,136,104,1005,992,960,402,370,764,732,700,668,636,604,572, 540,142,110,1011,998,966,408,376,344,985,738,706,674,642,610,578,546,514,148, 116,1017,972,940,414,382,350,1004,991,90,744,712,680,648,616,584,552,520,154, 122,1023,978,946,420,388,356,1010,96,718,654,590,526,160,952,394,1016,756, 692,628,564,134,1003,990,368,730,666,602,538,108,964,406,342,704,640,576,512, 146,1015,938,380,1002,989,742,678,614,550,120,976,418,354,94,716,652,588,524, 158,950,392,1014,754,690,626,562,132,1001,988,366,728,664,600,536,170,106, 962,404,766,702,638,574,144,1013,378,1000,987,740,676,612,548,118,974,416, 352,92,714,650,586,522,156,948,390,1012,999,752,688,624,560,130,986,364] [views:debug,2014-08-19T16:55:36.611,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/160. Updated state: replica (0) [ns_server:debug,2014-08-19T16:55:36.611,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",160,replica,0} [views:debug,2014-08-19T16:55:36.644,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/526. Updated state: active (0) [ns_server:debug,2014-08-19T16:55:36.644,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",526,active,0} [views:debug,2014-08-19T16:55:36.661,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/981. Updated state: replica (0) [ns_server:debug,2014-08-19T16:55:36.661,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",981,replica,0} [ns_server:debug,2014-08-19T16:55:36.934,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 158. Nacking mccouch update. [views:debug,2014-08-19T16:55:36.934,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/158. Updated state: replica (0) [ns_server:debug,2014-08-19T16:55:36.934,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",158,replica,0} [ns_server:debug,2014-08-19T16:55:36.935,ns_1@10.242.238.90:capi_set_view_manager-tiles<0.6184.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,686,622,558,984,426,362,724,660,596,532,166,958,400,1022,762,698,634,570, 996,374,736,672,608,544,970,412,348,710,646,582,518,944,386,1008,748,684,620, 556,982,424,360,722,658,594,530,164,988,956,398,366,1020,760,728,696,664,632, 600,568,536,170,994,962,404,372,766,734,702,670,638,606,574,542,968,410,378, 346,1000,740,708,676,644,612,580,548,516,974,942,416,384,352,1006,746,714, 682,650,618,586,554,522,980,948,422,390,358,1012,752,720,688,656,624,592,560, 528,162,986,954,396,364,1018,758,726,694,662,630,598,566,534,168,992,960,402, 370,764,732,700,668,636,604,572,540,998,966,408,376,344,738,706,674,642,610, 578,546,514,972,940,414,382,350,1004,744,712,680,648,616,584,552,520,1023, 978,946,420,388,356,1010,718,654,590,526,160,952,394,1016,756,692,628,564, 990,368,730,666,602,538,964,406,342,704,640,576,512,938,380,1002,742,678,614, 550,976,418,354,716,652,588,524,158,950,392,1014,754,690,626,562] [ns_server:debug,2014-08-19T16:55:37.009,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 979. Nacking mccouch update. [ns_server:debug,2014-08-19T16:55:37.009,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 524. Nacking mccouch update. [views:debug,2014-08-19T16:55:37.009,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/979. Updated state: replica (0) [views:debug,2014-08-19T16:55:37.009,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/524. Updated state: active (0) [ns_server:debug,2014-08-19T16:55:37.009,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",979,replica,0} [views:debug,2014-08-19T16:55:37.009,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/158. Updated state: replica (0) [ns_server:debug,2014-08-19T16:55:37.010,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",524,active,0} [ns_server:debug,2014-08-19T16:55:37.010,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",158,replica,0} [ns_server:debug,2014-08-19T16:55:37.010,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_tiles<0.4349.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,750,686,622,558,128,984,426,362,724,660,596,532,166,102,958,400,1022,762, 698,634,570,140,1009,996,374,983,736,672,608,544,114,970,412,348,88,710,646, 582,518,152,1021,944,386,1008,995,748,684,620,556,126,982,424,360,722,658, 594,530,164,100,956,398,1020,760,696,632,568,138,1007,994,372,981,734,670, 606,542,112,968,410,346,86,708,644,580,516,150,1019,942,384,1006,993,746,682, 618,554,124,980,422,358,98,720,656,592,528,162,954,396,1018,758,726,694,662, 630,598,566,534,168,136,104,1005,992,960,402,370,979,764,732,700,668,636,604, 572,540,142,110,1011,998,966,408,376,344,985,738,706,674,642,610,578,546,514, 148,116,1017,972,940,414,382,350,1004,991,90,744,712,680,648,616,584,552,520, 154,122,1023,978,946,420,388,356,1010,96,718,654,590,526,160,952,394,1016, 756,692,628,564,134,1003,990,368,730,666,602,538,108,964,406,342,704,640,576, 512,146,1015,938,380,1002,989,742,678,614,550,120,976,418,354,94,716,652,588, 524,158,950,392,1014,754,690,626,562,132,1001,988,366,728,664,600,536,170, 106,962,404,766,702,638,574,144,1013,378,1000,987,740,676,612,548,118,974, 416,352,92,714,650,586,522,156,948,390,1012,999,752,688,624,560,130,986,364] [ns_server:debug,2014-08-19T16:55:37.010,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,984,673,545,724,596,413,958,647,519,153,698,570,387,1009,749,621,127, 983,672,544,361,723,595,101,957,646,697,569,1008,748,620,982,671,543,722,594, 411,956,645,517,151,696,568,385,1007,747,619,125,981,670,542,359,99,721,593, 955,644,695,567,1006,993,746,682,618,554,371,980,733,669,605,541,111,967,720, 656,592,528,409,345,954,707,643,579,515,149,1018,941,758,694,630,566,383, 1005,992,745,681,617,553,123,979,732,668,604,540,421,357,97,966,719,655,591, 527,161,953,706,642,578,395,1017,940,757,693,629,565,135,1004,991,744,680, 616,552,369,978,731,667,603,539,109,965,718,654,590,526,407,343,952,705,641, 577,513,147,1016,939,756,692,628,564,381,1003,990,743,679,615,551,121,977, 730,666,602,538,419,355,964,95,717,653,589,525,159,951,704,640,576,393,1015, 938,755,691,627,563,133,1002,989,742,678,614,550,367,976,729,665,601,537,107, 963,716,652,588,524,405,950,767,703,639,575,145,1014,754,690,626,562,379, 1001,988,741,677,613,549,119,975,728,664,600,536,417,353,962,93,715,651,587, 523,157,949,766,702,638,574,391,1013,753,689,625,561,131,1000,987,740,676, 612,548,365,974,727,663,599,535,169,105,961,714,650,586,403,948,765,701,637, 573,143,1012,999,752,688,624,560,377,986,739,675,611,547,117,973,726,662,598, 534,415,351,960,91,713,649,585,521,155,947,764,700,636,572,389,1011,998,751, 687,623,559,129,985,738,674,610,546,363,972,725,661,597,533,167,103,959,712, 648,584,401,1023,946,763,699,635,571,141,1010,997,686,558,375,737,609,115, 971,660,532,349,89,711,583,1022,945,762,634,996,685,557,736,608,425,970,659, 531,165,710,582,399,1021,944,761,633,139,995,684,556,373,735,607,113,969,658, 530,347,87,709,581,1020,943,760,632,994,683,555,734,606,423,968,657,529,163, 708,580,397,1019,942,759,631,137] [views:debug,2014-08-19T16:55:37.112,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/524. Updated state: active (0) [views:debug,2014-08-19T16:55:37.112,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/979. Updated state: replica (0) [ns_server:debug,2014-08-19T16:55:37.112,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",524,active,0} [ns_server:debug,2014-08-19T16:55:37.112,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",979,replica,0} [ns_server:debug,2014-08-19T16:55:37.252,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 156. Nacking mccouch update. [views:debug,2014-08-19T16:55:37.252,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/156. Updated state: replica (0) [ns_server:debug,2014-08-19T16:55:37.252,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",156,replica,0} [ns_server:debug,2014-08-19T16:55:37.253,ns_1@10.242.238.90:capi_set_view_manager-tiles<0.6184.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,686,622,558,984,426,362,724,660,596,532,166,958,400,1022,762,698,634,570, 996,374,736,672,608,544,970,412,348,710,646,582,518,944,386,1008,748,684,620, 556,982,424,360,722,658,594,530,164,988,956,398,366,1020,760,728,696,664,632, 600,568,536,170,994,962,404,372,766,734,702,670,638,606,574,542,968,410,378, 346,1000,740,708,676,644,612,580,548,516,974,942,416,384,352,1006,746,714, 682,650,618,586,554,522,156,980,948,422,390,358,1012,752,720,688,656,624,592, 560,528,162,986,954,396,364,1018,758,726,694,662,630,598,566,534,168,992,960, 402,370,764,732,700,668,636,604,572,540,998,966,408,376,344,738,706,674,642, 610,578,546,514,972,940,414,382,350,1004,744,712,680,648,616,584,552,520, 1023,978,946,420,388,356,1010,718,654,590,526,160,952,394,1016,756,692,628, 564,990,368,730,666,602,538,964,406,342,704,640,576,512,938,380,1002,742,678, 614,550,976,418,354,716,652,588,524,158,950,392,1014,754,690,626,562] [views:debug,2014-08-19T16:55:37.320,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/156. Updated state: replica (0) [ns_server:debug,2014-08-19T16:55:37.320,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",156,replica,0} [ns_server:debug,2014-08-19T16:55:37.411,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 522. Nacking mccouch update. [ns_server:debug,2014-08-19T16:55:37.412,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 977. Nacking mccouch update. [views:debug,2014-08-19T16:55:37.412,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/522. Updated state: active (0) [views:debug,2014-08-19T16:55:37.412,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/977. Updated state: replica (0) [ns_server:debug,2014-08-19T16:55:37.412,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",522,active,0} [ns_server:debug,2014-08-19T16:55:37.412,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",977,replica,0} [ns_server:debug,2014-08-19T16:55:37.413,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_tiles<0.4349.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,750,686,622,558,128,984,426,362,724,660,596,532,166,102,958,400,1022,762, 698,634,570,140,1009,996,374,983,736,672,608,544,114,970,412,348,88,710,646, 582,518,152,1021,944,386,1008,995,748,684,620,556,126,982,424,360,722,658, 594,530,164,100,956,398,1020,760,696,632,568,138,1007,994,372,981,734,670, 606,542,112,968,410,346,86,708,644,580,516,150,1019,942,384,1006,993,746,682, 618,554,124,980,422,358,98,720,656,592,528,162,954,396,1018,758,694,630,566, 136,1005,992,960,402,370,979,764,732,700,668,636,604,572,540,142,110,1011, 998,966,408,376,344,985,738,706,674,642,610,578,546,514,148,116,1017,972,940, 414,382,350,1004,991,90,744,712,680,648,616,584,552,520,154,122,1023,978,946, 420,388,356,1010,96,718,654,590,526,160,952,394,1016,756,692,628,564,134, 1003,990,368,977,730,666,602,538,108,964,406,342,704,640,576,512,146,1015, 938,380,1002,989,742,678,614,550,120,976,418,354,94,716,652,588,524,158,950, 392,1014,754,690,626,562,132,1001,988,366,728,664,600,536,170,106,962,404, 766,702,638,574,144,1013,378,1000,987,740,676,612,548,118,974,416,352,92,714, 650,586,522,156,948,390,1012,999,752,688,624,560,130,986,364,726,662,598,534, 168,104] [ns_server:debug,2014-08-19T16:55:37.413,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,984,673,545,724,596,413,958,647,519,153,698,570,387,1009,749,621,127, 983,672,544,361,723,595,101,957,646,697,569,1008,748,620,982,671,543,722,594, 411,956,645,517,151,696,568,385,1007,747,619,125,981,670,542,359,99,721,593, 955,644,695,567,1006,746,618,980,733,669,605,541,111,967,720,656,592,528,409, 345,954,707,643,579,515,149,1018,941,758,694,630,566,383,1005,992,745,681, 617,553,123,979,732,668,604,540,421,357,97,966,719,655,591,527,161,953,706, 642,578,395,1017,940,757,693,629,565,135,1004,991,744,680,616,552,369,978, 731,667,603,539,109,965,718,654,590,526,407,343,952,705,641,577,513,147,1016, 939,756,692,628,564,381,1003,990,743,679,615,551,121,977,730,666,602,538,419, 355,964,95,717,653,589,525,159,951,704,640,576,393,1015,938,755,691,627,563, 133,1002,989,742,678,614,550,367,976,729,665,601,537,107,963,716,652,588,524, 405,950,767,703,639,575,145,1014,754,690,626,562,379,1001,988,741,677,613, 549,119,975,728,664,600,536,417,353,962,93,715,651,587,523,157,949,766,702, 638,574,391,1013,753,689,625,561,131,1000,987,740,676,612,548,365,974,727, 663,599,535,169,105,961,714,650,586,522,403,948,765,701,637,573,143,1012,999, 752,688,624,560,377,986,739,675,611,547,117,973,726,662,598,534,415,351,960, 91,713,649,585,521,155,947,764,700,636,572,389,1011,998,751,687,623,559,129, 985,738,674,610,546,363,972,725,661,597,533,167,103,959,712,648,584,401,1023, 946,763,699,635,571,141,1010,997,686,558,375,737,609,115,971,660,532,349,89, 711,583,1022,945,762,634,996,685,557,736,608,425,970,659,531,165,710,582,399, 1021,944,761,633,139,995,684,556,373,735,607,113,969,658,530,347,87,709,581, 1020,943,760,632,994,683,555,734,606,423,968,657,529,163,708,580,397,1019, 942,759,631,137,993,682,554,371] [views:debug,2014-08-19T16:55:37.554,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/977. Updated state: replica (0) [views:debug,2014-08-19T16:55:37.554,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/522. Updated state: active (0) [ns_server:debug,2014-08-19T16:55:37.554,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",977,replica,0} [ns_server:debug,2014-08-19T16:55:37.554,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",522,active,0} [ns_server:debug,2014-08-19T16:55:37.662,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 154. Nacking mccouch update. [views:debug,2014-08-19T16:55:37.662,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/154. Updated state: replica (0) [ns_server:debug,2014-08-19T16:55:37.662,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",154,replica,0} [ns_server:debug,2014-08-19T16:55:37.662,ns_1@10.242.238.90:capi_set_view_manager-tiles<0.6184.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,686,622,558,984,426,362,724,660,596,532,166,958,400,1022,762,698,634,570, 996,374,736,672,608,544,970,412,348,710,646,582,518,944,386,1008,748,684,620, 556,982,424,360,722,658,594,530,164,988,956,398,366,1020,760,728,696,664,632, 600,568,536,170,994,962,404,372,766,734,702,670,638,606,574,542,968,410,378, 346,1000,740,708,676,644,612,580,548,516,974,942,416,384,352,1006,746,714, 682,650,618,586,554,522,156,980,948,422,390,358,1012,752,720,688,656,624,592, 560,528,162,986,954,396,364,1018,758,726,694,662,630,598,566,534,168,992,960, 402,370,764,732,700,668,636,604,572,540,998,966,408,376,344,738,706,674,642, 610,578,546,514,972,940,414,382,350,1004,744,712,680,648,616,584,552,520,154, 1023,978,946,420,388,356,1010,718,654,590,526,160,952,394,1016,756,692,628, 564,990,368,730,666,602,538,964,406,342,704,640,576,512,938,380,1002,742,678, 614,550,976,418,354,716,652,588,524,158,950,392,1014,754,690,626,562] [views:debug,2014-08-19T16:55:37.713,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/154. Updated state: replica (0) [ns_server:debug,2014-08-19T16:55:37.713,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",154,replica,0} [ns_server:debug,2014-08-19T16:55:37.796,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 520. Nacking mccouch update. [views:debug,2014-08-19T16:55:37.796,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/520. Updated state: active (0) [ns_server:debug,2014-08-19T16:55:37.796,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 975. Nacking mccouch update. [ns_server:debug,2014-08-19T16:55:37.796,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",520,active,0} [views:debug,2014-08-19T16:55:37.796,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/975. Updated state: replica (0) [ns_server:debug,2014-08-19T16:55:37.796,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",975,replica,0} [ns_server:debug,2014-08-19T16:55:37.797,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_tiles<0.4349.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,750,686,622,558,128,984,426,362,724,660,596,532,166,102,958,400,1022,762, 698,634,570,140,1009,996,374,983,736,672,608,544,114,970,412,348,88,710,646, 582,518,152,1021,944,386,1008,995,748,684,620,556,126,982,424,360,722,658, 594,530,164,100,956,398,1020,760,696,632,568,138,1007,994,372,981,734,670, 606,542,112,968,410,346,86,708,644,580,516,150,1019,942,384,1006,993,746,682, 618,554,124,980,422,358,98,720,656,592,528,162,954,396,1018,758,694,630,566, 136,1005,992,960,402,370,979,764,732,700,668,636,604,572,540,142,110,1011, 998,966,408,376,344,985,738,706,674,642,610,578,546,514,148,116,1017,972,940, 414,382,350,1004,991,90,744,712,680,648,616,584,552,520,154,122,1023,978,946, 420,388,356,1010,96,718,654,590,526,160,952,394,1016,756,692,628,564,134, 1003,990,368,977,730,666,602,538,108,964,406,342,704,640,576,512,146,1015, 938,380,1002,989,742,678,614,550,120,976,418,354,94,716,652,588,524,158,950, 392,1014,754,690,626,562,132,1001,988,366,975,728,664,600,536,170,106,962, 404,766,702,638,574,144,1013,378,1000,987,740,676,612,548,118,974,416,352,92, 714,650,586,522,156,948,390,1012,999,752,688,624,560,130,986,364,726,662,598, 534,168,104] [ns_server:debug,2014-08-19T16:55:37.797,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,984,673,545,724,596,413,958,647,519,153,698,570,387,1009,749,621,127, 983,672,544,361,723,595,101,957,646,697,569,1008,748,620,982,671,543,722,594, 411,956,645,517,151,696,568,385,1007,747,619,125,981,670,542,359,99,721,593, 955,644,695,567,1006,746,618,980,733,669,605,541,111,967,720,656,592,528,409, 345,954,707,643,579,515,149,1018,941,758,694,630,566,383,1005,992,745,681, 617,553,123,979,732,668,604,540,421,357,97,966,719,655,591,527,161,953,706, 642,578,395,1017,940,757,693,629,565,135,1004,991,744,680,616,552,369,978, 731,667,603,539,109,965,718,654,590,526,407,343,952,705,641,577,513,147,1016, 939,756,692,628,564,381,1003,990,743,679,615,551,121,977,730,666,602,538,419, 355,964,95,717,653,589,525,159,951,704,640,576,393,1015,938,755,691,627,563, 133,1002,989,742,678,614,550,367,976,729,665,601,537,107,963,716,652,588,524, 405,950,767,703,639,575,145,1014,754,690,626,562,379,1001,988,741,677,613, 549,119,975,728,664,600,536,417,353,962,93,715,651,587,523,157,949,766,702, 638,574,391,1013,753,689,625,561,131,1000,987,740,676,612,548,365,974,727, 663,599,535,169,105,961,714,650,586,522,403,948,765,701,637,573,143,1012,999, 752,688,624,560,377,986,739,675,611,547,117,973,726,662,598,534,415,351,960, 91,713,649,585,521,155,947,764,700,636,572,389,1011,998,751,687,623,559,129, 985,738,674,610,546,363,972,725,661,597,533,167,103,959,712,648,584,520,401, 1023,946,763,699,635,571,141,1010,997,686,558,375,737,609,115,971,660,532, 349,89,711,583,1022,945,762,634,996,685,557,736,608,425,970,659,531,165,710, 582,399,1021,944,761,633,139,995,684,556,373,735,607,113,969,658,530,347,87, 709,581,1020,943,760,632,994,683,555,734,606,423,968,657,529,163,708,580,397, 1019,942,759,631,137,993,682,554,371] [views:debug,2014-08-19T16:55:37.897,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/975. Updated state: replica (0) [views:debug,2014-08-19T16:55:37.897,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/520. Updated state: active (0) [ns_server:debug,2014-08-19T16:55:37.897,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",975,replica,0} [ns_server:debug,2014-08-19T16:55:37.897,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",520,active,0} [ns_server:debug,2014-08-19T16:55:37.928,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 152. Nacking mccouch update. [views:debug,2014-08-19T16:55:37.928,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/152. Updated state: replica (0) [ns_server:debug,2014-08-19T16:55:37.928,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",152,replica,0} [ns_server:debug,2014-08-19T16:55:37.929,ns_1@10.242.238.90:capi_set_view_manager-tiles<0.6184.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,686,622,558,984,426,362,724,660,596,532,166,958,400,1022,762,698,634,570, 996,374,736,672,608,544,970,412,348,710,646,582,518,152,944,386,1008,748,684, 620,556,982,424,360,722,658,594,530,164,988,956,398,366,1020,760,728,696,664, 632,600,568,536,170,994,962,404,372,766,734,702,670,638,606,574,542,968,410, 378,346,1000,740,708,676,644,612,580,548,516,974,942,416,384,352,1006,746, 714,682,650,618,586,554,522,156,980,948,422,390,358,1012,752,720,688,656,624, 592,560,528,162,986,954,396,364,1018,758,726,694,662,630,598,566,534,168,992, 960,402,370,764,732,700,668,636,604,572,540,998,966,408,376,344,738,706,674, 642,610,578,546,514,972,940,414,382,350,1004,744,712,680,648,616,584,552,520, 154,1023,978,946,420,388,356,1010,718,654,590,526,160,952,394,1016,756,692, 628,564,990,368,730,666,602,538,964,406,342,704,640,576,512,938,380,1002,742, 678,614,550,976,418,354,716,652,588,524,158,950,392,1014,754,690,626,562] [views:debug,2014-08-19T16:55:38.033,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/152. Updated state: replica (0) [ns_server:debug,2014-08-19T16:55:38.033,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",152,replica,0} [ns_server:debug,2014-08-19T16:55:38.156,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 518. Nacking mccouch update. [views:debug,2014-08-19T16:55:38.156,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/518. Updated state: active (0) [ns_server:debug,2014-08-19T16:55:38.157,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",518,active,0} [ns_server:debug,2014-08-19T16:55:38.158,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,984,673,545,724,596,413,958,647,519,153,698,570,387,1009,749,621,127, 983,672,544,361,723,595,101,957,646,518,697,569,1008,748,620,982,671,543,722, 594,411,956,645,517,151,696,568,385,1007,747,619,125,981,670,542,359,99,721, 593,955,644,695,567,1006,746,618,980,733,669,605,541,111,967,720,656,592,528, 409,345,954,707,643,579,515,149,1018,941,758,694,630,566,383,1005,992,745, 681,617,553,123,979,732,668,604,540,421,357,97,966,719,655,591,527,161,953, 706,642,578,395,1017,940,757,693,629,565,135,1004,991,744,680,616,552,369, 978,731,667,603,539,109,965,718,654,590,526,407,343,952,705,641,577,513,147, 1016,939,756,692,628,564,381,1003,990,743,679,615,551,121,977,730,666,602, 538,419,355,964,95,717,653,589,525,159,951,704,640,576,393,1015,938,755,691, 627,563,133,1002,989,742,678,614,550,367,976,729,665,601,537,107,963,716,652, 588,524,405,950,767,703,639,575,145,1014,754,690,626,562,379,1001,988,741, 677,613,549,119,975,728,664,600,536,417,353,962,93,715,651,587,523,157,949, 766,702,638,574,391,1013,753,689,625,561,131,1000,987,740,676,612,548,365, 974,727,663,599,535,169,105,961,714,650,586,522,403,948,765,701,637,573,143, 1012,999,752,688,624,560,377,986,739,675,611,547,117,973,726,662,598,534,415, 351,960,91,713,649,585,521,155,947,764,700,636,572,389,1011,998,751,687,623, 559,129,985,738,674,610,546,363,972,725,661,597,533,167,103,959,712,648,584, 520,401,1023,946,763,699,635,571,141,1010,997,686,558,375,737,609,115,971, 660,532,349,89,711,583,1022,945,762,634,996,685,557,736,608,425,970,659,531, 165,710,582,399,1021,944,761,633,139,995,684,556,373,735,607,113,969,658,530, 347,87,709,581,1020,943,760,632,994,683,555,734,606,423,968,657,529,163,708, 580,397,1019,942,759,631,137,993,682,554,371] [ns_server:debug,2014-08-19T16:55:38.179,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 973. Nacking mccouch update. [views:debug,2014-08-19T16:55:38.179,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/973. Updated state: replica (0) [ns_server:debug,2014-08-19T16:55:38.179,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",973,replica,0} [ns_server:debug,2014-08-19T16:55:38.180,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_tiles<0.4349.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,750,686,622,558,128,984,426,362,724,660,596,532,166,102,958,400,1022,762, 698,634,570,140,1009,996,374,983,736,672,608,544,114,970,412,348,88,710,646, 582,518,152,1021,944,386,1008,995,748,684,620,556,126,982,424,360,722,658, 594,530,164,100,956,398,1020,760,696,632,568,138,1007,994,372,981,734,670, 606,542,112,968,410,346,86,708,644,580,516,150,1019,942,384,1006,993,746,682, 618,554,124,980,422,358,98,720,656,592,528,162,954,396,1018,758,694,630,566, 136,1005,992,960,402,370,979,764,732,700,668,636,604,572,540,142,110,1011, 998,966,408,376,344,985,738,706,674,642,610,578,546,514,148,116,1017,972,940, 414,382,350,1004,991,90,744,712,680,648,616,584,552,520,154,122,1023,978,946, 420,388,356,1010,96,718,654,590,526,160,952,394,1016,756,692,628,564,134, 1003,990,368,977,730,666,602,538,108,964,406,342,704,640,576,512,146,1015, 938,380,1002,989,742,678,614,550,120,976,418,354,94,716,652,588,524,158,950, 392,1014,754,690,626,562,132,1001,988,366,975,728,664,600,536,170,106,962, 404,766,702,638,574,144,1013,378,1000,987,740,676,612,548,118,974,416,352,92, 714,650,586,522,156,948,390,1012,999,752,688,624,560,130,986,364,973,726,662, 598,534,168,104] [views:debug,2014-08-19T16:55:38.321,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/518. Updated state: active (0) [ns_server:debug,2014-08-19T16:55:38.321,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",518,active,0} [ns_server:debug,2014-08-19T16:55:38.354,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 150. Nacking mccouch update. [views:debug,2014-08-19T16:55:38.354,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/150. Updated state: replica (0) [views:debug,2014-08-19T16:55:38.355,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/973. Updated state: replica (0) [ns_server:debug,2014-08-19T16:55:38.355,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",973,replica,0} [ns_server:debug,2014-08-19T16:55:38.355,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",150,replica,0} [ns_server:debug,2014-08-19T16:55:38.355,ns_1@10.242.238.90:capi_set_view_manager-tiles<0.6184.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,686,622,558,984,426,362,724,660,596,532,166,958,400,1022,762,698,634,570, 996,374,736,672,608,544,970,412,348,710,646,582,518,152,944,386,1008,748,684, 620,556,982,424,360,722,658,594,530,164,956,398,1020,760,728,696,664,632,600, 568,536,170,994,962,404,372,766,734,702,670,638,606,574,542,968,410,378,346, 1000,740,708,676,644,612,580,548,516,150,974,942,416,384,352,1006,746,714, 682,650,618,586,554,522,156,980,948,422,390,358,1012,752,720,688,656,624,592, 560,528,162,986,954,396,364,1018,758,726,694,662,630,598,566,534,168,992,960, 402,370,764,732,700,668,636,604,572,540,998,966,408,376,344,738,706,674,642, 610,578,546,514,972,940,414,382,350,1004,744,712,680,648,616,584,552,520,154, 1023,978,946,420,388,356,1010,718,654,590,526,160,952,394,1016,756,692,628, 564,990,368,730,666,602,538,964,406,342,704,640,576,512,938,380,1002,742,678, 614,550,976,418,354,716,652,588,524,158,950,392,1014,754,690,626,562,988,366] [views:debug,2014-08-19T16:55:38.500,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/150. Updated state: replica (0) [ns_server:debug,2014-08-19T16:55:38.500,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",150,replica,0} [ns_server:debug,2014-08-19T16:55:38.664,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 971. Nacking mccouch update. [ns_server:debug,2014-08-19T16:55:38.664,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 516. Nacking mccouch update. [views:debug,2014-08-19T16:55:38.665,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/516. Updated state: active (0) [views:debug,2014-08-19T16:55:38.665,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/971. Updated state: replica (0) [ns_server:debug,2014-08-19T16:55:38.665,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",516,active,0} [ns_server:debug,2014-08-19T16:55:38.665,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",971,replica,0} [ns_server:debug,2014-08-19T16:55:38.666,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_tiles<0.4349.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [997,750,686,622,558,128,984,426,362,971,724,660,596,532,166,102,958,400,1022, 762,698,634,570,140,1009,996,374,983,736,672,608,544,114,970,412,348,88,710, 646,582,518,152,1021,944,386,1008,995,748,684,620,556,126,982,424,360,722, 658,594,530,164,100,956,398,1020,760,696,632,568,138,1007,994,372,981,734, 670,606,542,112,968,410,346,86,708,644,580,516,150,1019,942,384,1006,993,746, 682,618,554,124,980,422,358,98,720,656,592,528,162,954,396,1018,758,694,630, 566,136,1005,992,960,402,370,979,764,732,700,668,636,604,572,540,142,110, 1011,998,966,408,376,344,985,738,706,674,642,610,578,546,514,148,116,1017, 972,940,414,382,350,1004,991,90,744,712,680,648,616,584,552,520,154,122,1023, 978,946,420,388,356,1010,96,718,654,590,526,160,952,394,1016,756,692,628,564, 134,1003,990,368,977,730,666,602,538,108,964,406,342,704,640,576,512,146, 1015,938,380,1002,989,742,678,614,550,120,976,418,354,94,716,652,588,524,158, 950,392,1014,754,690,626,562,132,1001,988,366,975,728,664,600,536,170,106, 962,404,766,702,638,574,144,1013,378,1000,987,740,676,612,548,118,974,416, 352,92,714,650,586,522,156,948,390,1012,999,752,688,624,560,130,986,364,973, 726,662,598,534,168,104] [ns_server:debug,2014-08-19T16:55:38.666,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,984,673,545,724,596,413,958,647,519,153,698,570,387,1009,749,621,127, 983,672,544,361,723,595,101,957,646,518,697,569,1008,748,620,982,671,543,722, 594,411,956,645,517,151,696,568,385,1007,747,619,125,981,670,542,359,99,721, 593,955,644,516,695,567,1006,746,618,980,733,669,605,541,111,967,720,656,592, 528,409,345,954,707,643,579,515,149,1018,941,758,694,630,566,383,1005,992, 745,681,617,553,123,979,732,668,604,540,421,357,97,966,719,655,591,527,161, 953,706,642,578,395,1017,940,757,693,629,565,135,1004,991,744,680,616,552, 369,978,731,667,603,539,109,965,718,654,590,526,407,343,952,705,641,577,513, 147,1016,939,756,692,628,564,381,1003,990,743,679,615,551,121,977,730,666, 602,538,419,355,964,95,717,653,589,525,159,951,704,640,576,393,1015,938,755, 691,627,563,133,1002,989,742,678,614,550,367,976,729,665,601,537,107,963,716, 652,588,524,405,950,767,703,639,575,145,1014,754,690,626,562,379,1001,988, 741,677,613,549,119,975,728,664,600,536,417,353,962,93,715,651,587,523,157, 949,766,702,638,574,391,1013,753,689,625,561,131,1000,987,740,676,612,548, 365,974,727,663,599,535,169,105,961,714,650,586,522,403,948,765,701,637,573, 143,1012,999,752,688,624,560,377,986,739,675,611,547,117,973,726,662,598,534, 415,351,960,91,713,649,585,521,155,947,764,700,636,572,389,1011,998,751,687, 623,559,129,985,738,674,610,546,363,972,725,661,597,533,167,103,959,712,648, 584,520,401,1023,946,763,699,635,571,141,1010,997,686,558,375,737,609,115, 971,660,532,349,89,711,583,1022,945,762,634,996,685,557,736,608,425,970,659, 531,165,710,582,399,1021,944,761,633,139,995,684,556,373,735,607,113,969,658, 530,347,87,709,581,1020,943,760,632,994,683,555,734,606,423,968,657,529,163, 708,580,397,1019,942,759,631,137,993,682,554,371] [ns_server:debug,2014-08-19T16:55:38.806,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 148. Nacking mccouch update. [views:debug,2014-08-19T16:55:38.807,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/148. Updated state: replica (0) [ns_server:debug,2014-08-19T16:55:38.807,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",148,replica,0} [ns_server:debug,2014-08-19T16:55:38.807,ns_1@10.242.238.90:capi_set_view_manager-tiles<0.6184.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,686,622,558,984,426,362,724,660,596,532,166,958,400,1022,762,698,634,570, 996,374,736,672,608,544,970,412,348,710,646,582,518,152,944,386,1008,748,684, 620,556,982,424,360,722,658,594,530,164,956,398,1020,760,728,696,664,632,600, 568,536,170,994,962,404,372,766,734,702,670,638,606,574,542,968,410,378,346, 1000,740,708,676,644,612,580,548,516,150,974,942,416,384,352,1006,746,714, 682,650,618,586,554,522,156,980,948,422,390,358,1012,752,720,688,656,624,592, 560,528,162,986,954,396,364,1018,758,726,694,662,630,598,566,534,168,992,960, 402,370,764,732,700,668,636,604,572,540,998,966,408,376,344,738,706,674,642, 610,578,546,514,148,972,940,414,382,350,1004,744,712,680,648,616,584,552,520, 154,1023,978,946,420,388,356,1010,718,654,590,526,160,952,394,1016,756,692, 628,564,990,368,730,666,602,538,964,406,342,704,640,576,512,938,380,1002,742, 678,614,550,976,418,354,716,652,588,524,158,950,392,1014,754,690,626,562,988, 366] [views:debug,2014-08-19T16:55:38.824,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/971. Updated state: replica (0) [views:debug,2014-08-19T16:55:38.824,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/516. Updated state: active (0) [ns_server:debug,2014-08-19T16:55:38.824,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",971,replica,0} [ns_server:debug,2014-08-19T16:55:38.824,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",516,active,0} [views:debug,2014-08-19T16:55:38.942,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for tiles/148. Updated state: replica (0) [ns_server:debug,2014-08-19T16:55:38.942,ns_1@10.242.238.90:<0.6225.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"tiles",148,replica,0} [ns_server:debug,2014-08-19T16:55:39.090,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 514. Nacking mccouch update. [views:debug,2014-08-19T16:55:39.090,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_metahash/514. Updated state: active (0) [ns_server:debug,2014-08-19T16:55:39.090,ns_1@10.242.238.90:<0.2222.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_metahash",514,active,0} [ns_server:debug,2014-08-19T16:55:39.091,ns_1@10.242.238.90:capi_set_view_manager-maps_1_8_metahash<0.2194.1>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,984,673,545,724,596,413,958,647,519,153,698,570,387,1009,749,621,127, 983,672,544,361,723,595,101,957,646,518,697,569,1008,748,620,982,671,543,722, 594,411,956,645,517,151,696,568,385,1007,747,619,125,981,670,542,359,99,721, 593,955,644,516,695,567,1006,746,618,980,733,669,605,541,111,967,720,656,592, 528,409,345,954,707,643,579,515,149,1018,941,758,694,630,566,383,1005,992, 745,681,617,553,123,979,732,668,604,540,421,357,97,966,719,655,591,527,161, 953,706,642,578,514,395,1017,940,757,693,629,565,135,1004,991,744,680,616, 552,369,978,731,667,603,539,109,965,718,654,590,526,407,343,952,705,641,577, 513,147,1016,939,756,692,628,564,381,1003,990,743,679,615,551,121,977,730, 666,602,538,419,355,964,95,717,653,589,525,159,951,704,640,576,393,1015,938, 755,691,627,563,133,1002,989,742,678,614,550,367,976,729,665,601,537,107,963, 716,652,588,524,405,950,767,703,639,575,145,1014,754,690,626,562,379,1001, 988,741,677,613,549,119,975,728,664,600,536,417,353,962,93,715,651,587,523, 157,949,766,702,638,574,391,1013,753,689,625,561,131,1000,987,740,676,612, 548,365,974,727,663,599,535,169,105,961,714,650,586,522,403,948,765,701,637, 573,143,1012,999,752,688,624,560,377,986,739,675,611,547,117,973,726,662,598, 534,415,351,960,91,713,649,585,521,155,947,764,700,636,572,389,1011,998,751, 687,623,559,129,985,738,674,610,546,363,972,725,661,597,533,167,103,959,712, 648,584,520,401,1023,946,763,699,635,571,141,1010,997,686,558,375,737,609, 115,971,660,532,349,89,711,583,1022,945,762,634,996,685,557,736,608,425,970, 659,531,165,710,582,399,1021,944,761,633,139,995,684,556,373,735,607,113,969, 658,530,347,87,709,581,1020,943,760,632,994,683,555,734,606,423,968,657,529, 163,708,580,397,1019,942,759,631,137,993,682,554,371] [ns_server:debug,2014-08-19T16:55:39.124,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 969. Nacking mccouch update. [views:debug,2014-08-19T16:55:39.124,ns_1@10.242.238.90:mc_couch_events<0.17533.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for maps_1_8_tiles/969. Updated state: replica (0) [ns_server:debug,2014-08-19T16:55:39.124,ns_1@10.242.238.90:<0.4377.1>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"maps_1_8_tiles",969,replica,0}